diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/.gitmodules rustc-1.75.0+dfsg0ubuntu1~bpo10/.gitmodules --- rustc-1.74.1+dfsg0ubuntu1~bpo10/.gitmodules 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/.gitmodules 2023-12-21 16:55:28.000000000 +0000 @@ -33,7 +33,7 @@ [submodule "src/llvm-project"] path = src/llvm-project url = https://github.com/rust-lang/llvm-project.git - branch = rustc/17.0-2023-09-19 + branch = rustc/17.0-2023-12-14 shallow = true [submodule "src/doc/embedded-book"] path = src/doc/embedded-book diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/Cargo.lock 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -25,6 +25,17 @@ ] [[package]] +name = "aes" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] name = "ahash" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -132,9 +143,9 @@ [[package]] name = "anstyle" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" @@ -359,11 +370,11 @@ [[package]] name = "bytecount" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" +checksum = "ad152d03a2c813c80bb94fedbf3a3f02b28f793e39e7c214c8a0bcc196343de7" dependencies = [ - "packed_simd_2", + "packed_simd", ] [[package]] @@ -391,7 +402,7 @@ name = "cargo-miri" version = "0.1.0" dependencies = [ - "cargo_metadata", + "cargo_metadata 0.18.0", "directories", "rustc-build-sysroot", "rustc_tools_util", @@ -424,6 +435,20 @@ ] [[package]] +name = "cargo_metadata" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb9ac64500cc83ce4b9f8dafa78186aa008c8dea77a09b94cd307fd0cd5022a8" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror", +] + +[[package]] name = "cargotest2" version = "0.1.0" @@ -457,6 +482,16 @@ ] [[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] name = "clap" version = "4.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -467,6 +502,16 @@ ] [[package]] +name = "clap-cargo" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "383f21342a464d4af96e9a4cad22a0b4f2880d4a5b3bbf5c9654dd1d9a224ee4" +dependencies = [ + "anstyle", + "clap", +] + +[[package]] name = "clap_builder" version = "4.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -508,10 +553,13 @@ [[package]] name = "clippy" -version = "0.1.74" +version = "0.1.75" dependencies = [ + "anstream", + "clippy_config", "clippy_lints", "clippy_utils", + "color-print", "filetime", "futures", "if_chain", @@ -527,7 +575,17 @@ "tester", "tokio", "toml 0.7.5", - "ui_test 0.20.0", + "ui_test", + "walkdir", +] + +[[package]] +name = "clippy_config" +version = "0.1.75" +dependencies = [ + "rustc-semver", + "serde", + "toml 0.7.5", "walkdir", ] @@ -546,10 +604,11 @@ [[package]] name = "clippy_lints" -version = "0.1.74" +version = "0.1.75" dependencies = [ "arrayvec", - "cargo_metadata", + "cargo_metadata 0.15.4", + "clippy_config", "clippy_utils", "declare_clippy_lint", "if_chain", @@ -566,13 +625,15 @@ "unicode-normalization", "unicode-script", "url", + "walkdir", ] [[package]] name = "clippy_utils" -version = "0.1.74" +version = "0.1.75" dependencies = [ "arrayvec", + "clippy_config", "if_chain", "itertools", "rustc-semver", @@ -604,6 +665,27 @@ ] [[package]] +name = "color-print" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a858372ff14bab9b1b30ea504f2a4bc534582aee3e42ba2d41d2a7baba63d5d" +dependencies = [ + "color-print-proc-macro", +] + +[[package]] +name = "color-print-proc-macro" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57e37866456a721d0a404439a1adae37a31be4e0055590d053dfe6981e05003f" +dependencies = [ + "nom", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] name = "color-spantrace" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -640,9 +722,9 @@ [[package]] name = "compiler_builtins" -version = "0.1.101" +version = "0.1.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a6d58e9c3408138099a396a98fd0d0e6cfb25d723594d2ae48b5004513fd5b" +checksum = "a3b73c3443a5fd2438d7ba4853c64e4c8efc2404a9e28a9234cc2d5eebc6c242" dependencies = [ "cc", "rustc-std-workspace-core", @@ -659,6 +741,7 @@ "getopts", "glob", "home", + "indexmap 2.0.0", "lazycell", "libc", "miow", @@ -933,7 +1016,7 @@ [[package]] name = "declare_clippy_lint" -version = "0.1.74" +version = "0.1.75" dependencies = [ "itertools", "quote", @@ -941,6 +1024,17 @@ ] [[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] name = "derive_builder" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1014,11 +1108,11 @@ [[package]] name = "directories" -version = "4.0.1" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210" +checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" dependencies = [ - "dirs-sys", + "dirs-sys 0.4.1", ] [[package]] @@ -1027,7 +1121,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" dependencies = [ - "dirs-sys", + "dirs-sys 0.3.7", ] [[package]] @@ -1052,6 +1146,18 @@ ] [[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.48.0", +] + +[[package]] name = "dirs-sys-next" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1175,26 +1281,15 @@ [[package]] name = "errno" -version = "0.3.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ - "errno-dragonfly", "libc", "windows-sys 0.48.0", ] [[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] name = "error_index_generator" version = "0.0.0" dependencies = [ @@ -1590,9 +1685,9 @@ [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" dependencies = [ "ahash", "allocator-api2", @@ -1704,7 +1799,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cb51c9a029ddc91b07a787f1d86b53ccfa49b0e86688c946ebe8d3555685dd7" dependencies = [ - "libm 0.2.7", + "libm", ] [[package]] @@ -1784,21 +1879,29 @@ [[package]] name = "icu_list" -version = "1.2.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd7ba7442d9235b689d4fdce17c452ea229934980fd81ba50cc28275752c9f90" +checksum = "dc1a44bbed77a7e7b555f9d7dd4b43f75ec1402b438a901d20451943d50cbd90" dependencies = [ "displaydoc", + "icu_list_data", + "icu_locid_transform", "icu_provider", "regex-automata 0.2.0", "writeable", ] [[package]] +name = "icu_list_data" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3237583f0cb7feafabb567c4492fe9ef1d2d4113f6a8798a923273ea5de996d" + +[[package]] name = "icu_locid" -version = "1.2.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3003f85dccfc0e238ff567693248c59153a46f4e6125ba4020b973cef4d1d335" +checksum = "f284eb342dc49d3e9d9f3b188489d76b5d22dfb1d1a5e0d1941811253bac625c" dependencies = [ "displaydoc", "litemap", @@ -1808,15 +1911,36 @@ ] [[package]] +name = "icu_locid_transform" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6551daf80882d8e68eee186cc19e132d8bde1b1f059a79b93384a5ca0e8fc5e7" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a741eba5431f75eb2f1f9022d3cffabcadda6771e54fb4e77c8ba8653e4da44" + +[[package]] name = "icu_provider" -version = "1.2.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dc312a7b6148f7dfe098047ae2494d12d4034f48ade58d4f353000db376e305" +checksum = "68acdef80034b5e35d8524e9817479d389a4f9774f3f0cbe1bf3884d80fd5934" dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", "stable_deref_trait", + "tinystr", "writeable", "yoke", "zerofrom", @@ -1825,26 +1949,26 @@ [[package]] name = "icu_provider_adapters" -version = "1.2.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4ae1e2bd0c41728b77e7c46e9afdec5e2127d1eedacc684724667d50c126bd3" +checksum = "36b380ef2d3d93b015cd0563d7e0d005cc07f82a5503716dbc191798d0079e1d" dependencies = [ "icu_locid", + "icu_locid_transform", "icu_provider", "tinystr", - "yoke", "zerovec", ] [[package]] name = "icu_provider_macros" -version = "1.2.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b728b9421e93eff1d9f8681101b78fa745e0748c95c655c83f337044a7e10" +checksum = "2060258edfcfe32ca7058849bf0f146cb5c59aadbedf480333c0d0002f97bc99" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.29", ] [[package]] @@ -1909,7 +2033,7 @@ checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.2", "rustc-rayon", "serde", ] @@ -1934,13 +2058,21 @@ checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" [[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array", +] + +[[package]] name = "installer" version = "0.0.0" dependencies = [ "anyhow", "clap", "flate2", - "num_cpus", "rayon", "tar", "walkdir", @@ -2009,9 +2141,9 @@ [[package]] name = "jemalloc-sys" -version = "0.5.3+5.3.0-patched" +version = "0.5.4+5.3.0-patched" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9bd5d616ea7ed58b571b2e209a65759664d7fb021a0819d7a790afc67e47ca1" +checksum = "ac6c1946e1cea1788cbfde01c993b52a10e2da07f4bac608228d1bed20bfebf2" dependencies = [ "cc", "libc", @@ -2100,9 +2232,9 @@ [[package]] name = "libc" -version = "0.2.148" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" dependencies = [ "rustc-std-workspace-core", ] @@ -2137,10 +2269,14 @@ ] [[package]] -name = "libm" -version = "0.1.4" +name = "libloading" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc7aa29613bd6a620df431842069224d8bc9011086b1db4c0e0cd47fa03ec9a" +checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] [[package]] name = "libm" @@ -2185,15 +2321,15 @@ [[package]] name = "linux-raw-sys" -version = "0.4.7" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a9bad9f94746442c783ca431b22403b519cd7fbeed0533fdd6328b2f2212128" +checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" [[package]] name = "litemap" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a04a5b2b6f54acba899926491d0a6c59d98012938ca2ab5befb281c034e8f94" +checksum = "77a1a2647d5b7134127971a6de0d533c49de2159167e7f259c427195f87168a1" [[package]] name = "lld-wrapper" @@ -2358,9 +2494,9 @@ [[package]] name = "minifier" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb022374af2f446981254e6bf9efb6e2c9e1a53176d395fca02792fd4435729" +checksum = "95bbbf96b9ac3482c2a25450b67a15ed851319bc5fabf3b40742ea9066e84282" [[package]] name = "minimal-lexical" @@ -2404,6 +2540,7 @@ name = "miri" version = "0.1.0" dependencies = [ + "aes", "colored", "ctrlc", "env_logger 0.10.0", @@ -2411,7 +2548,7 @@ "lazy_static", "libc", "libffi", - "libloading", + "libloading 0.8.1", "log", "measureme", "rand", @@ -2419,7 +2556,7 @@ "rustc_version", "serde", "smallvec", - "ui_test 0.21.2", + "ui_test", ] [[package]] @@ -2501,6 +2638,7 @@ checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -2528,7 +2666,7 @@ "compiler_builtins", "crc32fast", "flate2", - "hashbrown 0.14.0", + "hashbrown 0.14.2", "indexmap 2.0.0", "memchr", "rustc-std-workspace-alloc", @@ -2624,6 +2762,7 @@ "serde", "serde_json", "sysinfo", + "tabled", "tar", "tempfile", "xz", @@ -2631,6 +2770,12 @@ ] [[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2643,13 +2788,13 @@ checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" [[package]] -name = "packed_simd_2" -version = "0.3.8" +name = "packed_simd" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1914cd452d8fccd6f9db48147b29fd4ae05bea9dc5d9ad578509f72415de282" +checksum = "1f9f08af0c877571712e2e3e686ad79efad9657dbf0f7c3c8ba943ff6c38932d" dependencies = [ "cfg-if", - "libm 0.1.4", + "num-traits", ] [[package]] @@ -2685,6 +2830,17 @@ ] [[package]] +name = "papergrid" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2ccbe15f2b6db62f9a9871642746427e297b0ceb85f9a7f1ee5ff47d184d0c8" +dependencies = [ + "bytecount", + "fnv", + "unicode-width", +] + +[[package]] name = "parking_lot" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2866,9 +3022,9 @@ [[package]] name = "portable-atomic" -version = "1.4.2" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" +checksum = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b" [[package]] name = "ppv-lite86" @@ -3042,9 +3198,9 @@ [[package]] name = "rayon" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -3052,14 +3208,12 @@ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] @@ -3340,6 +3494,7 @@ "rustc_macros", "rustc_serialize", "rustc_span", + "rustc_type_ir", "smallvec", "thin-vec", "tracing", @@ -3384,7 +3539,6 @@ "rustc_span", "rustc_target", "thin-vec", - "tracing", ] [[package]] @@ -3419,6 +3573,7 @@ dependencies = [ "icu_list", "icu_locid", + "icu_locid_transform", "icu_provider", "icu_provider_adapters", "zerovec", @@ -3484,6 +3639,7 @@ dependencies = [ "bitflags 1.3.2", "cstr", + "itertools", "libc", "measureme", "object", @@ -3548,6 +3704,7 @@ "serde_json", "smallvec", "tempfile", + "thin-vec", "thorin-dwp", "tracing", "windows", @@ -3584,7 +3741,6 @@ dependencies = [ "arrayvec", "bitflags 1.3.2", - "cfg-if", "elsa", "ena", "indexmap 2.0.0", @@ -3594,6 +3750,7 @@ "measureme", "memmap2", "parking_lot 0.12.1", + "portable-atomic", "rustc-hash", "rustc-rayon", "rustc-rayon-core", @@ -3656,7 +3813,6 @@ "rustc_monomorphize", "rustc_parse", "rustc_passes", - "rustc_plugin_impl", "rustc_privacy", "rustc_query_system", "rustc_resolve", @@ -3759,7 +3915,7 @@ [[package]] name = "rustc_fluent_macro" -version = "0.1.0" +version = "0.0.0" dependencies = [ "annotate-snippets", "fluent-bundle", @@ -3835,7 +3991,7 @@ [[package]] name = "rustc_hir_typeck" -version = "0.1.0" +version = "0.0.0" dependencies = [ "rustc_ast", "rustc_attr", @@ -3914,7 +4070,7 @@ name = "rustc_interface" version = "0.0.0" dependencies = [ - "libloading", + "libloading 0.7.4", "rustc-rayon", "rustc-rayon-core", "rustc_ast", @@ -3936,7 +4092,6 @@ "rustc_hir_analysis", "rustc_hir_typeck", "rustc_incremental", - "rustc_index", "rustc_lint", "rustc_macros", "rustc_metadata", @@ -3946,7 +4101,6 @@ "rustc_monomorphize", "rustc_parse", "rustc_passes", - "rustc_plugin_impl", "rustc_privacy", "rustc_query_impl", "rustc_query_system", @@ -3963,7 +4117,7 @@ [[package]] name = "rustc_lexer" -version = "0.1.0" +version = "0.0.0" dependencies = [ "expect-test", "unicode-properties", @@ -4032,12 +4186,12 @@ [[package]] name = "rustc_macros" -version = "0.1.0" +version = "0.0.0" dependencies = [ "proc-macro2", "quote", "syn 2.0.29", - "synstructure 0.13.0", + "synstructure", ] [[package]] @@ -4045,7 +4199,7 @@ version = "0.0.0" dependencies = [ "bitflags 1.3.2", - "libloading", + "libloading 0.7.4", "odht", "rustc_ast", "rustc_attr", @@ -4161,6 +4315,7 @@ "coverage_test_macros", "either", "itertools", + "rustc_arena", "rustc_ast", "rustc_attr", "rustc_const_eval", @@ -4190,7 +4345,6 @@ "rustc_errors", "rustc_fluent_macro", "rustc_hir", - "rustc_index", "rustc_macros", "rustc_middle", "rustc_session", @@ -4257,21 +4411,6 @@ ] [[package]] -name = "rustc_plugin_impl" -version = "0.0.0" -dependencies = [ - "libloading", - "rustc_ast", - "rustc_errors", - "rustc_fluent_macro", - "rustc_lint", - "rustc_macros", - "rustc_metadata", - "rustc_session", - "rustc_span", -] - -[[package]] name = "rustc_privacy" version = "0.0.0" dependencies = [ @@ -4295,13 +4434,11 @@ dependencies = [ "field-offset", "measureme", - "memoffset", "rustc-rayon-core", "rustc_data_structures", "rustc_errors", "rustc_hir", "rustc_index", - "rustc_macros", "rustc_middle", "rustc_query_system", "rustc_serialize", @@ -4403,13 +4540,12 @@ name = "rustc_smir" version = "0.0.0" dependencies = [ - "rustc_driver", + "rustc_data_structures", "rustc_hir", - "rustc_interface", "rustc_middle", - "rustc_session", "rustc_span", "rustc_target", + "scoped-tls", "stable_mir", "tracing", ] @@ -4418,7 +4554,6 @@ name = "rustc_span" version = "0.0.0" dependencies = [ - "cfg-if", "indexmap 2.0.0", "md-5", "rustc_arena", @@ -4442,9 +4577,7 @@ "rustc-demangle", "rustc_data_structures", "rustc_errors", - "rustc_fluent_macro", "rustc_hir", - "rustc_macros", "rustc_middle", "rustc_session", "rustc_span", @@ -4463,6 +4596,7 @@ "rustc_data_structures", "rustc_feature", "rustc_fs_util", + "rustc_index", "rustc_macros", "rustc_serialize", "rustc_span", @@ -4516,7 +4650,7 @@ [[package]] name = "rustc_transmute" -version = "0.1.0" +version = "0.0.0" dependencies = [ "itertools", "rustc_data_structures", @@ -4555,6 +4689,7 @@ version = "0.0.0" dependencies = [ "bitflags 1.3.2", + "derivative", "rustc_data_structures", "rustc_index", "rustc_macros", @@ -4578,6 +4713,7 @@ "arrayvec", "askama", "expect-test", + "indexmap 2.0.0", "itertools", "minifier", "once_cell", @@ -4648,21 +4784,20 @@ [[package]] name = "rustfmt-nightly" -version = "1.6.0" +version = "1.7.0" dependencies = [ "annotate-snippets", "anyhow", "bytecount", - "cargo_metadata", + "cargo_metadata 0.15.4", "clap", + "clap-cargo", "diff", "dirs", - "env_logger 0.10.0", "getopts", "ignore", "itertools", "lazy_static", - "log", "regex", "rustfmt-config_proc_macro", "serde", @@ -4670,16 +4805,18 @@ "term", "thiserror", "toml 0.7.5", + "tracing", + "tracing-subscriber", + "unicode-properties", "unicode-segmentation", "unicode-width", - "unicode_categories", ] [[package]] name = "rustix" -version = "0.38.14" +version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747c788e9ce8e92b12cd485c49ddf90723550b654b32508f979b71a7b1ecda4f" +checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ "bitflags 2.4.0", "errno", @@ -4985,7 +5122,7 @@ "core", "dlmalloc", "fortanix-sgx-abi", - "hashbrown 0.14.0", + "hashbrown 0.14.2", "hermit-abi 0.3.2", "libc", "miniz_oxide", @@ -5098,18 +5235,6 @@ [[package]] name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - -[[package]] -name = "synstructure" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06" @@ -5144,6 +5269,16 @@ ] [[package]] +name = "tabled" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d38d39c754ae037a9bc3ca1580a985db7371cd14f1229172d1db9093feb6739" +dependencies = [ + "papergrid", + "unicode-width", +] + +[[package]] name = "tar" version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -5295,7 +5430,7 @@ checksum = "4db52ee8fec06e119b692ef3dd2c4cf621a99204c1b8c47407870ed050305b9b" dependencies = [ "gimli", - "hashbrown 0.14.0", + "hashbrown 0.14.2", "object", "tracing", ] @@ -5324,7 +5459,7 @@ version = "0.1.0" dependencies = [ "cargo-platform", - "cargo_metadata", + "cargo_metadata 0.15.4", "ignore", "lazy_static", "miropt-test-tools", @@ -5367,9 +5502,9 @@ [[package]] name = "tinystr" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef" +checksum = "d5d0e245e80bdc9b4e5356fc45a72184abbc3861992603f515270e9340f5a219" dependencies = [ "displaydoc", "zerovec", @@ -5556,6 +5691,7 @@ "thread_local", "tracing", "tracing-core", + "tracing-log", ] [[package]] @@ -5621,33 +5757,6 @@ [[package]] name = "ui_test" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd8fb9b15c8332cf51bfc2dc4830063b2446a9c9d732421b56f2478024a3971" -dependencies = [ - "annotate-snippets", - "anyhow", - "bstr", - "cargo-platform", - "cargo_metadata", - "color-eyre", - "colored", - "comma", - "crossbeam-channel", - "indicatif", - "lazy_static", - "levenshtein", - "prettydiff", - "regex", - "rustc_version", - "rustfix", - "serde", - "serde_json", - "tempfile", -] - -[[package]] -name = "ui_test" version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaf4bf7c184b8dfc7a4d3b90df789b1eb992ee42811cd115f32a7a1eb781058d" @@ -5656,7 +5765,7 @@ "anyhow", "bstr", "cargo-platform", - "cargo_metadata", + "cargo_metadata 0.15.4", "color-eyre", "colored", "comma", @@ -5799,12 +5908,6 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - -[[package]] name = "unified-diff" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -5825,7 +5928,6 @@ name = "unwind" version = "0.0.0" dependencies = [ - "cc", "cfg-if", "compiler_builtins", "core", @@ -6198,9 +6300,9 @@ [[package]] name = "writeable" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e49e42bdb1d5dc76f4cd78102f8f0714d32edfa3efb82286eb0f0b1fc0da0f" +checksum = "c0af0c3d13faebf8dda0b5256fa7096a2d5ccb662f7b9f54a40fe201077ab1c2" [[package]] name = "xattr" @@ -6260,9 +6362,9 @@ [[package]] name = "yoke" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1848075a23a28f9773498ee9a0f2cf58fcbad4f8c0ccf84a210ab33c6ae495de" +checksum = "61e38c508604d6bbbd292dadb3c02559aa7fff6b654a078a36217cad871636e4" dependencies = [ "serde", "stable_deref_trait", @@ -6272,42 +6374,42 @@ [[package]] name = "yoke-derive" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af46c169923ed7516eef0aa32b56d2651b229f57458ebe46b49ddd6efef5b7a2" +checksum = "d5e19fb6ed40002bab5403ffa37e53e0e56f914a4450c8765f533018db1db35f" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", - "synstructure 0.12.6", + "syn 2.0.29", + "synstructure", ] [[package]] name = "zerofrom" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df54d76c3251de27615dfcce21e636c172dafb2549cd7fd93e21c66f6ca6bea2" +checksum = "655b0814c5c0b19ade497851070c640773304939a6c0fd5f5fb43da0696d05b7" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4eae7c1f7d4b8eafce526bc0771449ddc2f250881ae31c50d22c032b5a1c499" +checksum = "e6a647510471d372f2e6c2e6b7219e44d8c574d24fdc11c610a61455782f18c3" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", - "synstructure 0.12.6", + "syn 2.0.29", + "synstructure", ] [[package]] name = "zerovec" -version = "0.9.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "198f54134cd865f437820aa3b43d0ad518af4e68ee161b444cdd15d8e567c8ea" +checksum = "1194130c5b155bf8ae50ab16c86ab758cd695cf9ad176d2f870b744cbdbb572e" dependencies = [ "yoke", "zerofrom", @@ -6316,14 +6418,13 @@ [[package]] name = "zerovec-derive" -version = "0.9.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486558732d5dde10d0f8cb2936507c1bb21bc539d924c949baf5f36a58e51bac" +checksum = "acabf549809064225ff8878baedc4ce3732ac3b07e7c7ce6e5c2ccdbc485c324" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", - "synstructure 0.12.6", + "syn 2.0.29", ] [[package]] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/README.md rustc-1.75.0+dfsg0ubuntu1~bpo10/README.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/README.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/README.md 2023-12-21 16:55:28.000000000 +0000 @@ -11,6 +11,20 @@ If you wish to _contribute_ to the compiler, you should read [CONTRIBUTING.md](CONTRIBUTING.md) instead. +
+Table of content + +- [Quick Start](#quick-start) +- [Installing from Source](#installing-from-source) +- [Building Documentation](#building-documentation) +- [Notes](#notes) +- [Getting Help](#getting-help) +- [Contributing](#contributing) +- [License](#license) +- [Trademark](#trademark) + +
+ ## Quick Start Read ["Installation"] from [The Book]. @@ -116,7 +130,7 @@ #### Configure and Make This project provides a configure script and makefile (the latter of which just -invokes `x.py`). `./configure` is the recommended way to programatically +invokes `x.py`). `./configure` is the recommended way to programmatically generate a `config.toml`. `make` is not recommended (we suggest using `x.py` directly), but it is supported and we try not to break it unnecessarily. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/RELEASES.md rustc-1.75.0+dfsg0ubuntu1~bpo10/RELEASES.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/RELEASES.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/RELEASES.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,128 @@ +Version 1.75.0 (2023-12-28) +========================== + + + +Language +-------- + +- [Stabilize `async fn` and return-position `impl Trait` in traits.](https://github.com/rust-lang/rust/pull/115822/) +- [Allow function pointer signatures containing `&mut T` in `const` contexts.](https://github.com/rust-lang/rust/pull/116015/) +- [Match `usize`/`isize` exhaustively with half-open ranges.](https://github.com/rust-lang/rust/pull/116692/) +- [Guarantee that `char` has the same size and alignment as `u32`.](https://github.com/rust-lang/rust/pull/116894/) +- [Document that the null pointer has the 0 address.](https://github.com/rust-lang/rust/pull/116988/) +- [Allow partially moved values in `match`.](https://github.com/rust-lang/rust/pull/103208/) +- [Add notes about non-compliant FP behavior on 32bit x86 targets.](https://github.com/rust-lang/rust/pull/113053/) +- [Stabilize ratified RISC-V target features.](https://github.com/rust-lang/rust/pull/116485/) + + + +Compiler +-------- + +- [Rework negative coherence to properly consider impls that only partly overlap.](https://github.com/rust-lang/rust/pull/112875/) +- [Bump `COINDUCTIVE_OVERLAP_IN_COHERENCE` to deny, and warn in dependencies.](https://github.com/rust-lang/rust/pull/116493/) +- [Consider alias bounds when computing liveness in NLL.](https://github.com/rust-lang/rust/pull/116733/) +- [Add the V (vector) extension to the `riscv64-linux-android` target spec.](https://github.com/rust-lang/rust/pull/116618/) +- [Automatically enable cross-crate inlining for small functions](https://github.com/rust-lang/rust/pull/116505) +- Add several new tier 3 targets: + - [`csky-unknown-linux-gnuabiv2hf`](https://github.com/rust-lang/rust/pull/117049/) + - [`i586-unknown-netbsd`](https://github.com/rust-lang/rust/pull/117170/) + - [`mipsel-unknown-netbsd`](https://github.com/rust-lang/rust/pull/117356/) + +Refer to Rust's [platform support page][platform-support-doc] +for more information on Rust's tiered platform support. + + + +Libraries +--------- + +- [Override `Waker::clone_from` to avoid cloning `Waker`s unnecessarily.](https://github.com/rust-lang/rust/pull/96979/) +- [Implement `BufRead` for `VecDeque`.](https://github.com/rust-lang/rust/pull/110604/) +- [Implement `FusedIterator` for `DecodeUtf16` when the inner iterator does.](https://github.com/rust-lang/rust/pull/110729/) +- [Implement `Not, Bit{And,Or}{,Assign}` for IP addresses.](https://github.com/rust-lang/rust/pull/113747/) +- [Implement `Default` for `ExitCode`.](https://github.com/rust-lang/rust/pull/114589/) +- [Guarantee representation of None in NPO](https://github.com/rust-lang/rust/pull/115333/) +- [Document when atomic loads are guaranteed read-only.](https://github.com/rust-lang/rust/pull/115577/) +- [Broaden the consequences of recursive TLS initialization.](https://github.com/rust-lang/rust/pull/116172/) +- [Windows: Support sub-millisecond sleep.](https://github.com/rust-lang/rust/pull/116461/) +- [Fix generic bound of `str::SplitInclusive`'s `DoubleEndedIterator` impl](https://github.com/rust-lang/rust/pull/100806/) +- [Fix exit status / wait status on non-Unix `cfg(unix)` platforms.](https://github.com/rust-lang/rust/pull/115108/) + + + +Stabilized APIs +--------------- + +- [`Atomic*::from_ptr`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicUsize.html#method.from_ptr) +- [`FileTimes`](https://doc.rust-lang.org/stable/std/fs/struct.FileTimes.html) +- [`FileTimesExt`](https://doc.rust-lang.org/stable/std/os/windows/fs/trait.FileTimesExt.html) +- [`File::set_modified`](https://doc.rust-lang.org/stable/std/fs/struct.File.html#method.set_modified) +- [`File::set_times`](https://doc.rust-lang.org/stable/std/fs/struct.File.html#method.set_times) +- [`IpAddr::to_canonical`](https://doc.rust-lang.org/stable/core/net/enum.IpAddr.html#method.to_canonical) +- [`Ipv6Addr::to_canonical`](https://doc.rust-lang.org/stable/core/net/struct.Ipv6Addr.html#method.to_canonical) +- [`Option::as_slice`](https://doc.rust-lang.org/stable/core/option/enum.Option.html#method.as_slice) +- [`Option::as_mut_slice`](https://doc.rust-lang.org/stable/core/option/enum.Option.html#method.as_mut_slice) +- [`pointer::byte_add`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_add) +- [`pointer::byte_offset`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_offset) +- [`pointer::byte_offset_from`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_offset_from) +- [`pointer::byte_sub`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.byte_sub) +- [`pointer::wrapping_byte_add`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.wrapping_byte_add) +- [`pointer::wrapping_byte_offset`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.wrapping_byte_offset) +- [`pointer::wrapping_byte_sub`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.wrapping_byte_sub) + +These APIs are now stable in const contexts: + +- [`Ipv6Addr::to_ipv4_mapped`](https://doc.rust-lang.org/stable/core/net/struct.Ipv6Addr.html#method.to_ipv4_mapped) +- [`MaybeUninit::assume_init_read`](https://doc.rust-lang.org/stable/core/mem/union.MaybeUninit.html#method.assume_init_read) +- [`MaybeUninit::zeroed`](https://doc.rust-lang.org/stable/core/mem/union.MaybeUninit.html#method.zeroed) +- [`mem::discriminant`](https://doc.rust-lang.org/stable/core/mem/fn.discriminant.html) +- [`mem::zeroed`](https://doc.rust-lang.org/stable/core/mem/fn.zeroed.html) + + + +Cargo +----- + +- [Add new packages to `[workspace.members]` automatically.](https://github.com/rust-lang/cargo/pull/12779/) +- [Allow version-less `Cargo.toml` manifests.](https://github.com/rust-lang/cargo/pull/12786/) +- [Make browser links out of HTML file paths.](https://github.com/rust-lang/cargo/pull/12889) + + + +Rustdoc +------- + +- [Accept less invalid Rust in rustdoc.](https://github.com/rust-lang/rust/pull/117450/) +- [Document lack of object safety on affected traits.](https://github.com/rust-lang/rust/pull/113241/) +- [Hide `#[repr(transparent)]` if it isn't part of the public ABI.](https://github.com/rust-lang/rust/pull/115439/) +- [Show enum discriminant if it is a C-like variant.](https://github.com/rust-lang/rust/pull/116142/) + + + +Compatibility Notes +------------------- + +- [FreeBSD targets now require at least version 12.](https://github.com/rust-lang/rust/pull/114521/) +- [Formally demote tier 2 MIPS targets to tier 3.](https://github.com/rust-lang/rust/pull/115238/) +- [Make misalignment a hard error in `const` contexts.](https://github.com/rust-lang/rust/pull/115524/) +- [Fix detecting references to packed unsized fields.](https://github.com/rust-lang/rust/pull/115583/) +- [Remove support for compiler plugins.](https://github.com/rust-lang/rust/pull/116412/) + + + +Internal Changes +---------------- + +These changes do not affect any public interfaces of Rust, but they represent +significant improvements to the performance or internals of rustc and related +tools. + +- [Optimize `librustc_driver.so` with BOLT.](https://github.com/rust-lang/rust/pull/116352/) +- [Enable parallel rustc front end in dev and nightly builds.](https://github.com/rust-lang/rust/pull/117435/) +- [Distribute `rustc-codegen-cranelift` as rustup component on the nightly channel.](https://github.com/rust-lang/rust/pull/81746/) + Version 1.74.1 (2023-12-07) =========================== @@ -14,11 +139,12 @@ -------- - [Codify that `std::mem::Discriminant` does not depend on any lifetimes in T](https://github.com/rust-lang/rust/pull/104299/) -- [Replace `private_in_public` lint with `private_interfaces` and `private_bounds` per RFC 2145](https://github.com/rust-lang/rust/pull/113126/) +- [Replace `private_in_public` lint with `private_interfaces` and `private_bounds` per RFC 2145.](https://github.com/rust-lang/rust/pull/113126/) Read more in [RFC 2145](https://rust-lang.github.io/rfcs/2145-type-privacy.html). - [Allow explicit `#[repr(Rust)]`](https://github.com/rust-lang/rust/pull/114201/) - [closure field capturing: don't depend on alignment of packed fields](https://github.com/rust-lang/rust/pull/115315/) - [Enable MIR-based drop-tracking for `async` blocks](https://github.com/rust-lang/rust/pull/107421/) +- [Stabilize `impl_trait_projections`](https://github.com/rust-lang/rust/pull/115659) @@ -53,8 +179,8 @@ - [`core::num::Saturating`](https://doc.rust-lang.org/stable/std/num/struct.Saturating.html) - [`impl From for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStdout%3E-for-Stdio) - [`impl From for std::process::Stdio`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio) -- [`impl From for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio) -- [`impl From for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.Stdio.html#impl-From%3CStderr%3E-for-Stdio) +- [`impl From for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.ChildStderr.html#impl-From%3COwnedHandle%3E-for-ChildStderr) +- [`impl From for std::process::Child{Stdin, Stdout, Stderr}`](https://doc.rust-lang.org/stable/std/process/struct.ChildStderr.html#impl-From%3COwnedFd%3E-for-ChildStderr) - [`std::ffi::OsString::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.from_encoded_bytes_unchecked) - [`std::ffi::OsString::into_encoded_bytes`](https://doc.rust-lang.org/stable/std/ffi/struct.OsString.html#method.into_encoded_bytes) - [`std::ffi::OsStr::from_encoded_bytes_unchecked`](https://doc.rust-lang.org/stable/std/ffi/struct.OsStr.html#method.from_encoded_bytes_unchecked) @@ -77,17 +203,17 @@ Cargo ----- -- [fix: Set MSRV for internal packages](https://github.com/rust-lang/cargo/pull/12381/) -- [config: merge lists in precedence order](https://github.com/rust-lang/cargo/pull/12515/) -- [fix(update): Clarify meaning of --aggressive as --recursive](https://github.com/rust-lang/cargo/pull/12544/) -- [fix(update): Make `-p` more convenient by being positional](https://github.com/rust-lang/cargo/pull/12545/) -- [feat(help): Add styling to help output ](https://github.com/rust-lang/cargo/pull/12578/) -- [feat(pkgid): Allow incomplete versions when unambigious](https://github.com/rust-lang/cargo/pull/12614/) -- [feat: stabilize credential-process and registry-auth](https://github.com/rust-lang/cargo/pull/12649/) -- [feat(cli): Add '-n' to dry-run](https://github.com/rust-lang/cargo/pull/12660/) +- [In `Cargo.toml`, stabilize `[lints]`](https://github.com/rust-lang/cargo/pull/12648/) +- [Stabilize credential-process and registry-auth](https://github.com/rust-lang/cargo/pull/12649/) +- [Stabilize `--keep-going` build flag](https://github.com/rust-lang/cargo/pull/12568/) +- [Add styling to `--help` output](https://github.com/rust-lang/cargo/pull/12578/) +- [For `cargo clean`, add `--dry-run` flag and summary line at the end](https://github.com/rust-lang/cargo/pull/12638) +- [For `cargo update`, make `--package` more convenient by being positional](https://github.com/rust-lang/cargo/pull/12545/) +- [For `cargo update`, clarify meaning of --aggressive as --recursive](https://github.com/rust-lang/cargo/pull/12544/) +- [Add '-n' as an alias for `--dry-run`](https://github.com/rust-lang/cargo/pull/12660/) +- [Allow version-prefixes in pkgid's (e.g. `--package` flags) to resolve ambiguities](https://github.com/rust-lang/cargo/pull/12614/) +- [In `.cargo/config.toml`, merge lists in precedence order](https://github.com/rust-lang/cargo/pull/12515/) - [Add support for `target.'cfg(..)'.linker`](https://github.com/rust-lang/cargo/pull/12535/) -- [Stabilize `--keep-going`](https://github.com/rust-lang/cargo/pull/12568/) -- [feat: Stabilize lints](https://github.com/rust-lang/cargo/pull/12648/) @@ -95,7 +221,6 @@ ------- - [Add warning block support in rustdoc](https://github.com/rust-lang/rust/pull/106561/) -- [Accept additional user-defined syntax classes in fenced code blocks](https://github.com/rust-lang/rust/pull/110800/) - [rustdoc-search: add support for type parameters](https://github.com/rust-lang/rust/pull/112725/) - [rustdoc: show inner enum and struct in type definition for concrete type](https://github.com/rust-lang/rust/pull/114855/) @@ -108,6 +233,7 @@ - [make Cell::swap panic if the Cells partially overlap](https://github.com/rust-lang/rust/pull/114795/) - [Reject invalid crate names in `--extern`](https://github.com/rust-lang/rust/pull/116001/) - [Don't resolve generic impls that may be shadowed by dyn built-in impls](https://github.com/rust-lang/rust/pull/114941/) +- [The new `impl From<{&,&mut} [T; N]> for Vec` is known to cause some inference failures with overly-generic code.](https://github.com/rust-lang/rust/issues/117054) In those examples using the `tui` crate, the combination of `AsRef<_>` and `Into` leaves the middle type ambiguous, and the new `impl` adds another possibility, so it now requires an explicit type annotation. @@ -205,7 +331,6 @@ Cargo ----- -- [Encode URL params correctly for `SourceId` in `Cargo.lock`.](https://github.com/rust-lang/cargo/pull/12280/) - [Bail out an error when using `cargo::` in custom build script.](https://github.com/rust-lang/cargo/pull/12332/) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,16 +4,21 @@ edition = "2021" [dependencies] -rustc_driver = { path = "../rustc_driver" } -rustc_driver_impl = { path = "../rustc_driver_impl" } +# tidy-alphabetical-start # Make sure rustc_codegen_ssa ends up in the sysroot, because this # crate is intended to be used by codegen backends, which may not be in-tree. rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } + +rustc_driver = { path = "../rustc_driver" } +rustc_driver_impl = { path = "../rustc_driver_impl" } + # Make sure rustc_smir ends up in the sysroot, because this -# crate is intended to be used by stable MIR consumers, which are not in-tree +# crate is intended to be used by stable MIR consumers, which are not in-tree. rustc_smir = { path = "../rustc_smir" } + stable_mir = { path = "../stable_mir" } +# tidy-alphabetical-end [dependencies.jemalloc-sys] version = "0.5.0" @@ -21,7 +26,9 @@ features = ['unprefixed_malloc_on_supported_platforms'] [features] +# tidy-alphabetical-start jemalloc = ['jemalloc-sys'] llvm = ['rustc_driver_impl/llvm'] max_level_info = ['rustc_driver_impl/max_level_info'] rustc_use_parallel_compiler = ['rustc_driver_impl/rustc_use_parallel_compiler'] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_abi/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_abi/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_abi/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_abi/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,21 +4,27 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" -tracing = "0.1" rand = { version = "0.8.4", default-features = false, optional = true } rand_xoshiro = { version = "0.6.0", optional = true } rustc_data_structures = { path = "../rustc_data_structures", optional = true } rustc_index = { path = "../rustc_index", default-features = false } rustc_macros = { path = "../rustc_macros", optional = true } rustc_serialize = { path = "../rustc_serialize", optional = true } +tracing = "0.1" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start default = ["nightly", "randomize"] -randomize = ["rand", "rand_xoshiro"] +# rust-analyzer depends on this crate and we therefore require it to built on a stable toolchain +# without depending on rustc_data_structures, rustc_macros and rustc_serialize nightly = [ "rustc_data_structures", "rustc_index/nightly", "rustc_macros", "rustc_serialize", ] +randomize = ["rand", "rand_xoshiro", "nightly"] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/layout.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/layout.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/layout.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/layout.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,21 +1,27 @@ -use super::*; -use std::fmt::Write; +use std::fmt::{self, Write}; +use std::ops::Deref; use std::{borrow::Borrow, cmp, iter, ops::Bound}; -#[cfg(feature = "randomize")] -use rand::{seq::SliceRandom, SeedableRng}; -#[cfg(feature = "randomize")] -use rand_xoshiro::Xoshiro128StarStar; - +use rustc_index::Idx; use tracing::debug; +use crate::{ + Abi, AbiAndPrefAlign, Align, FieldsShape, IndexSlice, IndexVec, Integer, LayoutS, Niche, + NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding, TargetDataLayout, + Variants, WrappingRange, +}; + pub trait LayoutCalculator { type TargetDataLayoutRef: Borrow; fn delay_bug(&self, txt: String); fn current_data_layout(&self) -> Self::TargetDataLayoutRef; - fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutS { + fn scalar_pair( + &self, + a: Scalar, + b: Scalar, + ) -> LayoutS { let dl = self.current_data_layout(); let dl = dl.borrow(); let b_align = b.align(dl); @@ -31,7 +37,7 @@ .max_by_key(|niche| niche.available(dl)); LayoutS { - variants: Variants::Single { index: FIRST_VARIANT }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Arbitrary { offsets: [Size::ZERO, b_offset].into(), memory_index: [0, 1].into(), @@ -45,40 +51,45 @@ } } - fn univariant( + fn univariant< + 'a, + FieldIdx: Idx, + VariantIdx: Idx, + F: Deref> + fmt::Debug, + >( &self, dl: &TargetDataLayout, - fields: &IndexSlice>, + fields: &IndexSlice, repr: &ReprOptions, kind: StructKind, - ) -> Option { + ) -> Option> { let layout = univariant(self, dl, fields, repr, kind, NicheBias::Start); - // Enums prefer niches close to the beginning or the end of the variants so that other (smaller) - // data-carrying variants can be packed into the space after/before the niche. + // Enums prefer niches close to the beginning or the end of the variants so that other + // (smaller) data-carrying variants can be packed into the space after/before the niche. // If the default field ordering does not give us a niche at the front then we do a second - // run and bias niches to the right and then check which one is closer to one of the struct's - // edges. + // run and bias niches to the right and then check which one is closer to one of the + // struct's edges. if let Some(layout) = &layout { // Don't try to calculate an end-biased layout for unsizable structs, // otherwise we could end up with different layouts for - // Foo and Foo which would break unsizing + // Foo and Foo which would break unsizing. if !matches!(kind, StructKind::MaybeUnsized) { if let Some(niche) = layout.largest_niche { let head_space = niche.offset.bytes(); - let niche_length = niche.value.size(dl).bytes(); - let tail_space = layout.size.bytes() - head_space - niche_length; + let niche_len = niche.value.size(dl).bytes(); + let tail_space = layout.size.bytes() - head_space - niche_len; - // This may end up doing redundant work if the niche is already in the last field - // (e.g. a trailing bool) and there is tail padding. But it's non-trivial to get - // the unpadded size so we try anyway. + // This may end up doing redundant work if the niche is already in the last + // field (e.g. a trailing bool) and there is tail padding. But it's non-trivial + // to get the unpadded size so we try anyway. if fields.len() > 1 && head_space != 0 && tail_space > 0 { let alt_layout = univariant(self, dl, fields, repr, kind, NicheBias::End) .expect("alt layout should always work"); - let niche = alt_layout + let alt_niche = alt_layout .largest_niche .expect("alt layout should have a niche like the regular one"); - let alt_head_space = niche.offset.bytes(); - let alt_niche_len = niche.value.size(dl).bytes(); + let alt_head_space = alt_niche.offset.bytes(); + let alt_niche_len = alt_niche.value.size(dl).bytes(); let alt_tail_space = alt_layout.size.bytes() - alt_head_space - alt_niche_len; @@ -93,7 +104,7 @@ alt_layout: {}\n", layout.size.bytes(), head_space, - niche_length, + niche_len, tail_space, alt_head_space, alt_niche_len, @@ -114,11 +125,13 @@ layout } - fn layout_of_never_type(&self) -> LayoutS { + fn layout_of_never_type( + &self, + ) -> LayoutS { let dl = self.current_data_layout(); let dl = dl.borrow(); LayoutS { - variants: Variants::Single { index: FIRST_VARIANT }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Primitive, abi: Abi::Uninhabited, largest_niche: None, @@ -129,10 +142,15 @@ } } - fn layout_of_struct_or_enum( + fn layout_of_struct_or_enum< + 'a, + FieldIdx: Idx, + VariantIdx: Idx, + F: Deref> + fmt::Debug, + >( &self, repr: &ReprOptions, - variants: &IndexSlice>>, + variants: &IndexSlice>, is_enum: bool, is_unsafe_cell: bool, scalar_valid_range: (Bound, Bound), @@ -140,7 +158,7 @@ discriminants: impl Iterator, dont_niche_optimize_enum: bool, always_sized: bool, - ) -> Option { + ) -> Option> { let dl = self.current_data_layout(); let dl = dl.borrow(); @@ -155,11 +173,11 @@ // but *not* an encoding of the discriminant (e.g., a tag value). // See issue #49298 for more details on the need to leave space // for non-ZST uninhabited data (mostly partial initialization). - let absent = |fields: &IndexSlice>| { - let uninhabited = fields.iter().any(|f| f.abi().is_uninhabited()); + let absent = |fields: &IndexSlice| { + let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited()); // We cannot ignore alignment; that might lead us to entirely discard a variant and // produce an enum that is less aligned than it should be! - let is_1zst = fields.iter().all(|f| f.0.is_1zst()); + let is_1zst = fields.iter().all(|f| f.is_1zst()); uninhabited && is_1zst }; let (present_first, present_second) = { @@ -176,7 +194,7 @@ } // If it's a struct, still compute a layout so that we can still compute the // field offsets. - None => FIRST_VARIANT, + None => VariantIdx::new(0), }; let is_struct = !is_enum || @@ -279,12 +297,12 @@ // variant layouts, so we can't store them in the // overall LayoutS. Store the overall LayoutS // and the variant LayoutSs here until then. - struct TmpLayout { - layout: LayoutS, - variants: IndexVec, + struct TmpLayout { + layout: LayoutS, + variants: IndexVec>, } - let calculate_niche_filling_layout = || -> Option { + let calculate_niche_filling_layout = || -> Option> { if dont_niche_optimize_enum { return None; } @@ -322,13 +340,14 @@ let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap() ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap(); - let count = niche_variants.size_hint().1.unwrap() as u128; + let count = + (niche_variants.end().index() as u128 - niche_variants.start().index() as u128) + 1; // Find the field with the largest niche let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index] .iter() .enumerate() - .filter_map(|(j, field)| Some((j, field.largest_niche()?))) + .filter_map(|(j, field)| Some((j, field.largest_niche?))) .max_by_key(|(_, niche)| niche.available(dl)) .and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?; let niche_offset = @@ -443,7 +462,7 @@ let discr_type = repr.discr_type(); let bits = Integer::from_attr(dl, discr_type).size().bits(); for (i, mut val) in discriminants { - if variants[i].iter().any(|f| f.abi().is_uninhabited()) { + if variants[i].iter().any(|f| f.abi.is_uninhabited()) { continue; } if discr_type.is_signed() { @@ -484,7 +503,7 @@ if repr.c() { for fields in variants { for field in fields { - prefix_align = prefix_align.max(field.align().abi); + prefix_align = prefix_align.max(field.align.abi); } } } @@ -503,9 +522,9 @@ // Find the first field we can't move later // to make room for a larger discriminant. for field_idx in st.fields.index_by_increasing_offset() { - let field = &field_layouts[FieldIdx::from_usize(field_idx)]; - if !field.0.is_1zst() { - start_align = start_align.min(field.align().abi); + let field = &field_layouts[FieldIdx::new(field_idx)]; + if !field.is_1zst() { + start_align = start_align.min(field.align.abi); break; } } @@ -520,6 +539,7 @@ // Align the maximum variant size to the largest alignment. size = size.align_to(align.abi); + // FIXME(oli-obk): deduplicate and harden these checks if size.bytes() >= dl.obj_size_bound() { return None; } @@ -587,7 +607,7 @@ let tag_mask = ity.size().unsigned_int_max(); let tag = Scalar::Initialized { - value: Int(ity, signed), + value: Primitive::Int(ity, signed), valid_range: WrappingRange { start: (min as u128 & tag_mask), end: (max as u128 & tag_mask), @@ -612,7 +632,7 @@ }; // We skip *all* ZST here and later check if we are good in terms of alignment. // This lets us handle some cases involving aligned ZST. - let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.0.is_zst()); + let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst()); let (field, offset) = match (fields.next(), fields.next()) { (None, None) => { common_prim_initialized_in_all_variants = false; @@ -624,7 +644,7 @@ break; } }; - let prim = match field.abi() { + let prim = match field.abi { Abi::Scalar(scalar) => { common_prim_initialized_in_all_variants &= matches!(scalar, Scalar::Initialized { .. }); @@ -655,7 +675,7 @@ // Common prim might be uninit. Scalar::Union { value: prim } }; - let pair = self.scalar_pair(tag, prim_scalar); + let pair = self.scalar_pair::(tag, prim_scalar); let pair_offsets = match pair.fields { FieldsShape::Arbitrary { ref offsets, ref memory_index } => { assert_eq!(memory_index.raw, [0, 1]); @@ -663,8 +683,8 @@ } _ => panic!(), }; - if pair_offsets[FieldIdx::from_u32(0)] == Size::ZERO - && pair_offsets[FieldIdx::from_u32(1)] == *offset + if pair_offsets[FieldIdx::new(0)] == Size::ZERO + && pair_offsets[FieldIdx::new(1)] == *offset && align == pair.align && size == pair.size { @@ -684,7 +704,8 @@ // Also do not overwrite any already existing "clever" ABIs. if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) { variant.abi = abi; - // Also need to bump up the size and alignment, so that the entire value fits in here. + // Also need to bump up the size and alignment, so that the entire value fits + // in here. variant.size = cmp::max(variant.size, size); variant.align.abi = cmp::max(variant.align.abi, align.abi); } @@ -720,8 +741,9 @@ // pick the layout with the larger niche; otherwise, // pick tagged as it has simpler codegen. use cmp::Ordering::*; - let niche_size = - |tmp_l: &TmpLayout| tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl)); + let niche_size = |tmp_l: &TmpLayout| { + tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl)) + }; match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) { (Greater, _) => nl, (Equal, Less) => nl, @@ -741,11 +763,16 @@ Some(best_layout.layout) } - fn layout_of_union( + fn layout_of_union< + 'a, + FieldIdx: Idx, + VariantIdx: Idx, + F: Deref> + fmt::Debug, + >( &self, repr: &ReprOptions, - variants: &IndexSlice>>, - ) -> Option { + variants: &IndexSlice>, + ) -> Option> { let dl = self.current_data_layout(); let dl = dl.borrow(); let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align }; @@ -762,24 +789,24 @@ }; let mut size = Size::ZERO; - let only_variant = &variants[FIRST_VARIANT]; + let only_variant = &variants[VariantIdx::new(0)]; for field in only_variant { - if field.0.is_unsized() { + if field.is_unsized() { self.delay_bug("unsized field in union".to_string()); } - align = align.max(field.align()); - max_repr_align = max_repr_align.max(field.max_repr_align()); - size = cmp::max(size, field.size()); + align = align.max(field.align); + max_repr_align = max_repr_align.max(field.max_repr_align); + size = cmp::max(size, field.size); - if field.0.is_zst() { + if field.is_zst() { // Nothing more to do for ZST fields continue; } if let Ok(common) = common_non_zst_abi_and_align { // Discard valid range information and allow undef - let field_abi = field.abi().to_union(); + let field_abi = field.abi.to_union(); if let Some((common_abi, common_align)) = common { if common_abi != field_abi { @@ -790,15 +817,14 @@ // have the same alignment if !matches!(common_abi, Abi::Aggregate { .. }) { assert_eq!( - common_align, - field.align().abi, + common_align, field.align.abi, "non-Aggregate field with matching ABI but differing alignment" ); } } } else { // First non-ZST field: record its ABI and alignment - common_non_zst_abi_and_align = Ok(Some((field_abi, field.align().abi))); + common_non_zst_abi_and_align = Ok(Some((field_abi, field.align.abi))); } } } @@ -830,7 +856,7 @@ }; Some(LayoutS { - variants: Variants::Single { index: FIRST_VARIANT }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Union(NonZeroUsize::new(only_variant.len())?), abi, largest_niche: None, @@ -848,14 +874,19 @@ End, } -fn univariant( +fn univariant< + 'a, + FieldIdx: Idx, + VariantIdx: Idx, + F: Deref> + fmt::Debug, +>( this: &(impl LayoutCalculator + ?Sized), dl: &TargetDataLayout, - fields: &IndexSlice>, + fields: &IndexSlice, repr: &ReprOptions, kind: StructKind, niche_bias: NicheBias, -) -> Option { +) -> Option> { let pack = repr.pack; let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; let mut max_repr_align = repr.align; @@ -868,15 +899,17 @@ // If `-Z randomize-layout` was enabled for the type definition we can shuffle // the field ordering to try and catch some code making assumptions about layouts - // we don't guarantee + // we don't guarantee. if repr.can_randomize_type_layout() && cfg!(feature = "randomize") { #[cfg(feature = "randomize")] { - // `ReprOptions.layout_seed` is a deterministic seed that we can use to - // randomize field ordering with - let mut rng = Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed.as_u64()); + use rand::{seq::SliceRandom, SeedableRng}; + // `ReprOptions.layout_seed` is a deterministic seed we can use to randomize field + // ordering. + let mut rng = + rand_xoshiro::Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed); - // Shuffle the ordering of the fields + // Shuffle the ordering of the fields. optimizing.shuffle(&mut rng); } // Otherwise we just leave things alone and actually optimize the type's fields @@ -884,35 +917,34 @@ // To allow unsizing `&Foo` -> `&Foo`, the layout of the struct must // not depend on the layout of the tail. let max_field_align = - fields_excluding_tail.iter().map(|f| f.align().abi.bytes()).max().unwrap_or(1); + fields_excluding_tail.iter().map(|f| f.align.abi.bytes()).max().unwrap_or(1); let largest_niche_size = fields_excluding_tail .iter() - .filter_map(|f| f.largest_niche()) + .filter_map(|f| f.largest_niche) .map(|n| n.available(dl)) .max() .unwrap_or(0); - // Calculates a sort key to group fields by their alignment or possibly some size-derived - // pseudo-alignment. - let alignment_group_key = |layout: Layout<'_>| { + // Calculates a sort key to group fields by their alignment or possibly some + // size-derived pseudo-alignment. + let alignment_group_key = |layout: &F| { if let Some(pack) = pack { - // return the packed alignment in bytes - layout.align().abi.min(pack).bytes() + // Return the packed alignment in bytes. + layout.align.abi.min(pack).bytes() } else { - // returns log2(effective-align). - // This is ok since `pack` applies to all fields equally. - // The calculation assumes that size is an integer multiple of align, except for ZSTs. - // - let align = layout.align().abi.bytes(); - let size = layout.size().bytes(); - let niche_size = layout.largest_niche().map(|n| n.available(dl)).unwrap_or(0); - // group [u8; 4] with align-4 or [u8; 6] with align-2 fields + // Returns `log2(effective-align)`. This is ok since `pack` applies to all + // fields equally. The calculation assumes that size is an integer multiple of + // align, except for ZSTs. + let align = layout.align.abi.bytes(); + let size = layout.size.bytes(); + let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0); + // Group [u8; 4] with align-4 or [u8; 6] with align-2 fields. let size_as_align = align.max(size).trailing_zeros(); let size_as_align = if largest_niche_size > 0 { match niche_bias { - // Given `A(u8, [u8; 16])` and `B(bool, [u8; 16])` we want to bump the array - // to the front in the first case (for aligned loads) but keep the bool in front - // in the second case for its niches. + // Given `A(u8, [u8; 16])` and `B(bool, [u8; 16])` we want to bump the + // array to the front in the first case (for aligned loads) but keep + // the bool in front in the second case for its niches. NicheBias::Start => max_field_align.trailing_zeros().min(size_as_align), // When moving niches towards the end of the struct then for // A((u8, u8, u8, bool), (u8, bool, u8)) we want to keep the first tuple @@ -931,18 +963,18 @@ match kind { StructKind::AlwaysSized | StructKind::MaybeUnsized => { - // Currently `LayoutS` only exposes a single niche so sorting is usually sufficient - // to get one niche into the preferred position. If it ever supported multiple niches - // then a more advanced pick-and-pack approach could provide better results. - // But even for the single-niche cache it's not optimal. E.g. for - // A(u32, (bool, u8), u16) it would be possible to move the bool to the front - // but it would require packing the tuple together with the u16 to build a 4-byte - // group so that the u32 can be placed after it without padding. This kind - // of packing can't be achieved by sorting. + // Currently `LayoutS` only exposes a single niche so sorting is usually + // sufficient to get one niche into the preferred position. If it ever + // supported multiple niches then a more advanced pick-and-pack approach could + // provide better results. But even for the single-niche cache it's not + // optimal. E.g. for A(u32, (bool, u8), u16) it would be possible to move the + // bool to the front but it would require packing the tuple together with the + // u16 to build a 4-byte group so that the u32 can be placed after it without + // padding. This kind of packing can't be achieved by sorting. optimizing.sort_by_key(|&x| { - let f = fields[x]; - let field_size = f.size().bytes(); - let niche_size = f.largest_niche().map_or(0, |n| n.available(dl)); + let f = &fields[x]; + let field_size = f.size.bytes(); + let niche_size = f.largest_niche.map_or(0, |n| n.available(dl)); let niche_size_key = match niche_bias { // large niche first NicheBias::Start => !niche_size, @@ -950,8 +982,8 @@ NicheBias::End => niche_size, }; let inner_niche_offset_key = match niche_bias { - NicheBias::Start => f.largest_niche().map_or(0, |n| n.offset.bytes()), - NicheBias::End => f.largest_niche().map_or(0, |n| { + NicheBias::Start => f.largest_niche.map_or(0, |n| n.offset.bytes()), + NicheBias::End => f.largest_niche.map_or(0, |n| { !(field_size - n.value.size(dl).bytes() - n.offset.bytes()) }), }; @@ -975,8 +1007,8 @@ // And put the largest niche in an alignment group at the end // so it can be used as discriminant in jagged enums optimizing.sort_by_key(|&x| { - let f = fields[x]; - let niche_size = f.largest_niche().map_or(0, |n| n.available(dl)); + let f = &fields[x]; + let niche_size = f.largest_niche.map_or(0, |n| n.available(dl)); (alignment_group_key(f), niche_size) }); } @@ -1012,24 +1044,24 @@ )); } - if field.0.is_unsized() { + if field.is_unsized() { sized = false; } // Invariant: offset < dl.obj_size_bound() <= 1<<61 let field_align = if let Some(pack) = pack { - field.align().min(AbiAndPrefAlign::new(pack)) + field.align.min(AbiAndPrefAlign::new(pack)) } else { - field.align() + field.align }; offset = offset.align_to(field_align.abi); align = align.max(field_align); - max_repr_align = max_repr_align.max(field.max_repr_align()); + max_repr_align = max_repr_align.max(field.max_repr_align); debug!("univariant offset: {:?} field: {:#?}", offset, field); offsets[i] = offset; - if let Some(mut niche) = field.largest_niche() { + if let Some(mut niche) = field.largest_niche { let available = niche.available(dl); // Pick up larger niches. let prefer_new_niche = match niche_bias { @@ -1044,7 +1076,7 @@ } } - offset = offset.checked_add(field.size(), dl)?; + offset = offset.checked_add(field.size, dl)?; } // The unadjusted ABI alignment does not include repr(align), but does include repr(pack). @@ -1068,16 +1100,20 @@ inverse_memory_index.invert_bijective_mapping() } else { debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices())); - inverse_memory_index.into_iter().map(FieldIdx::as_u32).collect() + inverse_memory_index.into_iter().map(|it| it.index() as u32).collect() }; let size = min_size.align_to(align.abi); + // FIXME(oli-obk): deduplicate and harden these checks + if size.bytes() >= dl.obj_size_bound() { + return None; + } let mut layout_of_single_non_zst_field = None; let mut abi = Abi::Aggregate { sized }; // Try to make this a Scalar/ScalarPair. if sized && size.bytes() > 0 { // We skip *all* ZST here and later check if we are good in terms of alignment. // This lets us handle some cases involving aligned ZST. - let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.0.is_zst()); + let mut non_zst_fields = fields.iter_enumerated().filter(|&(_, f)| !f.is_zst()); match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { // We have exactly one non-ZST field. @@ -1085,18 +1121,17 @@ layout_of_single_non_zst_field = Some(field); // Field fills the struct and it has a scalar or scalar pair ABI. - if offsets[i].bytes() == 0 && align.abi == field.align().abi && size == field.size() - { - match field.abi() { + if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size { + match field.abi { // For plain scalars, or vectors of them, we can't unpack // newtypes for `#[repr(C)]`, as that affects C ABIs. Abi::Scalar(_) | Abi::Vector { .. } if optimize => { - abi = field.abi(); + abi = field.abi; } // But scalar pairs are Rust-specific and get // treated as aggregates by C ABIs anyway. Abi::ScalarPair(..) => { - abi = field.abi(); + abi = field.abi; } _ => {} } @@ -1105,7 +1140,7 @@ // Two non-ZST fields, and they're both scalars. (Some((i, a)), Some((j, b)), None) => { - match (a.abi(), b.abi()) { + match (a.abi, b.abi) { (Abi::Scalar(a), Abi::Scalar(b)) => { // Order by the memory placement, not source order. let ((i, a), (j, b)) = if offsets[i] < offsets[j] { @@ -1113,7 +1148,7 @@ } else { ((j, b), (i, a)) }; - let pair = this.scalar_pair(a, b); + let pair = this.scalar_pair::(a, b); let pair_offsets = match pair.fields { FieldsShape::Arbitrary { ref offsets, ref memory_index } => { assert_eq!(memory_index.raw, [0, 1]); @@ -1121,8 +1156,8 @@ } _ => panic!(), }; - if offsets[i] == pair_offsets[FieldIdx::from_usize(0)] - && offsets[j] == pair_offsets[FieldIdx::from_usize(1)] + if offsets[i] == pair_offsets[FieldIdx::new(0)] + && offsets[j] == pair_offsets[FieldIdx::new(1)] && align == pair.align && size == pair.size { @@ -1138,13 +1173,13 @@ _ => {} } } - if fields.iter().any(|f| f.abi().is_uninhabited()) { + if fields.iter().any(|f| f.abi.is_uninhabited()) { abi = Abi::Uninhabited; } let unadjusted_abi_align = if repr.transparent() { match layout_of_single_non_zst_field { - Some(l) => l.unadjusted_abi_align(), + Some(l) => l.unadjusted_abi_align, None => { // `repr(transparent)` with all ZST fields. align.abi @@ -1155,7 +1190,7 @@ }; Some(LayoutS { - variants: Variants::Single { index: FIRST_VARIANT }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Arbitrary { offsets, memory_index }, abi, largest_niche, @@ -1166,17 +1201,22 @@ }) } -fn format_field_niches( - layout: &LayoutS, - fields: &IndexSlice>, +fn format_field_niches< + 'a, + FieldIdx: Idx, + VariantIdx: Idx, + F: Deref> + fmt::Debug, +>( + layout: &LayoutS, + fields: &IndexSlice, dl: &TargetDataLayout, ) -> String { let mut s = String::new(); for i in layout.fields.index_by_increasing_offset() { let offset = layout.fields.offset(i); - let f = fields[i.into()]; - write!(s, "[o{}a{}s{}", offset.bytes(), f.align().abi.bytes(), f.size().bytes()).unwrap(); - if let Some(n) = f.largest_niche() { + let f = &fields[FieldIdx::new(i)]; + write!(s, "[o{}a{}s{}", offset.bytes(), f.align.abi.bytes(), f.size.bytes()).unwrap(); + if let Some(n) = f.largest_niche { write!( s, " n{}b{}s{}", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_abi/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,23 +1,24 @@ -#![cfg_attr(feature = "nightly", feature(step_trait, rustc_attrs, min_specialization))] +#![cfg_attr(feature = "nightly", feature(step_trait))] #![cfg_attr(feature = "nightly", allow(internal_features))] +#![cfg_attr(all(not(bootstrap), feature = "nightly"), doc(rust_logo))] +#![cfg_attr(all(not(bootstrap), feature = "nightly"), feature(rustdoc_internals))] use std::fmt; -#[cfg(feature = "nightly")] -use std::iter::Step; use std::num::{NonZeroUsize, ParseIntError}; use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub}; use std::str::FromStr; use bitflags::bitflags; -use rustc_data_structures::intern::Interned; -use rustc_data_structures::stable_hasher::Hash64; +use rustc_index::{Idx, IndexSlice, IndexVec}; + #[cfg(feature = "nightly")] use rustc_data_structures::stable_hasher::StableOrd; -use rustc_index::{IndexSlice, IndexVec}; #[cfg(feature = "nightly")] use rustc_macros::HashStable_Generic; #[cfg(feature = "nightly")] use rustc_macros::{Decodable, Encodable}; +#[cfg(feature = "nightly")] +use std::iter::Step; mod layout; @@ -28,9 +29,6 @@ /// instead of implementing everything in `rustc_middle`. pub trait HashStableContext {} -use Integer::*; -use Primitive::*; - bitflags! { #[derive(Default)] #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] @@ -53,10 +51,11 @@ #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] pub enum IntegerType { - /// Pointer sized integer type, i.e. isize and usize. The field shows signedness, that - /// is, `Pointer(true)` is isize. + /// Pointer-sized integer type, i.e. `isize` and `usize`. The field shows signedness, e.g. + /// `Pointer(true)` means `isize`. Pointer(bool), - /// Fix sized integer type, e.g. i8, u32, i128 The bool field shows signedness, `Fixed(I8, false)` means `u8` + /// Fixed-sized integer type, e.g. `i8`, `u32`, `i128`. The bool field shows signedness, e.g. + /// `Fixed(I8, false)` means `u8`. Fixed(Integer, bool), } @@ -69,7 +68,7 @@ } } -/// Represents the repr options provided by the user, +/// Represents the repr options provided by the user. #[derive(Copy, Clone, Debug, Eq, PartialEq, Default)] #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] pub struct ReprOptions { @@ -79,12 +78,12 @@ pub flags: ReprFlags, /// The seed to be used for randomizing a type's layout /// - /// Note: This could technically be a `Hash128` which would + /// Note: This could technically be a `u128` which would /// be the "most accurate" hash as it'd encompass the item and crate /// hash without loss, but it does pay the price of being larger. /// Everything's a tradeoff, a 64-bit seed should be sufficient for our /// purposes (primarily `-Z randomize-layout`) - pub field_shuffle_seed: Hash64, + pub field_shuffle_seed: u64, } impl ReprOptions { @@ -139,7 +138,7 @@ } /// Returns `true` if this type is valid for reordering and `-Z randomize-layout` - /// was enabled for its declaration crate + /// was enabled for its declaration crate. pub fn can_randomize_type_layout(&self) -> bool { !self.inhibit_struct_field_reordering_opt() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT) @@ -217,7 +216,8 @@ } impl TargetDataLayout { - /// Parse data layout from an [llvm data layout string](https://llvm.org/docs/LangRef.html#data-layout) + /// Parse data layout from an + /// [llvm data layout string](https://llvm.org/docs/LangRef.html#data-layout) /// /// This function doesn't fill `c_enum_min_size` and it will always be `I32` since it can not be /// determined from llvm string. @@ -242,10 +242,11 @@ }; // Parse a size string. - let size = |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits); + let parse_size = + |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits); // Parse an alignment string. - let align = |s: &[&'a str], cause: &'a str| { + let parse_align = |s: &[&'a str], cause: &'a str| { if s.is_empty() { return Err(TargetDataLayoutErrors::MissingAlignment { cause }); } @@ -269,22 +270,22 @@ [p] if p.starts_with('P') => { dl.instruction_address_space = parse_address_space(&p[1..], "P")? } - ["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?, - ["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?, - ["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?, + ["a", ref a @ ..] => dl.aggregate_align = parse_align(a, "a")?, + ["f32", ref a @ ..] => dl.f32_align = parse_align(a, "f32")?, + ["f64", ref a @ ..] => dl.f64_align = parse_align(a, "f64")?, // FIXME(erikdesjardins): we should be parsing nonzero address spaces // this will require replacing TargetDataLayout::{pointer_size,pointer_align} // with e.g. `fn pointer_size_in(AddressSpace)` [p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => { - dl.pointer_size = size(s, p)?; - dl.pointer_align = align(a, p)?; + dl.pointer_size = parse_size(s, p)?; + dl.pointer_align = parse_align(a, p)?; } [s, ref a @ ..] if s.starts_with('i') => { let Ok(bits) = s[1..].parse::() else { - size(&s[1..], "i")?; // For the user error. + parse_size(&s[1..], "i")?; // For the user error. continue; }; - let a = align(a, s)?; + let a = parse_align(a, s)?; match bits { 1 => dl.i1_align = a, 8 => dl.i8_align = a, @@ -301,8 +302,8 @@ } } [s, ref a @ ..] if s.starts_with('v') => { - let v_size = size(&s[1..], "v")?; - let a = align(a, s)?; + let v_size = parse_size(&s[1..], "v")?; + let a = parse_align(a, s)?; if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) { v.1 = a; continue; @@ -339,6 +340,7 @@ #[inline] pub fn ptr_sized_integer(&self) -> Integer { + use Integer::*; match self.pointer_size.bits() { 16 => I16, 32 => I32, @@ -680,6 +682,7 @@ impl Align { pub const ONE: Align = Align { pow2: 0 }; + // LLVM has a maximal supported alignment of 2^29, we inherit that. pub const MAX: Align = Align { pow2: 29 }; #[inline] @@ -747,7 +750,6 @@ /// A pair of alignments, ABI-mandated and preferred. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] - pub struct AbiAndPrefAlign { pub abi: Align, pub pref: Align, @@ -773,7 +775,6 @@ /// Integers, also used for enum discriminants. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] - pub enum Integer { I8, I16, @@ -785,6 +786,7 @@ impl Integer { #[inline] pub fn size(self) -> Size { + use Integer::*; match self { I8 => Size::from_bytes(1), I16 => Size::from_bytes(2), @@ -805,6 +807,7 @@ } pub fn align(self, cx: &C) -> AbiAndPrefAlign { + use Integer::*; let dl = cx.data_layout(); match self { @@ -819,6 +822,7 @@ /// Returns the largest signed value that can be represented by this Integer. #[inline] pub fn signed_max(self) -> i128 { + use Integer::*; match self { I8 => i8::MAX as i128, I16 => i16::MAX as i128, @@ -831,6 +835,7 @@ /// Finds the smallest Integer type which can represent the signed value. #[inline] pub fn fit_signed(x: i128) -> Integer { + use Integer::*; match x { -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8, -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16, @@ -843,6 +848,7 @@ /// Finds the smallest Integer type which can represent the unsigned value. #[inline] pub fn fit_unsigned(x: u128) -> Integer { + use Integer::*; match x { 0..=0x0000_0000_0000_00ff => I8, 0..=0x0000_0000_0000_ffff => I16, @@ -854,6 +860,7 @@ /// Finds the smallest integer with the given alignment. pub fn for_align(cx: &C, wanted: Align) -> Option { + use Integer::*; let dl = cx.data_layout(); [I8, I16, I32, I64, I128].into_iter().find(|&candidate| { @@ -863,6 +870,7 @@ /// Find the largest integer with the given alignment or less. pub fn approximate_align(cx: &C, wanted: Align) -> Integer { + use Integer::*; let dl = cx.data_layout(); // FIXME(eddyb) maybe include I128 in the future, when it works everywhere. @@ -908,6 +916,7 @@ impl Primitive { pub fn size(self, cx: &C) -> Size { + use Primitive::*; let dl = cx.data_layout(); match self { @@ -922,6 +931,7 @@ } pub fn align(self, cx: &C) -> AbiAndPrefAlign { + use Primitive::*; let dl = cx.data_layout(); match self { @@ -937,8 +947,7 @@ } /// Inclusive wrap-around range of valid values, that is, if -/// start > end, it represents `start..=MAX`, -/// followed by `0..=end`. +/// start > end, it represents `start..=MAX`, followed by `0..=end`. /// /// That is, for an i8 primitive, a range of `254..=2` means following /// sequence: @@ -970,21 +979,21 @@ /// Returns `self` with replaced `start` #[inline(always)] - pub fn with_start(mut self, start: u128) -> Self { + fn with_start(mut self, start: u128) -> Self { self.start = start; self } /// Returns `self` with replaced `end` #[inline(always)] - pub fn with_end(mut self, end: u128) -> Self { + fn with_end(mut self, end: u128) -> Self { self.end = end; self } /// Returns `true` if `size` completely fills the range. #[inline] - pub fn is_full_for(&self, size: Size) -> bool { + fn is_full_for(&self, size: Size) -> bool { let max_value = size.unsigned_int_max(); debug_assert!(self.start <= max_value && self.end <= max_value); self.start == (self.end.wrapping_add(1) & max_value) @@ -1027,10 +1036,11 @@ impl Scalar { #[inline] pub fn is_bool(&self) -> bool { + use Integer::*; matches!( self, Scalar::Initialized { - value: Int(I8, false), + value: Primitive::Int(I8, false), valid_range: WrappingRange { start: 0, end: 1 } } ) @@ -1066,7 +1076,8 @@ } #[inline] - /// Allows the caller to mutate the valid range. This operation will panic if attempted on a union. + /// Allows the caller to mutate the valid range. This operation will panic if attempted on a + /// union. pub fn valid_range_mut(&mut self) -> &mut WrappingRange { match self { Scalar::Initialized { valid_range, .. } => valid_range, @@ -1074,7 +1085,8 @@ } } - /// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole layout + /// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole + /// layout. #[inline] pub fn is_always_valid(&self, cx: &C) -> bool { match *self { @@ -1093,36 +1105,11 @@ } } -rustc_index::newtype_index! { - /// The *source-order* index of a field in a variant. - /// - /// This is how most code after type checking refers to fields, rather than - /// using names (as names have hygiene complications and more complex lookup). - /// - /// Particularly for `repr(Rust)` types, this may not be the same as *layout* order. - /// (It is for `repr(C)` `struct`s, however.) - /// - /// For example, in the following types, - /// ```rust - /// # enum Never {} - /// # #[repr(u16)] - /// enum Demo1 { - /// Variant0 { a: Never, b: i32 } = 100, - /// Variant1 { c: u8, d: u64 } = 10, - /// } - /// struct Demo2 { e: u8, f: u16, g: u8 } - /// ``` - /// `b` is `FieldIdx(1)` in `VariantIdx(0)`, - /// `d` is `FieldIdx(1)` in `VariantIdx(1)`, and - /// `f` is `FieldIdx(1)` in `VariantIdx(0)`. - #[derive(HashStable_Generic)] - pub struct FieldIdx {} -} - +// NOTE: This struct is generic over the FieldIdx for rust-analyzer usage. /// Describes how the fields of a type are located in memory. #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub enum FieldsShape { +pub enum FieldsShape { /// Scalar primitives and `!`, which never have fields. Primitive, @@ -1162,7 +1149,7 @@ }, } -impl FieldsShape { +impl FieldsShape { #[inline] pub fn count(&self) -> usize { match *self { @@ -1188,7 +1175,7 @@ assert!(i < count, "tried to access field {i} of array with {count} fields"); stride * i } - FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::from_usize(i)], + FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)], } } @@ -1200,7 +1187,7 @@ } FieldsShape::Union(_) | FieldsShape::Array { .. } => i, FieldsShape::Arbitrary { ref memory_index, .. } => { - memory_index[FieldIdx::from_usize(i)].try_into().unwrap() + memory_index[FieldIdx::new(i)].try_into().unwrap() } } } @@ -1216,7 +1203,7 @@ if let FieldsShape::Arbitrary { ref memory_index, .. } = *self { if use_small { for (field_idx, &mem_idx) in memory_index.iter_enumerated() { - inverse_small[mem_idx as usize] = field_idx.as_u32() as u8; + inverse_small[mem_idx as usize] = field_idx.index() as u8; } } else { inverse_big = memory_index.invert_bijective_mapping(); @@ -1229,7 +1216,7 @@ if use_small { inverse_small[i] as usize } else { - inverse_big[i as u32].as_usize() + inverse_big[i as u32].index() } } }) @@ -1252,7 +1239,6 @@ /// in terms of categories of C types there are ABI rules for. #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] - pub enum Abi { Uninhabited, Scalar(Scalar), @@ -1373,9 +1359,10 @@ } } +// NOTE: This struct is generic over the FieldIdx and VariantIdx for rust-analyzer usage. #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub enum Variants { +pub enum Variants { /// Single enum variants, structs/tuples, unions, and all non-ADTs. Single { index: VariantIdx }, @@ -1387,15 +1374,16 @@ /// For enums, the tag is the sole field of the layout. Multiple { tag: Scalar, - tag_encoding: TagEncoding, + tag_encoding: TagEncoding, tag_field: usize, - variants: IndexVec, + variants: IndexVec>, }, } +// NOTE: This struct is generic over the VariantIdx for rust-analyzer usage. #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub enum TagEncoding { +pub enum TagEncoding { /// The tag directly stores the discriminant, but possibly with a smaller layout /// (so converting the tag to the discriminant can require sign extension). Direct, @@ -1457,17 +1445,19 @@ return None; } - // Extend the range of valid values being reserved by moving either `v.start` or `v.end` bound. - // Given an eventual `Option`, we try to maximize the chance for `None` to occupy the niche of zero. - // This is accomplished by preferring enums with 2 variants(`count==1`) and always taking the shortest path to niche zero. - // Having `None` in niche zero can enable some special optimizations. + // Extend the range of valid values being reserved by moving either `v.start` or `v.end` + // bound. Given an eventual `Option`, we try to maximize the chance for `None` to occupy + // the niche of zero. This is accomplished by preferring enums with 2 variants(`count==1`) + // and always taking the shortest path to niche zero. Having `None` in niche zero can + // enable some special optimizations. // // Bound selection criteria: // 1. Select closest to zero given wrapping semantics. // 2. Avoid moving past zero if possible. // - // In practice this means that enums with `count > 1` are unlikely to claim niche zero, since they have to fit perfectly. - // If niche zero is already reserved, the selection of bounds are of little interest. + // In practice this means that enums with `count > 1` are unlikely to claim niche zero, + // since they have to fit perfectly. If niche zero is already reserved, the selection of + // bounds are of little interest. let move_start = |v: WrappingRange| { let start = v.start.wrapping_sub(count) & max_value; Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) })) @@ -1501,38 +1491,21 @@ } } -rustc_index::newtype_index! { - /// The *source-order* index of a variant in a type. - /// - /// For enums, these are always `0..variant_count`, regardless of any - /// custom discriminants that may have been defined, and including any - /// variants that may end up uninhabited due to field types. (Some of the - /// variants may not be present in a monomorphized ABI [`Variants`], but - /// those skipped variants are always counted when determining the *index*.) - /// - /// `struct`s, `tuples`, and `unions`s are considered to have a single variant - /// with variant index zero, aka [`FIRST_VARIANT`]. - #[derive(HashStable_Generic)] - pub struct VariantIdx { - /// Equivalent to `VariantIdx(0)`. - const FIRST_VARIANT = 0; - } -} - +// NOTE: This struct is generic over the FieldIdx and VariantIdx for rust-analyzer usage. #[derive(PartialEq, Eq, Hash, Clone)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub struct LayoutS { +pub struct LayoutS { /// Says where the fields are located within the layout. - pub fields: FieldsShape, + pub fields: FieldsShape, /// Encodes information about multi-variant layouts. /// Even with `Multiple` variants, a layout still has its own fields! Those are then /// shared between all variants. One of them will be the discriminant, - /// but e.g. generators can have more. + /// but e.g. coroutines can have more. /// /// To access all fields of this layout, both `fields` and the fields of the active variant /// must be taken into account. - pub variants: Variants, + pub variants: Variants, /// The `abi` defines how this data is passed between functions, and it defines /// value restrictions via `valid_range`. @@ -1561,13 +1534,13 @@ pub unadjusted_abi_align: Align, } -impl LayoutS { +impl LayoutS { pub fn scalar(cx: &C, scalar: Scalar) -> Self { let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar); let size = scalar.size(cx); let align = scalar.align(cx); LayoutS { - variants: Variants::Single { index: FIRST_VARIANT }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Primitive, abi: Abi::Scalar(scalar), largest_niche, @@ -1579,7 +1552,11 @@ } } -impl fmt::Debug for LayoutS { +impl fmt::Debug for LayoutS +where + FieldsShape: fmt::Debug, + Variants: fmt::Debug, +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // This is how `Layout` used to print before it become // `Interned`. We print it like this to avoid having to update @@ -1607,61 +1584,6 @@ } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)] -#[rustc_pass_by_value] -pub struct Layout<'a>(pub Interned<'a, LayoutS>); - -impl<'a> fmt::Debug for Layout<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // See comment on `::fmt` above. - self.0.0.fmt(f) - } -} - -impl<'a> Layout<'a> { - pub fn fields(self) -> &'a FieldsShape { - &self.0.0.fields - } - - pub fn variants(self) -> &'a Variants { - &self.0.0.variants - } - - pub fn abi(self) -> Abi { - self.0.0.abi - } - - pub fn largest_niche(self) -> Option { - self.0.0.largest_niche - } - - pub fn align(self) -> AbiAndPrefAlign { - self.0.0.align - } - - pub fn size(self) -> Size { - self.0.0.size - } - - pub fn max_repr_align(self) -> Option { - self.0.0.max_repr_align - } - - pub fn unadjusted_abi_align(self) -> Align { - self.0.0.unadjusted_abi_align - } - - /// Whether the layout is from a type that implements [`std::marker::PointerLike`]. - /// - /// Currently, that means that the type is pointer-sized, pointer-aligned, - /// and has a scalar ABI. - pub fn is_pointer_like(self, data_layout: &TargetDataLayout) -> bool { - self.size() == data_layout.pointer_size - && self.align().abi == data_layout.pointer_align.abi - && matches!(self.abi(), Abi::Scalar(..)) - } -} - #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum PointerKind { /// Shared reference. `frozen` indicates the absence of any `UnsafeCell`. @@ -1681,7 +1603,7 @@ pub safe: Option, } -impl LayoutS { +impl LayoutS { /// Returns `true` if the layout corresponds to an unsized type. #[inline] pub fn is_unsized(&self) -> bool { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_arena/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_arena/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_arena/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_arena/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,4 +4,6 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_arena/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_arena/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_arena/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_arena/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,13 +11,13 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(no_crate_inject, attr(deny(warnings))) )] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(core_intrinsics)] #![feature(dropck_eyepatch)] #![feature(new_uninit)] #![feature(maybe_uninit_slice)] -#![feature(min_specialization)] #![feature(decl_macro)] -#![feature(pointer_byte_offsets)] #![feature(rustc_attrs)] #![cfg_attr(test, feature(test))] #![feature(strict_provenance)] @@ -44,23 +44,6 @@ f() } -/// An arena that can hold objects of only one type. -pub struct TypedArena { - /// A pointer to the next object to be allocated. - ptr: Cell<*mut T>, - - /// A pointer to the end of the allocated area. When this pointer is - /// reached, a new chunk is allocated. - end: Cell<*mut T>, - - /// A vector of arena chunks. - chunks: RefCell>>, - - /// Marker indicating that dropping the arena causes its owned - /// instances of `T` to be dropped. - _own: PhantomData, -} - struct ArenaChunk { /// The raw storage for the arena chunk. storage: NonNull<[MaybeUninit]>, @@ -130,6 +113,23 @@ const PAGE: usize = 4096; const HUGE_PAGE: usize = 2 * 1024 * 1024; +/// An arena that can hold objects of only one type. +pub struct TypedArena { + /// A pointer to the next object to be allocated. + ptr: Cell<*mut T>, + + /// A pointer to the end of the allocated area. When this pointer is + /// reached, a new chunk is allocated. + end: Cell<*mut T>, + + /// A vector of arena chunks. + chunks: RefCell>>, + + /// Marker indicating that dropping the arena causes its owned + /// instances of `T` to be dropped. + _own: PhantomData, +} + impl Default for TypedArena { /// Creates a new `TypedArena`. fn default() -> TypedArena { @@ -144,77 +144,6 @@ } } -trait IterExt { - fn alloc_from_iter(self, arena: &TypedArena) -> &mut [T]; -} - -impl IterExt for I -where - I: IntoIterator, -{ - // This default collects into a `SmallVec` and then allocates by copying - // from it. The specializations below for types like `Vec` are more - // efficient, copying directly without the intermediate collecting step. - // This default could be made more efficient, like - // `DroplessArena::alloc_from_iter`, but it's not hot enough to bother. - #[inline] - default fn alloc_from_iter(self, arena: &TypedArena) -> &mut [T] { - let vec: SmallVec<[_; 8]> = self.into_iter().collect(); - vec.alloc_from_iter(arena) - } -} - -impl IterExt for std::array::IntoIter { - #[inline] - fn alloc_from_iter(self, arena: &TypedArena) -> &mut [T] { - let len = self.len(); - if len == 0 { - return &mut []; - } - // Move the content to the arena by copying and then forgetting it. - unsafe { - let start_ptr = arena.alloc_raw_slice(len); - self.as_slice().as_ptr().copy_to_nonoverlapping(start_ptr, len); - mem::forget(self); - slice::from_raw_parts_mut(start_ptr, len) - } - } -} - -impl IterExt for Vec { - #[inline] - fn alloc_from_iter(mut self, arena: &TypedArena) -> &mut [T] { - let len = self.len(); - if len == 0 { - return &mut []; - } - // Move the content to the arena by copying and then forgetting it. - unsafe { - let start_ptr = arena.alloc_raw_slice(len); - self.as_ptr().copy_to_nonoverlapping(start_ptr, len); - self.set_len(0); - slice::from_raw_parts_mut(start_ptr, len) - } - } -} - -impl IterExt for SmallVec { - #[inline] - fn alloc_from_iter(mut self, arena: &TypedArena) -> &mut [A::Item] { - let len = self.len(); - if len == 0 { - return &mut []; - } - // Move the content to the arena by copying and then forgetting it. - unsafe { - let start_ptr = arena.alloc_raw_slice(len); - self.as_ptr().copy_to_nonoverlapping(start_ptr, len); - self.set_len(0); - slice::from_raw_parts_mut(start_ptr, len) - } - } -} - impl TypedArena { /// Allocates an object in the `TypedArena`, returning a reference to it. #[inline] @@ -250,33 +179,55 @@ available_bytes >= additional_bytes } - /// Ensures there's enough space in the current chunk to fit `len` objects. #[inline] - fn ensure_capacity(&self, additional: usize) { - if !self.can_allocate(additional) { - self.grow(additional); - debug_assert!(self.can_allocate(additional)); - } - } - - #[inline] - unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T { + fn alloc_raw_slice(&self, len: usize) -> *mut T { assert!(mem::size_of::() != 0); assert!(len != 0); - self.ensure_capacity(len); + // Ensure the current chunk can fit `len` objects. + if !self.can_allocate(len) { + self.grow(len); + debug_assert!(self.can_allocate(len)); + } let start_ptr = self.ptr.get(); - // SAFETY: `self.ensure_capacity` makes sure that there is enough space - // for `len` elements. + // SAFETY: `can_allocate`/`grow` ensures that there is enough space for + // `len` elements. unsafe { self.ptr.set(start_ptr.add(len)) }; start_ptr } #[inline] pub fn alloc_from_iter>(&self, iter: I) -> &mut [T] { + // This implementation is entirely separate to + // `DroplessIterator::alloc_from_iter`, even though conceptually they + // are the same. + // + // `DroplessIterator` (in the fast case) writes elements from the + // iterator one at a time into the allocated memory. That's easy + // because the elements don't implement `Drop`. But for `TypedArena` + // they do implement `Drop`, which means that if the iterator panics we + // could end up with some allocated-but-uninitialized elements, which + // will then cause UB in `TypedArena::drop`. + // + // Instead we use an approach where any iterator panic will occur + // before the memory is allocated. This function is much less hot than + // `DroplessArena::alloc_from_iter`, so it doesn't need to be + // hyper-optimized. assert!(mem::size_of::() != 0); - iter.alloc_from_iter(self) + + let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect(); + if vec.is_empty() { + return &mut []; + } + // Move the content to the arena by copying and then forgetting it. + let len = vec.len(); + let start_ptr = self.alloc_raw_slice(len); + unsafe { + vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); + vec.set_len(0); + slice::from_raw_parts_mut(start_ptr, len) + } } /// Grows the arena. @@ -407,6 +358,8 @@ #[inline] fn default() -> DroplessArena { DroplessArena { + // We set both `start` and `end` to 0 so that the first call to + // alloc() will trigger a grow(). start: Cell::new(ptr::null_mut()), end: Cell::new(ptr::null_mut()), chunks: Default::default(), @@ -415,9 +368,11 @@ } impl DroplessArena { + #[inline(never)] + #[cold] fn grow(&self, layout: Layout) { // Add some padding so we can align `self.end` while - // stilling fitting in a `layout` allocation. + // still fitting in a `layout` allocation. let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1; unsafe { @@ -441,7 +396,7 @@ let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE)); self.start.set(chunk.start()); - // Align the end to DROPLESS_ALIGNMENT + // Align the end to DROPLESS_ALIGNMENT. let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT); // Make sure we don't go past `start`. This should not happen since the allocation @@ -454,55 +409,40 @@ } } - #[inline(never)] - #[cold] - fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 { - self.grow(layout); - self.alloc_raw_without_grow(layout).unwrap() - } - - #[inline(never)] - #[cold] - fn grow_and_alloc(&self) -> *mut u8 { - self.grow_and_alloc_raw(Layout::new::()) - } - - /// Allocates a byte slice with specified layout from the current memory - /// chunk. Returns `None` if there is no free space left to satisfy the - /// request. - #[inline] - fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> { - let start = self.start.get().addr(); - let old_end = self.end.get(); - let end = old_end.addr(); - - // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT - let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT); - - // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT - unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) }; - - let new_end = align_down(end.checked_sub(bytes)?, layout.align()); - if start <= new_end { - let new_end = old_end.with_addr(new_end); - // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment - // as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT. - self.end.set(new_end); - Some(new_end) - } else { - None - } - } - #[inline] pub fn alloc_raw(&self, layout: Layout) -> *mut u8 { assert!(layout.size() != 0); - if let Some(a) = self.alloc_raw_without_grow(layout) { - return a; + + // This loop executes once or twice: if allocation fails the first + // time, the `grow` ensures it will succeed the second time. + loop { + let start = self.start.get().addr(); + let old_end = self.end.get(); + let end = old_end.addr(); + + // Align allocated bytes so that `self.end` stays aligned to + // DROPLESS_ALIGNMENT. + let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT); + + // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT. + unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) }; + + if let Some(sub) = end.checked_sub(bytes) { + let new_end = align_down(sub, layout.align()); + if start <= new_end { + let new_end = old_end.with_addr(new_end); + // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` + // preserves alignment as both `end` and `bytes` are already + // aligned to DROPLESS_ALIGNMENT. + self.end.set(new_end); + return new_end; + } + } + + // No free space left. Allocate a new chunk to satisfy the request. + // On failure the grow will panic or abort. + self.grow(layout); } - // No free space left. Allocate a new chunk to satisfy the request. - // On failure the grow will panic or abort. - self.grow_and_alloc_raw(layout) } #[inline] @@ -510,13 +450,7 @@ assert!(!mem::needs_drop::()); assert!(mem::size_of::() != 0); - let mem = if let Some(a) = self.alloc_raw_without_grow(Layout::for_value::(&object)) { - a - } else { - // No free space left. Allocate a new chunk to satisfy the request. - // On failure the grow will panic or abort. - self.grow_and_alloc::() - } as *mut T; + let mem = self.alloc_raw(Layout::new::()) as *mut T; unsafe { // Write into uninitialized memory. @@ -713,10 +647,10 @@ } #[allow(clippy::mut_from_ref)] - pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, C>, C>( - &'a self, + pub fn alloc_from_iter, C>( + &self, iter: impl ::std::iter::IntoIterator, - ) -> &'a mut [T] { + ) -> &mut [T] { T::allocate_from_iter(self, iter) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,9 +3,8 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" memchr = "2.5.0" rustc_data_structures = { path = "../rustc_data_structures" } @@ -14,6 +13,9 @@ rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } +# For Mutability and Movability, which could be uplifted into a common crate. +rustc_type_ir = { path = "../rustc_type_ir" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast/src/ast.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast/src/ast.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast/src/ast.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast/src/ast.rs 2023-12-21 16:55:28.000000000 +0000 @@ -34,6 +34,7 @@ use rustc_span::source_map::{respan, Spanned}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP}; +pub use rustc_type_ir::{Movability, Mutability}; use std::fmt; use std::mem; use thin_vec::{thin_vec, ThinVec}; @@ -733,6 +734,8 @@ } /// All the different flavors of pattern that Rust recognizes. +// +// Adding a new variant? Please update `test_pat` in `tests/ui/macros/stringify.rs`. #[derive(Clone, Encodable, Decodable, Debug)] pub enum PatKind { /// Represents a wildcard pattern (`_`). @@ -800,57 +803,6 @@ MacCall(P), } -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Copy)] -#[derive(HashStable_Generic, Encodable, Decodable)] -pub enum Mutability { - // N.B. Order is deliberate, so that Not < Mut - Not, - Mut, -} - -impl Mutability { - pub fn invert(self) -> Self { - match self { - Mutability::Mut => Mutability::Not, - Mutability::Not => Mutability::Mut, - } - } - - /// Returns `""` (empty string) or `"mut "` depending on the mutability. - pub fn prefix_str(self) -> &'static str { - match self { - Mutability::Mut => "mut ", - Mutability::Not => "", - } - } - - /// Returns `"&"` or `"&mut "` depending on the mutability. - pub fn ref_prefix_str(self) -> &'static str { - match self { - Mutability::Not => "&", - Mutability::Mut => "&mut ", - } - } - - /// Returns `""` (empty string) or `"mutably "` depending on the mutability. - pub fn mutably_str(self) -> &'static str { - match self { - Mutability::Not => "", - Mutability::Mut => "mutably ", - } - } - - /// Return `true` if self is mutable - pub fn is_mut(self) -> bool { - matches!(self, Self::Mut) - } - - /// Return `true` if self is **not** mutable - pub fn is_not(self) -> bool { - matches!(self, Self::Not) - } -} - /// The kind of borrow in an `AddrOf` expression, /// e.g., `&place` or `&raw const place`. #[derive(Clone, Copy, PartialEq, Eq, Debug)] @@ -1017,6 +969,7 @@ } } +// Adding a new variant? Please update `test_stmt` in `tests/ui/macros/stringify.rs`. #[derive(Clone, Encodable, Decodable, Debug)] pub enum StmtKind { /// A local (let) binding. @@ -1282,7 +1235,7 @@ ExprKind::Closure(..) => ExprPrecedence::Closure, ExprKind::Block(..) => ExprPrecedence::Block, ExprKind::TryBlock(..) => ExprPrecedence::TryBlock, - ExprKind::Async(..) => ExprPrecedence::Async, + ExprKind::Gen(..) => ExprPrecedence::Gen, ExprKind::Await(..) => ExprPrecedence::Await, ExprKind::Assign(..) => ExprPrecedence::Assign, ExprKind::AssignOp(..) => ExprPrecedence::AssignOp, @@ -1395,6 +1348,7 @@ pub rest: StructRest, } +// Adding a new variant? Please update `test_expr` in `tests/ui/macros/stringify.rs`. #[derive(Clone, Encodable, Decodable, Debug)] pub enum ExprKind { /// An array (`[a, b, c, d]`) @@ -1451,11 +1405,9 @@ Closure(Box), /// A block (`'label: { ... }`). Block(P, Option::Bar = RhsTy`, suggest `A: Foo`. - if let TyKind::Path(Some(qself), full_path) = &predicate.lhs_ty.kind { - if let TyKind::Path(None, path) = &qself.ty.kind { - match &path.segments[..] { - [PathSegment { ident, args: None, .. }] => { - for param in &generics.params { - if param.ident == *ident { - let param = ident; - match &full_path.segments[qself.position..] { - [PathSegment { ident, args, .. }] => { - // Make a new `Path` from `foo::Bar` to `Foo`. - let mut assoc_path = full_path.clone(); - // Remove `Bar` from `Foo::Bar`. - assoc_path.segments.pop(); - let len = assoc_path.segments.len() - 1; - let gen_args = args.as_deref().cloned(); - // Build ``. - let arg = AngleBracketedArg::Constraint(AssocConstraint { - id: rustc_ast::node_id::DUMMY_NODE_ID, - ident: *ident, - gen_args, - kind: AssocConstraintKind::Equality { - term: predicate.rhs_ty.clone().into(), - }, - span: ident.span, - }); - // Add `` to `Foo`. - match &mut assoc_path.segments[len].args { - Some(args) => match args.deref_mut() { - GenericArgs::Parenthesized(_) => continue, - GenericArgs::AngleBracketed(args) => { - args.args.push(arg); - } - }, - empty_args => { - *empty_args = Some( - AngleBracketedArgs { - span: ident.span, - args: thin_vec![arg], - } - .into(), - ); - } - } - err.assoc = Some(errors::AssociatedSuggestion { - span: predicate.span, - ident: *ident, - param: *param, - path: pprust::path_to_string(&assoc_path), - }) - } - _ => {} - }; + if let TyKind::Path(Some(qself), full_path) = &predicate.lhs_ty.kind + && let TyKind::Path(None, path) = &qself.ty.kind + && let [PathSegment { ident, args: None, .. }] = &path.segments[..] + { + for param in &generics.params { + if param.ident == *ident + && let [PathSegment { ident, args, .. }] = &full_path.segments[qself.position..] + { + // Make a new `Path` from `foo::Bar` to `Foo`. + let mut assoc_path = full_path.clone(); + // Remove `Bar` from `Foo::Bar`. + assoc_path.segments.pop(); + let len = assoc_path.segments.len() - 1; + let gen_args = args.as_deref().cloned(); + // Build ``. + let arg = AngleBracketedArg::Constraint(AssocConstraint { + id: rustc_ast::node_id::DUMMY_NODE_ID, + ident: *ident, + gen_args, + kind: AssocConstraintKind::Equality { + term: predicate.rhs_ty.clone().into(), + }, + span: ident.span, + }); + // Add `` to `Foo`. + match &mut assoc_path.segments[len].args { + Some(args) => match args.deref_mut() { + GenericArgs::Parenthesized(_) => continue, + GenericArgs::AngleBracketed(args) => { + args.args.push(arg); } + }, + empty_args => { + *empty_args = Some( + AngleBracketedArgs { + span: ident.span, + args: thin_vec![arg], + } + .into(), + ); } } - _ => {} + err.assoc = Some(errors::AssociatedSuggestion { + span: predicate.span, + ident: *ident, + param: param.ident, + path: pprust::path_to_string(&assoc_path), + }) } } } @@ -1510,7 +1549,7 @@ features, extern_mod: None, in_trait_impl: false, - in_const_trait_impl: false, + in_const_trait_or_impl: false, has_proc_macro_decls: false, outer_impl_trait: None, disallow_tilde_const: None, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -271,7 +271,7 @@ #[diag(ast_passes_bad_c_variadic)] pub struct BadCVariadic { #[primary_span] - pub span: Span, + pub span: Vec, } #[derive(Diagnostic)] @@ -584,6 +584,17 @@ } #[derive(Diagnostic)] +#[diag(ast_passes_const_and_c_variadic)] +pub struct ConstAndCVariadic { + #[primary_span] + pub spans: Vec, + #[label(ast_passes_const)] + pub const_span: Span, + #[label(ast_passes_variadic)] + pub variadic_spans: Vec, +} + +#[derive(Diagnostic)] #[diag(ast_passes_pattern_in_foreign, code = "E0130")] pub struct PatternInForeign { #[primary_span] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/feature_gate.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/feature_gate.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/feature_gate.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/feature_gate.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,53 +10,54 @@ use rustc_span::Span; use rustc_target::spec::abi; use thin_vec::ThinVec; -use tracing::debug; use crate::errors; -macro_rules! gate_feature_fn { - ($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $help: expr) => {{ - let (visitor, has_feature, span, name, explain, help) = - (&*$visitor, $has_feature, $span, $name, $explain, $help); - let has_feature: bool = has_feature(visitor.features); - debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); - if !has_feature && !span.allows_unstable($name) { - feature_err(&visitor.sess.parse_sess, name, span, explain).help(help).emit(); +/// The common case. +macro_rules! gate { + ($visitor:expr, $feature:ident, $span:expr, $explain:expr) => {{ + if !$visitor.features.$feature && !$span.allows_unstable(sym::$feature) { + feature_err(&$visitor.sess.parse_sess, sym::$feature, $span, $explain).emit(); } }}; - ($visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{ - let (visitor, has_feature, span, name, explain) = - (&*$visitor, $has_feature, $span, $name, $explain); - let has_feature: bool = has_feature(visitor.features); - debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); - if !has_feature && !span.allows_unstable($name) { - feature_err(&visitor.sess.parse_sess, name, span, explain).emit(); + ($visitor:expr, $feature:ident, $span:expr, $explain:expr, $help:expr) => {{ + if !$visitor.features.$feature && !$span.allows_unstable(sym::$feature) { + feature_err(&$visitor.sess.parse_sess, sym::$feature, $span, $explain) + .help($help) + .emit(); } }}; - (future_incompatible; $visitor: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{ - let (visitor, has_feature, span, name, explain) = - (&*$visitor, $has_feature, $span, $name, $explain); - let has_feature: bool = has_feature(visitor.features); - debug!( - "gate_feature(feature = {:?}, span = {:?}); has? {} (future_incompatible)", - name, span, has_feature - ); - if !has_feature && !span.allows_unstable($name) { - feature_warn(&visitor.sess.parse_sess, name, span, explain); +} + +/// The unusual case, where the `has_feature` condition is non-standard. +macro_rules! gate_alt { + ($visitor:expr, $has_feature:expr, $name:expr, $span:expr, $explain:expr) => {{ + if !$has_feature && !$span.allows_unstable($name) { + feature_err(&$visitor.sess.parse_sess, $name, $span, $explain).emit(); + } + }}; +} + +/// The case involving a multispan. +macro_rules! gate_multi { + ($visitor:expr, $feature:ident, $spans:expr, $explain:expr) => {{ + if !$visitor.features.$feature { + let spans: Vec<_> = + $spans.filter(|span| !span.allows_unstable(sym::$feature)).collect(); + if !spans.is_empty() { + feature_err(&$visitor.sess.parse_sess, sym::$feature, spans, $explain).emit(); + } } }}; } -macro_rules! gate_feature_post { - ($visitor: expr, $feature: ident, $span: expr, $explain: expr, $help: expr) => { - gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain, $help) - }; - ($visitor: expr, $feature: ident, $span: expr, $explain: expr) => { - gate_feature_fn!($visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain) - }; - (future_incompatible; $visitor: expr, $feature: ident, $span: expr, $explain: expr) => { - gate_feature_fn!(future_incompatible; $visitor, |x: &Features| x.$feature, $span, sym::$feature, $explain) - }; +/// The legacy case. +macro_rules! gate_legacy { + ($visitor:expr, $feature:ident, $span:expr, $explain:expr) => {{ + if !$visitor.features.$feature && !$span.allows_unstable(sym::$feature) { + feature_warn(&$visitor.sess.parse_sess, sym::$feature, $span, $explain); + } + }}; } pub fn check_attribute(attr: &ast::Attribute, sess: &Session, features: &Features) { @@ -78,7 +79,7 @@ match symbol_unescaped { // Stable sym::Rust | sym::C => {} - abi => gate_feature_post!( + abi => gate!( &self, const_extern_fn, span, @@ -129,14 +130,14 @@ fn visit_ty(&mut self, ty: &ast::Ty) { if let ast::TyKind::ImplTrait(..) = ty.kind { if self.in_associated_ty { - gate_feature_post!( + gate!( &self.vis, impl_trait_in_assoc_type, ty.span, "`impl Trait` in associated types is unstable" ); } else { - gate_feature_post!( + gate!( &self.vis, type_alias_impl_trait, ty.span, @@ -153,23 +154,16 @@ fn check_late_bound_lifetime_defs(&self, params: &[ast::GenericParam]) { // Check only lifetime parameters are present and that the lifetime // parameters that are present have no bounds. - let non_lt_param_spans: Vec<_> = params - .iter() - .filter_map(|param| match param.kind { - ast::GenericParamKind::Lifetime { .. } => None, - _ => Some(param.ident.span), - }) - .collect(); - // FIXME: gate_feature_post doesn't really handle multispans... - if !non_lt_param_spans.is_empty() && !self.features.non_lifetime_binders { - feature_err( - &self.sess.parse_sess, - sym::non_lifetime_binders, - non_lt_param_spans, - crate::fluent_generated::ast_passes_forbidden_non_lifetime_param, - ) - .emit(); - } + let non_lt_param_spans = params.iter().filter_map(|param| match param.kind { + ast::GenericParamKind::Lifetime { .. } => None, + _ => Some(param.ident.span), + }); + gate_multi!( + &self, + non_lifetime_binders, + non_lt_param_spans, + crate::fluent_generated::ast_passes_forbidden_non_lifetime_param + ); for param in params { if !param.bounds.is_empty() { let spans: Vec<_> = param.bounds.iter().map(|b| b.span()).collect(); @@ -188,48 +182,39 @@ .. }) = attr_info { - gate_feature_fn!(self, has_feature, attr.span, *name, *descr); + gate_alt!(self, has_feature(&self.features), *name, attr.span, *descr); } // Check unstable flavors of the `#[doc]` attribute. if attr.has_name(sym::doc) { for nested_meta in attr.meta_item_list().unwrap_or_default() { - macro_rules! gate_doc { ($($name:ident => $feature:ident)*) => { - $(if nested_meta.has_name(sym::$name) { - let msg = concat!("`#[doc(", stringify!($name), ")]` is experimental"); - gate_feature_post!(self, $feature, attr.span, msg); - })* + macro_rules! gate_doc { ($($s:literal { $($name:ident => $feature:ident)* })*) => { + $($(if nested_meta.has_name(sym::$name) { + let msg = concat!("`#[doc(", stringify!($name), ")]` is ", $s); + gate!(self, $feature, attr.span, msg); + })*)* }} gate_doc!( - cfg => doc_cfg - cfg_hide => doc_cfg_hide - masked => doc_masked - notable_trait => doc_notable_trait + "experimental" { + cfg => doc_cfg + cfg_hide => doc_cfg_hide + masked => doc_masked + notable_trait => doc_notable_trait + } + "meant for internal use only" { + keyword => rustdoc_internals + fake_variadic => rustdoc_internals + } ); - - if nested_meta.has_name(sym::keyword) { - let msg = "`#[doc(keyword)]` is meant for internal use only"; - gate_feature_post!(self, rustdoc_internals, attr.span, msg); - } - - if nested_meta.has_name(sym::fake_variadic) { - let msg = "`#[doc(fake_variadic)]` is meant for internal use only"; - gate_feature_post!(self, rustdoc_internals, attr.span, msg); - } } } if !attr.is_doc_comment() - && attr.get_normal_item().path.segments.len() == 2 - && attr.get_normal_item().path.segments[0].ident.name == sym::diagnostic + && let [seg, _] = attr.get_normal_item().path.segments.as_slice() + && seg.ident.name == sym::diagnostic && !self.features.diagnostic_namespace { let msg = "`#[diagnostic]` attribute name space is experimental"; - gate_feature_post!( - self, - diagnostic_namespace, - attr.get_normal_item().path.segments[0].ident.span, - msg - ); + gate!(self, diagnostic_namespace, seg.ident.span, msg); } // Emit errors for non-staged-api crates. @@ -255,12 +240,11 @@ ast::ItemKind::Fn(..) => { if attr::contains_name(&i.attrs, sym::start) { - gate_feature_post!( + gate!( &self, start, i.span, - "`#[start]` functions are experimental \ - and their signature may change \ + "`#[start]` functions are experimental and their signature may change \ over time" ); } @@ -270,7 +254,7 @@ for attr in attr::filter_by_name(&i.attrs, sym::repr) { for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) { if item.has_name(sym::simd) { - gate_feature_post!( + gate!( &self, repr_simd, attr.span, @@ -283,7 +267,7 @@ ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, of_trait, .. }) => { if let &ast::ImplPolarity::Negative(span) = polarity { - gate_feature_post!( + gate!( &self, negative_impls, span.to(of_trait.as_ref().map_or(span, |t| t.path.span)), @@ -293,12 +277,12 @@ } if let ast::Defaultness::Default(_) = defaultness { - gate_feature_post!(&self, specialization, i.span, "specialization is unstable"); + gate!(&self, specialization, i.span, "specialization is unstable"); } } ast::ItemKind::Trait(box ast::Trait { is_auto: ast::IsAuto::Yes, .. }) => { - gate_feature_post!( + gate!( &self, auto_traits, i.span, @@ -307,12 +291,12 @@ } ast::ItemKind::TraitAlias(..) => { - gate_feature_post!(&self, trait_alias, i.span, "trait aliases are experimental"); + gate!(&self, trait_alias, i.span, "trait aliases are experimental"); } ast::ItemKind::MacroDef(ast::MacroDef { macro_rules: false, .. }) => { let msg = "`macro` is experimental"; - gate_feature_post!(&self, decl_macro, i.span, msg); + gate!(&self, decl_macro, i.span, msg); } ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => { @@ -331,7 +315,7 @@ let link_name = attr::first_attr_value_str_by_name(&i.attrs, sym::link_name); let links_to_llvm = link_name.is_some_and(|val| val.as_str().starts_with("llvm.")); if links_to_llvm { - gate_feature_post!( + gate!( &self, link_llvm_intrinsics, i.span, @@ -340,7 +324,7 @@ } } ast::ForeignItemKind::TyAlias(..) => { - gate_feature_post!(&self, extern_types, i.span, "extern types are experimental"); + gate!(&self, extern_types, i.span, "extern types are experimental"); } ast::ForeignItemKind::MacCall(..) => {} } @@ -356,7 +340,7 @@ self.check_late_bound_lifetime_defs(&bare_fn_ty.generic_params); } ast::TyKind::Never => { - gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental"); + gate!(&self, never_type, ty.span, "the `!` type is experimental"); } _ => {} } @@ -389,7 +373,7 @@ fn visit_expr(&mut self, e: &'a ast::Expr) { match e.kind { ast::ExprKind::TryBlock(_) => { - gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental"); + gate!(&self, try_blocks, e.span, "`try` expression is experimental"); } _ => {} } @@ -405,7 +389,7 @@ _ => pat, }; if let PatKind::Range(Some(_), None, Spanned { .. }) = inner_pat.kind { - gate_feature_post!( + gate!( &self, half_open_range_patterns_in_slices, pat.span, @@ -415,15 +399,10 @@ } } PatKind::Box(..) => { - gate_feature_post!( - &self, - box_patterns, - pattern.span, - "box pattern syntax is experimental" - ); + gate!(&self, box_patterns, pattern.span, "box pattern syntax is experimental"); } PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => { - gate_feature_post!( + gate!( &self, exclusive_range_pattern, pattern.span, @@ -451,7 +430,7 @@ } if fn_kind.ctxt() != Some(FnCtxt::Foreign) && fn_kind.decl().c_variadic() { - gate_feature_post!(&self, c_variadic, span, "C-variadic functions are unstable"); + gate!(&self, c_variadic, span, "C-variadic functions are unstable"); } visit::walk_fn(self, fn_kind) @@ -463,14 +442,14 @@ && args.inputs.is_empty() && matches!(args.output, ast::FnRetTy::Default(..)) { - gate_feature_post!( + gate!( &self, return_type_notation, constraint.span, "return type notation is experimental" ); } else { - gate_feature_post!( + gate!( &self, associated_type_bounds, constraint.span, @@ -486,7 +465,7 @@ ast::AssocItemKind::Fn(_) => true, ast::AssocItemKind::Type(box ast::TyAlias { ty, .. }) => { if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) { - gate_feature_post!( + gate!( &self, associated_type_defaults, i.span, @@ -502,11 +481,11 @@ }; if let ast::Defaultness::Default(_) = i.kind.defaultness() { // Limit `min_specialization` to only specializing functions. - gate_feature_fn!( + gate_alt!( &self, - |x: &Features| x.specialization || (is_fn && x.min_specialization), - i.span, + self.features.specialization || (is_fn && self.features.min_specialization), sym::specialization, + i.span, "specialization is unstable" ); } @@ -521,17 +500,17 @@ let spans = sess.parse_sess.gated_spans.spans.borrow(); macro_rules! gate_all { - ($gate:ident, $msg:literal, $help:literal) => { + ($gate:ident, $msg:literal) => { if let Some(spans) = spans.get(&sym::$gate) { for span in spans { - gate_feature_post!(&visitor, $gate, *span, $msg, $help); + gate!(&visitor, $gate, *span, $msg); } } }; - ($gate:ident, $msg:literal) => { + ($gate:ident, $msg:literal, $help:literal) => { if let Some(spans) = spans.get(&sym::$gate) { for span in spans { - gate_feature_post!(&visitor, $gate, *span, $msg); + gate!(&visitor, $gate, *span, $msg, $help); } } }; @@ -554,7 +533,12 @@ "consider removing `for<...>`" ); gate_all!(more_qualified_paths, "usage of qualified paths in this context is experimental"); - gate_all!(generators, "yield syntax is experimental"); + for &span in spans.get(&sym::yield_expr).iter().copied().flatten() { + if !span.at_least_rust_2024() { + gate!(&visitor, coroutines, span, "yield syntax is experimental"); + } + } + gate_all!(gen_blocks, "gen blocks are experimental"); gate_all!(raw_ref_op, "raw address of syntax is experimental"); gate_all!(const_trait_impl, "const trait impls are experimental"); gate_all!( @@ -585,7 +569,7 @@ macro_rules! gate_all_legacy_dont_use { ($gate:ident, $msg:literal) => { for span in spans.get(&sym::$gate).unwrap_or(&vec![]) { - gate_feature_post!(future_incompatible; &visitor, $gate, *span, $msg); + gate_legacy!(&visitor, $gate, *span, $msg); } }; } @@ -603,6 +587,7 @@ "exclusive range pattern syntax is experimental" ); gate_all_legacy_dont_use!(try_blocks, "`try` blocks are unstable"); + gate_all_legacy_dont_use!(auto_traits, "`auto` traits are unstable"); visit::walk_crate(&mut visitor, krate); } @@ -657,7 +642,7 @@ for (f1, f2) in rustc_feature::INCOMPATIBLE_FEATURES .iter() - .filter(|&&(f1, f2)| features.enabled(f1) && features.enabled(f2)) + .filter(|&&(f1, f2)| features.active(f1) && features.active(f2)) { if let Some((f1_name, f1_span)) = declared_features.clone().find(|(name, _)| name == f1) { if let Some((f2_name, f2_span)) = declared_features.clone().find(|(name, _)| name == f2) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_passes/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,9 @@ //! //! The crate also contains other misc AST visitors, e.g. `node_count` and `show_span`. +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(iter_is_partitioned)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,9 +3,9 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } thin-vec = "0.2.12" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,6 @@ +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] #![feature(associated_type_bounds)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state/expr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state/expr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state/expr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state/expr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -445,8 +445,8 @@ self.ibox(0); self.print_block_with_attrs(blk, attrs); } - ast::ExprKind::Async(capture_clause, blk) => { - self.word_nbsp("async"); + ast::ExprKind::Gen(capture_clause, blk, kind) => { + self.word_nbsp(kind.modifier()); self.print_capture_clause(*capture_clause); // cbox/ibox in analogy to the `ExprKind::Block` arm above self.cbox(0); @@ -673,7 +673,7 @@ fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy) { match capture_clause { - ast::CaptureBy::Value => self.word_space("move"), + ast::CaptureBy::Value { .. } => self.word_space("move"), ast::CaptureBy::Ref => {} } } @@ -684,8 +684,8 @@ for piece in pieces { match piece { FormatArgsPiece::Literal(s) => { - for c in s.as_str().escape_debug() { - template.push(c); + for c in s.as_str().chars() { + template.extend(c.escape_debug()); if let '{' | '}' = c { template.push(c); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ast_pretty/src/pprust/state.rs 2023-12-21 16:55:28.000000000 +0000 @@ -146,37 +146,49 @@ s.s.eof() } -/// This makes printed token streams look slightly nicer, -/// and also addresses some specific regressions described in #63896 and #73345. -fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool { - if let TokenTree::Token(token, _) = prev { - // No space after these tokens, e.g. `x.y`, `$e` - // (The carets point to `prev`.) ^ ^ - if matches!(token.kind, token::Dot | token::Dollar) { - return false; - } - if let token::DocComment(comment_kind, ..) = token.kind { - return comment_kind != CommentKind::Line; - } - } - match tt { - // No space before these tokens, e.g. `foo,`, `println!`, `x.y` - // (The carets point to `token`.) ^ ^ ^ +/// Should two consecutive tokens be printed with a space between them? +/// +/// Note: some old proc macros parse pretty-printed output, so changes here can +/// break old code. For example: +/// - #63896: `#[allow(unused,` must be printed rather than `#[allow(unused ,` +/// - #73345: `#[allow(unused)] must be printed rather than `# [allow(unused)] +/// +fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { + use token::*; + use Delimiter::*; + use TokenTree::Delimited as Del; + use TokenTree::Token as Tok; + + // Each match arm has one or more examples in comments. The default is to + // insert space between adjacent tokens, except for the cases listed in + // this match. + match (tt1, tt2) { + // No space after line doc comments. + (Tok(Token { kind: DocComment(CommentKind::Line, ..), .. }, _), _) => false, + + // `.` + ANYTHING: `x.y`, `tup.0` + // `$` + ANYTHING: `$e` + (Tok(Token { kind: Dot | Dollar, .. }, _), _) => false, + + // ANYTHING + `,`: `foo,` + // ANYTHING + `.`: `x.y`, `tup.0` + // ANYTHING + `!`: `foo! { ... }` // - // FIXME: having `Not` here works well for macro invocations like - // `println!()`, but is bad when `!` means "logical not" or "the never - // type", where the lack of space causes ugliness like this: - // `Fn() ->!`, `x =! y`, `if! x { f(); }`. - TokenTree::Token(token, _) => !matches!(token.kind, token::Comma | token::Not | token::Dot), - // No space before parentheses if preceded by these tokens, e.g. `foo(...)` - TokenTree::Delimited(_, Delimiter::Parenthesis, _) => { - !matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }, _)) - } - // No space before brackets if preceded by these tokens, e.g. `#[...]` - TokenTree::Delimited(_, Delimiter::Bracket, _) => { - !matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }, _)) - } - TokenTree::Delimited(..) => true, + // FIXME: Incorrect cases: + // - Logical not: `x =! y`, `if! x { f(); }` + // - Never type: `Fn() ->!` + (_, Tok(Token { kind: Comma | Dot | Not, .. }, _)) => false, + + // IDENT + `(`: `f(3)` + // + // FIXME: Incorrect cases: + // - Let: `let(a, b) = (1, 2)` + (Tok(Token { kind: Ident(..), .. }, _), Del(_, Parenthesis, _)) => false, + + // `#` + `[`: `#[attr]` + (Tok(Token { kind: Pound, .. }, _), Del(_, Bracket, _)) => false, + + _ => true, } } @@ -575,7 +587,7 @@ while let Some(tt) = iter.next() { self.print_tt(tt, convert_dollar_crate); if let Some(next) = iter.peek() { - if tt_prepend_space(next, tt) { + if space_between(tt, next) { self.space(); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,17 +3,17 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start +rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } -rustc_serialize = { path = "../rustc_serialize" } -rustc_errors = { path = "../rustc_errors" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_span = { path = "../rustc_span" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } -rustc_ast = { path = "../rustc_ast" } +rustc_span = { path = "../rustc_span" } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -58,6 +58,9 @@ attr_invalid_repr_hint_no_value = invalid representation hint: `{$name}` does not take a value +attr_invalid_since = + 'since' must be a Rust version number, such as "1.31.0" + attr_missing_feature = missing 'feature' diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/builtin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/builtin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/builtin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/builtin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,13 +3,14 @@ use rustc_ast::{self as ast, attr}; use rustc_ast::{Attribute, LitKind, MetaItem, MetaItemKind, MetaItemLit, NestedMetaItem, NodeId}; use rustc_ast_pretty::pprust; +use rustc_errors::ErrorGuaranteed; use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg}; use rustc_macros::HashStable_Generic; use rustc_session::config::ExpectedValues; use rustc_session::lint::builtin::UNEXPECTED_CFGS; use rustc_session::lint::BuiltinLintDiagnostics; use rustc_session::parse::{feature_err, ParseSess}; -use rustc_session::Session; +use rustc_session::{RustcVersion, Session}; use rustc_span::hygiene::Transparency; use rustc_span::{symbol::sym, symbol::Symbol, Span}; use std::num::NonZeroU32; @@ -22,25 +23,10 @@ /// For more, see [this pull request](https://github.com/rust-lang/rust/pull/100591). pub const VERSION_PLACEHOLDER: &str = "CURRENT_RUSTC_VERSION"; -pub fn rust_version_symbol() -> Symbol { - let version = option_env!("CFG_RELEASE").unwrap_or(""); - Symbol::intern(&version) -} - pub fn is_builtin_attr(attr: &Attribute) -> bool { attr.is_doc_comment() || attr.ident().is_some_and(|ident| is_builtin_attr_name(ident.name)) } -enum AttrError { - MultipleItem(String), - UnknownMetaItem(String, &'static [&'static str]), - MissingSince, - NonIdentFeature, - MissingFeature, - MultipleStabilityLevels, - UnsupportedLiteral(UnsupportedLiteralReason, /* is_bytestr */ bool), -} - pub(crate) enum UnsupportedLiteralReason { Generic, CfgString, @@ -48,37 +34,6 @@ DeprecatedKvPair, } -fn handle_errors(sess: &ParseSess, span: Span, error: AttrError) { - match error { - AttrError::MultipleItem(item) => { - sess.emit_err(session_diagnostics::MultipleItem { span, item }); - } - AttrError::UnknownMetaItem(item, expected) => { - sess.emit_err(session_diagnostics::UnknownMetaItem { span, item, expected }); - } - AttrError::MissingSince => { - sess.emit_err(session_diagnostics::MissingSince { span }); - } - AttrError::NonIdentFeature => { - sess.emit_err(session_diagnostics::NonIdentFeature { span }); - } - AttrError::MissingFeature => { - sess.emit_err(session_diagnostics::MissingFeature { span }); - } - AttrError::MultipleStabilityLevels => { - sess.emit_err(session_diagnostics::MultipleStabilityLevels { span }); - } - AttrError::UnsupportedLiteral(reason, is_bytestr) => { - sess.emit_err(session_diagnostics::UnsupportedLiteral { - span, - reason, - is_bytestr, - start_point_span: sess.source_map().start_point(span), - }); - } - } -} - #[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum InlineAttr { None, @@ -162,7 +117,7 @@ is_soft: bool, /// If part of a feature is stabilized and a new feature is added for the remaining parts, /// then the `implied_by` attribute is used to indicate which now-stable feature previously - /// contained a item. + /// contained an item. /// /// ```pseudo-Rust /// #[unstable(feature = "foo", issue = "...")] @@ -184,13 +139,24 @@ /// `#[stable]` Stable { /// Rust release which stabilized this feature. - since: Symbol, + since: StableSince, /// Is this item allowed to be referred to on stable, despite being contained in unstable /// modules? allowed_through_unstable_modules: bool, }, } +/// Rust release in which a feature is stabilized. +#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)] +#[derive(HashStable_Generic)] +pub enum StableSince { + Version(RustcVersion), + /// Stabilized in the upcoming version, whatever number that is. + Current, + /// Failed to parse a stabilization version. + Err, +} + impl StabilityLevel { pub fn is_unstable(&self) -> bool { matches!(self, StabilityLevel::Unstable { .. }) @@ -241,7 +207,7 @@ sym::rustc_allowed_through_unstable_modules => allowed_through_unstable_modules = true, sym::unstable => { if stab.is_some() { - handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels); + sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span }); break; } @@ -251,7 +217,7 @@ } sym::stable => { if stab.is_some() { - handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels); + sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span }); break; } if let Some((feature, level)) = parse_stability(sess, attr) { @@ -295,7 +261,7 @@ sym::rustc_promotable => promotable = true, sym::rustc_const_unstable => { if const_stab.is_some() { - handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels); + sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span }); break; } @@ -306,7 +272,7 @@ } sym::rustc_const_stable => { if const_stab.is_some() { - handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels); + sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span }); break; } if let Some((feature, level)) = parse_stability(sess, attr) { @@ -340,7 +306,7 @@ for attr in attrs { if attr.has_name(sym::rustc_default_body_unstable) { if body_stab.is_some() { - handle_errors(&sess.parse_sess, attr.span, AttrError::MultipleStabilityLevels); + sess.emit_err(session_diagnostics::MultipleStabilityLevels { span: attr.span }); break; } @@ -353,83 +319,83 @@ body_stab } +fn insert_or_error(sess: &Session, meta: &MetaItem, item: &mut Option) -> Option<()> { + if item.is_some() { + sess.emit_err(session_diagnostics::MultipleItem { + span: meta.span, + item: pprust::path_to_string(&meta.path), + }); + None + } else if let Some(v) = meta.value_str() { + *item = Some(v); + Some(()) + } else { + sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span }); + None + } +} + /// Read the content of a `stable`/`rustc_const_stable` attribute, and return the feature name and /// its stability information. fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> { let meta = attr.meta()?; let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None }; - let insert_or_error = |meta: &MetaItem, item: &mut Option| { - if item.is_some() { - handle_errors( - &sess.parse_sess, - meta.span, - AttrError::MultipleItem(pprust::path_to_string(&meta.path)), - ); - return false; - } - if let Some(v) = meta.value_str() { - *item = Some(v); - true - } else { - sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span }); - false - } - }; let mut feature = None; let mut since = None; for meta in metas { let Some(mi) = meta.meta_item() else { - handle_errors( - &sess.parse_sess, - meta.span(), - AttrError::UnsupportedLiteral(UnsupportedLiteralReason::Generic, false), - ); + sess.emit_err(session_diagnostics::UnsupportedLiteral { + span: meta.span(), + reason: UnsupportedLiteralReason::Generic, + is_bytestr: false, + start_point_span: sess.source_map().start_point(meta.span()), + }); return None; }; match mi.name_or_empty() { - sym::feature => { - if !insert_or_error(mi, &mut feature) { - return None; - } - } - sym::since => { - if !insert_or_error(mi, &mut since) { - return None; - } - } + sym::feature => insert_or_error(sess, mi, &mut feature)?, + sym::since => insert_or_error(sess, mi, &mut since)?, _ => { - handle_errors( - &sess.parse_sess, - meta.span(), - AttrError::UnknownMetaItem( - pprust::path_to_string(&mi.path), - &["feature", "since"], - ), - ); + sess.emit_err(session_diagnostics::UnknownMetaItem { + span: meta.span(), + item: pprust::path_to_string(&mi.path), + expected: &["feature", "since"], + }); return None; } } } - if let Some(s) = since && s.as_str() == VERSION_PLACEHOLDER { - since = Some(rust_version_symbol()); - } + let feature = match feature { + Some(feature) if rustc_lexer::is_ident(feature.as_str()) => Ok(feature), + Some(_bad_feature) => { + Err(sess.emit_err(session_diagnostics::NonIdentFeature { span: attr.span })) + } + None => Err(sess.emit_err(session_diagnostics::MissingFeature { span: attr.span })), + }; - match (feature, since) { - (Some(feature), Some(since)) => { + let since = if let Some(since) = since { + if since.as_str() == VERSION_PLACEHOLDER { + StableSince::Current + } else if let Some(version) = parse_version(since) { + StableSince::Version(version) + } else { + sess.emit_err(session_diagnostics::InvalidSince { span: attr.span }); + StableSince::Err + } + } else { + sess.emit_err(session_diagnostics::MissingSince { span: attr.span }); + StableSince::Err + }; + + match feature { + Ok(feature) => { let level = StabilityLevel::Stable { since, allowed_through_unstable_modules: false }; Some((feature, level)) } - (None, _) => { - handle_errors(&sess.parse_sess, attr.span, AttrError::MissingFeature); - None - } - _ => { - handle_errors(&sess.parse_sess, attr.span, AttrError::MissingSince); - None - } + Err(ErrorGuaranteed { .. }) => None, } } @@ -438,23 +404,6 @@ fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> { let meta = attr.meta()?; let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None }; - let insert_or_error = |meta: &MetaItem, item: &mut Option| { - if item.is_some() { - handle_errors( - &sess.parse_sess, - meta.span, - AttrError::MultipleItem(pprust::path_to_string(&meta.path)), - ); - return false; - } - if let Some(v) = meta.value_str() { - *item = Some(v); - true - } else { - sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span }); - false - } - }; let mut feature = None; let mut reason = None; @@ -464,29 +413,20 @@ let mut implied_by = None; for meta in metas { let Some(mi) = meta.meta_item() else { - handle_errors( - &sess.parse_sess, - meta.span(), - AttrError::UnsupportedLiteral(UnsupportedLiteralReason::Generic, false), - ); + sess.emit_err(session_diagnostics::UnsupportedLiteral { + span: meta.span(), + reason: UnsupportedLiteralReason::Generic, + is_bytestr: false, + start_point_span: sess.source_map().start_point(meta.span()), + }); return None; }; match mi.name_or_empty() { - sym::feature => { - if !insert_or_error(mi, &mut feature) { - return None; - } - } - sym::reason => { - if !insert_or_error(mi, &mut reason) { - return None; - } - } + sym::feature => insert_or_error(sess, mi, &mut feature)?, + sym::reason => insert_or_error(sess, mi, &mut reason)?, sym::issue => { - if !insert_or_error(mi, &mut issue) { - return None; - } + insert_or_error(sess, mi, &mut issue)?; // These unwraps are safe because `insert_or_error` ensures the meta item // is a name/value pair string literal. @@ -515,31 +455,31 @@ } is_soft = true; } - sym::implied_by => { - if !insert_or_error(mi, &mut implied_by) { - return None; - } - } + sym::implied_by => insert_or_error(sess, mi, &mut implied_by)?, _ => { - handle_errors( - &sess.parse_sess, - meta.span(), - AttrError::UnknownMetaItem( - pprust::path_to_string(&mi.path), - &["feature", "reason", "issue", "soft", "implied_by"], - ), - ); + sess.emit_err(session_diagnostics::UnknownMetaItem { + span: meta.span(), + item: pprust::path_to_string(&mi.path), + expected: &["feature", "reason", "issue", "soft", "implied_by"], + }); return None; } } } - match (feature, reason, issue) { - (Some(feature), reason, Some(_)) => { - if !rustc_lexer::is_ident(feature.as_str()) { - handle_errors(&sess.parse_sess, attr.span, AttrError::NonIdentFeature); - return None; - } + let feature = match feature { + Some(feature) if rustc_lexer::is_ident(feature.as_str()) => Ok(feature), + Some(_bad_feature) => { + Err(sess.emit_err(session_diagnostics::NonIdentFeature { span: attr.span })) + } + None => Err(sess.emit_err(session_diagnostics::MissingFeature { span: attr.span })), + }; + + let issue = + issue.ok_or_else(|| sess.emit_err(session_diagnostics::MissingIssue { span: attr.span })); + + match (feature, issue) { + (Ok(feature), Ok(_)) => { let level = StabilityLevel::Unstable { reason: UnstableReason::from_opt_reason(reason), issue: issue_num, @@ -548,14 +488,7 @@ }; Some((feature, level)) } - (None, _, _) => { - handle_errors(&sess.parse_sess, attr.span, AttrError::MissingFeature); - return None; - } - _ => { - sess.emit_err(session_diagnostics::MissingIssue { span: attr.span }); - return None; - } + (Err(ErrorGuaranteed { .. }), _) | (_, Err(ErrorGuaranteed { .. })) => None, } } @@ -587,7 +520,11 @@ UNEXPECTED_CFGS, cfg.span, lint_node_id, - "unexpected `cfg` condition value", + if let Some(value) = cfg.value { + format!("unexpected `cfg` condition value: `{value}`") + } else { + format!("unexpected `cfg` condition value: (none)") + }, BuiltinLintDiagnostics::UnexpectedCfgValue( (cfg.name, cfg.name_span), cfg.value.map(|v| (v, cfg.value_span.unwrap())), @@ -599,7 +536,7 @@ UNEXPECTED_CFGS, cfg.span, lint_node_id, - "unexpected `cfg` condition name", + format!("unexpected `cfg` condition name: `{}`", cfg.name), BuiltinLintDiagnostics::UnexpectedCfgName( (cfg.name, cfg.name_span), cfg.value.map(|v| (v, cfg.value_span.unwrap())), @@ -627,24 +564,20 @@ } } -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -struct Version { - major: u16, - minor: u16, - patch: u16, -} - -fn parse_version(s: &str, allow_appendix: bool) -> Option { - let mut components = s.split('-'); +/// Parse a rustc version number written inside string literal in an attribute, +/// like appears in `since = "1.0.0"`. Suffixes like "-dev" and "-nightly" are +/// not accepted in this position, unlike when parsing CFG_RELEASE. +fn parse_version(s: Symbol) -> Option { + let mut components = s.as_str().split('-'); let d = components.next()?; - if !allow_appendix && components.next().is_some() { + if components.next().is_some() { return None; } let mut digits = d.splitn(3, '.'); let major = digits.next()?.parse().ok()?; let minor = digits.next()?.parse().ok()?; let patch = digits.next().unwrap_or("0").parse().ok()?; - Some(Version { major, minor, patch }) + Some(RustcVersion { major, minor, patch }) } /// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to @@ -676,27 +609,27 @@ return false; } }; - let Some(min_version) = parse_version(min_version.as_str(), false) else { + let Some(min_version) = parse_version(*min_version) else { sess.emit_warning(session_diagnostics::UnknownVersionLiteral { span: *span }); return false; }; - let rustc_version = parse_version(env!("CFG_RELEASE"), true).unwrap(); // See https://github.com/rust-lang/rust/issues/64796#issuecomment-640851454 for details if sess.assume_incomplete_release { - rustc_version > min_version + RustcVersion::CURRENT > min_version } else { - rustc_version >= min_version + RustcVersion::CURRENT >= min_version } } ast::MetaItemKind::List(mis) => { for mi in mis.iter() { if !mi.is_meta_item() { - handle_errors( - sess, - mi.span(), - AttrError::UnsupportedLiteral(UnsupportedLiteralReason::Generic, false), - ); + sess.emit_err(session_diagnostics::UnsupportedLiteral { + span: mi.span(), + reason: UnsupportedLiteralReason::Generic, + is_bytestr: false, + start_point_span: sess.source_map().start_point(mi.span()), + }); return false; } } @@ -729,13 +662,16 @@ !eval_condition(mis[0].meta_item().unwrap(), sess, features, eval) } sym::target => { - if let Some(features) = features && !features.cfg_target_compact { + if let Some(features) = features + && !features.cfg_target_compact + { feature_err( sess, sym::cfg_target_compact, cfg.span, - "compact `cfg(target(..))` is experimental and subject to change" - ).emit(); + "compact `cfg(target(..))` is experimental and subject to change", + ) + .emit(); } mis.iter().fold(true, |res, mi| { @@ -761,14 +697,12 @@ true } MetaItemKind::NameValue(lit) if !lit.kind.is_str() => { - handle_errors( - sess, - lit.span, - AttrError::UnsupportedLiteral( - UnsupportedLiteralReason::CfgString, - lit.kind.is_bytestr(), - ), - ); + sess.emit_err(session_diagnostics::UnsupportedLiteral { + span: lit.span, + reason: UnsupportedLiteralReason::CfgString, + is_bytestr: lit.kind.is_bytestr(), + start_point_span: sess.source_map().start_point(lit.span), + }); true } ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => { @@ -786,17 +720,49 @@ #[derive(Copy, Debug, Encodable, Decodable, Clone, HashStable_Generic)] pub struct Deprecation { - pub since: Option, + pub since: DeprecatedSince, /// The note to issue a reason. pub note: Option, /// A text snippet used to completely replace any use of the deprecated item in an expression. /// /// This is currently unstable. pub suggestion: Option, +} + +/// Release in which an API is deprecated. +#[derive(Copy, Debug, Encodable, Decodable, Clone, HashStable_Generic)] +pub enum DeprecatedSince { + RustcVersion(RustcVersion), + /// Deprecated in the future ("to be determined"). + Future, + /// `feature(staged_api)` is off. Deprecation versions outside the standard + /// library are allowed to be arbitrary strings, for better or worse. + NonStandard(Symbol), + /// Deprecation version is unspecified but optional. + Unspecified, + /// Failed to parse a deprecation version, or the deprecation version is + /// unspecified and required. An error has already been emitted. + Err, +} + +impl Deprecation { + /// Whether an item marked with #[deprecated(since = "X")] is currently + /// deprecated (i.e., whether X is not greater than the current rustc + /// version). + pub fn is_in_effect(&self) -> bool { + match self.since { + DeprecatedSince::RustcVersion(since) => since <= RustcVersion::CURRENT, + DeprecatedSince::Future => false, + // The `since` field doesn't have semantic purpose without `#![staged_api]`. + DeprecatedSince::NonStandard(_) => true, + // Assume deprecation is in effect if "since" field is absent or invalid. + DeprecatedSince::Unspecified | DeprecatedSince::Err => true, + } + } - /// Whether to treat the since attribute as being a Rust version identifier - /// (rather than an opaque string). - pub is_since_rustc_version: bool, + pub fn is_since_rustc_version(&self) -> bool { + matches!(self.since, DeprecatedSince::RustcVersion(_)) + } } /// Finds the deprecation attribute. `None` if none exists. @@ -825,11 +791,10 @@ MetaItemKind::List(list) => { let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { - handle_errors( - &sess.parse_sess, - meta.span, - AttrError::MultipleItem(pprust::path_to_string(&meta.path)), - ); + sess.emit_err(session_diagnostics::MultipleItem { + span: meta.span, + item: pprust::path_to_string(&meta.path), + }); return false; } if let Some(v) = meta.value_str() { @@ -837,16 +802,14 @@ true } else { if let Some(lit) = meta.name_value_literal() { - handle_errors( - &sess.parse_sess, - lit.span, - AttrError::UnsupportedLiteral( - UnsupportedLiteralReason::DeprecatedString, - lit.kind.is_bytestr(), - ), - ); + sess.emit_err(session_diagnostics::UnsupportedLiteral { + span: lit.span, + reason: UnsupportedLiteralReason::DeprecatedString, + is_bytestr: lit.kind.is_bytestr(), + start_point_span: sess.source_map().start_point(lit.span), + }); } else { - sess.emit_err(session_diagnostics::IncorrectMetaItem2 { + sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span, }); } @@ -882,30 +845,25 @@ } } _ => { - handle_errors( - &sess.parse_sess, - meta.span(), - AttrError::UnknownMetaItem( - pprust::path_to_string(&mi.path), - if features.deprecated_suggestion { - &["since", "note", "suggestion"] - } else { - &["since", "note"] - }, - ), - ); + sess.emit_err(session_diagnostics::UnknownMetaItem { + span: meta.span(), + item: pprust::path_to_string(&mi.path), + expected: if features.deprecated_suggestion { + &["since", "note", "suggestion"] + } else { + &["since", "note"] + }, + }); continue 'outer; } }, NestedMetaItem::Lit(lit) => { - handle_errors( - &sess.parse_sess, - lit.span, - AttrError::UnsupportedLiteral( - UnsupportedLiteralReason::DeprecatedKvPair, - false, - ), - ); + sess.emit_err(session_diagnostics::UnsupportedLiteral { + span: lit.span, + reason: UnsupportedLiteralReason::DeprecatedKvPair, + is_bytestr: false, + start_point_span: sess.source_map().start_point(lit.span), + }); continue 'outer; } } @@ -913,22 +871,30 @@ } } - if is_rustc { - if since.is_none() { - handle_errors(&sess.parse_sess, attr.span, AttrError::MissingSince); - continue; + let since = if let Some(since) = since { + if since.as_str() == "TBD" { + DeprecatedSince::Future + } else if !is_rustc { + DeprecatedSince::NonStandard(since) + } else if let Some(version) = parse_version(since) { + DeprecatedSince::RustcVersion(version) + } else { + sess.emit_err(session_diagnostics::InvalidSince { span: attr.span }); + DeprecatedSince::Err } + } else if is_rustc { + sess.emit_err(session_diagnostics::MissingSince { span: attr.span }); + DeprecatedSince::Err + } else { + DeprecatedSince::Unspecified + }; - if note.is_none() { - sess.emit_err(session_diagnostics::MissingNote { span: attr.span }); - continue; - } + if is_rustc && note.is_none() { + sess.emit_err(session_diagnostics::MissingNote { span: attr.span }); + continue; } - depr = Some(( - Deprecation { since, note, suggestion, is_since_rustc_version: is_rustc }, - attr.span, - )); + depr = Some((Deprecation { since, note, suggestion }, attr.span)); } depr diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,9 @@ //! The goal is to move the definition of `MetaItem` and things that don't need to be in `syntax` //! to this crate. +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![feature(let_chains)] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] @@ -24,6 +27,6 @@ pub use rustc_ast::attr::*; -pub(crate) use rustc_ast::HashStableContext; +pub(crate) use rustc_session::HashStableContext; fluent_messages! { "../messages.ftl" } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/session_diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/session_diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/session_diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_attr/src/session_diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -165,15 +165,6 @@ pub span: Span, } -// FIXME: This diagnostic is identical to `IncorrectMetaItem`, barring the error code. Consider -// changing this to `IncorrectMetaItem`. See #51489. -#[derive(Diagnostic)] -#[diag(attr_incorrect_meta_item, code = "E0551")] -pub(crate) struct IncorrectMetaItem2 { - #[primary_span] - pub span: Span, -} - // FIXME: Why is this the same error code as `InvalidReprHintNoParen` and `InvalidReprHintNoValue`? // It is more similar to `IncorrectReprFormatGeneric`. #[derive(Diagnostic)] @@ -380,6 +371,13 @@ } #[derive(Diagnostic)] +#[diag(attr_invalid_since)] +pub(crate) struct InvalidSince { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] #[diag(attr_soft_no_args)] pub(crate) struct SoftNoArgs { #[primary_span] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,11 +4,16 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start icu_list = "1.2" icu_locid = "1.2" +icu_locid_transform = "1.3.2" icu_provider = "1.2" icu_provider_adapters = "1.2" -zerovec = "0.9.4" +zerovec = "0.10.0" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ['icu_provider/sync'] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -// @generated -type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackLikelySubtagsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; -pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { - locale.is_empty().then(|| &UND) -} -static UND: DataStruct = include!("und.rs.data"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1 { - l2s: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"am\0ar\0as\0be\0bg\0bgcbhobn\0brxchrcv\0doiel\0fa\0gu\0he\0hi\0hy\0ja\0ka\0kk\0km\0kn\0ko\0kokks\0ky\0lo\0maimk\0ml\0mn\0mnimr\0my\0ne\0or\0pa\0ps\0rajru\0sa\0satsd\0si\0sr\0ta\0te\0tg\0th\0ti\0tt\0uk\0ur\0yuezh\0") - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"EthiArabBengCyrlCyrlDevaDevaBengDevaCherCyrlDevaGrekArabGujrHebrDevaArmnJpanGeorCyrlKhmrKndaKoreDevaArabCyrlLaooDevaCyrlMlymCyrlBengDevaMymrDevaOryaGuruArabDevaCyrlDevaOlckArabSinhCyrlTamlTeluCyrlThaiEthiCyrlCyrlArabHantHans") - }, - ) - }, - lr2s: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap2d::from_parts_unchecked( - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked( - b"az\0ha\0kk\0ky\0mn\0ms\0pa\0sd\0sr\0tg\0uz\0yuezh\0", - ) - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"\x03\0\0\0\x05\0\0\0\t\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x0F\0\0\0\x13\0\0\0\x14\0\0\0\x16\0\0\0\x17\0\0\0&\0\0\0") - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"IQ\0IR\0RU\0CM\0SD\0AF\0CN\0IR\0MN\0CN\0TR\0CN\0CC\0PK\0IN\0ME\0RO\0RU\0TR\0PK\0AF\0CN\0CN\0AU\0BN\0GB\0GF\0HK\0ID\0MO\0PA\0PF\0PH\0SR\0TH\0TW\0US\0VN\0") - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"ArabArabCyrlArabArabArabArabArabArabArabLatnMongArabArabDevaLatnLatnLatnLatnArabArabCyrlHansHantHantHantHantHantHantHantHantHantHantHantHantHantHantHant") - }, - ) - }, - l2r: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"af\0am\0ar\0as\0astaz\0be\0bg\0bgcbhobn\0br\0brxbs\0ca\0cebchrcs\0cv\0cy\0da\0de\0doidsbel\0en\0es\0et\0eu\0fa\0ff\0fi\0filfo\0fr\0ga\0gd\0gl\0gu\0ha\0he\0hi\0hr\0hsbhu\0hy\0ia\0id\0ig\0is\0it\0ja\0jv\0ka\0keakgpkk\0km\0kn\0ko\0kokks\0ky\0lo\0lt\0lv\0maimi\0mk\0ml\0mn\0mnimr\0ms\0my\0ne\0nl\0nn\0no\0or\0pa\0pcmpl\0ps\0pt\0qu\0rajrm\0ro\0ru\0sa\0satsc\0sd\0si\0sk\0sl\0so\0sq\0sr\0su\0sv\0sw\0ta\0te\0tg\0th\0ti\0tk\0to\0tr\0tt\0uk\0ur\0uz\0vi\0wo\0xh\0yo\0yrlyuezh\0zu\0") - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"ZA\0ET\0EG\0IN\0ES\0AZ\0BY\0BG\0IN\0IN\0BD\0FR\0IN\0BA\0ES\0PH\0US\0CZ\0RU\0GB\0DK\0DE\0IN\0DE\0GR\0US\0ES\0EE\0ES\0IR\0SN\0FI\0PH\0FO\0FR\0IE\0GB\0ES\0IN\0NG\0IL\0IN\0HR\0DE\0HU\0AM\x00001ID\0NG\0IS\0IT\0JP\0ID\0GE\0CV\0BR\0KZ\0KH\0IN\0KR\0IN\0IN\0KG\0LA\0LT\0LV\0IN\0NZ\0MK\0IN\0MN\0IN\0IN\0MY\0MM\0NP\0NL\0NO\0NO\0IN\0IN\0NG\0PL\0AF\0BR\0PE\0IN\0CH\0RO\0RU\0IN\0IN\0IT\0PK\0LK\0SK\0SI\0SO\0AL\0RS\0ID\0SE\0TZ\0IN\0IN\0TJ\0TH\0ET\0TM\0TO\0TR\0RU\0UA\0PK\0UZ\0VN\0SN\0ZA\0NG\0BR\0HK\0CN\0ZA\0") - }, - ) - }, - ls2r: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap2d::from_parts_unchecked( - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked( - b"az\0en\0ff\0kk\0ky\0mn\0pa\0sd\0tg\0uz\0yuezh\0", - ) - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"\x01\0\0\0\x02\0\0\0\x03\0\0\0\x04\0\0\0\x06\0\0\0\x07\0\0\0\x08\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x11\0\0\0") - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked( - b"ArabShawAdlmArabArabLatnMongArabDevaKhojSindArabArabHansBopoHanbHant", - ) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked( - b"IR\0GB\0GN\0CN\0CN\0TR\0CN\0PK\0IN\0IN\0IN\0PK\0AF\0CN\0TW\0TW\0TW\0", - ) - }, - ) - }, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -// @generated -pub mod likelysubtags_v1; -pub mod parents_v1; -pub mod supplement; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -// @generated -type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackParentsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; -pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { - locale.is_empty().then(|| &UND) -} -static UND: DataStruct = include!("und.rs.data"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1 { - parents: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - :: zerovec :: VarZeroVec :: from_bytes_unchecked (b"\x84\0\0\0\0\0\x06\0\x0B\0\x10\0\x15\0\x1A\0\x1F\0$\0)\0.\x003\08\0=\0B\0G\0L\0Q\0V\0[\0`\0e\0j\0o\0t\0y\0~\0\x83\0\x88\0\x8D\0\x92\0\x97\0\x9C\0\xA1\0\xA6\0\xAB\0\xB0\0\xB5\0\xBA\0\xBF\0\xC4\0\xC9\0\xCE\0\xD3\0\xD8\0\xDD\0\xE2\0\xE7\0\xEC\0\xF1\0\xF6\0\xFB\0\0\x01\x05\x01\n\x01\x0F\x01\x14\x01\x19\x01\x1E\x01#\x01(\x01-\x012\x017\x01<\x01A\x01F\x01K\x01P\x01U\x01Z\x01_\x01d\x01i\x01n\x01s\x01x\x01}\x01\x82\x01\x87\x01\x8C\x01\x91\x01\x96\x01\x9B\x01\xA0\x01\xA5\x01\xAA\x01\xAF\x01\xB4\x01\xB9\x01\xBE\x01\xC3\x01\xC8\x01\xCD\x01\xD2\x01\xD7\x01\xDC\x01\xE1\x01\xE6\x01\xEB\x01\xF0\x01\xF5\x01\xFA\x01\xFF\x01\x04\x02\t\x02\x0E\x02\x13\x02\x18\x02\x1D\x02\"\x02'\x02,\x021\x026\x02;\x02@\x02G\x02I\x02K\x02M\x02R\x02W\x02\\\x02a\x02f\x02k\x02p\x02u\x02z\x02\x7F\x02\x84\x02\x89\x02en-150en-AGen-AIen-ATen-AUen-BBen-BEen-BMen-BSen-BWen-BZen-CCen-CHen-CKen-CMen-CXen-CYen-DEen-DGen-DKen-DMen-ERen-FIen-FJen-FKen-FMen-GBen-GDen-GGen-GHen-GIen-GMen-GYen-HKen-IEen-ILen-IMen-INen-IOen-JEen-JMen-KEen-KIen-KNen-KYen-LCen-LRen-LSen-MGen-MOen-MSen-MTen-MUen-MVen-MWen-MYen-NAen-NFen-NGen-NLen-NRen-NUen-NZen-PGen-PKen-PNen-PWen-RWen-SBen-SCen-SDen-SEen-SGen-SHen-SIen-SLen-SSen-SXen-SZen-TCen-TKen-TOen-TTen-TVen-TZen-UGen-VCen-VGen-VUen-WSen-ZAen-ZMen-ZWes-ARes-BOes-BRes-BZes-CLes-COes-CRes-CUes-DOes-ECes-GTes-HNes-MXes-NIes-PAes-PEes-PRes-PYes-SVes-USes-UYes-VEhi-Latnhtnbnnno-NOpt-AOpt-CHpt-CVpt-FRpt-GQpt-GWpt-LUpt-MOpt-MZpt-STpt-TLzh-Hant-MO") - }, - unsafe { - :: zerovec :: ZeroVec :: from_bytes_unchecked (b"en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419en\0\0\0\0\0\0\x01IN\0fr\0\0\0\0\0\0\x01HT\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0zh\0\x01Hant\x01HK\0") - }, - ) - }, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -// @generated -type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: CollationFallbackSupplementV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; -pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { - locale.is_empty().then(|| &UND) -} -static UND: DataStruct = include!("und.rs.data"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -::icu_provider_adapters::fallback::provider::LocaleFallbackSupplementV1 { - parents: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { ::zerovec::VarZeroVec::from_bytes_unchecked(b"\x01\0\0\0\0\0yue") }, - unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(b"zh\0\x01Hant\0\0\0\0") }, - ) - }, - unicode_extension_defaults: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap2d::from_parts_unchecked( - unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(b"co") }, - unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(b"\x02\0\0\0") }, - unsafe { - ::zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x02\0zhzh-Hant") - }, - unsafe { - ::zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x06\0pinyinstroke") - }, - ) - }, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -// @generated -pub mod co_v1; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", & ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" & ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,116 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( - if cfg!(target_endian = "little") { - b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" - } else { - b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" - }, - ) - }, - pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( - if cfg!(target_endian = "little") { - b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" - } else { - b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" - }, - ) - }, - pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( - if cfg!(target_endian = "little") { - b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" - } else { - b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" - }, - ) - }, - pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( - if cfg!(target_endian = "little") { - b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" - } else { - b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" - }, - ) - }, - pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( - if cfg!(target_endian = "little") { - b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" - } else { - b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" - }, - ) - }, - pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( - if cfg!(target_endian = "little") { - b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" - } else { - b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" - }, - ) - }, - pattern: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - }), - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -// @generated -type DataStruct = <::icu_list::provider::AndListV1Marker as ::icu_provider::DataMarker>::Yokeable; -pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { - static KEYS: [&str; 12usize] = - ["en", "es", "fr", "it", "ja", "pt", "ru", "tr", "und", "zh", "zh-Hans", "zh-Hant"]; - static DATA: [&DataStruct; 12usize] = - [&EN, &ES, &FR, &IT, &JA, &PT, &RU, &TR, &UND, &ZH, &ZH, &ZH_HANT]; - KEYS.binary_search_by(|k| locale.strict_cmp(k.as_bytes()).reverse()) - .ok() - .map(|i| unsafe { *DATA.get_unchecked(i) }) -} -static EN: DataStruct = include!("en.rs.data"); -static ES: DataStruct = include!("es.rs.data"); -static FR: DataStruct = include!("fr.rs.data"); -static IT: DataStruct = include!("it.rs.data"); -static JA: DataStruct = include!("ja.rs.data"); -static PT: DataStruct = include!("pt.rs.data"); -static RU: DataStruct = include!("ru.rs.data"); -static TR: DataStruct = include!("tr.rs.data"); -static UND: DataStruct = include!("und.rs.data"); -static ZH_HANT: DataStruct = include!("zh-Hant.rs.data"); -static ZH: DataStruct = include!("zh.rs.data"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: ::icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), - special_case: None, - }, -]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/list/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -// @generated -pub mod and_v1; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,40 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_fallback_likelysubtags_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl $provider { + #[doc(hidden)] + pub const SINGLETON_FALLBACK_LIKELYSUBTAGS_V1: &'static ::Yokeable = &icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1 { + l2s: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"am\0ar\0as\0be\0bg\0bgcbhobn\0brxchrcv\0doiel\0fa\0gu\0he\0hi\0hy\0ja\0ka\0kk\0km\0kn\0ko\0kokks\0ky\0lo\0maimk\0ml\0mn\0mnimr\0my\0ne\0or\0pa\0ps\0rajru\0sa\0satsd\0si\0sr\0ta\0te\0tg\0th\0ti\0tt\0uk\0ur\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"EthiArabBengCyrlCyrlDevaDevaBengDevaCherCyrlDevaGrekArabGujrHebrDevaArmnJpanGeorCyrlKhmrKndaKoreDevaArabCyrlLaooDevaCyrlMlymCyrlBengDevaMymrDevaOryaGuruArabDevaCyrlDevaOlckArabSinhCyrlTamlTeluCyrlThaiEthiCyrlCyrlArabHantHans") }) + }, + lr2s: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0ha\0kk\0ky\0mn\0ms\0pa\0sd\0sr\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x03\0\0\0\x05\0\0\0\t\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x0F\0\0\0\x13\0\0\0\x14\0\0\0\x16\0\0\0\x17\0\0\0&\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IQ\0IR\0RU\0CM\0SD\0AF\0CN\0IR\0MN\0CN\0TR\0CN\0CC\0PK\0IN\0ME\0RO\0RU\0TR\0PK\0AF\0CN\0CN\0AU\0BN\0GB\0GF\0HK\0ID\0MO\0PA\0PF\0PH\0SR\0TH\0TW\0US\0VN\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabArabCyrlArabArabArabArabArabArabArabLatnMongArabArabDevaLatnLatnLatnLatnArabArabCyrlHansHantHantHantHantHantHantHantHantHantHantHantHantHantHantHant") }) + }, + l2r: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"af\0am\0ar\0as\0astaz\0be\0bg\0bgcbhobn\0br\0brxbs\0ca\0cebchrcs\0cv\0cy\0da\0de\0doidsbel\0en\0es\0et\0eu\0fa\0ff\0fi\0filfo\0fr\0ga\0gd\0gl\0gu\0ha\0he\0hi\0hr\0hsbhu\0hy\0ia\0id\0ig\0is\0it\0ja\0jv\0ka\0keakgpkk\0km\0kn\0ko\0kokks\0ky\0lo\0lt\0lv\0maimi\0mk\0ml\0mn\0mnimr\0ms\0my\0ne\0nl\0nn\0no\0or\0pa\0pcmpl\0ps\0pt\0qu\0rajrm\0ro\0ru\0sa\0satsc\0sd\0si\0sk\0sl\0so\0sq\0sr\0su\0sv\0sw\0ta\0te\0tg\0th\0ti\0tk\0to\0tr\0tt\0uk\0ur\0uz\0vi\0wo\0xh\0yo\0yrlyuezh\0zu\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ZA\0ET\0EG\0IN\0ES\0AZ\0BY\0BG\0IN\0IN\0BD\0FR\0IN\0BA\0ES\0PH\0US\0CZ\0RU\0GB\0DK\0DE\0IN\0DE\0GR\0US\0ES\0EE\0ES\0IR\0SN\0FI\0PH\0FO\0FR\0IE\0GB\0ES\0IN\0NG\0IL\0IN\0HR\0DE\0HU\0AM\x00001ID\0NG\0IS\0IT\0JP\0ID\0GE\0CV\0BR\0KZ\0KH\0IN\0KR\0IN\0IN\0KG\0LA\0LT\0LV\0IN\0NZ\0MK\0IN\0MN\0IN\0IN\0MY\0MM\0NP\0NL\0NO\0NO\0IN\0IN\0NG\0PL\0AF\0BR\0PE\0IN\0CH\0RO\0RU\0IN\0IN\0IT\0PK\0LK\0SK\0SI\0SO\0AL\0RS\0ID\0SE\0TZ\0IN\0IN\0TJ\0TH\0ET\0TM\0TO\0TR\0RU\0UA\0PK\0UZ\0VN\0SN\0ZA\0NG\0BR\0HK\0CN\0ZA\0") }) + }, + ls2r: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0en\0ff\0kk\0ky\0mn\0pa\0sd\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x01\0\0\0\x02\0\0\0\x03\0\0\0\x04\0\0\0\x06\0\0\0\x07\0\0\0\x08\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x11\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabShawAdlmArabArabLatnMongArabDevaKhojSindArabArabHansBopoHanbHant") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IR\0GB\0GN\0CN\0CN\0TR\0CN\0PK\0IN\0IN\0IN\0PK\0AF\0CN\0TW\0TW\0TW\0") }) + }, + }; + } + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_LIKELYSUBTAGS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(::KEY, req)) } + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,28 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_fallback_parents_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl $provider { + #[doc(hidden)] + pub const SINGLETON_FALLBACK_PARENTS_V1: &'static ::Yokeable = &icu_locid_transform::provider::LocaleFallbackParentsV1 { + parents: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x84\0\0\0\0\0\x06\0\x0B\0\x10\0\x15\0\x1A\0\x1F\0$\0)\0.\x003\08\0=\0B\0G\0L\0Q\0V\0[\0`\0e\0j\0o\0t\0y\0~\0\x83\0\x88\0\x8D\0\x92\0\x97\0\x9C\0\xA1\0\xA6\0\xAB\0\xB0\0\xB5\0\xBA\0\xBF\0\xC4\0\xC9\0\xCE\0\xD3\0\xD8\0\xDD\0\xE2\0\xE7\0\xEC\0\xF1\0\xF6\0\xFB\0\0\x01\x05\x01\n\x01\x0F\x01\x14\x01\x19\x01\x1E\x01#\x01(\x01-\x012\x017\x01<\x01A\x01F\x01K\x01P\x01U\x01Z\x01_\x01d\x01i\x01n\x01s\x01x\x01}\x01\x82\x01\x87\x01\x8C\x01\x91\x01\x96\x01\x9B\x01\xA0\x01\xA5\x01\xAA\x01\xAF\x01\xB4\x01\xB9\x01\xBE\x01\xC3\x01\xC8\x01\xCD\x01\xD2\x01\xD7\x01\xDC\x01\xE1\x01\xE6\x01\xEB\x01\xF0\x01\xF5\x01\xFA\x01\xFF\x01\x04\x02\t\x02\x0E\x02\x13\x02\x18\x02\x1D\x02\"\x02'\x02,\x021\x026\x02;\x02@\x02G\x02I\x02K\x02M\x02R\x02W\x02\\\x02a\x02f\x02k\x02p\x02u\x02z\x02\x7F\x02\x84\x02\x89\x02en-150en-AGen-AIen-ATen-AUen-BBen-BEen-BMen-BSen-BWen-BZen-CCen-CHen-CKen-CMen-CXen-CYen-DEen-DGen-DKen-DMen-ERen-FIen-FJen-FKen-FMen-GBen-GDen-GGen-GHen-GIen-GMen-GYen-HKen-IEen-ILen-IMen-INen-IOen-JEen-JMen-KEen-KIen-KNen-KYen-LCen-LRen-LSen-MGen-MOen-MSen-MTen-MUen-MVen-MWen-MYen-NAen-NFen-NGen-NLen-NRen-NUen-NZen-PGen-PKen-PNen-PWen-RWen-SBen-SCen-SDen-SEen-SGen-SHen-SIen-SLen-SSen-SXen-SZen-TCen-TKen-TOen-TTen-TVen-TZen-UGen-VCen-VGen-VUen-WSen-ZAen-ZMen-ZWes-ARes-BOes-BRes-BZes-CLes-COes-CRes-CUes-DOes-ECes-GTes-HNes-MXes-NIes-PAes-PEes-PRes-PYes-SVes-USes-UYes-VEhi-Latnhtnbnnno-NOpt-AOpt-CHpt-CVpt-FRpt-GQpt-GWpt-LUpt-MOpt-MZpt-STpt-TLzh-Hant-MO") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419en\0\0\0\0\0\0\x01IN\0fr\0\0\0\0\0\0\x01HT\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0zh\0\x01Hant\x01HK\0") }) + }, + }; + } + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_PARENTS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(::KEY, req)) } + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,32 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_fallback_supplement_co_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl $provider { + #[doc(hidden)] + pub const SINGLETON_FALLBACK_SUPPLEMENT_CO_V1: &'static ::Yokeable = &icu_locid_transform::provider::LocaleFallbackSupplementV1 { + parents: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x01\0\0\0\0\0yue") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"zh\0\x01Hant\0\0\0\0") }) + }, + unicode_extension_defaults: unsafe { + #[allow(unused_unsafe)] + zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"co") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x02\0\0\0") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x02\0zhzh-Hant") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x06\0pinyinstroke") }) + }, + }; + } + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_SUPPLEMENT_CO_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(::KEY, req)) } + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,35 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_list_and_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + static EN_001: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EN_IN: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static IT: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }]); + static PT: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static FR: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static TR: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static ES: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }]); + static RU: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static UND: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EN: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static HI_LATN: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }]); + static JA: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]); + static ZH_HK: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }]); + static ZH: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]); + static ZH_HANT: ::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }]); + static VALUES: [&::Yokeable; 215usize] = [&EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_IN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &HI_LATN, &IT, &IT, &IT, &IT, &JA, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &RU, &RU, &RU, &RU, &RU, &RU, &TR, &TR, &UND, &ZH, &ZH_HK, &ZH, &ZH, &ZH, &ZH_HANT, &ZH_HK, &ZH]; + static KEYS: [&str; 215usize] = ["en", "en-001", "en-150", "en-AE", "en-AG", "en-AI", "en-AS", "en-AT", "en-AU", "en-BB", "en-BE", "en-BI", "en-BM", "en-BS", "en-BW", "en-BZ", "en-CA", "en-CC", "en-CH", "en-CK", "en-CM", "en-CX", "en-CY", "en-DE", "en-DG", "en-DK", "en-DM", "en-ER", "en-FI", "en-FJ", "en-FK", "en-FM", "en-GB", "en-GD", "en-GG", "en-GH", "en-GI", "en-GM", "en-GU", "en-GY", "en-HK", "en-IE", "en-IL", "en-IM", "en-IN", "en-IO", "en-JE", "en-JM", "en-KE", "en-KI", "en-KN", "en-KY", "en-LC", "en-LR", "en-LS", "en-MG", "en-MH", "en-MO", "en-MP", "en-MS", "en-MT", "en-MU", "en-MV", "en-MW", "en-MY", "en-NA", "en-NF", "en-NG", "en-NL", "en-NR", "en-NU", "en-NZ", "en-PG", "en-PH", "en-PK", "en-PN", "en-PR", "en-PW", "en-RW", "en-SB", "en-SC", "en-SD", "en-SE", "en-SG", "en-SH", "en-SI", "en-SL", "en-SS", "en-SX", "en-SZ", "en-TC", "en-TK", "en-TO", "en-TT", "en-TV", "en-TZ", "en-UG", "en-UM", "en-VC", "en-VG", "en-VI", "en-VU", "en-WS", "en-ZA", "en-ZM", "en-ZW", "es", "es-419", "es-AR", "es-BO", "es-BR", "es-BZ", "es-CL", "es-CO", "es-CR", "es-CU", "es-DO", "es-EA", "es-EC", "es-GQ", "es-GT", "es-HN", "es-IC", "es-MX", "es-NI", "es-PA", "es-PE", "es-PH", "es-PR", "es-PY", "es-SV", "es-US", "es-UY", "es-VE", "fr", "fr-BE", "fr-BF", "fr-BI", "fr-BJ", "fr-BL", "fr-CA", "fr-CD", "fr-CF", "fr-CG", "fr-CH", "fr-CI", "fr-CM", "fr-DJ", "fr-DZ", "fr-GA", "fr-GF", "fr-GN", "fr-GP", "fr-GQ", "fr-HT", "fr-KM", "fr-LU", "fr-MA", "fr-MC", "fr-MF", "fr-MG", "fr-ML", "fr-MQ", "fr-MR", "fr-MU", "fr-NC", "fr-NE", "fr-PF", "fr-PM", "fr-RE", "fr-RW", "fr-SC", "fr-SN", "fr-SY", "fr-TD", "fr-TG", "fr-TN", "fr-VU", "fr-WF", "fr-YT", "hi-Latn", "it", "it-CH", "it-SM", "it-VA", "ja", "pt", "pt-AO", "pt-CH", "pt-CV", "pt-GQ", "pt-GW", "pt-LU", "pt-MO", "pt-MZ", "pt-PT", "pt-ST", "pt-TL", "ru", "ru-BY", "ru-KG", "ru-KZ", "ru-MD", "ru-UA", "tr", "tr-CY", "und", "zh", "zh-HK", "zh-Hans", "zh-Hans-HK", "zh-Hans-MO", "zh-Hant", "zh-MO", "zh-SG"]; + if let Ok(payload) = KEYS.binary_search_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(payload)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::MissingLocale.with_req(::KEY, req)) } + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/macros.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,46 @@ +// @generated +/// Marks a type as a data provider. You can then use macros like +/// `impl_core_helloworld_v1` to add implementations. +/// +/// ```ignore +/// struct MyProvider; +/// const _: () = { +/// include!("path/to/generated/macros.rs"); +/// make_provider!(MyProvider); +/// impl_core_helloworld_v1!(MyProvider); +/// } +/// ``` +#[doc(hidden)] +#[macro_export] +macro_rules! __make_provider { + ($ name : ty) => { + #[clippy::msrv = "1.66"] + impl $name { + #[doc(hidden)] + #[allow(dead_code)] + pub const MUST_USE_MAKE_PROVIDER_MACRO: () = (); + } + }; +} +#[doc(inline)] +pub use __make_provider as make_provider; +#[macro_use] +#[path = "macros/fallback_likelysubtags_v1.data.rs"] +mod fallback_likelysubtags_v1; +#[doc(inline)] +pub use __impl_fallback_likelysubtags_v1 as impl_fallback_likelysubtags_v1; +#[macro_use] +#[path = "macros/fallback_parents_v1.data.rs"] +mod fallback_parents_v1; +#[doc(inline)] +pub use __impl_fallback_parents_v1 as impl_fallback_parents_v1; +#[macro_use] +#[path = "macros/fallback_supplement_co_v1.data.rs"] +mod fallback_supplement_co_v1; +#[doc(inline)] +pub use __impl_fallback_supplement_co_v1 as impl_fallback_supplement_co_v1; +#[macro_use] +#[path = "macros/list_and_v1.data.rs"] +mod list_and_v1; +#[doc(inline)] +pub use __impl_list_and_v1 as impl_list_and_v1; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/data/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,122 +1,31 @@ // @generated -#[clippy::msrv = "1.61"] -mod fallback; -#[clippy::msrv = "1.61"] -mod list; -#[clippy::msrv = "1.61"] -use icu_provider::prelude::*; -/// Implement [`DataProvider`] on the given struct using the data -/// hardcoded in this module. This allows the struct to be used with -/// `icu`'s `_unstable` constructors. -/// -/// This macro can only be called from its definition-site, i.e. right -/// after `include!`-ing the generated module. -/// -/// ```compile_fail -/// struct MyDataProvider; -/// include!("/path/to/generated/mod.rs"); -/// impl_data_provider(MyDataProvider); -/// ``` -#[allow(unused_macros)] +include!("macros.rs"); macro_rules! impl_data_provider { - ($ provider : path) => { - #[clippy::msrv = "1.61"] - impl DataProvider<::icu_list::provider::AndListV1Marker> for $provider { - fn load(&self, req: DataRequest) -> Result, DataError> { - list::and_v1::lookup(&req.locale) - .map(zerofrom::ZeroFrom::zero_from) - .map(DataPayload::from_owned) - .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) - .ok_or_else(|| DataErrorKind::MissingLocale.with_req(::icu_list::provider::AndListV1Marker::KEY, req)) - } - } - #[clippy::msrv = "1.61"] - impl DataProvider<::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker> for $provider { - fn load( - &self, - req: DataRequest, - ) -> Result, DataError> { - fallback::supplement::co_v1::lookup(&req.locale) - .map(zerofrom::ZeroFrom::zero_from) - .map(DataPayload::from_owned) - .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) - .ok_or_else(|| { - DataErrorKind::MissingLocale - .with_req(::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY, req) - }) - } - } - #[clippy::msrv = "1.61"] - impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker> for $provider { - fn load( - &self, - req: DataRequest, - ) -> Result, DataError> { - fallback::likelysubtags_v1::lookup(&req.locale) - .map(zerofrom::ZeroFrom::zero_from) - .map(DataPayload::from_owned) - .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) - .ok_or_else(|| { - DataErrorKind::MissingLocale - .with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY, req) - }) - } - } - #[clippy::msrv = "1.61"] - impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker> for $provider { - fn load( - &self, - req: DataRequest, - ) -> Result, DataError> { - fallback::parents_v1::lookup(&req.locale) - .map(zerofrom::ZeroFrom::zero_from) - .map(DataPayload::from_owned) - .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) - .ok_or_else(|| { - DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY, req) - }) - } - } + ($ provider : ty) => { + make_provider!($provider); + impl_fallback_likelysubtags_v1!($provider); + impl_fallback_parents_v1!($provider); + impl_fallback_supplement_co_v1!($provider); + impl_list_and_v1!($provider); }; } -/// Implement [`AnyProvider`] on the given struct using the data -/// hardcoded in this module. This allows the struct to be used with -/// `icu`'s `_any` constructors. -/// -/// This macro can only be called from its definition-site, i.e. right -/// after `include!`-ing the generated module. -/// -/// ```compile_fail -/// struct MyAnyProvider; -/// include!("/path/to/generated/mod.rs"); -/// impl_any_provider(MyAnyProvider); -/// ``` #[allow(unused_macros)] macro_rules! impl_any_provider { - ($ provider : path) => { - #[clippy::msrv = "1.61"] - impl AnyProvider for $provider { - fn load_any(&self, key: DataKey, req: DataRequest) -> Result { - const ANDLISTV1MARKER: ::icu_provider::DataKeyHash = ::icu_list::provider::AndListV1Marker::KEY.hashed(); - const COLLATIONFALLBACKSUPPLEMENTV1MARKER: ::icu_provider::DataKeyHash = - ::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY.hashed(); - const LOCALEFALLBACKLIKELYSUBTAGSV1MARKER: ::icu_provider::DataKeyHash = - ::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY.hashed(); - const LOCALEFALLBACKPARENTSV1MARKER: ::icu_provider::DataKeyHash = - ::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY.hashed(); + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + impl icu_provider::AnyProvider for $provider { + fn load_any(&self, key: icu_provider::DataKey, req: icu_provider::DataRequest) -> Result { match key.hashed() { - ANDLISTV1MARKER => list::and_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), - COLLATIONFALLBACKSUPPLEMENTV1MARKER => fallback::supplement::co_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), - LOCALEFALLBACKLIKELYSUBTAGSV1MARKER => fallback::likelysubtags_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), - LOCALEFALLBACKPARENTSV1MARKER => fallback::parents_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), - _ => return Err(DataErrorKind::MissingDataKey.with_req(key, req)), + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + _ => Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req)), } - .map(|payload| AnyResponse { payload: Some(payload), metadata: Default::default() }) - .ok_or_else(|| DataErrorKind::MissingLocale.with_req(key, req)) } } }; } -#[clippy::msrv = "1.61"] +#[clippy::msrv = "1.66"] pub struct BakedDataProvider; impl_data_provider!(BakedDataProvider); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_baked_icu_data/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,6 +19,10 @@ //! -k list/and@1 fallback/likelysubtags@1 fallback/parents@1 fallback/supplement/co@1 \ //! --cldr-tag latest --icuexport-tag latest -o src/data //! ``` + +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![allow(elided_lifetimes_in_paths)] mod data { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,19 +3,16 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start either = "1.5.0" itertools = "0.10.1" -tracing = "0.1" polonius-engine = "0.13.0" -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_graphviz = { path = "../rustc_graphviz" } rustc_hir = { path = "../rustc_hir" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } rustc_lexer = { path = "../rustc_lexer" } @@ -24,7 +21,10 @@ rustc_mir_dataflow = { path = "../rustc_mir_dataflow" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_traits = { path = "../rustc_traits" } -rustc_span = { path = "../rustc_span" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -1,20 +1,20 @@ borrowck_assign_due_to_use_closure = assignment occurs due to use in closure -borrowck_assign_due_to_use_generator = - assign occurs due to use in generator +borrowck_assign_due_to_use_coroutine = + assign occurs due to use in coroutine borrowck_assign_part_due_to_use_closure = assignment to part occurs due to use in closure -borrowck_assign_part_due_to_use_generator = - assign to part occurs due to use in generator +borrowck_assign_part_due_to_use_coroutine = + assign to part occurs due to use in coroutine borrowck_borrow_due_to_use_closure = borrow occurs due to use in closure -borrowck_borrow_due_to_use_generator = - borrow occurs due to use in generator +borrowck_borrow_due_to_use_coroutine = + borrow occurs due to use in coroutine borrowck_calling_operator_moves_lhs = calling this operator moves the left-hand side @@ -142,11 +142,11 @@ *[false] moved } due to use in closure -borrowck_partial_var_move_by_use_in_generator = +borrowck_partial_var_move_by_use_in_coroutine = variable {$is_partial -> [true] partially moved *[false] moved - } due to use in generator + } due to use in coroutine borrowck_returned_async_block_escaped = returns an `async` block that contains a reference to a captured variable, which then escapes the closure body @@ -180,15 +180,15 @@ borrowck_use_due_to_use_closure = use occurs due to use in closure -borrowck_use_due_to_use_generator = - use occurs due to use in generator +borrowck_use_due_to_use_coroutine = + use occurs due to use in coroutine borrowck_used_impl_require_static = the used `impl` has a `'static` requirement borrowck_value_capture_here = value captured {$is_within -> - [true] here by generator + [true] here by coroutine *[false] here } @@ -207,8 +207,8 @@ borrowck_var_borrow_by_use_in_closure = borrow occurs due to use in closure -borrowck_var_borrow_by_use_in_generator = - borrow occurs due to use in generator +borrowck_var_borrow_by_use_in_coroutine = + borrow occurs due to use in coroutine borrowck_var_borrow_by_use_place_in_closure = {$is_single_var -> @@ -216,11 +216,11 @@ [false] borrows occur } due to use of {$place} in closure -borrowck_var_borrow_by_use_place_in_generator = +borrowck_var_borrow_by_use_place_in_coroutine = {$is_single_var -> *[true] borrow occurs [false] borrows occur - } due to use of {$place} in generator + } due to use of {$place} in coroutine borrowck_var_cannot_escape_closure = captured variable cannot escape `FnMut` closure body @@ -234,8 +234,8 @@ borrowck_var_first_borrow_by_use_place_in_closure = first borrow occurs due to use of {$place} in closure -borrowck_var_first_borrow_by_use_place_in_generator = - first borrow occurs due to use of {$place} in generator +borrowck_var_first_borrow_by_use_place_in_coroutine = + first borrow occurs due to use of {$place} in coroutine borrowck_var_here_captured = variable captured here @@ -244,8 +244,8 @@ borrowck_var_move_by_use_in_closure = move occurs due to use in closure -borrowck_var_move_by_use_in_generator = - move occurs due to use in generator +borrowck_var_move_by_use_in_coroutine = + move occurs due to use in coroutine borrowck_var_mutable_borrow_by_use_place_in_closure = mutable borrow occurs due to use of {$place} in closure @@ -253,5 +253,5 @@ borrowck_var_second_borrow_by_use_place_in_closure = second borrow occurs due to use of {$place} in closure -borrowck_var_second_borrow_by_use_place_in_generator = - second borrow occurs due to use of {$place} in generator +borrowck_var_second_borrow_by_use_place_in_coroutine = + second borrow occurs due to use of {$place} in coroutine diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/borrowck_errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/borrowck_errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/borrowck_errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/borrowck_errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -368,16 +368,17 @@ err } - pub(crate) fn cannot_borrow_across_generator_yield( + pub(crate) fn cannot_borrow_across_coroutine_yield( &self, span: Span, yield_span: Span, ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { + let coroutine_kind = self.body.coroutine.as_ref().unwrap().coroutine_kind; let mut err = struct_span_err!( self, span, E0626, - "borrow may still be in use when generator yields", + "borrow may still be in use when {coroutine_kind:#} yields", ); err.span_label(yield_span, "possible yield occurs here"); err diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/dataflow.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/dataflow.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/dataflow.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/dataflow.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,7 @@ #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] use rustc_data_structures::fx::FxIndexMap; +use rustc_data_structures::graph::WithSuccessors; use rustc_index::bit_set::BitSet; use rustc_middle::mir::{ self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges, @@ -222,6 +223,7 @@ } } +// This is `pub` because it's used by unstable external borrowck data users, see `consumers.rs`. pub fn calculate_borrows_out_of_scope_at_location<'tcx>( body: &Body<'tcx>, regioncx: &RegionInferenceContext<'tcx>, @@ -238,15 +240,203 @@ prec.borrows_out_of_scope_at_location } +struct PoloniusOutOfScopePrecomputer<'a, 'tcx> { + visited: BitSet, + visit_stack: Vec, + body: &'a Body<'tcx>, + regioncx: &'a RegionInferenceContext<'tcx>, + + loans_out_of_scope_at_location: FxIndexMap>, +} + +impl<'a, 'tcx> PoloniusOutOfScopePrecomputer<'a, 'tcx> { + fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self { + Self { + visited: BitSet::new_empty(body.basic_blocks.len()), + visit_stack: vec![], + body, + regioncx, + loans_out_of_scope_at_location: FxIndexMap::default(), + } + } +} + +impl<'tcx> PoloniusOutOfScopePrecomputer<'_, 'tcx> { + /// Loans are in scope while they are live: whether they are contained within any live region. + /// In the location-insensitive analysis, a loan will be contained in a region if the issuing + /// region can reach it in the subset graph. So this is a reachability problem. + fn precompute_loans_out_of_scope( + &mut self, + loan_idx: BorrowIndex, + issuing_region: RegionVid, + loan_issued_at: Location, + ) { + let sccs = self.regioncx.constraint_sccs(); + let universal_regions = self.regioncx.universal_regions(); + + // We first handle the cases where the loan doesn't go out of scope, depending on the issuing + // region's successors. + for successor in self.regioncx.region_graph().depth_first_search(issuing_region) { + // 1. Via applied member constraints + // + // The issuing region can flow into the choice regions, and they are either: + // - placeholders or free regions themselves, + // - or also transitively outlive a free region. + // + // That is to say, if there are applied member constraints here, the loan escapes the + // function and cannot go out of scope. We could early return here. + // + // For additional insurance via fuzzing and crater, we verify that the constraint's min + // choice indeed escapes the function. In the future, we could e.g. turn this check into + // a debug assert and early return as an optimization. + let scc = sccs.scc(successor); + for constraint in self.regioncx.applied_member_constraints(scc) { + if universal_regions.is_universal_region(constraint.min_choice) { + return; + } + } + + // 2. Via regions that are live at all points: placeholders and free regions. + // + // If the issuing region outlives such a region, its loan escapes the function and + // cannot go out of scope. We can early return. + if self.regioncx.is_region_live_at_all_points(successor) { + return; + } + } + + let first_block = loan_issued_at.block; + let first_bb_data = &self.body.basic_blocks[first_block]; + + // The first block we visit is the one where the loan is issued, starting from the statement + // where the loan is issued: at `loan_issued_at`. + let first_lo = loan_issued_at.statement_index; + let first_hi = first_bb_data.statements.len(); + + if let Some(kill_location) = + self.loan_kill_location(loan_idx, loan_issued_at, first_block, first_lo, first_hi) + { + debug!("loan {:?} gets killed at {:?}", loan_idx, kill_location); + self.loans_out_of_scope_at_location.entry(kill_location).or_default().push(loan_idx); + + // The loan dies within the first block, we're done and can early return. + return; + } + + // The loan is not dead. Add successor BBs to the work list, if necessary. + for succ_bb in first_bb_data.terminator().successors() { + if self.visited.insert(succ_bb) { + self.visit_stack.push(succ_bb); + } + } + + // We may end up visiting `first_block` again. This is not an issue: we know at this point + // that the loan is not killed in the `first_lo..=first_hi` range, so checking the + // `0..first_lo` range and the `0..first_hi` range gives the same result. + while let Some(block) = self.visit_stack.pop() { + let bb_data = &self.body[block]; + let num_stmts = bb_data.statements.len(); + if let Some(kill_location) = + self.loan_kill_location(loan_idx, loan_issued_at, block, 0, num_stmts) + { + debug!("loan {:?} gets killed at {:?}", loan_idx, kill_location); + self.loans_out_of_scope_at_location + .entry(kill_location) + .or_default() + .push(loan_idx); + + // The loan dies within this block, so we don't need to visit its successors. + continue; + } + + // Add successor BBs to the work list, if necessary. + for succ_bb in bb_data.terminator().successors() { + if self.visited.insert(succ_bb) { + self.visit_stack.push(succ_bb); + } + } + } + + self.visited.clear(); + assert!(self.visit_stack.is_empty(), "visit stack should be empty"); + } + + /// Returns the lowest statement in `start..=end`, where the loan goes out of scope, if any. + /// This is the statement where the issuing region can't reach any of the regions that are live + /// at this point. + fn loan_kill_location( + &self, + loan_idx: BorrowIndex, + loan_issued_at: Location, + block: BasicBlock, + start: usize, + end: usize, + ) -> Option { + for statement_index in start..=end { + let location = Location { block, statement_index }; + + // Check whether the issuing region can reach local regions that are live at this point: + // - a loan is always live at its issuing location because it can reach the issuing + // region, which is always live at this location. + if location == loan_issued_at { + continue; + } + + // - the loan goes out of scope at `location` if it's not contained within any regions + // live at this point. + // + // FIXME: if the issuing region `i` can reach a live region `r` at point `p`, and `r` is + // live at point `q`, then it's guaranteed that `i` would reach `r` at point `q`. + // Reachability is location-insensitive, and we could take advantage of that, by jumping + // to a further point than just the next statement: we can jump to the furthest point + // within the block where `r` is live. + if self.regioncx.is_loan_live_at(loan_idx, location) { + continue; + } + + // No live region is reachable from the issuing region: the loan is killed at this + // point. + return Some(location); + } + + None + } +} + impl<'a, 'tcx> Borrows<'a, 'tcx> { pub fn new( tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, - nonlexical_regioncx: &'a RegionInferenceContext<'tcx>, + regioncx: &'a RegionInferenceContext<'tcx>, borrow_set: &'a BorrowSet<'tcx>, ) -> Self { - let borrows_out_of_scope_at_location = - calculate_borrows_out_of_scope_at_location(body, nonlexical_regioncx, borrow_set); + let mut borrows_out_of_scope_at_location = + calculate_borrows_out_of_scope_at_location(body, regioncx, borrow_set); + + // The in-tree polonius analysis computes loans going out of scope using the set-of-loans + // model, and makes sure they're identical to the existing computation of the set-of-points + // model. + if tcx.sess.opts.unstable_opts.polonius.is_next_enabled() { + let mut polonius_prec = PoloniusOutOfScopePrecomputer::new(body, regioncx); + for (loan_idx, loan_data) in borrow_set.iter_enumerated() { + let issuing_region = loan_data.region; + let loan_issued_at = loan_data.reserve_location; + + polonius_prec.precompute_loans_out_of_scope( + loan_idx, + issuing_region, + loan_issued_at, + ); + } + + assert_eq!( + borrows_out_of_scope_at_location, polonius_prec.loans_out_of_scope_at_location, + "the loans out of scope must be the same as the borrows out of scope" + ); + + borrows_out_of_scope_at_location = polonius_prec.loans_out_of_scope_at_location; + } + Borrows { tcx, body, borrow_set, borrows_out_of_scope_at_location } } @@ -333,6 +523,13 @@ } } +/// Forward dataflow computation of the set of borrows that are in scope at a particular location. +/// - we gen the introduced loans +/// - we kill loans on locals going out of (regular) scope +/// - we kill the loans going out of their region's NLL scope: in NLL terms, the frontier where a +/// region stops containing the CFG points reachable from the issuing location. +/// - we also kill loans of conflicting places when overwriting a shared path: e.g. borrows of +/// `a.b.c` when `a` is overwritten. impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> { type Idx = BorrowIndex; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/def_use.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/def_use.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/def_use.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/def_use.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,7 +44,7 @@ PlaceContext::MutatingUse(MutatingUseContext::Projection) | // Borrows only consider their local used at the point of the borrow. - // This won't affect the results since we use this analysis for generators + // This won't affect the results since we use this analysis for coroutines // and we only care about the result at suspension points. Borrows cannot // cross suspension points so this behavior is unproblematic. PlaceContext::MutatingUse(MutatingUseContext::Borrow) | diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_hir::intravisit::{walk_block, walk_expr, Visitor}; -use rustc_hir::{AsyncGeneratorKind, GeneratorKind, LangItem}; +use rustc_hir::{CoroutineKind, CoroutineSource, LangItem}; use rustc_infer::traits::ObligationCause; use rustc_middle::hir::nested_filter::OnlyBodies; use rustc_middle::mir::tcx::PlaceTy; @@ -351,7 +351,9 @@ } // Check if we are in a situation of `ident @ ident` where we want to suggest // `ref ident @ ref ident` or `ref ident @ Struct { ref ident }`. - if let Some(subpat) = sub && self.pat.is_none() { + if let Some(subpat) = sub + && self.pat.is_none() + { self.visit_pat(subpat); if self.pat.is_some() { self.parent_pat = Some(p); @@ -370,7 +372,9 @@ let mut finder = ExpressionFinder { expr_span: move_span, expr: None, pat: None, parent_pat: None }; finder.visit_expr(expr); - if let Some(span) = span && let Some(expr) = finder.expr { + if let Some(span) = span + && let Some(expr) = finder.expr + { for (_, expr) in hir.parent_iter(expr.hir_id) { if let hir::Node::Expr(expr) = expr { if expr.span.contains(span) { @@ -425,10 +429,7 @@ Some(hir::intravisit::FnKind::Method(..)) => "method", Some(hir::intravisit::FnKind::Closure) => "closure", }; - span.push_span_label( - ident.span, - format!("in this {descr}"), - ); + span.push_span_label(ident.span, format!("in this {descr}")); err.span_note( span, format!( @@ -441,15 +442,16 @@ let ty = place.ty(self.body, self.infcx.tcx).ty; if let hir::Node::Expr(parent_expr) = parent && let hir::ExprKind::Call(call_expr, _) = parent_expr.kind - && let hir::ExprKind::Path( - hir::QPath::LangItem(LangItem::IntoIterIntoIter, _, _) - ) = call_expr.kind + && let hir::ExprKind::Path(hir::QPath::LangItem( + LangItem::IntoIterIntoIter, + _, + _, + )) = call_expr.kind { // Do not suggest `.clone()` in a `for` loop, we already suggest borrowing. - } else if let UseSpans::FnSelfUse { - kind: CallKind::Normal { .. }, - .. - } = move_spans { + } else if let UseSpans::FnSelfUse { kind: CallKind::Normal { .. }, .. } = + move_spans + { // We already suggest cloning for these cases in `explain_captures`. } else { self.suggest_cloning(err, ty, expr, move_span); @@ -602,10 +604,10 @@ if self.sugg_span.is_some() { return; } - if let hir::StmtKind::Local(hir::Local { - span, ty, init: None, .. - }) = &ex.kind && span.contains(self.decl_span) { - self.sugg_span = ty.map_or(Some(self.decl_span), |ty| Some(ty.span)); + if let hir::StmtKind::Local(hir::Local { span, ty, init: None, .. }) = &ex.kind + && span.contains(self.decl_span) + { + self.sugg_span = ty.map_or(Some(self.decl_span), |ty| Some(ty.span)); } hir::intravisit::walk_stmt(self, ex); } @@ -743,19 +745,14 @@ ".clone()".to_owned() }; if let Some(clone_trait_def) = tcx.lang_items().clone_trait() - && self.infcx - .type_implements_trait( - clone_trait_def, - [ty], - self.param_env, - ) + && self + .infcx + .type_implements_trait(clone_trait_def, [ty], self.param_env) .must_apply_modulo_regions() { let msg = if let ty::Adt(def, _) = ty.kind() - && [ - tcx.get_diagnostic_item(sym::Arc), - tcx.get_diagnostic_item(sym::Rc), - ].contains(&Some(def.did())) + && [tcx.get_diagnostic_item(sym::Arc), tcx.get_diagnostic_item(sym::Rc)] + .contains(&Some(def.did())) { "clone the value to increment its reference count" } else { @@ -851,7 +848,7 @@ move_spans.var_subdiag(None, &mut err, None, |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => MoveUseInGenerator { var_span }, + Some(_) => MoveUseInCoroutine { var_span }, None => MoveUseInClosure { var_span }, } }); @@ -897,7 +894,7 @@ let desc_place = self.describe_any_place(place.as_ref()); match kind { Some(_) => { - BorrowUsePlaceGenerator { place: desc_place, var_span, is_single_var: true } + BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: true } } None => BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: true }, } @@ -929,8 +926,8 @@ let borrow_spans = self.borrow_spans(span, location); let span = borrow_spans.args_or_use(); - let container_name = if issued_spans.for_generator() || borrow_spans.for_generator() { - "generator" + let container_name = if issued_spans.for_coroutine() || borrow_spans.for_coroutine() { + "coroutine" } else { "closure" }; @@ -1043,7 +1040,7 @@ |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUsePlaceGenerator { + Some(_) => BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: true, @@ -1127,7 +1124,7 @@ borrow_spans.var_subdiag(None, &mut err, Some(gen_borrow_kind), |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUsePlaceGenerator { + Some(_) => BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: false, @@ -1148,7 +1145,7 @@ let borrow_place_desc = self.describe_any_place(borrow_place.as_ref()); match kind { Some(_) => { - FirstBorrowUsePlaceGenerator { place: borrow_place_desc, var_span } + FirstBorrowUsePlaceCoroutine { place: borrow_place_desc, var_span } } None => FirstBorrowUsePlaceClosure { place: borrow_place_desc, var_span }, } @@ -1162,7 +1159,7 @@ |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => SecondBorrowUsePlaceGenerator { place: desc_place, var_span }, + Some(_) => SecondBorrowUsePlaceCoroutine { place: desc_place, var_span }, None => SecondBorrowUsePlaceClosure { place: desc_place, var_span }, } }, @@ -1328,42 +1325,160 @@ issue_span: Span, expr_span: Span, body_expr: Option<&'hir hir::Expr<'hir>>, - loop_bind: Option, + loop_bind: Option<&'hir Ident>, + loop_span: Option, + head_span: Option, + pat_span: Option, + head: Option<&'hir hir::Expr<'hir>>, } impl<'hir> Visitor<'hir> for ExprFinder<'hir> { fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) { - if let hir::ExprKind::Loop(hir::Block{ stmts: [stmt, ..], ..}, _, hir::LoopSource::ForLoop, _) = ex.kind && - let hir::StmtKind::Expr(hir::Expr{ kind: hir::ExprKind::Match(call, [_, bind, ..], _), ..}) = stmt.kind && - let hir::ExprKind::Call(path, _args) = call.kind && - let hir::ExprKind::Path(hir::QPath::LangItem(LangItem::IteratorNext, _, _, )) = path.kind && - let hir::PatKind::Struct(path, [field, ..], _) = bind.pat.kind && - let hir::QPath::LangItem(LangItem::OptionSome, _, _) = path && - let PatField { pat: hir::Pat{ kind: hir::PatKind::Binding(_, _, ident, ..), .. }, ..} = field && - self.issue_span.source_equal(call.span) { - self.loop_bind = Some(ident.name); + // Try to find + // let result = match IntoIterator::into_iter() { + // mut iter => { + // [opt_ident]: loop { + // match Iterator::next(&mut iter) { + // None => break, + // Some() => , + // }; + // } + // } + // }; + // corresponding to the desugaring of a for loop `for in { }`. + if let hir::ExprKind::Call(path, [arg]) = ex.kind + && let hir::ExprKind::Path(hir::QPath::LangItem( + LangItem::IntoIterIntoIter, + _, + _, + )) = path.kind + && arg.span.contains(self.issue_span) + { + // Find `IntoIterator::into_iter()` + self.head = Some(arg); + } + if let hir::ExprKind::Loop( + hir::Block { stmts: [stmt, ..], .. }, + _, + hir::LoopSource::ForLoop, + _, + ) = ex.kind + && let hir::StmtKind::Expr(hir::Expr { + kind: hir::ExprKind::Match(call, [_, bind, ..], _), + span: head_span, + .. + }) = stmt.kind + && let hir::ExprKind::Call(path, _args) = call.kind + && let hir::ExprKind::Path(hir::QPath::LangItem(LangItem::IteratorNext, _, _)) = + path.kind + && let hir::PatKind::Struct(path, [field, ..], _) = bind.pat.kind + && let hir::QPath::LangItem(LangItem::OptionSome, pat_span, _) = path + && call.span.contains(self.issue_span) + { + // Find `` and the span for the whole `for` loop. + if let PatField { + pat: hir::Pat { kind: hir::PatKind::Binding(_, _, ident, ..), .. }, + .. + } = field + { + self.loop_bind = Some(ident); } + self.head_span = Some(*head_span); + self.pat_span = Some(pat_span); + self.loop_span = Some(stmt.span); + } - if let hir::ExprKind::MethodCall(body_call, _recv, ..) = ex.kind && - body_call.ident.name == sym::next && ex.span.source_equal(self.expr_span) { - self.body_expr = Some(ex); + if let hir::ExprKind::MethodCall(body_call, recv, ..) = ex.kind + && body_call.ident.name == sym::next + && recv.span.source_equal(self.expr_span) + { + self.body_expr = Some(ex); } hir::intravisit::walk_expr(self, ex); } } - let mut finder = - ExprFinder { expr_span: span, issue_span, loop_bind: None, body_expr: None }; + let mut finder = ExprFinder { + expr_span: span, + issue_span, + loop_bind: None, + body_expr: None, + head_span: None, + loop_span: None, + pat_span: None, + head: None, + }; finder.visit_expr(hir.body(body_id).value); - if let Some(loop_bind) = finder.loop_bind && - let Some(body_expr) = finder.body_expr && - let Some(def_id) = typeck_results.type_dependent_def_id(body_expr.hir_id) && - let Some(trait_did) = tcx.trait_of_item(def_id) && - tcx.is_diagnostic_item(sym::Iterator, trait_did) { - err.note(format!( - "a for loop advances the iterator for you, the result is stored in `{loop_bind}`." + if let Some(body_expr) = finder.body_expr + && let Some(loop_span) = finder.loop_span + && let Some(def_id) = typeck_results.type_dependent_def_id(body_expr.hir_id) + && let Some(trait_did) = tcx.trait_of_item(def_id) + && tcx.is_diagnostic_item(sym::Iterator, trait_did) + { + if let Some(loop_bind) = finder.loop_bind { + err.note(format!( + "a for loop advances the iterator for you, the result is stored in `{}`", + loop_bind.name, + )); + } else { + err.note( + "a for loop advances the iterator for you, the result is stored in its pattern", + ); + } + let msg = "if you want to call `next` on a iterator within the loop, consider using \ + `while let`"; + if let Some(head) = finder.head + && let Some(pat_span) = finder.pat_span + && loop_span.contains(body_expr.span) + && loop_span.contains(head.span) + { + let sm = self.infcx.tcx.sess.source_map(); + + let mut sugg = vec![]; + if let hir::ExprKind::Path(hir::QPath::Resolved(None, _)) = head.kind { + // A bare path doesn't need a `let` assignment, it's already a simple + // binding access. + // As a new binding wasn't added, we don't need to modify the advancing call. + sugg.push((loop_span.with_hi(pat_span.lo()), format!("while let Some("))); + sugg.push(( + pat_span.shrink_to_hi().with_hi(head.span.lo()), + ") = ".to_string(), + )); + sugg.push((head.span.shrink_to_hi(), ".next()".to_string())); + } else { + // Needs a new a `let` binding. + let indent = if let Some(indent) = sm.indentation_before(loop_span) { + format!("\n{indent}") + } else { + " ".to_string() + }; + let Ok(head_str) = sm.span_to_snippet(head.span) else { + err.help(msg); + return; + }; + sugg.push(( + loop_span.with_hi(pat_span.lo()), + format!("let iter = {head_str};{indent}while let Some("), )); - err.help("if you want to call `next` on a iterator within the loop, consider using `while let`."); + sugg.push(( + pat_span.shrink_to_hi().with_hi(head.span.hi()), + ") = iter.next()".to_string(), + )); + // As a new binding was added, we should change how the iterator is advanced to + // use the newly introduced binding. + if let hir::ExprKind::MethodCall(_, recv, ..) = body_expr.kind + && let hir::ExprKind::Path(hir::QPath::Resolved(None, ..)) = recv.kind + { + // As we introduced a `let iter = ;`, we need to change where the + // already borrowed value was accessed from `.next()` to + // `iter.next()`. + sugg.push((recv.span, "iter".to_string())); + } + } + err.multipart_suggestion(msg, sugg, Applicability::MaybeIncorrect); + } else { + err.help(msg); + } } } @@ -1459,7 +1574,7 @@ // Get closure's arguments let ty::Closure(_, args) = typeck_results.expr_ty(closure_expr).kind() else { - /* hir::Closure can be a generator too */ + /* hir::Closure can be a coroutine too */ return; }; let sig = args.as_closure().sig(); @@ -1539,69 +1654,80 @@ fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) { if e.span.contains(self.capture_span) { if let hir::ExprKind::Closure(&hir::Closure { - movability: None, - body, - fn_arg_span, - fn_decl: hir::FnDecl{ inputs, .. }, - .. - }) = e.kind && - let Some(hir::Node::Expr(body )) = self.hir.find(body.hir_id) { - self.suggest_arg = "this: &Self".to_string(); - if inputs.len() > 0 { - self.suggest_arg.push_str(", "); - } - self.in_closure = true; - self.closure_arg_span = fn_arg_span; - self.visit_expr(body); - self.in_closure = false; + movability: None, + body, + fn_arg_span, + fn_decl: hir::FnDecl { inputs, .. }, + .. + }) = e.kind + && let Some(hir::Node::Expr(body)) = self.hir.find(body.hir_id) + { + self.suggest_arg = "this: &Self".to_string(); + if inputs.len() > 0 { + self.suggest_arg.push_str(", "); + } + self.in_closure = true; + self.closure_arg_span = fn_arg_span; + self.visit_expr(body); + self.in_closure = false; } } if let hir::Expr { kind: hir::ExprKind::Path(path), .. } = e { - if let hir::QPath::Resolved(_, hir::Path { segments: [seg], ..}) = path && - seg.ident.name == kw::SelfLower && self.in_closure { - self.closure_change_spans.push(e.span); + if let hir::QPath::Resolved(_, hir::Path { segments: [seg], .. }) = path + && seg.ident.name == kw::SelfLower + && self.in_closure + { + self.closure_change_spans.push(e.span); } } hir::intravisit::walk_expr(self, e); } fn visit_local(&mut self, local: &'hir hir::Local<'hir>) { - if let hir::Pat { kind: hir::PatKind::Binding(_, hir_id, _ident, _), .. } = local.pat && - let Some(init) = local.init + if let hir::Pat { kind: hir::PatKind::Binding(_, hir_id, _ident, _), .. } = + local.pat + && let Some(init) = local.init { - if let hir::Expr { kind: hir::ExprKind::Closure(&hir::Closure { - movability: None, - .. - }), .. } = init && - init.span.contains(self.capture_span) { - self.closure_local_id = Some(*hir_id); + if let hir::Expr { + kind: hir::ExprKind::Closure(&hir::Closure { movability: None, .. }), + .. + } = init + && init.span.contains(self.capture_span) + { + self.closure_local_id = Some(*hir_id); } } hir::intravisit::walk_local(self, local); } fn visit_stmt(&mut self, s: &'hir hir::Stmt<'hir>) { - if let hir::StmtKind::Semi(e) = s.kind && - let hir::ExprKind::Call(hir::Expr { kind: hir::ExprKind::Path(path), ..}, args) = e.kind && - let hir::QPath::Resolved(_, hir::Path { segments: [seg], ..}) = path && - let Res::Local(hir_id) = seg.res && - Some(hir_id) == self.closure_local_id { - let (span, arg_str) = if args.len() > 0 { - (args[0].span.shrink_to_lo(), "self, ".to_string()) - } else { - let span = e.span.trim_start(seg.ident.span).unwrap_or(e.span); - (span, "(self)".to_string()) - }; - self.closure_call_changes.push((span, arg_str)); + if let hir::StmtKind::Semi(e) = s.kind + && let hir::ExprKind::Call( + hir::Expr { kind: hir::ExprKind::Path(path), .. }, + args, + ) = e.kind + && let hir::QPath::Resolved(_, hir::Path { segments: [seg], .. }) = path + && let Res::Local(hir_id) = seg.res + && Some(hir_id) == self.closure_local_id + { + let (span, arg_str) = if args.len() > 0 { + (args[0].span.shrink_to_lo(), "self, ".to_string()) + } else { + let span = e.span.trim_start(seg.ident.span).unwrap_or(e.span); + (span, "(self)".to_string()) + }; + self.closure_call_changes.push((span, arg_str)); } hir::intravisit::walk_stmt(self, s); } } - if let Some(hir::Node::ImplItem( - hir::ImplItem { kind: hir::ImplItemKind::Fn(_fn_sig, body_id), .. } - )) = hir.find(self.mir_hir_id()) && - let Some(hir::Node::Expr(expr)) = hir.find(body_id.hir_id) { + if let Some(hir::Node::ImplItem(hir::ImplItem { + kind: hir::ImplItemKind::Fn(_fn_sig, body_id), + .. + })) = hir.find(self.mir_hir_id()) + && let Some(hir::Node::Expr(expr)) = hir.find(body_id.hir_id) + { let mut finder = ExpressionFinder { capture_span: *capture_kind_span, closure_change_spans: vec![], @@ -1822,7 +1948,7 @@ ( Some(name), BorrowExplanation::UsedLater(LaterUseKind::ClosureCapture, var_or_use_span, _), - ) if borrow_spans.for_generator() || borrow_spans.for_closure() => self + ) if borrow_spans.for_coroutine() || borrow_spans.for_closure() => self .report_escaping_closure_capture( borrow_spans, borrow_span, @@ -1847,7 +1973,7 @@ span, .. }, - ) if borrow_spans.for_generator() || borrow_spans.for_closure() => self + ) if borrow_spans.for_coroutine() || borrow_spans.for_closure() => self .report_escaping_closure_capture( borrow_spans, borrow_span, @@ -1950,8 +2076,8 @@ .unwrap_or_else(|| { match &self.infcx.tcx.def_kind(self.mir_def_id()) { DefKind::Closure => "enclosing closure", - DefKind::Generator => "enclosing generator", - kind => bug!("expected closure or generator, found {:?}", kind), + DefKind::Coroutine => "enclosing coroutine", + kind => bug!("expected closure or coroutine, found {:?}", kind), } .to_string() }) @@ -1985,7 +2111,7 @@ borrow_spans.args_subdiag(&mut err, |args_span| { crate::session_diagnostics::CaptureArgLabel::Capture { - is_within: borrow_spans.for_generator(), + is_within: borrow_spans.for_coroutine(), args_span, } }); @@ -2136,6 +2262,7 @@ current: usize, found: usize, prop_expr: Option<&'tcx hir::Expr<'tcx>>, + call: Option<&'tcx hir::Expr<'tcx>>, } impl<'tcx> Visitor<'tcx> for NestedStatementVisitor<'tcx> { @@ -2145,6 +2272,11 @@ self.current -= 1; } fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { + if let hir::ExprKind::MethodCall(_, rcvr, _, _) = expr.kind { + if self.span == rcvr.span.source_callsite() { + self.call = Some(expr); + } + } if self.span == expr.span.source_callsite() { self.found = self.current; if self.prop_expr.is_none() { @@ -2168,25 +2300,43 @@ current: 0, found: 0, prop_expr: None, + call: None, }; visitor.visit_stmt(stmt); let typeck_results = self.infcx.tcx.typeck(self.mir_def_id()); - let expr_ty: Option> = visitor.prop_expr.map(|expr| typeck_results.expr_ty(expr).peel_refs()); + let expr_ty: Option> = + visitor.prop_expr.map(|expr| typeck_results.expr_ty(expr).peel_refs()); - let is_format_arguments_item = - if let Some(expr_ty) = expr_ty - && let ty::Adt(adt, _) = expr_ty.kind() { - self.infcx.tcx.lang_items().get(LangItem::FormatArguments) == Some(adt.did()) - } else { - false - }; + let is_format_arguments_item = if let Some(expr_ty) = expr_ty + && let ty::Adt(adt, _) = expr_ty.kind() + { + self.infcx.tcx.lang_items().get(LangItem::FormatArguments) + == Some(adt.did()) + } else { + false + }; if visitor.found == 0 && stmt.span.contains(proper_span) && let Some(p) = sm.span_to_margin(stmt.span) && let Ok(s) = sm.span_to_snippet(proper_span) { + if let Some(call) = visitor.call + && let hir::ExprKind::MethodCall(path, _, [], _) = call.kind + && path.ident.name == sym::iter + && let Some(ty) = expr_ty + { + err.span_suggestion_verbose( + path.ident.span, + format!( + "consider consuming the `{ty}` when turning it into an \ + `Iterator`", + ), + "into_iter".to_string(), + Applicability::MaybeIncorrect, + ); + } if !is_format_arguments_item { let addition = format!("let binding = {};\n{}", s, " ".repeat(p)); err.multipart_suggestion_verbose( @@ -2224,7 +2374,7 @@ borrow_spans.args_subdiag(&mut err, |args_span| { crate::session_diagnostics::CaptureArgLabel::Capture { - is_within: borrow_spans.for_generator(), + is_within: borrow_spans.for_coroutine(), args_span, } }); @@ -2340,11 +2490,17 @@ let (sugg_span, suggestion) = match tcx.sess.source_map().span_to_snippet(args_span) { Ok(string) => { - if string.starts_with("async ") { - let pos = args_span.lo() + BytePos(6); - (args_span.with_lo(pos).with_hi(pos), "move ") - } else if string.starts_with("async|") { - let pos = args_span.lo() + BytePos(5); + let coro_prefix = if string.starts_with("async") { + // `async` is 5 chars long. Not using `.len()` to avoid the cast from `usize` to `u32` + Some(5) + } else if string.starts_with("gen") { + // `gen` is 3 chars long + Some(3) + } else { + None + }; + if let Some(n) = coro_prefix { + let pos = args_span.lo() + BytePos(n); (args_span.with_lo(pos).with_hi(pos), " move") } else { (args_span.shrink_to_lo(), "move ") @@ -2352,14 +2508,19 @@ } Err(_) => (args_span, "move || "), }; - let kind = match use_span.generator_kind() { - Some(generator_kind) => match generator_kind { - GeneratorKind::Async(async_kind) => match async_kind { - AsyncGeneratorKind::Block => "async block", - AsyncGeneratorKind::Closure => "async closure", + let kind = match use_span.coroutine_kind() { + Some(coroutine_kind) => match coroutine_kind { + CoroutineKind::Gen(kind) => match kind { + CoroutineSource::Block => "gen block", + CoroutineSource::Closure => "gen closure", + _ => bug!("gen block/closure expected, but gen function found."), + }, + CoroutineKind::Async(async_kind) => match async_kind { + CoroutineSource::Block => "async block", + CoroutineSource::Closure => "async closure", _ => bug!("async block/closure expected, but async function found."), }, - GeneratorKind::Gen => "generator", + CoroutineKind::Coroutine => "coroutine", }, None => "closure", }; @@ -2388,7 +2549,7 @@ } ConstraintCategory::CallArgument(_) => { fr_name.highlight_region_name(&mut err); - if matches!(use_span.generator_kind(), Some(GeneratorKind::Async(_))) { + if matches!(use_span.coroutine_kind(), Some(CoroutineKind::Async(_))) { err.note( "async blocks are not executed immediately and must either take a \ reference or ownership of outside variables they use", @@ -2482,9 +2643,10 @@ /* Check if the mpi is initialized as an argument */ let mut is_argument = false; for arg in self.body.args_iter() { - let path = self.move_data.rev_lookup.find_local(arg); - if mpis.contains(&path) { - is_argument = true; + if let Some(path) = self.move_data.rev_lookup.find_local(arg) { + if mpis.contains(&path) { + is_argument = true; + } } } @@ -2656,7 +2818,7 @@ loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUseInGenerator { var_span }, + Some(_) => BorrowUseInCoroutine { var_span }, None => BorrowUseInClosure { var_span }, } }); @@ -2672,7 +2834,7 @@ loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUseInGenerator { var_span }, + Some(_) => BorrowUseInCoroutine { var_span }, None => BorrowUseInClosure { var_span }, } }); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs 2023-12-21 16:55:28.000000000 +0000 @@ -76,10 +76,10 @@ expr_finder.visit_expr(body.value); if let Some(mut expr) = expr_finder.result { while let hir::ExprKind::AddrOf(_, _, inner) - | hir::ExprKind::Unary(hir::UnOp::Deref, inner) - | hir::ExprKind::Field(inner, _) - | hir::ExprKind::MethodCall(_, inner, _, _) - | hir::ExprKind::Index(inner, _, _) = &expr.kind + | hir::ExprKind::Unary(hir::UnOp::Deref, inner) + | hir::ExprKind::Field(inner, _) + | hir::ExprKind::MethodCall(_, inner, _, _) + | hir::ExprKind::Index(inner, _, _) = &expr.kind { expr = inner; } @@ -88,10 +88,7 @@ && let hir::def::Res::Local(hir_id) = p.res && let Some(hir::Node::Pat(pat)) = tcx.hir().find(hir_id) { - err.span_label( - pat.span, - format!("binding `{ident}` declared here"), - ); + err.span_label(pat.span, format!("binding `{ident}` declared here")); } } } @@ -185,7 +182,7 @@ // Otherwise, just report the whole type (and use // the intentionally fuzzy phrase "destructor") ty::Closure(..) => ("destructor", "closure".to_owned()), - ty::Generator(..) => ("destructor", "generator".to_owned()), + ty::Coroutine(..) => ("destructor", "coroutine".to_owned()), _ => ("destructor", format!("type `{}`", local_decl.ty)), }; @@ -419,7 +416,8 @@ if self.local_names[local].is_some() && let Some((WriteKind::StorageDeadOrDrop, place)) = kind_place && let Some(borrowed_local) = place.as_local() - && self.local_names[borrowed_local].is_some() && local != borrowed_local + && self.local_names[borrowed_local].is_some() + && local != borrowed_local { should_note_order = true; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ use rustc_errors::{Applicability, Diagnostic}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, Namespace}; -use rustc_hir::GeneratorKind; +use rustc_hir::CoroutineKind; use rustc_index::IndexSlice; use rustc_infer::infer::LateBoundRegionConversionTime; use rustc_middle::mir::tcx::PlaceTy; @@ -46,6 +46,7 @@ mod region_errors; pub(crate) use bound_region_errors::{ToUniverseInfo, UniverseInfo}; +pub(crate) use move_errors::{IllegalMoveOriginKind, MoveError}; pub(crate) use mutability_errors::AccessKind; pub(crate) use outlives_suggestion::OutlivesSuggestionBuilder; pub(crate) use region_errors::{ErrorConstraintInfo, RegionErrorKind, RegionErrors}; @@ -369,7 +370,7 @@ ty::Array(ty, _) | ty::Slice(ty) => { self.describe_field_from_ty(ty, field, variant_index, including_tuple_field) } - ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => { + ty::Closure(def_id, _) | ty::Coroutine(def_id, _, _) => { // We won't be borrowck'ing here if the closure came from another crate, // so it's safe to call `expect_local`. // @@ -470,7 +471,8 @@ } } - ty.print(printer).unwrap().into_buffer() + ty.print(&mut printer).unwrap(); + printer.into_buffer() } /// Returns the name of the provided `Ty` (that must be a reference)'s region with a @@ -492,7 +494,8 @@ bug!("ty for annotation of borrow region is not a reference"); }; - region.print(printer).unwrap().into_buffer() + region.print(&mut printer).unwrap(); + printer.into_buffer() } } @@ -501,8 +504,8 @@ pub(super) enum UseSpans<'tcx> { /// The access is caused by capturing a variable for a closure. ClosureUse { - /// This is true if the captured variable was from a generator. - generator_kind: Option, + /// This is true if the captured variable was from a coroutine. + coroutine_kind: Option, /// The span of the args of the closure, including the `move` keyword if /// it's present. args_span: Span, @@ -569,9 +572,9 @@ } } - pub(super) fn generator_kind(self) -> Option { + pub(super) fn coroutine_kind(self) -> Option { match self { - UseSpans::ClosureUse { generator_kind, .. } => generator_kind, + UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind, _ => None, } } @@ -596,14 +599,14 @@ ) { use crate::InitializationRequiringAction::*; use CaptureVarPathUseCause::*; - if let UseSpans::ClosureUse { generator_kind, path_span, .. } = self { - match generator_kind { + if let UseSpans::ClosureUse { coroutine_kind, path_span, .. } = self { + match coroutine_kind { Some(_) => { err.subdiagnostic(match action { - Borrow => BorrowInGenerator { path_span }, - MatchOn | Use => UseInGenerator { path_span }, - Assignment => AssignInGenerator { path_span }, - PartialAssignment => AssignPartInGenerator { path_span }, + Borrow => BorrowInCoroutine { path_span }, + MatchOn | Use => UseInCoroutine { path_span }, + Assignment => AssignInCoroutine { path_span }, + PartialAssignment => AssignPartInCoroutine { path_span }, }); } None => { @@ -624,9 +627,9 @@ handler: Option<&rustc_errors::Handler>, err: &mut Diagnostic, kind: Option, - f: impl FnOnce(Option, Span) -> CaptureVarCause, + f: impl FnOnce(Option, Span) -> CaptureVarCause, ) { - if let UseSpans::ClosureUse { generator_kind, capture_kind_span, path_span, .. } = self { + if let UseSpans::ClosureUse { coroutine_kind, capture_kind_span, path_span, .. } = self { if capture_kind_span != path_span { err.subdiagnostic(match kind { Some(kd) => match kd { @@ -642,7 +645,7 @@ None => CaptureVarKind::Move { kind_span: capture_kind_span }, }); }; - let diag = f(generator_kind, path_span); + let diag = f(coroutine_kind, path_span); match handler { Some(hd) => err.eager_subdiagnostic(hd, diag), None => err.subdiagnostic(diag), @@ -653,15 +656,15 @@ /// Returns `false` if this place is not used in a closure. pub(super) fn for_closure(&self) -> bool { match *self { - UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_none(), + UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind.is_none(), _ => false, } } - /// Returns `false` if this place is not used in a generator. - pub(super) fn for_generator(&self) -> bool { + /// Returns `false` if this place is not used in a coroutine. + pub(super) fn for_coroutine(&self) -> bool { match *self { - UseSpans::ClosureUse { generator_kind, .. } => generator_kind.is_some(), + UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind.is_some(), _ => false, } } @@ -780,19 +783,15 @@ debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt); if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind - && let AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) = **kind + && let AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _, _) = + **kind { debug!("move_spans: def_id={:?} places={:?}", def_id, places); let def_id = def_id.expect_local(); - if let Some((args_span, generator_kind, capture_kind_span, path_span)) = + if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) = self.closure_span(def_id, moved_place, places) { - return ClosureUse { - generator_kind, - args_span, - capture_kind_span, - path_span, - }; + return ClosureUse { coroutine_kind, args_span, capture_kind_span, path_span }; } } @@ -804,11 +803,11 @@ | FakeReadCause::ForLet(Some(closure_def_id)) => { debug!("move_spans: def_id={:?} place={:?}", closure_def_id, place); let places = &[Operand::Move(place)]; - if let Some((args_span, generator_kind, capture_kind_span, path_span)) = + if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) = self.closure_span(closure_def_id, moved_place, IndexSlice::from_raw(places)) { return ClosureUse { - generator_kind, + coroutine_kind, args_span, capture_kind_span, path_span, @@ -918,21 +917,21 @@ for stmt in statements.chain(maybe_additional_statement) { if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind { - let (&def_id, is_generator) = match kind { + let (&def_id, is_coroutine) = match kind { box AggregateKind::Closure(def_id, _) => (def_id, false), - box AggregateKind::Generator(def_id, _, _) => (def_id, true), + box AggregateKind::Coroutine(def_id, _, _) => (def_id, true), _ => continue, }; let def_id = def_id.expect_local(); debug!( - "borrow_spans: def_id={:?} is_generator={:?} places={:?}", - def_id, is_generator, places + "borrow_spans: def_id={:?} is_coroutine={:?} places={:?}", + def_id, is_coroutine, places ); - if let Some((args_span, generator_kind, capture_kind_span, path_span)) = + if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) = self.closure_span(def_id, Place::from(target).as_ref(), places) { - return ClosureUse { generator_kind, args_span, capture_kind_span, path_span }; + return ClosureUse { coroutine_kind, args_span, capture_kind_span, path_span }; } else { return OtherUse(use_span); } @@ -946,7 +945,7 @@ OtherUse(use_span) } - /// Finds the spans of a captured place within a closure or generator. + /// Finds the spans of a captured place within a closure or coroutine. /// The first span is the location of the use resulting in the capture kind of the capture /// The second span is the location the use resulting in the captured path of the capture fn closure_span( @@ -954,7 +953,7 @@ def_id: LocalDefId, target_place: PlaceRef<'tcx>, places: &IndexSlice>, - ) -> Option<(Span, Option, Span, Span)> { + ) -> Option<(Span, Option, Span, Span)> { debug!( "closure_span: def_id={:?} target_place={:?} places={:?}", def_id, target_place, places @@ -972,11 +971,11 @@ { debug!("closure_span: found captured local {:?}", place); let body = self.infcx.tcx.hir().body(body); - let generator_kind = body.generator_kind(); + let coroutine_kind = body.coroutine_kind(); return Some(( fn_decl_span, - generator_kind, + coroutine_kind, captured_place.get_capture_kind_span(self.infcx.tcx), captured_place.get_path_span(self.infcx.tcx), )); @@ -1123,7 +1122,8 @@ &self.infcx.tcx.sess.parse_sess.span_diagnostic, CaptureReasonSuggest::FreshReborrow { span: move_span.shrink_to_hi(), - }); + }, + ); } if let Some(clone_trait) = tcx.lang_items().clone_trait() && let trait_ref = ty::TraitRef::new(tcx, clone_trait, [ty]) @@ -1191,7 +1191,7 @@ // another message for the same span if !is_loop_message { move_spans.var_subdiag(None, err, None, |kind, var_span| match kind { - Some(_) => CaptureVarCause::PartialMoveUseInGenerator { var_span, is_partial }, + Some(_) => CaptureVarCause::PartialMoveUseInCoroutine { var_span, is_partial }, None => CaptureVarCause::PartialMoveUseInClosure { var_span, is_partial }, }) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/move_errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/move_errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/move_errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/move_errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,16 +1,50 @@ use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed}; use rustc_middle::mir::*; -use rustc_middle::ty; -use rustc_mir_dataflow::move_paths::{ - IllegalMoveOrigin, IllegalMoveOriginKind, LookupResult, MoveError, MovePathIndex, -}; -use rustc_span::{BytePos, Span}; +use rustc_middle::ty::{self, Ty}; +use rustc_mir_dataflow::move_paths::{LookupResult, MovePathIndex}; +use rustc_span::{BytePos, ExpnKind, MacroKind, Span}; use crate::diagnostics::CapturedMessageOpt; use crate::diagnostics::{DescribePlaceOpt, UseSpans}; use crate::prefixes::PrefixSet; use crate::MirBorrowckCtxt; +#[derive(Debug)] +pub enum IllegalMoveOriginKind<'tcx> { + /// Illegal move due to attempt to move from behind a reference. + BorrowedContent { + /// The place the reference refers to: if erroneous code was trying to + /// move from `(*x).f` this will be `*x`. + target_place: Place<'tcx>, + }, + + /// Illegal move due to attempt to move from field of an ADT that + /// implements `Drop`. Rust maintains invariant that all `Drop` + /// ADT's remain fully-initialized so that user-defined destructor + /// can safely read from all of the ADT's fields. + InteriorOfTypeWithDestructor { container_ty: Ty<'tcx> }, + + /// Illegal move due to attempt to move out of a slice or array. + InteriorOfSliceOrArray { ty: Ty<'tcx>, is_index: bool }, +} + +#[derive(Debug)] +pub(crate) struct MoveError<'tcx> { + place: Place<'tcx>, + location: Location, + kind: IllegalMoveOriginKind<'tcx>, +} + +impl<'tcx> MoveError<'tcx> { + pub(crate) fn new( + place: Place<'tcx>, + location: Location, + kind: IllegalMoveOriginKind<'tcx>, + ) -> Self { + MoveError { place, location, kind } + } +} + // Often when desugaring a pattern match we may have many individual moves in // MIR that are all part of one operation from the user's point-of-view. For // example: @@ -53,20 +87,18 @@ } impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { - pub(crate) fn report_move_errors(&mut self, move_errors: Vec<(Place<'tcx>, MoveError<'tcx>)>) { - let grouped_errors = self.group_move_errors(move_errors); + pub(crate) fn report_move_errors(&mut self) { + let grouped_errors = self.group_move_errors(); for error in grouped_errors { self.report(error); } } - fn group_move_errors( - &self, - errors: Vec<(Place<'tcx>, MoveError<'tcx>)>, - ) -> Vec> { + fn group_move_errors(&mut self) -> Vec> { let mut grouped_errors = Vec::new(); - for (original_path, error) in errors { - self.append_to_grouped_errors(&mut grouped_errors, original_path, error); + let errors = std::mem::take(&mut self.move_errors); + for error in errors { + self.append_to_grouped_errors(&mut grouped_errors, error); } grouped_errors } @@ -74,66 +106,58 @@ fn append_to_grouped_errors( &self, grouped_errors: &mut Vec>, - original_path: Place<'tcx>, error: MoveError<'tcx>, ) { - match error { - MoveError::UnionMove { .. } => { - unimplemented!("don't know how to report union move errors yet.") - } - MoveError::IllegalMove { cannot_move_out_of: IllegalMoveOrigin { location, kind } } => { - // Note: that the only time we assign a place isn't a temporary - // to a user variable is when initializing it. - // If that ever stops being the case, then the ever initialized - // flow could be used. - if let Some(StatementKind::Assign(box ( - place, - Rvalue::Use(Operand::Move(move_from)), - ))) = self.body.basic_blocks[location.block] - .statements - .get(location.statement_index) - .map(|stmt| &stmt.kind) + let MoveError { place: original_path, location, kind } = error; + + // Note: that the only time we assign a place isn't a temporary + // to a user variable is when initializing it. + // If that ever stops being the case, then the ever initialized + // flow could be used. + if let Some(StatementKind::Assign(box (place, Rvalue::Use(Operand::Move(move_from))))) = + self.body.basic_blocks[location.block] + .statements + .get(location.statement_index) + .map(|stmt| &stmt.kind) + { + if let Some(local) = place.as_local() { + let local_decl = &self.body.local_decls[local]; + // opt_match_place is the + // match_span is the span of the expression being matched on + // match *x.y { ... } match_place is Some(*x.y) + // ^^^^ match_span is the span of *x.y + // + // opt_match_place is None for let [mut] x = ... statements, + // whether or not the right-hand side is a place expression + if let LocalInfo::User(BindingForm::Var(VarBindingForm { + opt_match_place: Some((opt_match_place, match_span)), + binding_mode: _, + opt_ty_info: _, + pat_span: _, + })) = *local_decl.local_info() { - if let Some(local) = place.as_local() { - let local_decl = &self.body.local_decls[local]; - // opt_match_place is the - // match_span is the span of the expression being matched on - // match *x.y { ... } match_place is Some(*x.y) - // ^^^^ match_span is the span of *x.y - // - // opt_match_place is None for let [mut] x = ... statements, - // whether or not the right-hand side is a place expression - if let LocalInfo::User(BindingForm::Var(VarBindingForm { - opt_match_place: Some((opt_match_place, match_span)), - binding_mode: _, - opt_ty_info: _, - pat_span: _, - })) = *local_decl.local_info() - { - let stmt_source_info = self.body.source_info(location); - self.append_binding_error( - grouped_errors, - kind, - original_path, - *move_from, - local, - opt_match_place, - match_span, - stmt_source_info.span, - ); - return; - } - } + let stmt_source_info = self.body.source_info(location); + self.append_binding_error( + grouped_errors, + kind, + original_path, + *move_from, + local, + opt_match_place, + match_span, + stmt_source_info.span, + ); + return; } - - let move_spans = self.move_spans(original_path.as_ref(), location); - grouped_errors.push(GroupedMoveError::OtherIllegalMove { - use_spans: move_spans, - original_path, - kind, - }); } } + + let move_spans = self.move_spans(original_path.as_ref(), location); + grouped_errors.push(GroupedMoveError::OtherIllegalMove { + use_spans: move_spans, + original_path, + kind, + }); } fn append_binding_error( @@ -464,6 +488,8 @@ args_span, } }); + + self.add_note_for_packed_struct_derive(err, original_path.local); } } } @@ -570,4 +596,20 @@ ); } } + + /// Adds an explanatory note if the move error occurs in a derive macro + /// expansion of a packed struct. + /// Such errors happen because derive macro expansions shy away from taking + /// references to the struct's fields since doing so would be undefined behaviour + fn add_note_for_packed_struct_derive(&self, err: &mut Diagnostic, local: Local) { + let local_place: PlaceRef<'tcx> = local.into(); + let local_ty = local_place.ty(self.body.local_decls(), self.infcx.tcx).ty.peel_refs(); + + if let Some(adt) = local_ty.ty_adt_def() + && adt.repr().packed() + && let ExpnKind::Macro(MacroKind::Derive, name) = self.body.span.ctxt().outer_expn_data().kind + { + err.note(format!("`#[derive({name})]` triggers a move because taking references to the fields of a packed struct is undefined behaviour")); + } + } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,9 +9,8 @@ hir::place::PlaceBase, mir::{self, BindingForm, Local, LocalDecl, LocalInfo, LocalKind, Location}, }; -use rustc_span::source_map::DesugaringKind; use rustc_span::symbol::{kw, Symbol}; -use rustc_span::{sym, BytePos, Span}; +use rustc_span::{sym, BytePos, DesugaringKind, Span}; use rustc_target::abi::FieldIdx; use crate::diagnostics::BorrowedContentSource; @@ -62,7 +61,7 @@ local, projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)], } => { - debug_assert!(is_closure_or_generator( + debug_assert!(is_closure_or_coroutine( Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty )); @@ -122,7 +121,7 @@ { item_msg = access_place_desc; debug_assert!(self.body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty.is_ref()); - debug_assert!(is_closure_or_generator( + debug_assert!(is_closure_or_coroutine( the_place_err.ty(self.body, self.infcx.tcx).ty )); @@ -385,7 +384,7 @@ local, projection: [proj_base @ .., ProjectionElem::Field(upvar_index, _)], } => { - debug_assert!(is_closure_or_generator( + debug_assert!(is_closure_or_coroutine( Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty )); @@ -396,17 +395,16 @@ let upvar_hir_id = captured_place.get_root_variable(); if let Some(Node::Pat(pat)) = self.infcx.tcx.hir().find(upvar_hir_id) - && let hir::PatKind::Binding( - hir::BindingAnnotation::NONE, - _, - upvar_ident, - _, - ) = pat.kind + && let hir::PatKind::Binding(hir::BindingAnnotation::NONE, _, upvar_ident, _) = + pat.kind { if upvar_ident.name == kw::SelfLower { for (_, node) in self.infcx.tcx.hir().parent_iter(upvar_hir_id) { if let Some(fn_decl) = node.fn_decl() { - if !matches!(fn_decl.implicit_self, hir::ImplicitSelfKind::ImmRef | hir::ImplicitSelfKind::MutRef) { + if !matches!( + fn_decl.implicit_self, + hir::ImplicitSelfKind::ImmRef | hir::ImplicitSelfKind::MutRef + ) { err.span_suggestion( upvar_ident.span, "consider changing this to be mutable", @@ -573,7 +571,8 @@ self.ty, ), vec![ - vec![ // val.insert(index, rv); + vec![ + // val.insert(index, rv); ( val.span.shrink_to_hi().with_hi(index.span.lo()), ".insert(".to_string(), @@ -584,7 +583,8 @@ ), (rv.span.shrink_to_hi(), ")".to_string()), ], - vec![ // val.get_mut(index).map(|v| { *v = rv; }); + vec![ + // val.get_mut(index).map(|v| { *v = rv; }); ( val.span.shrink_to_hi().with_hi(index.span.lo()), ".get_mut(".to_string(), @@ -593,12 +593,10 @@ index.span.shrink_to_hi().with_hi(place.span.hi()), ").map(|val| { *val".to_string(), ), - ( - rv.span.shrink_to_hi(), - "; })".to_string(), - ), + (rv.span.shrink_to_hi(), "; })".to_string()), ], - vec![ // let x = val.entry(index).or_insert(rv); + vec![ + // let x = val.entry(index).or_insert(rv); (val.span.shrink_to_lo(), "let val = ".to_string()), ( val.span.shrink_to_hi().with_hi(index.span.lo()), @@ -747,10 +745,7 @@ && let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id) { let body = hir_map.body(body_id); - let mut v = BindingFinder { - span: pat_span, - hir_id: None, - }; + let mut v = BindingFinder { span: pat_span, hir_id: None }; v.visit_body(body); v.hir_id } else { @@ -766,7 +761,8 @@ pat: hir::Pat { kind: hir::PatKind::Ref(_, _), .. }, .. })) = hir_map.find(hir_id) - && let Ok(name) = self.infcx.tcx.sess.source_map().span_to_snippet(local_decl.source_info.span) + && let Ok(name) = + self.infcx.tcx.sess.source_map().span_to_snippet(local_decl.source_info.span) { err.span_suggestion( pat_span, @@ -879,12 +875,11 @@ // `span` corresponds to the expression being iterated, find the `for`-loop desugared // expression with that span in order to identify potential fixes when encountering a // read-only iterator that should be mutable. - let mut v = Finder { - span, - expr: None, - }; + let mut v = Finder { span, expr: None }; v.visit_block(block); - if let Some(expr) = v.expr && let Call(_, [expr]) = expr.kind { + if let Some(expr) = v.expr + && let Call(_, [expr]) = expr.kind + { match expr.kind { MethodCall(path_segment, _, _, span) => { // We have `for _ in iter.read_only_iter()`, try to @@ -1032,38 +1027,42 @@ let source = self.body.source; let hir = self.infcx.tcx.hir(); if let InstanceDef::Item(def_id) = source.instance - && let Some(Node::Expr(hir::Expr { hir_id, kind, ..})) = hir.get_if_local(def_id) - && let ExprKind::Closure(closure) = kind && closure.movability == None - && let Some(Node::Expr(expr)) = hir.find_parent(*hir_id) { - let mut cur_expr = expr; - while let ExprKind::MethodCall(path_segment, recv, _, _) = cur_expr.kind { - if path_segment.ident.name == sym::iter { - // check `_ty` has `iter_mut` method - let res = self - .infcx - .tcx - .typeck(path_segment.hir_id.owner.def_id) - .type_dependent_def_id(cur_expr.hir_id) - .and_then(|def_id| self.infcx.tcx.impl_of_method(def_id)) - .map(|def_id| self.infcx.tcx.associated_items(def_id)) - .map(|assoc_items| { - assoc_items.filter_by_name_unhygienic(sym::iter_mut).peekable() - }); + && let Some(Node::Expr(hir::Expr { hir_id, kind, .. })) = hir.get_if_local(def_id) + && let ExprKind::Closure(closure) = kind + && closure.movability == None + && let Some(Node::Expr(expr)) = hir.find_parent(*hir_id) + { + let mut cur_expr = expr; + while let ExprKind::MethodCall(path_segment, recv, _, _) = cur_expr.kind { + if path_segment.ident.name == sym::iter { + // check `_ty` has `iter_mut` method + let res = self + .infcx + .tcx + .typeck(path_segment.hir_id.owner.def_id) + .type_dependent_def_id(cur_expr.hir_id) + .and_then(|def_id| self.infcx.tcx.impl_of_method(def_id)) + .map(|def_id| self.infcx.tcx.associated_items(def_id)) + .map(|assoc_items| { + assoc_items.filter_by_name_unhygienic(sym::iter_mut).peekable() + }); - if let Some(mut res) = res && res.peek().is_some() { - err.span_suggestion_verbose( - path_segment.ident.span, - "you may want to use `iter_mut` here", - "iter_mut", - Applicability::MaybeIncorrect, - ); - } - break; - } else { - cur_expr = recv; + if let Some(mut res) = res + && res.peek().is_some() + { + err.span_suggestion_verbose( + path_segment.ident.span, + "you may want to use `iter_mut` here", + "iter_mut", + Applicability::MaybeIncorrect, + ); } + break; + } else { + cur_expr = recv; } } + } } fn suggest_make_local_mut( @@ -1200,14 +1199,11 @@ } let hir_map = self.infcx.tcx.hir(); let def_id = self.body.source.def_id(); - let hir_id = if let Some(local_def_id) = def_id.as_local() && - let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id) + let hir_id = if let Some(local_def_id) = def_id.as_local() + && let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id) { let body = hir_map.body(body_id); - let mut v = BindingFinder { - span: err_label_span, - hir_id: None, - }; + let mut v = BindingFinder { span: err_label_span, hir_id: None }; v.visit_body(body); v.hir_id } else { @@ -1215,15 +1211,13 @@ }; if let Some(hir_id) = hir_id - && let Some(hir::Node::Local(local)) = hir_map.find(hir_id) + && let Some(hir::Node::Local(local)) = hir_map.find(hir_id) { let (changing, span, sugg) = match local.ty { Some(ty) => ("changing", ty.span, message), - None => ( - "specifying", - local.pat.span.shrink_to_hi(), - format!(": {message}"), - ), + None => { + ("specifying", local.pat.span.shrink_to_hi(), format!(": {message}")) + } }; err.span_suggestion_verbose( span, @@ -1234,9 +1228,7 @@ } else { err.span_label( err_label_span, - format!( - "consider changing this binding's type to be: `{message}`" - ), + format!("consider changing this binding's type to be: `{message}`"), ); } } @@ -1359,9 +1351,9 @@ None => (false, decl_span), }; - // if the binding already exists and is a reference with a explicit + // if the binding already exists and is a reference with an explicit // lifetime, then we can suggest adding ` mut`. this is special-cased from - // the path without a explicit lifetime. + // the path without an explicit lifetime. if let Ok(src) = tcx.sess.source_map().span_to_snippet(span) && src.starts_with("&'") // note that `& 'a T` is invalid so this is correct. @@ -1380,16 +1372,12 @@ let ty_mut = decl_ty.builtin_deref(true).unwrap(); assert_eq!(ty_mut.mutbl, hir::Mutability::Not); - ( - false, - span, - format!("{}mut {}", if decl_ty.is_ref() {"&"} else {"*"}, ty_mut.ty) - ) + (false, span, format!("{}mut {}", if decl_ty.is_ref() { "&" } else { "*" }, ty_mut.ty)) } } -fn is_closure_or_generator(ty: Ty<'_>) -> bool { - ty.is_closure() || ty.is_generator() +fn is_closure_or_coroutine(ty: Ty<'_>) -> bool { + ty.is_closure() || ty.is_coroutine() } /// Given a field that needs to be mutable, returns a span where the " mut " could go. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -580,7 +580,7 @@ let err = FnMutError { span: *span, ty_err: match output_ty.kind() { - ty::Generator(def, ..) if self.infcx.tcx.generator_is_async(*def) => { + ty::Coroutine(def, ..) if self.infcx.tcx.coroutine_is_async(*def) => { FnMutReturnTypeErr::ReturnAsyncBlock { span: *span } } _ if output_ty.contains_closure() => { @@ -1036,7 +1036,7 @@ .. }) => { let body = map.body(*body); - if !matches!(body.generator_kind, Some(hir::GeneratorKind::Async(..))) { + if !matches!(body.coroutine_kind, Some(hir::CoroutineKind::Async(..))) { closure_span = Some(expr.span.shrink_to_lo()); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_name.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_name.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_name.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/region_name.rs 2023-12-21 16:55:28.000000000 +0000 @@ -41,7 +41,7 @@ AnonRegionFromUpvar(Span, Symbol), /// The region corresponding to the return type of a closure. AnonRegionFromOutput(RegionNameHighlight, &'static str), - /// The region from a type yielded by a generator. + /// The region from a type yielded by a coroutine. AnonRegionFromYieldTy(Span, String), /// An anonymous region from an async fn. AnonRegionFromAsyncFn(Span), @@ -322,7 +322,7 @@ let def_ty = self.regioncx.universal_regions().defining_ty; let DefiningTy::Closure(_, args) = def_ty else { - // Can't have BrEnv in functions, constants or generators. + // Can't have BrEnv in functions, constants or coroutines. bug!("BrEnv outside of closure."); }; let hir::ExprKind::Closure(&hir::Closure { fn_decl_span, .. }) = @@ -680,16 +680,16 @@ } hir::FnRetTy::Return(hir_ty) => (fn_decl.output.span(), Some(hir_ty)), }; - let mir_description = match hir.body(body).generator_kind { - Some(hir::GeneratorKind::Async(gen)) => match gen { - hir::AsyncGeneratorKind::Block => " of async block", - hir::AsyncGeneratorKind::Closure => " of async closure", - hir::AsyncGeneratorKind::Fn => { + let mir_description = match hir.body(body).coroutine_kind { + Some(hir::CoroutineKind::Async(gen)) => match gen { + hir::CoroutineSource::Block => " of async block", + hir::CoroutineSource::Closure => " of async closure", + hir::CoroutineSource::Fn => { let parent_item = hir.get_by_def_id(hir.get_parent_item(mir_hir_id).def_id); let output = &parent_item .fn_decl() - .expect("generator lowered from async fn should be in fn") + .expect("coroutine lowered from async fn should be in fn") .output; span = output.span(); if let hir::FnRetTy::Return(ret) = output { @@ -698,7 +698,21 @@ " of async function" } }, - Some(hir::GeneratorKind::Gen) => " of generator", + Some(hir::CoroutineKind::Gen(gen)) => match gen { + hir::CoroutineSource::Block => " of gen block", + hir::CoroutineSource::Closure => " of gen closure", + hir::CoroutineSource::Fn => { + let parent_item = + hir.get_by_def_id(hir.get_parent_item(mir_hir_id).def_id); + let output = &parent_item + .fn_decl() + .expect("coroutine lowered from gen fn should be in fn") + .output; + span = output.span(); + " of gen function" + } + }, + Some(hir::CoroutineKind::Coroutine) => " of coroutine", None => " of closure", }; (span, mir_description, hir_ty) @@ -793,7 +807,7 @@ &self, fr: RegionVid, ) -> Option { - // Note: generators from `async fn` yield `()`, so we don't have to + // Note: coroutines from `async fn` yield `()`, so we don't have to // worry about them here. let yield_ty = self.regioncx.universal_regions().yield_ty?; debug!("give_name_if_anonymous_region_appears_in_yield_ty: yield_ty = {:?}", yield_ty); @@ -942,9 +956,7 @@ ty::ClauseKind::Projection(data) if data.projection_ty.self_ty() == ty => {} _ => return false, } - tcx.any_free_region_meets(pred, |r| { - *r == ty::ReEarlyBound(region) - }) + tcx.any_free_region_meets(pred, |r| *r == ty::ReEarlyBound(region)) }) } else { false diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/var_name.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/var_name.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/var_name.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/diagnostics/var_name.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,8 +6,8 @@ use rustc_index::IndexSlice; use rustc_middle::mir::{Body, Local}; use rustc_middle::ty::{RegionVid, TyCtxt}; -use rustc_span::source_map::Span; use rustc_span::symbol::Symbol; +use rustc_span::Span; impl<'tcx> RegionInferenceContext<'tcx> { pub(crate) fn get_var_name_and_span_for_region( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/facts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/facts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/facts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/facts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -41,7 +41,8 @@ impl AllFactsExt for AllFacts { /// Return fn enabled(tcx: TyCtxt<'_>) -> bool { - tcx.sess.opts.unstable_opts.nll_facts || tcx.sess.opts.unstable_opts.polonius + tcx.sess.opts.unstable_opts.nll_facts + || tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled() } fn write_to_dir( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/invalidation.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/invalidation.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/invalidation.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/invalidation.rs 2023-12-21 16:55:28.000000000 +0000 @@ -161,7 +161,7 @@ } TerminatorKind::UnwindResume | TerminatorKind::Return - | TerminatorKind::GeneratorDrop => { + | TerminatorKind::CoroutineDrop => { // Invalidate all borrows of local places let borrow_set = self.borrow_set; let start = self.location_table.start_index(location); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,8 @@ //! This query borrow-checks the MIR to (further) ensure it is not broken. +#![allow(internal_features)] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(let_chains)] @@ -11,7 +14,6 @@ #![feature(trusted_step)] #![feature(try_blocks)] #![recursion_limit = "256"] -#![allow(internal_features)] #[macro_use] extern crate rustc_middle; @@ -29,13 +31,8 @@ use rustc_infer::infer::{ InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt, }; -use rustc_middle::mir::{ - traversal, Body, ClearCrossCrate, Local, Location, MutBorrowKind, Mutability, - NonDivergingIntrinsic, Operand, Place, PlaceElem, PlaceRef, VarDebugInfoContents, -}; -use rustc_middle::mir::{AggregateKind, BasicBlock, BorrowCheckResult, BorrowKind}; -use rustc_middle::mir::{InlineAsmOperand, Terminator, TerminatorKind}; -use rustc_middle::mir::{ProjectionElem, Promoted, Rvalue, Statement, StatementKind}; +use rustc_middle::mir::tcx::PlaceTy; +use rustc_middle::mir::*; use rustc_middle::query::Providers; use rustc_middle::traits::DefiningAnchor; use rustc_middle::ty::{self, CapturedPlace, ParamEnv, RegionVid, TyCtxt}; @@ -53,13 +50,13 @@ EverInitializedPlaces, MaybeInitializedPlaces, MaybeUninitializedPlaces, }; use rustc_mir_dataflow::move_paths::{InitIndex, MoveOutIndex, MovePathIndex}; -use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult, MoveData, MoveError}; +use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult, MoveData}; use rustc_mir_dataflow::Analysis; use rustc_mir_dataflow::MoveDataParamEnv; use crate::session_diagnostics::VarNeedNotMut; -use self::diagnostics::{AccessKind, RegionName}; +use self::diagnostics::{AccessKind, IllegalMoveOriginKind, MoveError, RegionName}; use self::location::LocationTable; use self::prefixes::PrefixSet; use consumers::{BodyWithBorrowckFacts, ConsumerOptions}; @@ -173,7 +170,9 @@ for var_debug_info in &input_body.var_debug_info { if let VarDebugInfoContents::Place(place) = var_debug_info.value { if let Some(local) = place.as_local() { - if let Some(prev_name) = local_names[local] && var_debug_info.name != prev_name { + if let Some(prev_name) = local_names[local] + && var_debug_info.name != prev_name + { span_bug!( var_debug_info.source_info.span, "local {:?} has many names (`{}` vs `{}`)", @@ -220,14 +219,10 @@ let location_table_owned = LocationTable::new(body); let location_table = &location_table_owned; - let (move_data, move_errors): (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>) = - match MoveData::gather_moves(&body, tcx, param_env) { - Ok(move_data) => (move_data, Vec::new()), - Err((move_data, move_errors)) => (move_data, move_errors), - }; - let promoted_errors = promoted + let move_data = MoveData::gather_moves(&body, tcx, param_env, |_| true); + let promoted_move_data = promoted .iter_enumerated() - .map(|(idx, body)| (idx, MoveData::gather_moves(&body, tcx, param_env))); + .map(|(idx, body)| (idx, MoveData::gather_moves(&body, tcx, param_env, |_| true))); let mdpe = MoveDataParamEnv { move_data, param_env }; @@ -298,47 +293,60 @@ .pass_name("borrowck") .iterate_to_fixpoint(); - let movable_generator = - // The first argument is the generator type passed by value + let movable_coroutine = + // The first argument is the coroutine type passed by value if let Some(local) = body.local_decls.raw.get(1) // Get the interior types and args which typeck computed - && let ty::Generator(_, _, hir::Movability::Static) = local.ty.kind() + && let ty::Coroutine(_, _, hir::Movability::Static) = local.ty.kind() { false } else { true }; - for (idx, move_data_results) in promoted_errors { - let promoted_body = &promoted[idx]; + for (idx, move_data) in promoted_move_data { + use rustc_middle::mir::visit::Visitor; - if let Err((move_data, move_errors)) = move_data_results { - let mut promoted_mbcx = MirBorrowckCtxt { - infcx: &infcx, - param_env, - body: promoted_body, - move_data: &move_data, - location_table, // no need to create a real one for the promoted, it is not used - movable_generator, - fn_self_span_reported: Default::default(), - locals_are_invalidated_at_exit, - access_place_error_reported: Default::default(), - reservation_error_reported: Default::default(), - uninitialized_error_reported: Default::default(), - regioncx: regioncx.clone(), - used_mut: Default::default(), - used_mut_upvars: SmallVec::new(), - borrow_set: Rc::clone(&borrow_set), - upvars: Vec::new(), - local_names: IndexVec::from_elem(None, &promoted_body.local_decls), - region_names: RefCell::default(), - next_region_name: RefCell::new(1), - polonius_output: None, - errors, - }; - promoted_mbcx.report_move_errors(move_errors); - errors = promoted_mbcx.errors; + let promoted_body = &promoted[idx]; + let mut promoted_mbcx = MirBorrowckCtxt { + infcx: &infcx, + param_env, + body: promoted_body, + move_data: &move_data, + location_table, // no need to create a real one for the promoted, it is not used + movable_coroutine, + fn_self_span_reported: Default::default(), + locals_are_invalidated_at_exit, + access_place_error_reported: Default::default(), + reservation_error_reported: Default::default(), + uninitialized_error_reported: Default::default(), + regioncx: regioncx.clone(), + used_mut: Default::default(), + used_mut_upvars: SmallVec::new(), + borrow_set: Rc::clone(&borrow_set), + upvars: Vec::new(), + local_names: IndexVec::from_elem(None, &promoted_body.local_decls), + region_names: RefCell::default(), + next_region_name: RefCell::new(1), + polonius_output: None, + move_errors: Vec::new(), + errors, }; + MoveVisitor { ctxt: &mut promoted_mbcx }.visit_body(promoted_body); + promoted_mbcx.report_move_errors(); + errors = promoted_mbcx.errors; + + struct MoveVisitor<'a, 'cx, 'tcx> { + ctxt: &'a mut MirBorrowckCtxt<'cx, 'tcx>, + } + + impl<'tcx> Visitor<'tcx> for MoveVisitor<'_, '_, 'tcx> { + fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { + if let Operand::Move(place) = operand { + self.ctxt.check_movable_place(location, *place); + } + } + } } let mut mbcx = MirBorrowckCtxt { @@ -347,7 +355,7 @@ body, move_data: &mdpe.move_data, location_table, - movable_generator, + movable_coroutine, locals_are_invalidated_at_exit, fn_self_span_reported: Default::default(), access_place_error_reported: Default::default(), @@ -362,6 +370,7 @@ region_names: RefCell::default(), next_region_name: RefCell::new(1), polonius_output, + move_errors: Vec::new(), errors, }; @@ -374,8 +383,6 @@ borrows: flow_borrows, }; - mbcx.report_move_errors(move_errors); - rustc_mir_dataflow::visit_results( body, traversal::reverse_postorder(body).map(|(bb, _)| bb), @@ -383,6 +390,8 @@ &mut mbcx, ); + mbcx.report_move_errors(); + // For each non-user used mutable variable, check if it's been assigned from // a user-declared local. If so, then put that local into the used_mut set. // Note that this set is expected to be small - only upvars from closures @@ -532,7 +541,7 @@ /// when MIR borrowck begins. location_table: &'cx LocationTable, - movable_generator: bool, + movable_coroutine: bool, /// This keeps track of whether local variables are free-ed when the function /// exits even without a `StorageDead`, which appears to be the case for /// constants. @@ -591,6 +600,7 @@ polonius_output: Option>, errors: error::BorrowckErrors<'tcx>, + move_errors: Vec>, } // Check that: @@ -721,7 +731,6 @@ } TerminatorKind::Assert { cond, expected: _, msg, target: _, unwind: _ } => { self.consume_operand(loc, (cond, span), flow_state); - use rustc_middle::mir::AssertKind; if let AssertKind::BoundsCheck { len, index } = &**msg { self.consume_operand(loc, (len, span), flow_state); self.consume_operand(loc, (index, span), flow_state); @@ -774,7 +783,7 @@ | TerminatorKind::Unreachable | TerminatorKind::UnwindResume | TerminatorKind::Return - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ } | TerminatorKind::FalseUnwind { real_target: _, unwind: _ } => { // no data used, thus irrelevant to borrowck @@ -793,7 +802,7 @@ match term.kind { TerminatorKind::Yield { value: _, resume: _, resume_arg: _, drop: _ } => { - if self.movable_generator { + if self.movable_coroutine { // Look for any active borrows to locals let borrow_set = self.borrow_set.clone(); for i in flow_state.borrows.iter() { @@ -805,7 +814,7 @@ TerminatorKind::UnwindResume | TerminatorKind::Return - | TerminatorKind::GeneratorDrop => { + | TerminatorKind::CoroutineDrop => { // Returning from the function implicitly kills storage for all locals and statics. // Often, the storage will already have been killed by an explicit // StorageDead, but we don't always emit those (notably on unwind paths), @@ -1322,7 +1331,7 @@ // moved into the closure and subsequently used by the closure, // in order to populate our used_mut set. match **aggregate_kind { - AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => { + AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _, _) => { let def_id = def_id.expect_local(); let BorrowCheckResult { used_mut_upvars, .. } = self.infcx.tcx.mir_borrowck(def_id); @@ -1405,7 +1414,9 @@ // As such we have to search for the local that this // capture comes from and mark it as being used as mut. - let temp_mpi = self.move_data.rev_lookup.find_local(local); + let Some(temp_mpi) = self.move_data.rev_lookup.find_local(local) else { + bug!("temporary should be tracked"); + }; let init = if let [init_index] = *self.move_data.init_path_map[temp_mpi] { &self.move_data.inits[init_index] } else { @@ -1465,6 +1476,9 @@ ); } Operand::Move(place) => { + // Check if moving from this place makes sense. + self.check_movable_place(location, place); + // move of place: check if this is move of already borrowed path self.access_place( location, @@ -1545,12 +1559,12 @@ } /// Reports an error if this is a borrow of local data. - /// This is called for all Yield expressions on movable generators + /// This is called for all Yield expressions on movable coroutines fn check_for_local_borrow(&mut self, borrow: &BorrowData<'tcx>, yield_span: Span) { debug!("check_for_local_borrow({:?})", borrow); if borrow_of_local_data(borrow.borrowed_place) { - let err = self.cannot_borrow_across_generator_yield( + let err = self.cannot_borrow_across_coroutine_yield( self.retrieve_borrow_spans(borrow).var_or_use(), yield_span, ); @@ -1586,6 +1600,131 @@ } } + fn check_movable_place(&mut self, location: Location, place: Place<'tcx>) { + use IllegalMoveOriginKind::*; + + let body = self.body; + let tcx = self.infcx.tcx; + let mut place_ty = PlaceTy::from_ty(body.local_decls[place.local].ty); + for (place_ref, elem) in place.iter_projections() { + match elem { + ProjectionElem::Deref => match place_ty.ty.kind() { + ty::Ref(..) | ty::RawPtr(..) => { + self.move_errors.push(MoveError::new( + place, + location, + BorrowedContent { + target_place: place_ref.project_deeper(&[elem], tcx), + }, + )); + return; + } + ty::Adt(adt, _) => { + if !adt.is_box() { + bug!("Adt should be a box type when Place is deref"); + } + } + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Foreign(_) + | ty::Str + | ty::Array(_, _) + | ty::Slice(_) + | ty::FnDef(_, _) + | ty::FnPtr(_) + | ty::Dynamic(_, _, _) + | ty::Closure(_, _) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) + | ty::Never + | ty::Tuple(_) + | ty::Alias(_, _) + | ty::Param(_) + | ty::Bound(_, _) + | ty::Infer(_) + | ty::Error(_) + | ty::Placeholder(_) => { + bug!("When Place is Deref it's type shouldn't be {place_ty:#?}") + } + }, + ProjectionElem::Field(_, _) => match place_ty.ty.kind() { + ty::Adt(adt, _) => { + if adt.has_dtor(tcx) { + self.move_errors.push(MoveError::new( + place, + location, + InteriorOfTypeWithDestructor { container_ty: place_ty.ty }, + )); + return; + } + } + ty::Closure(_, _) | ty::Coroutine(_, _, _) | ty::Tuple(_) => (), + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Foreign(_) + | ty::Str + | ty::Array(_, _) + | ty::Slice(_) + | ty::RawPtr(_) + | ty::Ref(_, _, _) + | ty::FnDef(_, _) + | ty::FnPtr(_) + | ty::Dynamic(_, _, _) + | ty::CoroutineWitness(..) + | ty::Never + | ty::Alias(_, _) + | ty::Param(_) + | ty::Bound(_, _) + | ty::Infer(_) + | ty::Error(_) + | ty::Placeholder(_) => bug!( + "When Place contains ProjectionElem::Field it's type shouldn't be {place_ty:#?}" + ), + }, + ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => { + match place_ty.ty.kind() { + ty::Slice(_) => { + self.move_errors.push(MoveError::new( + place, + location, + InteriorOfSliceOrArray { ty: place_ty.ty, is_index: false }, + )); + return; + } + ty::Array(_, _) => (), + _ => bug!("Unexpected type {:#?}", place_ty.ty), + } + } + ProjectionElem::Index(_) => match place_ty.ty.kind() { + ty::Array(..) | ty::Slice(..) => { + self.move_errors.push(MoveError::new( + place, + location, + InteriorOfSliceOrArray { ty: place_ty.ty, is_index: true }, + )); + return; + } + _ => bug!("Unexpected type {place_ty:#?}"), + }, + // `OpaqueCast`: only transmutes the type, so no moves there. + // `Downcast` : only changes information about a `Place` without moving. + // `Subtype` : only transmutes the type, so no moves. + // So it's safe to skip these. + ProjectionElem::OpaqueCast(_) + | ProjectionElem::Subtype(_) + | ProjectionElem::Downcast(_, _) => (), + } + + place_ty = place_ty.projection_ty(tcx, elem); + } + } + fn check_if_full_path_is_moved( &mut self, location: Location, @@ -1967,7 +2106,7 @@ Reservation(WriteKind::MutableBorrow(BorrowKind::Mut { kind: mut_borrow_kind })) | Write(WriteKind::MutableBorrow(BorrowKind::Mut { kind: mut_borrow_kind })) => { let is_local_mutation_allowed = match mut_borrow_kind { - // `ClosureCapture` is used for mutable variable with a immutable binding. + // `ClosureCapture` is used for mutable variable with an immutable binding. // This is only behaviour difference between `ClosureCapture` and mutable borrows. MutBorrowKind::ClosureCapture => LocalMutationIsAllowed::Yes, MutBorrowKind::Default | MutBorrowKind::TwoPhaseBorrow => { @@ -2070,7 +2209,7 @@ local: Local, flow_state: &Flows<'cx, 'tcx>, ) -> Option { - let mpi = self.move_data.rev_lookup.find_local(local); + let mpi = self.move_data.rev_lookup.find_local(local)?; let ii = &self.move_data.init_path_map[mpi]; ii.into_iter().find(|&&index| flow_state.ever_inits.contains(index)).copied() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/nll.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/nll.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/nll.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/nll.rs 2023-12-21 16:55:28.000000000 +0000 @@ -169,10 +169,11 @@ upvars: &[Upvar<'tcx>], consumer_options: Option, ) -> NllOutput<'tcx> { + let is_polonius_legacy_enabled = infcx.tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled(); let polonius_input = consumer_options.map(|c| c.polonius_input()).unwrap_or_default() - || infcx.tcx.sess.opts.unstable_opts.polonius; + || is_polonius_legacy_enabled; let polonius_output = consumer_options.map(|c| c.polonius_output()).unwrap_or_default() - || infcx.tcx.sess.opts.unstable_opts.polonius; + || is_polonius_legacy_enabled; let mut all_facts = (polonius_input || AllFacts::enabled(infcx.tcx)).then_some(AllFacts::default()); @@ -181,22 +182,26 @@ let elements = &Rc::new(RegionValueElements::new(&body)); // Run the MIR type-checker. - let MirTypeckResults { constraints, universal_region_relations, opaque_type_values } = - type_check::type_check( - infcx, - param_env, - body, - promoted, - &universal_regions, - location_table, - borrow_set, - &mut all_facts, - flow_inits, - move_data, - elements, - upvars, - polonius_input, - ); + let MirTypeckResults { + constraints, + universal_region_relations, + opaque_type_values, + live_loans, + } = type_check::type_check( + infcx, + param_env, + body, + promoted, + &universal_regions, + location_table, + borrow_set, + &mut all_facts, + flow_inits, + move_data, + elements, + upvars, + polonius_input, + ); if let Some(all_facts) = &mut all_facts { let _prof_timer = infcx.tcx.prof.generic_activity("polonius_fact_generation"); @@ -274,6 +279,7 @@ type_tests, liveness_constraints, elements, + live_loans, ); // Generate various additional constraints. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/path_utils.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/path_utils.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/path_utils.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/path_utils.rs 2023-12-21 16:55:28.000000000 +0000 @@ -137,7 +137,7 @@ } /// Determines if a given borrow is borrowing local data -/// This is called for all Yield expressions on movable generators +/// This is called for all Yield expressions on movable coroutines pub(super) fn borrow_of_local_data(place: Place<'_>) -> bool { // Reborrow of already borrowed data is ignored // Any errors will be caught on the initial borrow @@ -165,7 +165,7 @@ match place_ref.last_projection() { Some((place_base, ProjectionElem::Field(field, _ty))) => { let base_ty = place_base.ty(body, tcx).ty; - if (base_ty.is_closure() || base_ty.is_generator()) + if (base_ty.is_closure() || base_ty.is_coroutine()) && (!by_ref || upvars[field.index()].by_ref) { Some(field) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,6 +7,7 @@ use rustc_data_structures::graph::scc::Sccs; use rustc_errors::Diagnostic; use rustc_hir::def_id::CRATE_DEF_ID; +use rustc_index::bit_set::SparseBitMatrix; use rustc_index::{IndexSlice, IndexVec}; use rustc_infer::infer::outlives::test_type_match; use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq}; @@ -21,17 +22,17 @@ use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitableExt}; use rustc_span::Span; +use crate::constraints::graph::{self, NormalConstraintGraph, RegionGraph}; +use crate::dataflow::BorrowIndex; use crate::{ - constraints::{ - graph::NormalConstraintGraph, ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet, - }, + constraints::{ConstraintSccIndex, OutlivesConstraint, OutlivesConstraintSet}, diagnostics::{RegionErrorKind, RegionErrors, UniverseInfo}, member_constraints::{MemberConstraintSet, NllMemberConstraintIndex}, nll::PoloniusOutput, region_infer::reverse_sccs::ReverseSccGraph, region_infer::values::{ - LivenessValues, PlaceholderIndices, RegionElement, RegionValueElements, RegionValues, - ToElementIndex, + LivenessValues, PlaceholderIndices, PointIndex, RegionElement, RegionValueElements, + RegionValues, ToElementIndex, }, type_check::{free_region_relations::UniversalRegionRelations, Locations}, universal_regions::UniversalRegions, @@ -119,6 +120,9 @@ /// Information about how the universally quantified regions in /// scope on this function relate to one another. universal_region_relations: Frozen>, + + /// The set of loans that are live at a given point in the CFG, when using `-Zpolonius=next`. + live_loans: SparseBitMatrix, } /// Each time that `apply_member_constraint` is successful, it appends @@ -330,6 +334,7 @@ type_tests: Vec>, liveness_constraints: LivenessValues, elements: &Rc, + live_loans: SparseBitMatrix, ) -> Self { debug!("universal_regions: {:#?}", universal_regions); debug!("outlives constraints: {:#?}", outlives_constraints); @@ -383,6 +388,7 @@ type_tests, universal_regions, universal_region_relations, + live_loans, }; result.init_free_and_bound_regions(); @@ -637,11 +643,12 @@ self.scc_universes[scc] } - /// Once region solving has completed, this function will return - /// the member constraints that were applied to the value of a given - /// region `r`. See `AppliedMemberConstraint`. - pub(crate) fn applied_member_constraints(&self, r: RegionVid) -> &[AppliedMemberConstraint] { - let scc = self.constraint_sccs.scc(r); + /// Once region solving has completed, this function will return the member constraints that + /// were applied to the value of a given SCC `scc`. See `AppliedMemberConstraint`. + pub(crate) fn applied_member_constraints( + &self, + scc: ConstraintSccIndex, + ) -> &[AppliedMemberConstraint] { binary_search_util::binary_search_slice( &self.member_constraints_applied, |applied| applied.member_region_scc, @@ -683,7 +690,7 @@ // In Polonius mode, the errors about missing universal region relations are in the output // and need to be emitted or propagated. Otherwise, we need to check whether the // constraints were too strong, and if so, emit or propagate those errors. - if infcx.tcx.sess.opts.unstable_opts.polonius { + if infcx.tcx.sess.opts.unstable_opts.polonius.is_legacy_enabled() { self.check_polonius_subset_errors( outlives_requirements.as_mut(), &mut errors_buffer, @@ -1938,7 +1945,7 @@ // Member constraints can also give rise to `'r: 'x` edges that // were not part of the graph initially, so watch out for those. // (But they are extremely rare; this loop is very cold.) - for constraint in self.applied_member_constraints(r) { + for constraint in self.applied_member_constraints(self.constraint_sccs.scc(r)) { let p_c = &self.member_constraints[constraint.member_constraint_index]; let constraint = OutlivesConstraint { sup: r, @@ -2279,6 +2286,38 @@ } None } + + /// Access to the SCC constraint graph. + pub(crate) fn constraint_sccs(&self) -> &Sccs { + self.constraint_sccs.as_ref() + } + + /// Access to the region graph, built from the outlives constraints. + pub(crate) fn region_graph(&self) -> RegionGraph<'_, 'tcx, graph::Normal> { + self.constraint_graph.region_graph(&self.constraints, self.universal_regions.fr_static) + } + + /// Returns whether the given region is considered live at all points: whether it is a + /// placeholder or a free region. + pub(crate) fn is_region_live_at_all_points(&self, region: RegionVid) -> bool { + // FIXME: there must be a cleaner way to find this information. At least, when + // higher-ranked subtyping is abstracted away from the borrowck main path, we'll only + // need to check whether this is a universal region. + let origin = self.region_definition(region).origin; + let live_at_all_points = matches!( + origin, + NllRegionVariableOrigin::Placeholder(_) | NllRegionVariableOrigin::FreeRegion + ); + live_at_all_points + } + + /// Returns whether the `loan_idx` is live at the given `location`: whether its issuing + /// region is contained within the type of a variable that is live at this point. + /// Note: for now, the sets of live loans is only available when using `-Zpolonius=next`. + pub(crate) fn is_loan_live_at(&self, loan_idx: BorrowIndex, location: Location) -> bool { + let point = self.liveness_constraints.point_from_location(location); + self.live_loans.contains(point, loan_idx) + } } impl<'tcx> RegionDefinition<'tcx> { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/opaque_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/opaque_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/opaque_types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/opaque_types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_errors::ErrorGuaranteed; +use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalDefId; use rustc_hir::OpaqueTyOrigin; use rustc_infer::infer::InferCtxt; @@ -308,20 +309,19 @@ return Ok(definition_ty); }; let param_env = tcx.param_env(def_id); - // HACK This bubble is required for this tests to pass: - // nested-return-type2-tait2.rs - // nested-return-type2-tait3.rs + + let mut parent_def_id = def_id; + while tcx.def_kind(parent_def_id) == DefKind::OpaqueTy { + parent_def_id = tcx.local_parent(parent_def_id); + } + // FIXME(-Ztrait-solver=next): We probably should use `DefiningAnchor::Error` // and prepopulate this `InferCtxt` with known opaque values, rather than // using the `Bind` anchor here. For now it's fine. let infcx = tcx .infer_ctxt() .with_next_trait_solver(next_trait_solver) - .with_opaque_type_inference(if next_trait_solver { - DefiningAnchor::Bind(def_id) - } else { - DefiningAnchor::Bubble - }) + .with_opaque_type_inference(DefiningAnchor::Bind(parent_def_id)) .build(); let ocx = ObligationCtxt::new(&infcx); let identity_args = GenericArgs::identity_for_item(tcx, def_id); @@ -361,7 +361,7 @@ if errors.is_empty() { Ok(definition_ty) } else { - Err(infcx.err_ctxt().report_fulfillment_errors(&errors)) + Err(infcx.err_ctxt().report_fulfillment_errors(errors)) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/values.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/values.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/values.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/region_infer/values.rs 2023-12-21 16:55:28.000000000 +0000 @@ -176,6 +176,11 @@ pub(crate) fn region_value_str(&self, r: N) -> String { region_value_str(self.get_elements(r).map(RegionElement::Location)) } + + #[inline] + pub(crate) fn point_from_location(&self, location: Location) -> PointIndex { + self.elements.point_from_location(location) + } } /// Maps from `ty::PlaceholderRegion` values that are used in the rest of diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/renumber.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/renumber.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/renumber.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/renumber.rs 2023-12-21 16:55:28.000000000 +0000 @@ -81,6 +81,10 @@ #[instrument(skip(self), level = "debug")] fn visit_ty(&mut self, ty: &mut Ty<'tcx>, ty_context: TyContext) { + if matches!(ty_context, TyContext::ReturnTy(_)) { + // We will renumber the return ty when called again with `TyContext::LocalDecl` + return; + } *ty = self.renumber_regions(*ty, || RegionCtxt::TyContext(ty_context)); debug!(?ty); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/session_diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/session_diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/session_diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/session_diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -139,23 +139,23 @@ #[derive(Subdiagnostic)] pub(crate) enum CaptureVarPathUseCause { - #[label(borrowck_borrow_due_to_use_generator)] - BorrowInGenerator { + #[label(borrowck_borrow_due_to_use_coroutine)] + BorrowInCoroutine { #[primary_span] path_span: Span, }, - #[label(borrowck_use_due_to_use_generator)] - UseInGenerator { + #[label(borrowck_use_due_to_use_coroutine)] + UseInCoroutine { #[primary_span] path_span: Span, }, - #[label(borrowck_assign_due_to_use_generator)] - AssignInGenerator { + #[label(borrowck_assign_due_to_use_coroutine)] + AssignInCoroutine { #[primary_span] path_span: Span, }, - #[label(borrowck_assign_part_due_to_use_generator)] - AssignPartInGenerator { + #[label(borrowck_assign_part_due_to_use_coroutine)] + AssignPartInCoroutine { #[primary_span] path_span: Span, }, @@ -202,8 +202,8 @@ #[derive(Subdiagnostic)] pub(crate) enum CaptureVarCause { - #[label(borrowck_var_borrow_by_use_place_in_generator)] - BorrowUsePlaceGenerator { + #[label(borrowck_var_borrow_by_use_place_in_coroutine)] + BorrowUsePlaceCoroutine { is_single_var: bool, place: String, #[primary_span] @@ -216,8 +216,8 @@ #[primary_span] var_span: Span, }, - #[label(borrowck_var_borrow_by_use_in_generator)] - BorrowUseInGenerator { + #[label(borrowck_var_borrow_by_use_in_coroutine)] + BorrowUseInCoroutine { #[primary_span] var_span: Span, }, @@ -226,8 +226,8 @@ #[primary_span] var_span: Span, }, - #[label(borrowck_var_move_by_use_in_generator)] - MoveUseInGenerator { + #[label(borrowck_var_move_by_use_in_coroutine)] + MoveUseInCoroutine { #[primary_span] var_span: Span, }, @@ -236,8 +236,8 @@ #[primary_span] var_span: Span, }, - #[label(borrowck_var_first_borrow_by_use_place_in_generator)] - FirstBorrowUsePlaceGenerator { + #[label(borrowck_var_first_borrow_by_use_place_in_coroutine)] + FirstBorrowUsePlaceCoroutine { place: String, #[primary_span] var_span: Span, @@ -248,8 +248,8 @@ #[primary_span] var_span: Span, }, - #[label(borrowck_var_second_borrow_by_use_place_in_generator)] - SecondBorrowUsePlaceGenerator { + #[label(borrowck_var_second_borrow_by_use_place_in_coroutine)] + SecondBorrowUsePlaceCoroutine { place: String, #[primary_span] var_span: Span, @@ -266,8 +266,8 @@ #[primary_span] var_span: Span, }, - #[label(borrowck_partial_var_move_by_use_in_generator)] - PartialMoveUseInGenerator { + #[label(borrowck_partial_var_move_by_use_in_coroutine)] + PartialMoveUseInCoroutine { #[primary_span] var_span: Span, is_partial: bool, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/canonical.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/canonical.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/canonical.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/canonical.rs 2023-12-21 16:55:28.000000000 +0000 @@ -49,7 +49,9 @@ // If the query has created new universes and errors are going to be emitted, register the // cause of these new universes for improved diagnostics. let universe = self.infcx.universe(); - if old_universe != universe && let Some(error_info) = error_info { + if old_universe != universe + && let Some(error_info) = error_info + { let universe_info = error_info.to_universe_info(old_universe); for u in (old_universe + 1)..=universe { self.borrowck_context.constraints.universe_causes.insert(u, universe_info.clone()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/free_region_relations.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/free_region_relations.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/free_region_relations.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/free_region_relations.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::query::OutlivesBound; use rustc_middle::ty::{self, RegionVid, Ty}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP}; use rustc_trait_selection::traits::query::type_op::{self, TypeOp}; use std::rc::Rc; use type_op::TypeOpOutput; @@ -318,7 +318,8 @@ .param_env .and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty }) .fully_perform(self.infcx, DUMMY_SP) - .unwrap_or_else(|_| bug!("failed to compute implied bounds {:?}", ty)); + .map_err(|_: ErrorGuaranteed| debug!("failed to compute implied bounds {:?}", ty)) + .ok()?; debug!(?bounds, ?constraints); self.add_outlives_bounds(bounds); constraints diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/input_output.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/input_output.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/input_output.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/input_output.rs 2023-12-21 16:55:28.000000000 +0000 @@ -101,7 +101,7 @@ ); // We will not have a universal_regions.yield_ty if we yield (by accident) - // outside of a generator and return an `impl Trait`, so emit a delay_span_bug + // outside of a coroutine and return an `impl Trait`, so emit a delay_span_bug // because we don't want to panic in an assert here if we've already got errors. if body.yield_ty().is_some() != universal_regions.yield_ty.is_some() { self.tcx().sess.delay_span_bug( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/liveness/trace.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/liveness/trace.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/liveness/trace.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/liveness/trace.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,12 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; -use rustc_index::bit_set::HybridBitSet; +use rustc_data_structures::graph::WithSuccessors; +use rustc_index::bit_set::{HybridBitSet, SparseBitMatrix}; use rustc_index::interval::IntervalSet; use rustc_infer::infer::canonical::QueryRegionConstraints; +use rustc_infer::infer::outlives::for_liveness; use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location}; use rustc_middle::traits::query::DropckOutlivesResult; -use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt}; +use rustc_middle::ty::{RegionVid, Ty, TyCtxt, TypeVisitable, TypeVisitableExt}; use rustc_span::DUMMY_SP; use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives; use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput}; @@ -14,6 +16,7 @@ use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex}; use rustc_mir_dataflow::ResultsCursor; +use crate::dataflow::BorrowIndex; use crate::{ region_infer::values::{self, PointIndex, RegionValueElements}, type_check::liveness::local_use_map::LocalUseMap, @@ -50,6 +53,33 @@ let local_use_map = &LocalUseMap::build(&relevant_live_locals, elements, body); + // When using `-Zpolonius=next`, compute the set of loans that can reach a given region. + let num_loans = typeck.borrowck_context.borrow_set.len(); + let mut inflowing_loans = SparseBitMatrix::new(num_loans); + if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() { + let borrowck_context = &typeck.borrowck_context; + let borrow_set = &borrowck_context.borrow_set; + let constraint_set = &borrowck_context.constraints.outlives_constraints; + + let num_region_vars = typeck.infcx.num_region_vars(); + let graph = constraint_set.graph(num_region_vars); + let region_graph = + graph.region_graph(&constraint_set, borrowck_context.universal_regions.fr_static); + + // Traverse each issuing region's constraints, and record the loan as flowing into the + // outlived region. + for (loan, issuing_region_data) in borrow_set.iter_enumerated() { + for succ in region_graph.depth_first_search(issuing_region_data.region) { + // We don't need to mention that a loan flows into its issuing region. + if succ == issuing_region_data.region { + continue; + } + + inflowing_loans.insert(succ, loan); + } + } + }; + let cx = LivenessContext { typeck, body, @@ -58,6 +88,7 @@ local_use_map, move_data, drop_data: FxIndexMap::default(), + inflowing_loans, }; let mut results = LivenessResults::new(cx); @@ -71,7 +102,7 @@ results.dropck_boring_locals(boring_locals); } -/// Contextual state for the type-liveness generator. +/// Contextual state for the type-liveness coroutine. struct LivenessContext<'me, 'typeck, 'flow, 'tcx> { /// Current type-checker, giving us our inference context etc. typeck: &'me mut TypeChecker<'typeck, 'tcx>, @@ -95,6 +126,9 @@ /// Index indicating where each variable is assigned, used, or /// dropped. local_use_map: &'me LocalUseMap, + + /// Set of loans that flow into a given region, when using `-Zpolonius=next`. + inflowing_loans: SparseBitMatrix, } struct DropData<'tcx> { @@ -284,7 +318,7 @@ fn compute_drop_live_points_for(&mut self, local: Local) { debug!("compute_drop_live_points_for(local={:?})", local); - let mpi = self.cx.move_data.rev_lookup.find_local(local); + let Some(mpi) = self.cx.move_data.rev_lookup.find_local(local) else { return }; debug!("compute_drop_live_points_for: mpi = {:?}", mpi); // Find the drops where `local` is initialized. @@ -486,7 +520,13 @@ ) { debug!("add_use_live_facts_for(value={:?})", value); - Self::make_all_regions_live(self.elements, &mut self.typeck, value, live_at) + Self::make_all_regions_live( + self.elements, + &mut self.typeck, + value, + live_at, + &self.inflowing_loans, + ); } /// Some variable with type `live_ty` is "drop live" at `location` @@ -537,7 +577,13 @@ // All things in the `outlives` array may be touched by // the destructor and must be live at this point. for &kind in &drop_data.dropck_result.kinds { - Self::make_all_regions_live(self.elements, &mut self.typeck, kind, live_at); + Self::make_all_regions_live( + self.elements, + &mut self.typeck, + kind, + live_at, + &self.inflowing_loans, + ); polonius::add_drop_of_var_derefs_origin(&mut self.typeck, dropped_local, &kind); } @@ -548,6 +594,7 @@ typeck: &mut TypeChecker<'_, 'tcx>, value: impl TypeVisitable>, live_at: &IntervalSet, + inflowing_loans: &SparseBitMatrix, ) { debug!("make_all_regions_live(value={:?})", value); debug!( @@ -555,16 +602,38 @@ values::location_set_str(elements, live_at.iter()), ); - let tcx = typeck.tcx(); - tcx.for_each_free_region(&value, |live_region| { - let live_region_vid = - typeck.borrowck_context.universal_regions.to_region_vid(live_region); - typeck - .borrowck_context - .constraints - .liveness_constraints - .add_elements(live_region_vid, live_at); + // When using `-Zpolonius=next`, we want to record the loans that flow into this value's + // regions as being live at the given `live_at` points: this will be used to compute the + // location where a loan goes out of scope. + let num_loans = typeck.borrowck_context.borrow_set.len(); + let value_loans = &mut HybridBitSet::new_empty(num_loans); + + value.visit_with(&mut for_liveness::FreeRegionsVisitor { + tcx: typeck.tcx(), + param_env: typeck.param_env, + op: |r| { + let live_region_vid = typeck.borrowck_context.universal_regions.to_region_vid(r); + + typeck + .borrowck_context + .constraints + .liveness_constraints + .add_elements(live_region_vid, live_at); + + // There can only be inflowing loans for this region when we are using + // `-Zpolonius=next`. + if let Some(inflowing) = inflowing_loans.row(live_region_vid) { + value_loans.union(inflowing); + } + }, }); + + // Record the loans reaching the value. + if !value_loans.is_empty() { + for point in live_at.iter() { + typeck.borrowck_context.live_loans.union_row(point, value_loans); + } + } } fn compute_drop_data( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/type_check/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -14,6 +14,7 @@ use rustc_hir::def::DefKind; use rustc_hir::def_id::LocalDefId; use rustc_hir::lang_items::LangItem; +use rustc_index::bit_set::SparseBitMatrix; use rustc_index::{IndexSlice, IndexVec}; use rustc_infer::infer::canonical::QueryRegionConstraints; use rustc_infer::infer::outlives::env::RegionBoundPairs; @@ -50,6 +51,8 @@ use rustc_mir_dataflow::move_paths::MoveData; use rustc_mir_dataflow::ResultsCursor; +use crate::dataflow::BorrowIndex; +use crate::region_infer::values::PointIndex; use crate::session_diagnostics::{MoveUnsized, SimdShuffleLastConst}; use crate::{ borrow_set::BorrowSet, @@ -163,6 +166,9 @@ debug!(?normalized_inputs_and_output); + // When using `-Zpolonius=next`, liveness will record the set of live loans per point. + let mut live_loans = SparseBitMatrix::new(borrow_set.len()); + let mut borrowck_context = BorrowCheckContext { universal_regions, location_table, @@ -170,6 +176,7 @@ all_facts, constraints: &mut constraints, upvars, + live_loans: &mut live_loans, }; let mut checker = TypeChecker::new( @@ -181,11 +188,7 @@ &mut borrowck_context, ); - // FIXME(-Ztrait-solver=next): A bit dubious that we're only registering - // predefined opaques in the typeck root. - if infcx.next_trait_solver() && !infcx.tcx.is_typeck_child(body.source.def_id()) { - checker.register_predefined_opaques_in_new_solver(); - } + checker.check_user_type_annotations(); let mut verifier = TypeVerifier::new(&mut checker, promoted); verifier.visit_body(&body); @@ -240,7 +243,7 @@ }) .collect(); - MirTypeckResults { constraints, universal_region_relations, opaque_type_values } + MirTypeckResults { constraints, universal_region_relations, opaque_type_values, live_loans } } fn translate_outlives_facts(typeck: &mut TypeChecker<'_, '_>) { @@ -664,8 +667,8 @@ PlaceTy { ty: base_ty, variant_index: Some(index) } } } - // We do not need to handle generators here, because this runs - // before the generator transform stage. + // We do not need to handle coroutines here, because this runs + // before the coroutine transform stage. _ => { let ty = if let Some(name) = maybe_name { span_mirbug_and_err!( @@ -767,13 +770,13 @@ let (variant, args) = match base_ty { PlaceTy { ty, variant_index: Some(variant_index) } => match *ty.kind() { ty::Adt(adt_def, args) => (adt_def.variant(variant_index), args), - ty::Generator(def_id, args, _) => { - let mut variants = args.as_generator().state_tys(def_id, tcx); + ty::Coroutine(def_id, args, _) => { + let mut variants = args.as_coroutine().state_tys(def_id, tcx); let Some(mut variant) = variants.nth(variant_index.into()) else { bug!( - "variant_index of generator out of range: {:?}/{:?}", + "variant_index of coroutine out of range: {:?}/{:?}", variant_index, - args.as_generator().state_tys(def_id, tcx).count() + args.as_coroutine().state_tys(def_id, tcx).count() ); }; return match variant.nth(field.index()) { @@ -781,7 +784,7 @@ None => Err(FieldAccessError::OutOfRange { field_count: variant.count() }), }; } - _ => bug!("can't have downcast of non-adt non-generator type"), + _ => bug!("can't have downcast of non-adt non-coroutine type"), }, PlaceTy { ty, variant_index: None } => match *ty.kind() { ty::Adt(adt_def, args) if !adt_def.is_enum() => { @@ -795,13 +798,13 @@ }), }; } - ty::Generator(_, args, _) => { + ty::Coroutine(_, args, _) => { // Only prefix fields (upvars and current state) are // accessible without a variant index. - return match args.as_generator().prefix_tys().get(field.index()) { + return match args.as_coroutine().prefix_tys().get(field.index()) { Some(ty) => Ok(*ty), None => Err(FieldAccessError::OutOfRange { - field_count: args.as_generator().prefix_tys().len(), + field_count: args.as_coroutine().prefix_tys().len(), }), }; } @@ -855,12 +858,21 @@ borrow_set: &'a BorrowSet<'tcx>, pub(crate) constraints: &'a mut MirTypeckRegionConstraints<'tcx>, upvars: &'a [Upvar<'tcx>], + + /// The set of loans that are live at a given point in the CFG, filled in by `liveness::trace`, + /// when using `-Zpolonius=next`. + pub(crate) live_loans: &'a mut SparseBitMatrix, } +/// Holder struct for passing results from MIR typeck to the rest of the non-lexical regions +/// inference computation. pub(crate) struct MirTypeckResults<'tcx> { pub(crate) constraints: MirTypeckRegionConstraints<'tcx>, pub(crate) universal_region_relations: Frozen>, pub(crate) opaque_type_values: FxIndexMap, OpaqueHiddenType<'tcx>>, + + /// The set of loans that are live at a given point in the CFG, when using `-Zpolonius=next`. + pub(crate) live_loans: SparseBitMatrix, } /// A collection of region constraints that must be satisfied for the @@ -1005,7 +1017,13 @@ borrowck_context, reported_errors: Default::default(), }; - checker.check_user_type_annotations(); + + // FIXME(-Ztrait-solver=next): A bit dubious that we're only registering + // predefined opaques in the typeck root. + if infcx.next_trait_solver() && !infcx.tcx.is_typeck_child(body.source.def_id()) { + checker.register_predefined_opaques_in_new_solver(); + } + checker } @@ -1335,7 +1353,7 @@ | TerminatorKind::UnwindResume | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } | TerminatorKind::FalseEdge { .. } @@ -1452,7 +1470,7 @@ let value_ty = value.ty(body, tcx); match body.yield_ty() { - None => span_mirbug!(self, term, "yield in non-generator"), + None => span_mirbug!(self, term, "yield in non-coroutine"), Some(ty) => { if let Err(terr) = self.sub_types( value_ty, @@ -1624,7 +1642,7 @@ } TerminatorKind::UnwindTerminate(_) => { if !is_cleanup { - span_mirbug!(self, block_data, "abort on non-cleanup block!") + span_mirbug!(self, block_data, "terminate on non-cleanup block!") } } TerminatorKind::Return => { @@ -1632,9 +1650,9 @@ span_mirbug!(self, block_data, "return on cleanup block") } } - TerminatorKind::GeneratorDrop { .. } => { + TerminatorKind::CoroutineDrop { .. } => { if is_cleanup { - span_mirbug!(self, block_data, "generator_drop in cleanup block") + span_mirbug!(self, block_data, "coroutine_drop in cleanup block") } } TerminatorKind::Yield { resume, drop, .. } => { @@ -1781,14 +1799,14 @@ }), } } - AggregateKind::Generator(_, args, _) => { + AggregateKind::Coroutine(_, args, _) => { // It doesn't make sense to look at a field beyond the prefix; // these require a variant index, and are not initialized in // aggregate rvalues. - match args.as_generator().prefix_tys().get(field_index.as_usize()) { + match args.as_coroutine().prefix_tys().get(field_index.as_usize()) { Some(ty) => Ok(*ty), None => Err(FieldAccessError::OutOfRange { - field_count: args.as_generator().prefix_tys().len(), + field_count: args.as_coroutine().prefix_tys().len(), }), } } @@ -2381,7 +2399,7 @@ AggregateKind::Array(_) => None, AggregateKind::Tuple => None, AggregateKind::Closure(_, _) => None, - AggregateKind::Generator(_, _, _) => None, + AggregateKind::Coroutine(_, _, _) => None, }, } } @@ -2609,7 +2627,7 @@ // desugaring. A closure gets desugared to a struct, and // these extra requirements are basically like where // clauses on the struct. - AggregateKind::Closure(def_id, args) | AggregateKind::Generator(def_id, args, _) => { + AggregateKind::Closure(def_id, args) | AggregateKind::Coroutine(def_id, args, _) => { (def_id, self.prove_closure_bounds(tcx, def_id.expect_local(), args, location)) } @@ -2657,7 +2675,7 @@ let parent_args = match tcx.def_kind(def_id) { DefKind::Closure => args.as_closure().parent_args(), - DefKind::Generator => args.as_generator().parent_args(), + DefKind::Coroutine => args.as_coroutine().parent_args(), DefKind::InlineConst => args.as_inline_const().parent_args(), other => bug!("unexpected item {:?}", other), }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/universal_regions.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/universal_regions.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/universal_regions.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_borrowck/src/universal_regions.rs 2023-12-21 16:55:28.000000000 +0000 @@ -58,7 +58,7 @@ num_universals: usize, /// The "defining" type for this function, with all universal - /// regions instantiated. For a closure or generator, this is the + /// regions instantiated. For a closure or coroutine, this is the /// closure type, but for a top-level function it's the `FnDef`. pub defining_ty: DefiningTy<'tcx>, @@ -91,10 +91,10 @@ /// `ClosureArgs::closure_sig_ty`. Closure(DefId, GenericArgsRef<'tcx>), - /// The MIR is a generator. The signature is that generators take + /// The MIR is a coroutine. The signature is that coroutines take /// no parameters and return the result of - /// `ClosureArgs::generator_return_ty`. - Generator(DefId, GenericArgsRef<'tcx>, hir::Movability), + /// `ClosureArgs::coroutine_return_ty`. + Coroutine(DefId, GenericArgsRef<'tcx>, hir::Movability), /// The MIR is a fn item with the given `DefId` and args. The signature /// of the function can be bound then with the `fn_sig` query. @@ -112,13 +112,13 @@ impl<'tcx> DefiningTy<'tcx> { /// Returns a list of all the upvar types for this MIR. If this is - /// not a closure or generator, there are no upvars, and hence it + /// not a closure or coroutine, there are no upvars, and hence it /// will be an empty list. The order of types in this list will /// match up with the upvar order in the HIR, typesystem, and MIR. pub fn upvar_tys(self) -> &'tcx ty::List> { match self { DefiningTy::Closure(_, args) => args.as_closure().upvar_tys(), - DefiningTy::Generator(_, args, _) => args.as_generator().upvar_tys(), + DefiningTy::Coroutine(_, args, _) => args.as_coroutine().upvar_tys(), DefiningTy::FnDef(..) | DefiningTy::Const(..) | DefiningTy::InlineConst(..) => { ty::List::empty() } @@ -130,7 +130,7 @@ /// user's code. pub fn implicit_inputs(self) -> usize { match self { - DefiningTy::Closure(..) | DefiningTy::Generator(..) => 1, + DefiningTy::Closure(..) | DefiningTy::Coroutine(..) => 1, DefiningTy::FnDef(..) | DefiningTy::Const(..) | DefiningTy::InlineConst(..) => 0, } } @@ -146,7 +146,7 @@ pub fn def_id(&self) -> DefId { match *self { DefiningTy::Closure(def_id, ..) - | DefiningTy::Generator(def_id, ..) + | DefiningTy::Coroutine(def_id, ..) | DefiningTy::FnDef(def_id, ..) | DefiningTy::Const(def_id, ..) | DefiningTy::InlineConst(def_id, ..) => def_id, @@ -164,7 +164,7 @@ /// be able to map them to our internal `RegionVid`. This is /// basically equivalent to an `GenericArgs`, except that it also /// contains an entry for `ReStatic` -- it might be nice to just - /// use a args, and then handle `ReStatic` another way. + /// use an args, and then handle `ReStatic` another way. indices: FxHashMap, RegionVid>, /// The vid assigned to `'static`. Used only for diagnostics. @@ -178,7 +178,7 @@ Global, /// An **external** region is only relevant for - /// closures, generators, and inline consts. In that + /// closures, coroutines, and inline consts. In that /// case, it refers to regions that are free in the type /// -- basically, something bound in the surrounding context. /// @@ -196,7 +196,7 @@ /// Here, the lifetimes `'a` and `'b` would be **external** to the /// closure. /// - /// If we are not analyzing a closure/generator/inline-const, + /// If we are not analyzing a closure/coroutine/inline-const, /// there are no external lifetimes. External, @@ -290,7 +290,7 @@ (FIRST_GLOBAL_INDEX..self.num_universals).map(RegionVid::from_usize) } - /// Returns `true` if `r` is classified as an local region. + /// Returns `true` if `r` is classified as a local region. pub fn is_local_free_region(&self, r: RegionVid) -> bool { self.region_classification(r) == Some(RegionClassification::Local) } @@ -354,7 +354,7 @@ err.note(format!("late-bound region is {:?}", self.to_region_vid(r))); }); } - DefiningTy::Generator(def_id, args, _) => { + DefiningTy::Coroutine(def_id, args, _) => { let v = with_no_trimmed_paths!( args[tcx.generics_of(def_id).parent_count..] .iter() @@ -362,7 +362,7 @@ .collect::>() ); err.note(format!( - "defining type: {} with generator args [\n {},\n]", + "defining type: {} with coroutine args [\n {},\n]", tcx.def_path_str_with_args(def_id, args), v.join(",\n "), )); @@ -426,13 +426,13 @@ let typeck_root_def_id = self.infcx.tcx.typeck_root_def_id(self.mir_def.to_def_id()); - // If this is a 'root' body (not a closure/generator/inline const), then + // If this is a 'root' body (not a closure/coroutine/inline const), then // there are no extern regions, so the local regions start at the same // position as the (empty) sub-list of extern regions let first_local_index = if self.mir_def.to_def_id() == typeck_root_def_id { first_extern_index } else { - // If this is a closure, generator, or inline-const, then the late-bound regions from the enclosing + // If this is a closure, coroutine, or inline-const, then the late-bound regions from the enclosing // function/closures are actually external regions to us. For example, here, 'a is not local // to the closure c (although it is local to the fn foo): // fn foo<'a>() { @@ -528,7 +528,7 @@ debug!("build: local regions = {}..{}", first_local_index, num_universals); let yield_ty = match defining_ty { - DefiningTy::Generator(_, args, _) => Some(args.as_generator().yield_ty()), + DefiningTy::Coroutine(_, args, _) => Some(args.as_coroutine().yield_ty()), _ => None, }; @@ -563,8 +563,8 @@ match *defining_ty.kind() { ty::Closure(def_id, args) => DefiningTy::Closure(def_id, args), - ty::Generator(def_id, args, movability) => { - DefiningTy::Generator(def_id, args, movability) + ty::Coroutine(def_id, args, movability) => { + DefiningTy::Coroutine(def_id, args, movability) } ty::FnDef(def_id, args) => DefiningTy::FnDef(def_id, args), _ => span_bug!( @@ -621,7 +621,7 @@ let identity_args = GenericArgs::identity_for_item(tcx, typeck_root_def_id); let fr_args = match defining_ty { DefiningTy::Closure(_, args) - | DefiningTy::Generator(_, args, _) + | DefiningTy::Coroutine(_, args, _) | DefiningTy::InlineConst(_, args) => { // In the case of closures, we rely on the fact that // the first N elements in the ClosureArgs are @@ -686,13 +686,13 @@ ) } - DefiningTy::Generator(def_id, args, movability) => { + DefiningTy::Coroutine(def_id, args, movability) => { assert_eq!(self.mir_def.to_def_id(), def_id); - let resume_ty = args.as_generator().resume_ty(); - let output = args.as_generator().return_ty(); - let generator_ty = Ty::new_generator(tcx, def_id, args, movability); + let resume_ty = args.as_coroutine().resume_ty(); + let output = args.as_coroutine().return_ty(); + let coroutine_ty = Ty::new_coroutine(tcx, def_id, args, movability); let inputs_and_output = - self.infcx.tcx.mk_type_list(&[generator_ty, resume_ty, output]); + self.infcx.tcx.mk_type_list(&[coroutine_ty, resume_ty, output]); ty::Binder::dummy(inputs_and_output) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -7,6 +7,7 @@ doctest = false [dependencies] +# tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_attr = { path = "../rustc_attr" } @@ -14,16 +15,17 @@ rustc_errors = { path = "../rustc_errors" } rustc_expand = { path = "../rustc_expand" } rustc_feature = { path = "../rustc_feature" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_index = { path = "../rustc_index" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_parse_format = { path = "../rustc_parse_format" } rustc_parse = { path = "../rustc_parse" } +rustc_parse_format = { path = "../rustc_parse_format" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -137,6 +137,20 @@ .label = positional arguments must be before named arguments .named_args = named argument +builtin_macros_format_redundant_args = redundant {$n -> + [one] argument + *[more] arguments + } + .help = {$n -> + [one] the formatting string already captures the binding directly, it doesn't need to be included in the argument list + *[more] the formatting strings already captures the bindings directly, they don't need to be included in the argument list + } + .note = {$n -> + [one] the formatting specifier is referencing the binding already + *[more] the formatting specifiers are referencing the bindings already + } + .suggestion = this can be removed + builtin_macros_format_remove_raw_ident = remove the `r#` builtin_macros_format_requires_string = requires at least a format string argument diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/alloc_error_handler.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/alloc_error_handler.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/alloc_error_handler.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/alloc_error_handler.rs 2023-12-21 16:55:28.000000000 +0000 @@ -21,20 +21,22 @@ // Allow using `#[alloc_error_handler]` on an item statement // FIXME - if we get deref patterns, use them to reduce duplication here - let (item, is_stmt, sig_span) = - if let Annotatable::Item(item) = &item - && let ItemKind::Fn(fn_kind) = &item.kind - { - (item, false, ecx.with_def_site_ctxt(fn_kind.sig.span)) - } else if let Annotatable::Stmt(stmt) = &item - && let StmtKind::Item(item) = &stmt.kind - && let ItemKind::Fn(fn_kind) = &item.kind - { - (item, true, ecx.with_def_site_ctxt(fn_kind.sig.span)) - } else { - ecx.sess.parse_sess.span_diagnostic.emit_err(errors::AllocErrorMustBeFn {span: item.span() }); - return vec![orig_item]; - }; + let (item, is_stmt, sig_span) = if let Annotatable::Item(item) = &item + && let ItemKind::Fn(fn_kind) = &item.kind + { + (item, false, ecx.with_def_site_ctxt(fn_kind.sig.span)) + } else if let Annotatable::Stmt(stmt) = &item + && let StmtKind::Item(item) = &stmt.kind + && let ItemKind::Fn(fn_kind) = &item.kind + { + (item, true, ecx.with_def_site_ctxt(fn_kind.sig.span)) + } else { + ecx.sess + .parse_sess + .span_diagnostic + .emit_err(errors::AllocErrorMustBeFn { span: item.span() }); + return vec![orig_item]; + }; // Generate a bunch of new items using the AllocFnFactory let span = ecx.with_def_site_ctxt(item.span); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert/context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert/context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert/context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert/context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -193,10 +193,9 @@ fn manage_cond_expr(&mut self, expr: &mut P) { match &mut expr.kind { ExprKind::AddrOf(_, mutability, local_expr) => { - self.with_is_consumed_management( - matches!(mutability, Mutability::Mut), - |this| this.manage_cond_expr(local_expr) - ); + self.with_is_consumed_management(matches!(mutability, Mutability::Mut), |this| { + this.manage_cond_expr(local_expr) + }); } ExprKind::Array(local_exprs) => { for local_expr in local_exprs { @@ -223,7 +222,7 @@ |this| { this.manage_cond_expr(lhs); this.manage_cond_expr(rhs); - } + }, ); } ExprKind::Call(_, local_exprs) => { @@ -285,10 +284,9 @@ } } ExprKind::Unary(un_op, local_expr) => { - self.with_is_consumed_management( - matches!(un_op, UnOp::Neg | UnOp::Not), - |this| this.manage_cond_expr(local_expr) - ); + self.with_is_consumed_management(matches!(un_op, UnOp::Neg | UnOp::Not), |this| { + this.manage_cond_expr(local_expr) + }); } // Expressions that are not worth or can not be captured. // @@ -296,7 +294,7 @@ // sync with the `rfc-2011-nicer-assert-messages/all-expr-kinds.rs` test. ExprKind::Assign(_, _, _) | ExprKind::AssignOp(_, _, _) - | ExprKind::Async(_, _) + | ExprKind::Gen(_, _, _) | ExprKind::Await(_, _) | ExprKind::Block(_, _) | ExprKind::Break(_, _) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/assert.rs 2023-12-21 16:55:28.000000000 +0000 @@ -85,7 +85,7 @@ DUMMY_SP, Symbol::intern(&format!( "assertion failed: {}", - pprust::expr_to_string(&cond_expr).escape_debug() + pprust::expr_to_string(&cond_expr) )), )], ); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -33,7 +33,7 @@ accumulator.push_str(&b.to_string()); } Ok(ast::LitKind::CStr(..)) => { - cx.emit_err(errors::ConcatCStrLit{ span: e.span}); + cx.emit_err(errors::ConcatCStrLit { span: e.span }); has_errors = true; } Ok(ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..)) => { @@ -49,7 +49,9 @@ } }, // We also want to allow negative numeric literals. - ast::ExprKind::Unary(ast::UnOp::Neg, ref expr) if let ast::ExprKind::Lit(token_lit) = expr.kind => { + ast::ExprKind::Unary(ast::UnOp::Neg, ref expr) + if let ast::ExprKind::Lit(token_lit) = expr.kind => + { match ast::LitKind::from_token_lit(token_lit) { Ok(ast::LitKind::Int(i, _)) => accumulator.push_str(&format!("-{i}")), Ok(ast::LitKind::Float(f, _)) => accumulator.push_str(&format!("-{f}")), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat_bytes.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat_bytes.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat_bytes.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/concat_bytes.rs 2023-12-21 16:55:28.000000000 +0000 @@ -140,8 +140,8 @@ } ast::ExprKind::Repeat(expr, count) => { if let ast::ExprKind::Lit(token_lit) = count.value.kind - && let Ok(ast::LitKind::Int(count_val, _)) = - ast::LitKind::from_token_lit(token_lit) + && let Ok(ast::LitKind::Int(count_val, _)) = + ast::LitKind::from_token_lit(token_lit) { if let Some(elem) = handle_array_element(cx, &mut has_errors, &mut missing_literals, expr) @@ -151,7 +151,7 @@ } } } else { - cx.emit_err(errors::ConcatBytesBadRepeat {span: count.value.span }); + cx.emit_err(errors::ConcatBytesBadRepeat { span: count.value.span }); } } &ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/clone.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/clone.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/clone.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/clone.rs 2023-12-21 16:55:28.000000000 +0000 @@ -106,7 +106,9 @@ // This basic redundancy checking only prevents duplication of // assertions like `AssertParamIsClone` where the type is a // simple name. That's enough to get a lot of cases, though. - if let Some(name) = field.ty.kind.is_simple_path() && !seen_type_names.insert(name) { + if let Some(name) = field.ty.kind.is_simple_path() + && !seen_type_names.insert(name) + { // Already produced an assertion for this type. } else { // let _: AssertParamIsClone; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs 2023-12-21 16:55:28.000000000 +0000 @@ -73,7 +73,9 @@ // This basic redundancy checking only prevents duplication of // assertions like `AssertParamIsEq` where the type is a // simple name. That's enough to get a lot of cases, though. - if let Some(name) = field.ty.kind.is_simple_path() && !seen_type_names.insert(name) { + if let Some(name) = field.ty.kind.is_simple_path() + && !seen_type_names.insert(name) + { // Already produced an assertion for this type. } else { // let _: AssertParamIsEq; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs 2023-12-21 16:55:28.000000000 +0000 @@ -21,25 +21,26 @@ // Order in which to perform matching let tag_then_data = if let Annotatable::Item(item) = item - && let ItemKind::Enum(def, _) = &item.kind { - let dataful: Vec = def.variants.iter().map(|v| !v.data.fields().is_empty()).collect(); - match dataful.iter().filter(|&&b| b).count() { - // No data, placing the tag check first makes codegen simpler - 0 => true, - 1..=2 => false, - _ => { - (0..dataful.len()-1).any(|i| { - if dataful[i] && let Some(idx) = dataful[i+1..].iter().position(|v| *v) { - idx >= 2 - } else { - false - } - }) + && let ItemKind::Enum(def, _) = &item.kind + { + let dataful: Vec = def.variants.iter().map(|v| !v.data.fields().is_empty()).collect(); + match dataful.iter().filter(|&&b| b).count() { + // No data, placing the tag check first makes codegen simpler + 0 => true, + 1..=2 => false, + _ => (0..dataful.len() - 1).any(|i| { + if dataful[i] + && let Some(idx) = dataful[i + 1..].iter().position(|v| *v) + { + idx >= 2 + } else { + false } - } - } else { - true - }; + }), + } + } else { + true + }; let partial_cmp_def = MethodDef { name: sym::partial_cmp, generics: Bounds::empty(), @@ -133,12 +134,16 @@ if !tag_then_data && let ExprKind::Match(_, arms) = &mut expr1.kind && let Some(last) = arms.last_mut() - && let PatKind::Wild = last.pat.kind { - last.body = expr2; - expr1 + && let PatKind::Wild = last.pat.kind + { + last.body = expr2; + expr1 } else { - let eq_arm = - cx.arm(span, cx.pat_some(span, cx.pat_path(span, equal_path.clone())), expr1); + let eq_arm = cx.arm( + span, + cx.pat_some(span, cx.pat_path(span, equal_path.clone())), + expr1, + ); let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); cx.expr_match(span, expr2, thin_vec![eq_arm, neq_arm]) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/debug.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/debug.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/debug.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/debug.rs 2023-12-21 16:55:28.000000000 +0000 @@ -33,7 +33,7 @@ explicit_self: true, nonself_args: vec![(fmtr, sym::f)], ret_ty: Path(path_std!(fmt::Result)), - attributes: ast::AttrVec::new(), + attributes: thin_vec![cx.attr_word(sym::inline, span)], fieldless_variants_strategy: FieldlessVariantsStrategy::SpecializeIfAllVariantsFieldless, combine_substructure: combine_substructure(Box::new(|a, b, c| { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,8 +37,9 @@ //! following snippet //! //! ```rust -//! # #![allow(dead_code)] -//! struct A { x : i32 } +//! struct A { +//! x: i32, +//! } //! //! struct B(i32); //! @@ -74,6 +75,7 @@ //! trait PartialEq { //! fn eq(&self, other: &Self) -> bool; //! } +//! //! impl PartialEq for i32 { //! fn eq(&self, other: &i32) -> bool { //! *self == *other @@ -90,22 +92,22 @@ //! //! ```text //! Struct(vec![FieldInfo { -//! span: -//! name: Some(), -//! self_: , -//! other: vec![, +//! name: Some(), +//! self_: , +//! other: vec![], +//! }]) //! ``` //! //! For the `B` impl, called with `B(a)` and `B(b)`, //! //! ```text //! Struct(vec![FieldInfo { -//! span: , -//! name: None, -//! self_: -//! other: vec![] -//! }]) +//! span: , +//! name: None, +//! self_: , +//! other: vec![], +//! }]) //! ``` //! //! ## Enums @@ -114,33 +116,42 @@ //! == C0(b)`, the SubstructureFields is //! //! ```text -//! EnumMatching(0, , -//! vec![FieldInfo { -//! span: -//! name: None, -//! self_: , -//! other: vec![] -//! }]) +//! EnumMatching( +//! 0, +//! , +//! vec![FieldInfo { +//! span: , +//! name: None, +//! self_: , +//! other: vec![], +//! }], +//! ) //! ``` //! //! For `C1 {x}` and `C1 {x}`, //! //! ```text -//! EnumMatching(1, , -//! vec![FieldInfo { -//! span: -//! name: Some(), -//! self_: , -//! other: vec![] -//! }]) +//! EnumMatching( +//! 1, +//! , +//! vec![FieldInfo { +//! span: , +//! name: Some(), +//! self_: , +//! other: vec![], +//! }], +//! ) //! ``` //! //! For the tags, //! //! ```text //! EnumTag( -//! &[, ], ) +//! &[, ], +//! , +//! ) //! ``` +//! //! Note that this setup doesn't allow for the brute-force "match every variant //! against every other variant" approach, which is bad because it produces a //! quadratic amount of code (see #15375). @@ -154,9 +165,13 @@ //! //! StaticStruct(, Unnamed(vec![])) //! -//! StaticEnum(, -//! vec![(, , Unnamed(vec![])), -//! (, , Named(vec![(, )]))]) +//! StaticEnum( +//! , +//! vec![ +//! (, , Unnamed(vec![])), +//! (, , Named(vec![(, )])), +//! ], +//! ) //! ``` pub use StaticFields::*; @@ -522,7 +537,10 @@ /// Given that we are deriving a trait `DerivedTrait` for a type like: /// /// ```ignore (only-for-syntax-highlight) - /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait { + /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> + /// where + /// C: WhereTrait, + /// { /// a: A, /// b: B::Item, /// b1: ::Item, @@ -535,12 +553,13 @@ /// create an impl like: /// /// ```ignore (only-for-syntax-highlight) - /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where - /// C: WhereTrait, + /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> + /// where + /// C: WhereTrait, /// A: DerivedTrait + B1 + ... + BN, /// B: DerivedTrait + B1 + ... + BN, /// C: DerivedTrait + B1 + ... + BN, - /// B::Item: DerivedTrait + B1 + ... + BN, + /// B::Item: DerivedTrait + B1 + ... + BN, /// ::Item: DerivedTrait + B1 + ... + BN, /// ... /// { @@ -676,65 +695,59 @@ } })); - { - // Extra scope required here so ty_params goes out of scope before params is moved - - let mut ty_params = params - .iter() - .filter(|param| matches!(param.kind, ast::GenericParamKind::Type { .. })) - .peekable(); - - if ty_params.peek().is_some() { - let ty_param_names: Vec = - ty_params.map(|ty_param| ty_param.ident.name).collect(); - - for field_ty in field_tys { - let field_ty_params = find_type_parameters(&field_ty, &ty_param_names, cx); - - for field_ty_param in field_ty_params { - // if we have already handled this type, skip it - if let ast::TyKind::Path(_, p) = &field_ty_param.ty.kind - && let [sole_segment] = &*p.segments - && ty_param_names.contains(&sole_segment.ident.name) - { - continue; - } - let mut bounds: Vec<_> = self - .additional_bounds - .iter() - .map(|p| { - cx.trait_bound( - p.to_path(cx, self.span, type_ident, generics), - self.is_const, - ) - }) - .collect(); - - // Require the current trait. - if !self.skip_path_as_bound { - bounds.push(cx.trait_bound(trait_path.clone(), self.is_const)); - } + let ty_param_names: Vec = params + .iter() + .filter(|param| matches!(param.kind, ast::GenericParamKind::Type { .. })) + .map(|ty_param| ty_param.ident.name) + .collect(); - // Add a `Copy` bound if required. - if is_packed && self.needs_copy_as_bound_if_packed { - let p = deriving::path_std!(marker::Copy); - bounds.push(cx.trait_bound( + if !ty_param_names.is_empty() { + for field_ty in field_tys { + let field_ty_params = find_type_parameters(&field_ty, &ty_param_names, cx); + + for field_ty_param in field_ty_params { + // if we have already handled this type, skip it + if let ast::TyKind::Path(_, p) = &field_ty_param.ty.kind + && let [sole_segment] = &*p.segments + && ty_param_names.contains(&sole_segment.ident.name) + { + continue; + } + let mut bounds: Vec<_> = self + .additional_bounds + .iter() + .map(|p| { + cx.trait_bound( p.to_path(cx, self.span, type_ident, generics), self.is_const, - )); - } + ) + }) + .collect(); - if !bounds.is_empty() { - let predicate = ast::WhereBoundPredicate { - span: self.span, - bound_generic_params: field_ty_param.bound_generic_params, - bounded_ty: field_ty_param.ty, - bounds, - }; + // Require the current trait. + if !self.skip_path_as_bound { + bounds.push(cx.trait_bound(trait_path.clone(), self.is_const)); + } - let predicate = ast::WherePredicate::BoundPredicate(predicate); - where_clause.predicates.push(predicate); - } + // Add a `Copy` bound if required. + if is_packed && self.needs_copy_as_bound_if_packed { + let p = deriving::path_std!(marker::Copy); + bounds.push(cx.trait_bound( + p.to_path(cx, self.span, type_ident, generics), + self.is_const, + )); + } + + if !bounds.is_empty() { + let predicate = ast::WhereBoundPredicate { + span: self.span, + bound_generic_params: field_ty_param.bound_generic_params, + bounded_ty: field_ty_param.ty, + bounds, + }; + + let predicate = ast::WherePredicate::BoundPredicate(predicate); + where_clause.predicates.push(predicate); } } } @@ -1026,6 +1039,7 @@ } /// The normal case uses field access. + /// /// ``` /// #[derive(PartialEq)] /// # struct Dummy; @@ -1038,10 +1052,12 @@ /// } /// } /// ``` + /// /// But if the struct is `repr(packed)`, we can't use something like /// `&self.x` because that might cause an unaligned ref. So for any trait /// method that takes a reference, we use a local block to force a copy. /// This requires that the field impl `Copy`. + /// /// ```rust,ignore (example) /// # struct A { x: u8, y: u8 } /// impl PartialEq for A { @@ -1053,7 +1069,7 @@ /// impl Hash for A { /// fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { /// ::core::hash::Hash::hash(&{ self.x }, state); - /// ::core::hash::Hash::hash(&{ self.y }, state) + /// ::core::hash::Hash::hash(&{ self.y }, state); /// } /// } /// ``` @@ -1107,7 +1123,9 @@ /// A2(i32) /// } /// ``` + /// /// is equivalent to: + /// /// ``` /// #![feature(core_intrinsics)] /// enum A { @@ -1119,15 +1137,15 @@ /// fn eq(&self, other: &A) -> bool { /// let __self_tag = ::core::intrinsics::discriminant_value(self); /// let __arg1_tag = ::core::intrinsics::discriminant_value(other); - /// __self_tag == __arg1_tag && - /// match (self, other) { - /// (A::A2(__self_0), A::A2(__arg1_0)) => - /// *__self_0 == *__arg1_0, + /// __self_tag == __arg1_tag + /// && match (self, other) { + /// (A::A2(__self_0), A::A2(__arg1_0)) => *__self_0 == *__arg1_0, /// _ => true, /// } /// } /// } /// ``` + /// /// Creates a tag check combined with a match for a tuple of all /// `selflike_args`, with an arm for each variant with fields, possibly an /// arm for each fieldless variant (if `unify_fieldless_variants` is not @@ -1349,7 +1367,7 @@ // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2, // ... - // _ => ::core::intrinsics::unreachable() + // _ => ::core::intrinsics::unreachable(), // } let get_match_expr = |mut selflike_args: ThinVec>| { let match_arg = if selflike_args.len() == 1 { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,9 +6,10 @@ use rustc_ast::ptr::P; use rustc_ast::{self as ast, Expr, GenericArg, GenericParamKind, Generics, SelfKind}; use rustc_expand::base::ExtCtxt; -use rustc_span::source_map::{respan, DUMMY_SP}; +use rustc_span::source_map::respan; use rustc_span::symbol::{kw, Ident, Symbol}; use rustc_span::Span; +use rustc_span::DUMMY_SP; use thin_vec::ThinVec; /// A path, e.g., `::std::option::Option::` (global). Has support diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/env.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/env.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/env.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/env.rs 2023-12-21 16:55:28.000000000 +0000 @@ -108,7 +108,7 @@ return DummyResult::any(sp); } - Some(value) => cx.expr_str(sp, value), + Some(value) => cx.expr_str(span, value), }; MacEager::expr(e) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -647,6 +647,27 @@ } #[derive(Diagnostic)] +#[diag(builtin_macros_format_redundant_args)] +pub(crate) struct FormatRedundantArgs { + #[primary_span] + pub(crate) span: MultiSpan, + pub(crate) n: usize, + + #[note] + pub(crate) note: MultiSpan, + + #[subdiagnostic] + pub(crate) sugg: Option, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(builtin_macros_suggestion, applicability = "machine-applicable")] +pub(crate) struct FormatRedundantArgsSugg { + #[suggestion_part(code = "")] + pub(crate) spans: Vec, +} + +#[derive(Diagnostic)] #[diag(builtin_macros_test_case_non_item)] pub(crate) struct TestCaseNonItem { #[primary_span] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/format.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/format.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/format.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/format.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use parse::Position::ArgumentNamed; use rustc_ast::ptr::P; use rustc_ast::tokenstream::TokenStream; use rustc_ast::{token, StmtKind}; @@ -7,7 +8,9 @@ FormatDebugHex, FormatOptions, FormatPlaceholder, FormatSign, FormatTrait, }; use rustc_data_structures::fx::FxHashSet; -use rustc_errors::{Applicability, MultiSpan, PResult, SingleLabelManySpans}; +use rustc_errors::{ + Applicability, DiagnosticBuilder, ErrorGuaranteed, MultiSpan, PResult, SingleLabelManySpans, +}; use rustc_expand::base::{self, *}; use rustc_parse_format as parse; use rustc_span::symbol::{Ident, Symbol}; @@ -73,7 +76,9 @@ let first_token = &p.token; - let fmtstr = if let token::Literal(lit) = first_token.kind && matches!(lit.kind, token::Str | token::StrRaw(_)) { + let fmtstr = if let token::Literal(lit) = first_token.kind + && matches!(lit.kind, token::Str | token::StrRaw(_)) + { // This allows us to properly handle cases when the first comma // after the format string is mistakenly replaced with any operator, // which cause the expression parser to eat too much tokens. @@ -176,7 +181,7 @@ && block.stmts.len() == 1 && let StmtKind::Expr(expr) = &block.stmts[0].kind && let ExprKind::Path(None, path) = &expr.kind - && path.is_potential_trivial_const_arg() + && path.is_potential_trivial_const_arg() { err.multipart_suggestion( "quote your inlined format argument to use as string literal", @@ -184,7 +189,7 @@ (unexpanded_fmt_span.shrink_to_hi(), "\"".to_string()), (unexpanded_fmt_span.shrink_to_lo(), "\"".to_string()), ], - Applicability::MaybeIncorrect, + Applicability::MaybeIncorrect, ); } else { let sugg_fmt = match args.explicit_args().len() { @@ -257,8 +262,13 @@ if let Some(note) = err.note { e.note_ = Some(errors::InvalidFormatStringNote { note }); } - if let Some((label, span)) = err.secondary_label && is_source_literal { - e.label_ = Some(errors::InvalidFormatStringLabel { span: fmt_span.from_inner(InnerSpan::new(span.start, span.end)), label } ); + if let Some((label, span)) = err.secondary_label + && is_source_literal + { + e.label_ = Some(errors::InvalidFormatStringLabel { + span: fmt_span.from_inner(InnerSpan::new(span.start, span.end)), + label, + }); } match err.suggestion { parse::Suggestion::None => {} @@ -357,8 +367,8 @@ let mut unfinished_literal = String::new(); let mut placeholder_index = 0; - for piece in pieces { - match piece { + for piece in &pieces { + match *piece { parse::Piece::String(s) => { unfinished_literal.push_str(s); } @@ -506,7 +516,17 @@ // If there's a lot of unused arguments, // let's check if this format arguments looks like another syntax (printf / shell). let detect_foreign_fmt = unused.len() > args.explicit_args().len() / 2; - report_missing_placeholders(ecx, unused, detect_foreign_fmt, str_style, fmt_str, fmt_span); + report_missing_placeholders( + ecx, + unused, + &used, + &args, + &pieces, + detect_foreign_fmt, + str_style, + fmt_str, + fmt_span, + ); } // Only check for unused named argument names if there are no other errors to avoid causing @@ -573,6 +593,9 @@ fn report_missing_placeholders( ecx: &mut ExtCtxt<'_>, unused: Vec<(Span, bool)>, + used: &[bool], + args: &FormatArguments, + pieces: &[parse::Piece<'_>], detect_foreign_fmt: bool, str_style: Option, fmt_str: &str, @@ -591,6 +614,26 @@ }) }; + let placeholders = pieces + .iter() + .filter_map(|piece| { + if let parse::Piece::NextArgument(argument) = piece && let ArgumentNamed(binding) = argument.position { + let span = fmt_span.from_inner(InnerSpan::new(argument.position_span.start, argument.position_span.end)); + Some((span, binding)) + } else { None } + }) + .collect::>(); + + if !placeholders.is_empty() { + if let Some(mut new_diag) = + report_redundant_format_arguments(ecx, &args, used, placeholders) + { + diag.cancel(); + new_diag.emit(); + return; + } + } + // Used to ensure we only report translations for *one* kind of foreign format. let mut found_foreign = false; @@ -678,6 +721,76 @@ diag.emit(); } +/// This function detects and reports unused format!() arguments that are +/// redundant due to implicit captures (e.g. `format!("{x}", x)`). +fn report_redundant_format_arguments<'a>( + ecx: &mut ExtCtxt<'a>, + args: &FormatArguments, + used: &[bool], + placeholders: Vec<(Span, &str)>, +) -> Option> { + let mut fmt_arg_indices = vec![]; + let mut args_spans = vec![]; + let mut fmt_spans = vec![]; + + for (i, unnamed_arg) in args.unnamed_args().iter().enumerate().rev() { + let Some(ty) = unnamed_arg.expr.to_ty() else { continue }; + let Some(argument_binding) = ty.kind.is_simple_path() else { continue }; + let argument_binding = argument_binding.as_str(); + + if used[i] { + continue; + } + + let matching_placeholders = placeholders + .iter() + .filter(|(_, inline_binding)| argument_binding == *inline_binding) + .map(|(span, _)| span) + .collect::>(); + + if !matching_placeholders.is_empty() { + fmt_arg_indices.push(i); + args_spans.push(unnamed_arg.expr.span); + for span in &matching_placeholders { + if fmt_spans.contains(*span) { + continue; + } + fmt_spans.push(**span); + } + } + } + + if !args_spans.is_empty() { + let multispan = MultiSpan::from(fmt_spans); + let mut suggestion_spans = vec![]; + + for (arg_span, fmt_arg_idx) in args_spans.iter().zip(fmt_arg_indices.iter()) { + let span = if fmt_arg_idx + 1 == args.explicit_args().len() { + *arg_span + } else { + arg_span.until(args.explicit_args()[*fmt_arg_idx + 1].expr.span) + }; + + suggestion_spans.push(span); + } + + let sugg = if args.named_args().len() == 0 { + Some(errors::FormatRedundantArgsSugg { spans: suggestion_spans }) + } else { + None + }; + + return Some(ecx.create_err(errors::FormatRedundantArgs { + n: args_spans.len(), + span: MultiSpan::from(args_spans), + note: multispan, + sugg, + })); + } + + None +} + /// Handle invalid references to positional arguments. Output different /// errors for the case where all arguments are positional and for when /// there are named arguments or numbered positional arguments in the diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/global_allocator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/global_allocator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/global_allocator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/global_allocator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -24,20 +24,22 @@ // Allow using `#[global_allocator]` on an item statement // FIXME - if we get deref patterns, use them to reduce duplication here - let (item, is_stmt, ty_span) = - if let Annotatable::Item(item) = &item - && let ItemKind::Static(box ast::StaticItem { ty, ..}) = &item.kind - { - (item, false, ecx.with_def_site_ctxt(ty.span)) - } else if let Annotatable::Stmt(stmt) = &item - && let StmtKind::Item(item) = &stmt.kind - && let ItemKind::Static(box ast::StaticItem { ty, ..}) = &item.kind - { - (item, true, ecx.with_def_site_ctxt(ty.span)) - } else { - ecx.sess.parse_sess.span_diagnostic.emit_err(errors::AllocMustStatics{span: item.span()}); - return vec![orig_item]; - }; + let (item, is_stmt, ty_span) = if let Annotatable::Item(item) = &item + && let ItemKind::Static(box ast::StaticItem { ty, .. }) = &item.kind + { + (item, false, ecx.with_def_site_ctxt(ty.span)) + } else if let Annotatable::Stmt(stmt) = &item + && let StmtKind::Item(item) = &stmt.kind + && let ItemKind::Static(box ast::StaticItem { ty, .. }) = &item.kind + { + (item, true, ecx.with_def_site_ctxt(ty.span)) + } else { + ecx.sess + .parse_sess + .span_diagnostic + .emit_err(errors::AllocMustStatics { span: item.span() }); + return vec![orig_item]; + }; // Generate a bunch of new items using the AllocFnFactory let span = ecx.with_def_site_ctxt(item.span); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,9 @@ //! This crate contains implementations of built-in macros and other code generating facilities //! injecting code into the crate before it is lowered to HIR. +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![feature(array_windows)] #![feature(box_patterns)] @@ -71,33 +74,35 @@ } register_bang! { + // tidy-alphabetical-start asm: asm::expand_asm, assert: assert::expand_assert, cfg: cfg::expand_cfg, column: source_util::expand_column, compile_error: compile_error::expand_compile_error, + concat: concat::expand_concat, concat_bytes: concat_bytes::expand_concat_bytes, concat_idents: concat_idents::expand_concat_idents, - concat: concat::expand_concat, + const_format_args: format::expand_format_args, + core_panic: edition_panic::expand_panic, env: env::expand_env, file: source_util::expand_file, - format_args_nl: format::expand_format_args_nl, format_args: format::expand_format_args, - const_format_args: format::expand_format_args, + format_args_nl: format::expand_format_args_nl, global_asm: asm::expand_global_asm, + include: source_util::expand_include, include_bytes: source_util::expand_include_bytes, include_str: source_util::expand_include_str, - include: source_util::expand_include, line: source_util::expand_line, log_syntax: log_syntax::expand_log_syntax, module_path: source_util::expand_mod, option_env: env::expand_option_env, - core_panic: edition_panic::expand_panic, std_panic: edition_panic::expand_panic, - unreachable: edition_panic::expand_unreachable, stringify: source_util::expand_stringify, trace_macros: trace_macros::expand_trace_macros, type_ascribe: type_ascribe::expand_type_ascribe, + unreachable: edition_panic::expand_unreachable, + // tidy-alphabetical-end } register_attr! { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/source_util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/source_util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/source_util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/source_util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -61,9 +61,14 @@ let topmost = cx.expansion_cause().unwrap_or(sp); let loc = cx.source_map().lookup_char_pos(topmost.lo()); - base::MacEager::expr( - cx.expr_str(topmost, Symbol::intern(&loc.file.name.prefer_remapped().to_string_lossy())), - ) + + use rustc_session::{config::RemapPathScopeComponents, RemapFileNameExt}; + base::MacEager::expr(cx.expr_str( + topmost, + Symbol::intern( + &loc.file.name.for_scope(cx.sess, RemapPathScopeComponents::MACRO).to_string_lossy(), + ), + )) } pub fn expand_stringify( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test.rs 2023-12-21 16:55:28.000000000 +0000 @@ -35,11 +35,13 @@ let sp = ecx.with_def_site_ctxt(attr_sp); let (mut item, is_stmt) = match anno_item { Annotatable::Item(item) => (item, false), - Annotatable::Stmt(stmt) if let ast::StmtKind::Item(_) = stmt.kind => if let ast::StmtKind::Item(i) = stmt.into_inner().kind { - (i, true) - } else { - unreachable!() - }, + Annotatable::Stmt(stmt) if let ast::StmtKind::Item(_) = stmt.kind => { + if let ast::StmtKind::Item(i) = stmt.into_inner().kind { + (i, true) + } else { + unreachable!() + } + } _ => { ecx.emit_err(errors::TestCaseNonItem { span: anno_item.span() }); return vec![]; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test_harness.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test_harness.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test_harness.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_builtin_macros/src/test_harness.rs 2023-12-21 16:55:28.000000000 +0000 @@ -169,31 +169,17 @@ } } -// Beware, this is duplicated in librustc_passes/entry.rs (with -// `rustc_hir::Item`), so make sure to keep them in sync. -fn entry_point_type(item: &ast::Item, depth: usize) -> EntryPointType { +fn entry_point_type(item: &ast::Item, at_root: bool) -> EntryPointType { match item.kind { ast::ItemKind::Fn(..) => { - if attr::contains_name(&item.attrs, sym::start) { - EntryPointType::Start - } else if attr::contains_name(&item.attrs, sym::rustc_main) { - EntryPointType::RustcMainAttr - } else if item.ident.name == sym::main { - if depth == 0 { - // This is a top-level function so can be 'main' - EntryPointType::MainNamed - } else { - EntryPointType::OtherMain - } - } else { - EntryPointType::None - } + rustc_ast::entry::entry_point_type(&item.attrs, at_root, Some(item.ident.name)) } _ => EntryPointType::None, } } + /// A folder used to remove any entry points (like fn main) because the harness -/// generator will provide its own +/// coroutine will provide its own struct EntryPointCleaner<'a> { // Current depth in the ast sess: &'a Session, @@ -210,7 +196,7 @@ // Remove any #[rustc_main] or #[start] from the AST so it doesn't // clash with the one we're going to add, but mark it as // #[allow(dead_code)] to avoid printing warnings. - let item = match entry_point_type(&item, self.depth) { + let item = match entry_point_type(&item, self.depth == 0) { EntryPointType::MainNamed | EntryPointType::RustcMainAttr | EntryPointType::Start => { item.map(|ast::Item { id, ident, attrs, kind, vis, span, tokens }| { let allow_dead_code = attr::mk_attr_nested_word( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.cirrus.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.cirrus.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.cirrus.yml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.cirrus.yml 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ freebsd_instance: image: freebsd-13-2-release-amd64 setup_rust_script: - - pkg install -y git bash + - pkg install -y git bash binutils - curl https://sh.rustup.rs -sSf --output rustup.sh - sh rustup.sh --default-toolchain none -y --profile=minimal target_cache: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.github/workflows/main.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.github/workflows/main.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.github/workflows/main.yml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.github/workflows/main.yml 2023-12-21 16:55:28.000000000 +0000 @@ -50,10 +50,12 @@ - os: ubuntu-latest env: TARGET_TRIPLE: aarch64-unknown-linux-gnu - # s390x requires QEMU 6.1 or greater, we could build it from source, but ubuntu 22.04 comes with 6.2 by default - os: ubuntu-latest env: TARGET_TRIPLE: s390x-unknown-linux-gnu + - os: ubuntu-latest + env: + TARGET_TRIPLE: riscv64gc-unknown-linux-gnu - os: windows-latest env: TARGET_TRIPLE: x86_64-pc-windows-msvc @@ -92,6 +94,12 @@ sudo apt-get update sudo apt-get install -y gcc-s390x-linux-gnu qemu-user + - name: Install riscv64gc toolchain and qemu + if: matrix.env.TARGET_TRIPLE == 'riscv64gc-unknown-linux-gnu' + run: | + sudo apt-get update + sudo apt-get install -y gcc-riscv64-linux-gnu qemu-user + - name: Prepare dependencies run: ./y.sh prepare diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.vscode/settings.json rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.vscode/settings.json --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.vscode/settings.json 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/.vscode/settings.json 2023-12-21 16:55:28.000000000 +0000 @@ -33,7 +33,7 @@ ] }, { - "sysroot_src": "./download/sysroot/sysroot_src/library", + "sysroot_src": "./build/stdlib/library", "crates": [ { "root_module": "./example/std_example.rs", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.lock 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -15,9 +15,9 @@ [[package]] name = "anyhow" -version = "1.0.66" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arbitrary" @@ -26,12 +26,6 @@ checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" [[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -39,9 +33,9 @@ [[package]] name = "bumpalo" -version = "3.11.1" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "cfg-if" @@ -51,18 +45,18 @@ [[package]] name = "cranelift-bforest" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec27af72e56235eb326b5bf2de4e70ab7c5ac1fb683a1829595badaf821607fd" +checksum = "f773437307980ac0f424bf9b9a5d0cd21a0f17248c6860c9a65bec8b5975f3fe" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-codegen" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2231e12925e6c5f4bc9c95b62a798eea6ed669a95bc3e00f8b2adb3b7b9b7a80" +checksum = "443c2ac50e97fb7de1a0f862753fce3f27215558811a6fcee508eb0c3747fa79" dependencies = [ "bumpalo", "cranelift-bforest", @@ -72,7 +66,7 @@ "cranelift-entity", "cranelift-isle", "gimli", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "log", "regalloc2", "smallvec", @@ -81,39 +75,39 @@ [[package]] name = "cranelift-codegen-meta" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413b00b8dfb3aab85674a534677e7ca08854b503f164a70ec0634fce80996e2c" +checksum = "c5b174c411480c79ce0793c55042fa51bec27e486381d103a53cab3b480cb2db" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0feb9ecc8193ef5cb04f494c5bd835e5bfec4bde726e7ac0444fc9dd76229e" +checksum = "73fa0151a528066a369de6debeea4d4b23a32aba68b5add8c46d3dc8091ff434" [[package]] name = "cranelift-control" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72eedd2afcf5fee1e042eaaf18d3750e48ad0eca364a9f5971ecfdd5ef85bf71" +checksum = "b8adf1e6398493c9bea1190e37d28a0eb0eca5fddbc80e01e506cda34db92b1f" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7af19157be42671073cf8c2a52d6a4ae1e7b11f1dcb4131fede356d9f91c29dd" +checksum = "4917e2ed3bb5fe87d0ed88395ca6d644018d119a034faedd1f3e1f2c33cd52b2" [[package]] name = "cranelift-frontend" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dc7636c5fad156be7d9ae691cd1aaecd97326caf2ab534ba168056d56aa76c" +checksum = "9aaadf1e7cf28886bbf046eaf7ef538997bc8a7e020e578ea4957b39da87d5a1" dependencies = [ "cranelift-codegen", "log", @@ -123,15 +117,15 @@ [[package]] name = "cranelift-isle" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1111aea4fb6fade5779903f184249a3fc685a799fe4ec59126f9af59c7c2a74" +checksum = "a67fda31b9d69eaa1c49a2081939454c45857596a9d45af6744680541c628b4c" [[package]] name = "cranelift-jit" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dadf88076317f6286ec77ebbe65978734fb43b6befdc96f52ff4c4c511841644" +checksum = "d6bf32710628e7ff298739f1ed80a0bfdafc0c6a3e284c4540b23f18e8889d4b" dependencies = [ "anyhow", "cranelift-codegen", @@ -149,9 +143,9 @@ [[package]] name = "cranelift-module" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6bae8a82dbf82241b1083e57e06870d2c2bdc9852727be99d58477513816953" +checksum = "4d693e93a0fbf56b4bc93cffe6b107c2e52f070e1111950505fc8c83ac440b9d" dependencies = [ "anyhow", "cranelift-codegen", @@ -160,9 +154,9 @@ [[package]] name = "cranelift-native" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecfc01a634448468a698beac433d98040033046678a0eed3ca39a3a9f63ae86" +checksum = "76fb52ba71be98312f35e798d9e98e45ab2586f27584231bf7c644fa9501e8af" dependencies = [ "cranelift-codegen", "libc", @@ -171,9 +165,9 @@ [[package]] name = "cranelift-object" -version = "0.98.0" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ee14a7276999f0dcaae2de84043e2c2de50820fb89b3db56fab586a4ad26734" +checksum = "2551b2e185022b89e9efa5e04c0f17f679b86ef73d9f7feabc48b608ff23120d" dependencies = [ "anyhow", "cranelift-codegen", @@ -195,35 +189,29 @@ [[package]] name = "equivalent" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "fallible-iterator" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "gimli" -version = "0.27.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" dependencies = [ "fallible-iterator", - "indexmap 1.9.3", + "indexmap", "stable_deref_trait", ] [[package]] name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" @@ -236,15 +224,8 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ - "autocfg", - "hashbrown 0.12.3", + "ahash", ] [[package]] @@ -259,9 +240,9 @@ [[package]] name = "libc" -version = "0.2.138" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" [[package]] name = "libloading" @@ -275,12 +256,9 @@ [[package]] name = "log" -version = "0.4.17" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "mach" @@ -293,27 +271,27 @@ [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" [[package]] name = "object" -version = "0.30.4" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" dependencies = [ "crc32fast", - "hashbrown 0.13.2", - "indexmap 1.9.3", + "hashbrown 0.14.0", + "indexmap", "memchr", ] [[package]] name = "once_cell" -version = "1.16.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "regalloc2" @@ -357,7 +335,7 @@ "cranelift-native", "cranelift-object", "gimli", - "indexmap 2.0.0", + "indexmap", "libloading", "object", "smallvec", @@ -366,15 +344,15 @@ [[package]] name = "slice-group-by" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b634d87b960ab1a38c4fe143b508576f075e7c978bfad18217645ebfdfa2ec" +checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "stable_deref_trait" @@ -384,9 +362,9 @@ [[package]] name = "target-lexicon" -version = "0.12.5" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9410d0f6853b1d94f0e519fb95df60f29d2c1eff2d921ffdf01a4c8a3b54f12d" +checksum = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a" [[package]] name = "version_check" @@ -396,9 +374,9 @@ [[package]] name = "wasmtime-jit-icache-coherence" -version = "11.0.0" +version = "14.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e34eb67f0829a5614ec54716c8e0c9fe68fab7b9df3686c85f719c9d247f7169" +checksum = "0980a96b16abbdaf829858d2389697b1d6cfc6a903873fd74b7e47a6b1045584" dependencies = [ "cfg-if", "libc", @@ -438,9 +416,9 @@ [[package]] name = "windows-targets" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -453,42 +431,42 @@ [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -8,15 +8,15 @@ [dependencies] # These have to be in sync with each other -cranelift-codegen = { version = "0.98", features = ["unwind", "all-arch"] } -cranelift-frontend = { version = "0.98" } -cranelift-module = { version = "0.98" } -cranelift-native = { version = "0.98" } -cranelift-jit = { version = "0.98", optional = true } -cranelift-object = { version = "0.98" } +cranelift-codegen = { version = "0.101.2", default-features = false, features = ["std", "unwind", "all-arch"] } +cranelift-frontend = { version = "0.101.2" } +cranelift-module = { version = "0.101.2" } +cranelift-native = { version = "0.101.2" } +cranelift-jit = { version = "0.101.2", optional = true } +cranelift-object = { version = "0.101.2" } target-lexicon = "0.12.0" -gimli = { version = "0.27.2", default-features = false, features = ["write"]} -object = { version = "0.30.3", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] } +gimli = { version = "0.28", default-features = false, features = ["write"]} +object = { version = "0.32", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] } indexmap = "2.0.0" libloading = { version = "0.7.3", optional = true } @@ -35,9 +35,9 @@ [features] # Enable features not ready to be enabled when compiling as part of rustc -unstable-features = ["jit", "inline_asm"] +unstable-features = ["jit", "inline_asm_sym"] jit = ["cranelift-jit", "libloading"] -inline_asm = [] +inline_asm_sym = [] [package.metadata.rust-analyzer] rustc_private = true diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Readme.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Readme.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Readme.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/Readme.md 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ ## Building and testing ```bash -$ git clone https://github.com/bjorn3/rustc_codegen_cranelift +$ git clone https://github.com/rust-lang/rustc_codegen_cranelift $ cd rustc_codegen_cranelift $ ./y.sh prepare $ ./y.sh build @@ -29,7 +29,7 @@ If you want to use `cargo clif build` instead of having to specify the full path to the `cargo-clif` executable, you can add the `bin` subdirectory of the extracted `dist` directory to your `PATH`. (tutorial [for Windows](https://stackoverflow.com/a/44272417), and [for Linux/MacOS](https://unix.stackexchange.com/questions/26047/how-to-correctly-add-a-path-to-path/26059#26059)). -[releases]: https://github.com/bjorn3/rustc_codegen_cranelift/releases/tag/dev +[releases]: https://github.com/rust-lang/rustc_codegen_cranelift/releases/tag/dev ## Usage @@ -60,18 +60,14 @@ 2. Run `python x.py setup` and choose option for compiler (`b`). 3. Build compiler and necessary tools: `python x.py build --stage=2 compiler library/std src/tools/rustdoc src/tools/rustfmt` * (Optional) You can also build cargo by adding `src/tools/cargo` to previous command. -4. Copy exectutable files from `./build/host/stage2-tools//release` -to `./build/host/stage2/bin/`. Note that you would need to do this every time you rebuilt `rust` repository. -5. Copy cargo from another toolchain: `cp $(rustup which cargo) .build//stage2/bin/cargo` - * Another option is to build it at step 3 and copy with other executables at step 4. -6. Link your new `rustc` to toolchain: `rustup toolchain link stage2 ./build/host/stage2/`. -7. (Windows only) compile the build system: `rustc +stage2 -O build_system/main.rs -o y.exe`. -8. You need to prefix every `./y.sh` (or `y` if you built `build_system/main.rs` as `y`) command by `rustup run stage2` to make cg_clif use your local changes in rustc. - +4. Copy cargo from a nightly toolchain: `cp $(rustup +nightly which cargo) ./build/host/stage2/bin/cargo`. Note that you would need to do this every time you rebuilt `rust` repository. +5. Link your new `rustc` to toolchain: `rustup toolchain link stage2 ./build/host/stage2/`. +6. (Windows only) compile the build system: `rustc +stage2 -O build_system/main.rs -o y.exe`. +7. You need to prefix every `./y.sh` (or `y` if you built `build_system/main.rs` as `y`) command by `rustup run stage2` to make cg_clif use your local changes in rustc. * `rustup run stage2 ./y.sh prepare` * `rustup run stage2 ./y.sh build` * (Optional) run tests: `rustup run stage2 ./y.sh test` -9. Now you can use your cg_clif build to compile other Rust programs, e.g. you can open any Rust crate and run commands like `$RustCheckoutDir/compiler/rustc_codegen_cranelift/dist/cargo-clif build --release`. +8. Now you can use your cg_clif build to compile other Rust programs, e.g. you can open any Rust crate and run commands like `$RustCheckoutDir/compiler/rustc_codegen_cranelift/dist/cargo-clif build --release`. ## Configuration @@ -80,9 +76,7 @@ ## Not yet supported -* Inline assembly ([no cranelift support](https://github.com/bytecodealliance/wasmtime/issues/1041)) - * On UNIX there is support for invoking an external assembler for `global_asm!` and `asm!`. -* SIMD ([tracked here](https://github.com/bjorn3/rustc_codegen_cranelift/issues/171), `std::simd` fully works, `std::arch` is partially supported) +* SIMD ([tracked here](https://github.com/rust-lang/rustc_codegen_cranelift/issues/171), `std::simd` fully works, `std::arch` is partially supported) * Unwinding on panics ([no cranelift support](https://github.com/bytecodealliance/wasmtime/issues/1677), `-Cpanic=abort` is enabled by default) ## License diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_backend.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_backend.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_backend.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_backend.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,6 +20,8 @@ let mut rustflags = rustflags_from_env("RUSTFLAGS"); + rustflags.push("-Zallow-features=rustc_private".to_owned()); + if is_ci() { // Deny warnings on CI rustflags.push("-Dwarnings".to_owned()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/build_sysroot.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use std::env; use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; @@ -259,6 +260,14 @@ // inlining. rustflags.push("-Zinline-mir".to_owned()); } + if let Some(prefix) = env::var_os("CG_CLIF_STDLIB_REMAP_PATH_PREFIX") { + rustflags.push("--remap-path-prefix".to_owned()); + rustflags.push(format!( + "{}={}", + STDLIB_SRC.to_path(dirs).to_str().unwrap(), + prefix.to_str().unwrap() + )); + } compiler.rustflags.extend(rustflags); let mut build_cmd = STANDARD_LIBRARY.build(&compiler, dirs); maybe_incremental(&mut build_cmd); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/main.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/main.rs 2023-12-21 16:55:28.000000000 +0000 @@ -55,7 +55,7 @@ } fn main() { - if env::var("RUST_BACKTRACE").is_err() { + if env::var_os("RUST_BACKTRACE").is_none() { env::set_var("RUST_BACKTRACE", "1"); } env::set_var("CG_CLIF_DISABLE_INCR_CACHE", "1"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/prepare.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/prepare.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/prepare.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/prepare.rs 2023-12-21 16:55:28.000000000 +0000 @@ -122,10 +122,10 @@ if download_dir.exists() { let actual_hash = format!("{:016x}", hash_dir(&download_dir)); if actual_hash == self.content_hash { - println!("[FRESH] {}", download_dir.display()); + eprintln!("[FRESH] {}", download_dir.display()); return; } else { - println!( + eprintln!( "Mismatched content hash for {download_dir}: {actual_hash} != {content_hash}. Downloading again.", download_dir = download_dir.display(), content_hash = self.content_hash, @@ -143,6 +143,7 @@ RelPath::PATCHES.to_path(dirs).join(format!("{}-lock.toml", self.patch_name)); let target_lockfile = download_dir.join("Cargo.lock"); if source_lockfile.exists() { + assert!(!target_lockfile.exists()); fs::copy(source_lockfile, target_lockfile).unwrap(); } else { assert!(target_lockfile.exists()); @@ -150,7 +151,7 @@ let actual_hash = format!("{:016x}", hash_dir(&download_dir)); if actual_hash != self.content_hash { - println!( + eprintln!( "Download of {download_dir} failed with mismatched content hash: {actual_hash} != {content_hash}", download_dir = download_dir.display(), content_hash = self.content_hash, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,7 @@ use crate::prepare::{apply_patches, GitRepo}; use crate::rustc_info::get_default_sysroot; use crate::shared_utils::rustflags_from_env; -use crate::utils::{spawn_and_wait, spawn_and_wait_with_input, CargoProject, Compiler, LogGroup}; +use crate::utils::{spawn_and_wait, CargoProject, Compiler, LogGroup}; use crate::{CodegenBackend, SysrootKind}; static BUILD_EXAMPLE_OUT_DIR: RelPath = RelPath::BUILD.join("example"); @@ -99,15 +99,14 @@ TestCase::build_bin_and_run("aot.mod_bench", "example/mod_bench.rs", &[]), TestCase::build_bin_and_run("aot.issue-72793", "example/issue-72793.rs", &[]), TestCase::build_bin("aot.issue-59326", "example/issue-59326.rs"), + TestCase::build_bin_and_run("aot.neon", "example/neon.rs", &[]), ]; -// FIXME(rust-random/rand#1293): Newer rand versions fail to test on Windows. Update once this is -// fixed. pub(crate) static RAND_REPO: GitRepo = GitRepo::github( "rust-random", "rand", - "50b9a447410860af8d6db9a208c3576886955874", - "446203b96054891e", + "9a02c819cc1e4ec6959ae25eafbb5cf6acb68234", + "4934f0afb1d1c2ca", "rand", ); @@ -116,8 +115,8 @@ pub(crate) static REGEX_REPO: GitRepo = GitRepo::github( "rust-lang", "regex", - "32fed9429eafba0ae92a64b01796a0c5a75b88c8", - "fcc4df7c5b902633", + "061ee815ef2c44101dba7b0b124600fcb03c1912", + "dc26aefbeeac03ca", "regex", ); @@ -126,8 +125,8 @@ pub(crate) static PORTABLE_SIMD_REPO: GitRepo = GitRepo::github( "rust-lang", "portable-simd", - "7c7dbe0c505ccbc02ff30c1e37381ab1d47bf46f", - "5bcc9c544f6fa7bd", + "4825b2a64d765317066948867e8714674419359b", + "9e67d07c00f5fb0b", "portable-simd", ); @@ -180,40 +179,6 @@ spawn_and_wait(build_cmd); } }), - TestCase::custom("test.regex-shootout-regex-dna", &|runner| { - REGEX_REPO.patch(&runner.dirs); - - REGEX.clean(&runner.dirs); - - let mut build_cmd = REGEX.build(&runner.target_compiler, &runner.dirs); - build_cmd.arg("--example").arg("shootout-regex-dna"); - spawn_and_wait(build_cmd); - - if runner.is_native { - let mut run_cmd = REGEX.run(&runner.target_compiler, &runner.dirs); - run_cmd.arg("--example").arg("shootout-regex-dna"); - - let input = fs::read_to_string( - REGEX.source_dir(&runner.dirs).join("examples").join("regexdna-input.txt"), - ) - .unwrap(); - let expected = fs::read_to_string( - REGEX.source_dir(&runner.dirs).join("examples").join("regexdna-output.txt"), - ) - .unwrap(); - - let output = spawn_and_wait_with_input(run_cmd, input); - - let output_matches = expected.lines().eq(output.lines()); - if !output_matches { - println!("Output files don't match!"); - println!("Expected Output:\n{}", expected); - println!("Actual Output:\n{}", output); - - std::process::exit(1); - } - } - }), TestCase::custom("test.regex", &|runner| { REGEX_REPO.patch(&runner.dirs); @@ -223,7 +188,22 @@ let mut run_cmd = REGEX.test(&runner.target_compiler, &runner.dirs); // regex-capi and regex-debug don't have any tests. Nor do they contain any code // that is useful to test with cg_clif. Skip building them to reduce test time. - run_cmd.args(["-p", "regex", "-p", "regex-syntax", "--", "-q"]); + run_cmd.args([ + "-p", + "regex", + "-p", + "regex-syntax", + "--release", + "--all-targets", + "--", + "-q", + ]); + spawn_and_wait(run_cmd); + + let mut run_cmd = REGEX.test(&runner.target_compiler, &runner.dirs); + // don't run integration tests for regex-autonata. they take like 2min each without + // much extra coverage of simd usage. + run_cmd.args(["-p", "regex-automata", "--release", "--lib", "--", "-q"]); spawn_and_wait(run_cmd); } else { eprintln!("Cross-Compiling: Not running tests"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/utils.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/utils.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/utils.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/build_system/utils.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,8 @@ use std::env; use std::fs; -use std::io::{self, Write}; +use std::io; use std::path::{Path, PathBuf}; -use std::process::{self, Command, Stdio}; +use std::process::{self, Command}; use std::sync::atomic::{AtomicBool, Ordering}; use crate::path::{Dirs, RelPath}; @@ -42,12 +42,22 @@ "/usr/s390x-linux-gnu".to_owned(), ]; } + "riscv64gc-unknown-linux-gnu" => { + // We are cross-compiling for riscv64. Use the correct linker and run tests in qemu. + self.rustflags.push("-Clinker=riscv64-linux-gnu-gcc".to_owned()); + self.rustdocflags.push("-Clinker=riscv64-linux-gnu-gcc".to_owned()); + self.runner = vec![ + "qemu-riscv64".to_owned(), + "-L".to_owned(), + "/usr/riscv64-linux-gnu".to_owned(), + ]; + } "x86_64-pc-windows-gnu" => { // We are cross-compiling for Windows. Run tests in wine. self.runner = vec!["wine".to_owned()]; } _ => { - println!("Unknown non-native platform"); + eprintln!("Unknown non-native platform"); } } } @@ -197,7 +207,9 @@ #[track_caller] pub(crate) fn spawn_and_wait(mut cmd: Command) { - if !cmd.spawn().unwrap().wait().unwrap().success() { + let status = cmd.spawn().unwrap().wait().unwrap(); + if !status.success() { + eprintln!("{cmd:?} exited with status {:?}", status); process::exit(1); } } @@ -207,38 +219,17 @@ pub(crate) fn retry_spawn_and_wait(tries: u64, mut cmd: Command) { for i in 1..tries + 1 { if i != 1 { - println!("Command failed. Attempt {i}/{tries}:"); + eprintln!("Command failed. Attempt {i}/{tries}:"); } if cmd.spawn().unwrap().wait().unwrap().success() { return; } std::thread::sleep(std::time::Duration::from_secs(i * 5)); } - println!("The command has failed after {tries} attempts."); + eprintln!("The command has failed after {tries} attempts."); process::exit(1); } -#[track_caller] -pub(crate) fn spawn_and_wait_with_input(mut cmd: Command, input: String) -> String { - let mut child = cmd - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .spawn() - .expect("Failed to spawn child process"); - - let mut stdin = child.stdin.take().expect("Failed to open stdin"); - std::thread::spawn(move || { - stdin.write_all(input.as_bytes()).expect("Failed to write to stdin"); - }); - - let output = child.wait_with_output().expect("Failed to read stdout"); - if !output.status.success() { - process::exit(1); - } - - String::from_utf8(output.stdout).unwrap() -} - pub(crate) fn remove_dir_if_exists(path: &Path) { match fs::remove_dir_all(&path) { Ok(()) => {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/config.txt rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/config.txt --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/config.txt 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/config.txt 2023-12-21 16:55:28.000000000 +0000 @@ -42,10 +42,10 @@ aot.mod_bench aot.issue-72793 aot.issue-59326 +aot.neon testsuite.extended_sysroot test.rust-random/rand test.libcore -test.regex-shootout-regex-dna test.regex test.portable-simd diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core.rs 2023-12-21 16:55:28.000000000 +0000 @@ -685,6 +685,12 @@ #[rustc_builtin_macro] #[rustc_macro_transparency = "semitransparent"] +pub macro asm() { + /* compiler built-in */ +} + +#[rustc_builtin_macro] +#[rustc_macro_transparency = "semitransparent"] pub macro global_asm() { /* compiler built-in */ } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/mini_core_hello_world.rs 2023-12-21 16:55:28.000000000 +0000 @@ -353,6 +353,17 @@ let f = V([0.0, 1.0]); let _a = f.0[0]; + + stack_val_align(); +} + +#[inline(never)] +fn stack_val_align() { + #[repr(align(8192))] + struct Foo(u8); + + let a = Foo(0); + assert_eq!(&a as *const Foo as usize % 8192, 0); } #[cfg(all( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/neon.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/neon.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/neon.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/neon.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,234 @@ +// Most of these tests are copied from https://github.com/japaric/stdsimd/blob/0f4413d01c4f0c3ffbc5a69e9a37fbc7235b31a9/coresimd/arm/neon.rs + +#![feature(portable_simd)] + +#[cfg(target_arch = "aarch64")] +use std::arch::aarch64::*; +use std::mem::transmute; +use std::simd::*; + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_s8() { + let a = i8x8::from([1, -2, 3, -4, 5, 6, 7, 8]); + let b = i8x8::from([0, 3, 2, 5, 4, 7, 6, 9]); + let e = i8x8::from([-2, -4, 5, 7, 0, 2, 4, 6]); + let r: i8x8 = transmute(vpmin_s8(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_s16() { + let a = i16x4::from([1, 2, 3, -4]); + let b = i16x4::from([0, 3, 2, 5]); + let e = i16x4::from([1, -4, 0, 2]); + let r: i16x4 = transmute(vpmin_s16(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_s32() { + let a = i32x2::from([1, -2]); + let b = i32x2::from([0, 3]); + let e = i32x2::from([-2, 0]); + let r: i32x2 = transmute(vpmin_s32(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_u8() { + let a = u8x8::from([1, 2, 3, 4, 5, 6, 7, 8]); + let b = u8x8::from([0, 3, 2, 5, 4, 7, 6, 9]); + let e = u8x8::from([1, 3, 5, 7, 0, 2, 4, 6]); + let r: u8x8 = transmute(vpmin_u8(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_u16() { + let a = u16x4::from([1, 2, 3, 4]); + let b = u16x4::from([0, 3, 2, 5]); + let e = u16x4::from([1, 3, 0, 2]); + let r: u16x4 = transmute(vpmin_u16(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_u32() { + let a = u32x2::from([1, 2]); + let b = u32x2::from([0, 3]); + let e = u32x2::from([1, 0]); + let r: u32x2 = transmute(vpmin_u32(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmin_f32() { + let a = f32x2::from([1., -2.]); + let b = f32x2::from([0., 3.]); + let e = f32x2::from([-2., 0.]); + let r: f32x2 = transmute(vpmin_f32(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_s8() { + let a = i8x8::from([1, -2, 3, -4, 5, 6, 7, 8]); + let b = i8x8::from([0, 3, 2, 5, 4, 7, 6, 9]); + let e = i8x8::from([1, 3, 6, 8, 3, 5, 7, 9]); + let r: i8x8 = transmute(vpmax_s8(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_s16() { + let a = i16x4::from([1, 2, 3, -4]); + let b = i16x4::from([0, 3, 2, 5]); + let e = i16x4::from([2, 3, 3, 5]); + let r: i16x4 = transmute(vpmax_s16(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_s32() { + let a = i32x2::from([1, -2]); + let b = i32x2::from([0, 3]); + let e = i32x2::from([1, 3]); + let r: i32x2 = transmute(vpmax_s32(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_u8() { + let a = u8x8::from([1, 2, 3, 4, 5, 6, 7, 8]); + let b = u8x8::from([0, 3, 2, 5, 4, 7, 6, 9]); + let e = u8x8::from([2, 4, 6, 8, 3, 5, 7, 9]); + let r: u8x8 = transmute(vpmax_u8(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_u16() { + let a = u16x4::from([1, 2, 3, 4]); + let b = u16x4::from([0, 3, 2, 5]); + let e = u16x4::from([2, 4, 3, 5]); + let r: u16x4 = transmute(vpmax_u16(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_u32() { + let a = u32x2::from([1, 2]); + let b = u32x2::from([0, 3]); + let e = u32x2::from([2, 3]); + let r: u32x2 = transmute(vpmax_u32(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpmax_f32() { + let a = f32x2::from([1., -2.]); + let b = f32x2::from([0., 3.]); + let e = f32x2::from([1., 3.]); + let r: f32x2 = transmute(vpmax_f32(transmute(a), transmute(b))); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpadd_s16() { + let a = i16x4::from([1, 2, 3, 4]); + let b = i16x4::from([0, -1, -2, -3]); + let r: i16x4 = transmute(vpadd_s16(transmute(a), transmute(b))); + let e = i16x4::from([3, 7, -1, -5]); + assert_eq!(r, e); +} +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpadd_s32() { + let a = i32x2::from([1, 2]); + let b = i32x2::from([0, -1]); + let r: i32x2 = transmute(vpadd_s32(transmute(a), transmute(b))); + let e = i32x2::from([3, -1]); + assert_eq!(r, e); +} +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpadd_s8() { + let a = i8x8::from([1, 2, 3, 4, 5, 6, 7, 8]); + let b = i8x8::from([0, -1, -2, -3, -4, -5, -6, -7]); + let r: i8x8 = transmute(vpadd_s8(transmute(a), transmute(b))); + let e = i8x8::from([3, 7, 11, 15, -1, -5, -9, -13]); + assert_eq!(r, e); +} +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpadd_u16() { + let a = u16x4::from([1, 2, 3, 4]); + let b = u16x4::from([30, 31, 32, 33]); + let r: u16x4 = transmute(vpadd_u16(transmute(a), transmute(b))); + let e = u16x4::from([3, 7, 61, 65]); + assert_eq!(r, e); +} +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpadd_u32() { + let a = u32x2::from([1, 2]); + let b = u32x2::from([30, 31]); + let r: u32x2 = transmute(vpadd_u32(transmute(a), transmute(b))); + let e = u32x2::from([3, 61]); + assert_eq!(r, e); +} +#[cfg(target_arch = "aarch64")] +unsafe fn test_vpadd_u8() { + let a = u8x8::from([1, 2, 3, 4, 5, 6, 7, 8]); + let b = u8x8::from([30, 31, 32, 33, 34, 35, 36, 37]); + let r: u8x8 = transmute(vpadd_u8(transmute(a), transmute(b))); + let e = u8x8::from([3, 7, 11, 15, 61, 65, 69, 73]); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vqsub_u8() { + let a = u8x8::from([1, 2, 3, 4, 5, 6, 7, 0xff]); + let b = u8x8::from([30, 1, 1, 1, 34, 0xff, 36, 37]); + let r: u8x8 = transmute(vqsub_u8(transmute(a), transmute(b))); + let e = u8x8::from([0, 1, 2, 3, 0, 0, 0, 218]); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +unsafe fn test_vqadd_u8() { + let a = u8x8::from([1, 2, 3, 4, 5, 6, 7, 0xff]); + let b = u8x8::from([30, 1, 1, 1, 34, 0xff, 36, 37]); + let r: u8x8 = transmute(vqadd_u8(transmute(a), transmute(b))); + let e = u8x8::from([31, 3, 4, 5, 39, 0xff, 43, 0xff]); + assert_eq!(r, e); +} + +#[cfg(target_arch = "aarch64")] +fn main() { + unsafe { + test_vpmin_s8(); + test_vpmin_s16(); + test_vpmin_s32(); + test_vpmin_u8(); + test_vpmin_u16(); + test_vpmin_u32(); + test_vpmin_f32(); + test_vpmax_s8(); + test_vpmax_s16(); + test_vpmax_s32(); + test_vpmax_u8(); + test_vpmax_u16(); + test_vpmax_u32(); + test_vpmax_f32(); + + test_vpadd_s16(); + test_vpadd_s32(); + test_vpadd_s8(); + test_vpadd_u16(); + test_vpadd_u32(); + test_vpadd_u8(); + + test_vqsub_u8(); + test_vqadd_u8(); + } +} + +#[cfg(not(target_arch = "aarch64"))] +fn main() {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/std_example.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/std_example.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/std_example.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/example/std_example.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ #![feature( core_intrinsics, - generators, - generator_trait, + coroutines, + coroutine_trait, is_sorted, repr_simd, tuple_trait, @@ -12,7 +12,7 @@ use std::arch::x86_64::*; use std::hint::black_box; use std::io::Write; -use std::ops::Generator; +use std::ops::Coroutine; fn main() { println!("{:?}", std::env::args().collect::>()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Allow-internal-features.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Allow-internal-features.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Allow-internal-features.patch 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0001-portable-simd-Allow-internal-features.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -From fcf75306d88e533b83eaff3f8d0ab9f307e8a84d Mon Sep 17 00:00:00 2001 -From: bjorn3 <17426603+bjorn3@users.noreply.github.com> -Date: Wed, 9 Aug 2023 10:01:17 +0000 -Subject: [PATCH] Allow internal features - ---- - crates/core_simd/src/lib.rs | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/crates/core_simd/src/lib.rs b/crates/core_simd/src/lib.rs -index fde406b..b386116 100644 ---- a/crates/core_simd/src/lib.rs -+++ b/crates/core_simd/src/lib.rs -@@ -19,6 +19,7 @@ - #![warn(missing_docs, clippy::missing_inline_in_public_items)] // basically all items, really - #![deny(unsafe_op_in_unsafe_fn, clippy::undocumented_unsafe_blocks)] - #![unstable(feature = "portable_simd", issue = "86656")] -+#![allow(internal_features)] - //! Portable SIMD module. - - #[path = "mod.rs"] --- -2.34.1 - diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0002-rand-Disable-failing-test.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0002-rand-Disable-failing-test.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0002-rand-Disable-failing-test.patch 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0002-rand-Disable-failing-test.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -From a8fb97120d71252538b6b026695df40d02696bdb Mon Sep 17 00:00:00 2001 -From: bjorn3 -Date: Sat, 15 Aug 2020 20:04:38 +0200 -Subject: [PATCH] [rand] Disable failing test - ---- - src/distributions/uniform.rs | 1 + - 1 file changed, 1 insertion(+), 0 deletions(-) - -diff --git a/src/distributions/uniform.rs b/src/distributions/uniform.rs -index 480b859..c80bb6f 100644 ---- a/src/distributions/uniform.rs -+++ b/src/distributions/uniform.rs -@@ -1314,6 +1314,7 @@ mod tests { - not(target_arch = "wasm32"), - not(target_arch = "asmjs") - ))] -+ #[ignore] // Requires unwinding - fn test_float_assertions() { - use super::SampleUniform; - use std::panic::catch_unwind; --- -2.20.1 - diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0003-rand-Disable-rand-tests-on-mingw.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0003-rand-Disable-rand-tests-on-mingw.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0003-rand-Disable-rand-tests-on-mingw.patch 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0003-rand-Disable-rand-tests-on-mingw.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,47 +0,0 @@ -From eec874c889b8d24e5ad50faded24288150f057b1 Mon Sep 17 00:00:00 2001 -From: Afonso Bordado -Date: Tue, 27 Sep 2022 08:13:58 +0100 -Subject: [PATCH] Disable rand tests on mingw - ---- - rand_distr/src/pareto.rs | 2 ++ - rand_distr/tests/value_stability.rs | 4 ++++ - 2 files changed, 6 insertions(+) - -diff --git a/rand_distr/src/pareto.rs b/rand_distr/src/pareto.rs -index 217899e..9cedeb7 100644 ---- a/rand_distr/src/pareto.rs -+++ b/rand_distr/src/pareto.rs -@@ -107,6 +107,8 @@ mod tests { - } - - #[test] -+ // This is broken on x86_64-pc-windows-gnu presumably due to a broken powf implementation -+ #[cfg_attr(all(target_os = "windows", target_env = "gnu"), ignore)] - fn value_stability() { - fn test_samples>( - distr: D, thresh: F, expected: &[F], -diff --git a/rand_distr/tests/value_stability.rs b/rand_distr/tests/value_stability.rs -index 192ba74..0101ace 100644 ---- a/rand_distr/tests/value_stability.rs -+++ b/rand_distr/tests/value_stability.rs -@@ -72,6 +72,8 @@ fn unit_disc_stability() { - } - - #[test] -+// This is broken on x86_64-pc-windows-gnu -+#[cfg_attr(all(target_os = "windows", target_env = "gnu"), ignore)] - fn pareto_stability() { - test_samples(213, Pareto::new(1.0, 1.0).unwrap(), &[ - 1.0423688f32, 2.1235929, 4.132709, 1.4679428, -@@ -143,6 +145,8 @@ fn inverse_gaussian_stability() { - } - - #[test] -+// This is broken on x86_64-pc-windows-gnu -+#[cfg_attr(all(target_os = "windows", target_env = "gnu"), ignore)] - fn gamma_stability() { - // Gamma has 3 cases: shape == 1, shape < 1, shape > 1 - test_samples(223, Gamma::new(1.0, 5.0).unwrap(), &[ --- -2.25.1 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0027-coretests-128bit-atomic-operations.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0027-coretests-128bit-atomic-operations.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0027-coretests-128bit-atomic-operations.patch 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/0027-coretests-128bit-atomic-operations.patch 2023-12-21 16:55:28.000000000 +0000 @@ -19,9 +19,9 @@ #![feature(const_option_ext)] #![feature(const_result)] -#![cfg_attr(target_has_atomic = "128", feature(integer_atomics))] + #![cfg_attr(test, feature(cfg_match))] #![feature(int_roundings)] #![feature(slice_group_by)] - #![feature(split_array)] diff --git a/atomic.rs b/atomic.rs index b735957..ea728b6 100644 --- a/atomic.rs diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/portable-simd-lock.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/portable-simd-lock.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/portable-simd-lock.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/portable-simd-lock.toml 1970-01-01 00:00:00.000000000 +0000 @@ -1,304 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bumpalo" -version = "3.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - -[[package]] -name = "core_simd" -version = "0.1.0" -dependencies = [ - "proptest", - "std_float", - "test_helpers", - "wasm-bindgen", - "wasm-bindgen-test", -] - -[[package]] -name = "js-sys" -version = "0.3.63" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "log" -version = "0.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de" - -[[package]] -name = "num-traits" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" -dependencies = [ - "autocfg", -] - -[[package]] -name = "once_cell" -version = "1.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9670a07f94779e00908f3e686eab508878ebb390ba6e604d3a284c00e8d0487b" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "proc-macro2" -version = "1.0.59" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proptest" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12e6c80c1139113c28ee4670dc50cc42915228b51f56a9e407f0ec60f966646f" -dependencies = [ - "bitflags", - "byteorder", - "num-traits", - "rand", - "rand_chacha", - "rand_xorshift", -] - -[[package]] -name = "quote" -version = "1.0.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "rand_chacha", - "rand_core", - "rand_hc", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core", -] - -[[package]] -name = "rand_xorshift" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77d416b86801d23dde1aa643023b775c3a462efc0ed96443add11546cdf1dca8" -dependencies = [ - "rand_core", -] - -[[package]] -name = "scoped-tls" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" - -[[package]] -name = "std_float" -version = "0.1.0" -dependencies = [ - "core_simd", -] - -[[package]] -name = "syn" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "test_helpers" -version = "0.1.0" -dependencies = [ - "proptest", -] - -[[package]] -name = "unicode-ident" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0" - -[[package]] -name = "wasm-bindgen" -version = "0.2.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" - -[[package]] -name = "wasm-bindgen-test" -version = "0.3.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e636f3a428ff62b3742ebc3c70e254dfe12b8c2b469d688ea59cdd4abcf502" -dependencies = [ - "console_error_panic_hook", - "js-sys", - "scoped-tls", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-bindgen-test-macro", -] - -[[package]] -name = "wasm-bindgen-test-macro" -version = "0.3.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f18c1fad2f7c4958e7bcce014fa212f59a65d5e3721d0f77e6c0b27ede936ba3" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "web-sys" -version = "0.3.63" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" -dependencies = [ - "js-sys", - "wasm-bindgen", -] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/rand-lock.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/rand-lock.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/rand-lock.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/rand-lock.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,32 @@ version = 3 [[package]] +name = "aho-corasick" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f2135563fb5c609d2b2b87c1e8ce7bc41b0b45430fa9661f457981503dd5bf0" +dependencies = [ + "memchr", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] name = "autocfg" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -29,12 +55,114 @@ ] [[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] +name = "ciborium" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "bitflags", + "clap_lex", + "indexmap", + "textwrap", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast", + "ciborium", + "clap", + "criterion-plot", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] name = "crossbeam-channel" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -57,9 +185,9 @@ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", @@ -70,14 +198,49 @@ [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] [[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote", + "syn 1.0.109", +] + +[[package]] name = "easy-cast" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -88,9 +251,9 @@ [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "float-ord" @@ -99,10 +262,16 @@ checksum = "8ce81f49ae8a0482e4c55ea62ebbd7e5a686af544c00b9d090bba3ff9be97b3d" [[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", "libc", @@ -110,25 +279,83 @@ ] [[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] name = "hermit-abi" -version = "0.2.6" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] [[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] name = "itoa" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "js-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.144" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" [[package]] name = "libm" @@ -138,24 +365,30 @@ [[package]] name = "log" -version = "0.4.18" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "memchr" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" [[package]] name = "memoffset" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -163,15 +396,61 @@ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.2", "libc", ] [[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "os_str_bytes" +version = "6.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" + +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + +[[package]] name = "ppv-lite86" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -179,18 +458,18 @@ [[package]] name = "proc-macro2" -version = "1.0.59" +version = "1.0.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b" +checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.28" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] @@ -200,6 +479,7 @@ version = "0.9.0" dependencies = [ "bincode", + "criterion", "libc", "log", "rand_chacha", @@ -236,6 +516,7 @@ "rand", "rand_pcg", "serde", + "serde_with", "special", ] @@ -271,42 +552,80 @@ ] [[package]] +name = "regex" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" + +[[package]] name = "ryu" -version = "1.0.13" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "same-file" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.163" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.37", ] [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" dependencies = [ "itoa", "ryu", @@ -314,6 +633,28 @@ ] [[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] name = "special" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -323,10 +664,27 @@ ] [[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] name = "syn" -version = "2.0.18" +version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" dependencies = [ "proc-macro2", "quote", @@ -334,13 +692,134 @@ ] [[package]] +name = "textwrap" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] name = "unicode-ident" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "walkdir" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +dependencies = [ + "same-file", + "winapi-util", +] [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.37", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.37", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" + +[[package]] +name = "web-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/regex-lock.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/regex-lock.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/regex-lock.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/regex-lock.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,51 +4,49 @@ [[package]] name = "aho-corasick" -version = "0.7.20" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +checksum = "0f2135563fb5c609d2b2b87c1e8ce7bc41b0b45430fa9661f457981503dd5bf0" dependencies = [ + "log", "memchr", ] [[package]] -name = "bitflags" -version = "1.3.2" +name = "anyhow" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] -name = "bzip2" -version = "0.3.3" +name = "arbitrary" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42b7c3cbf0fa9c1b82308d57191728ca0256cb821220f4e2fd410a72ade26e3b" +checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" dependencies = [ - "bzip2-sys", - "libc", + "derive_arbitrary", ] [[package]] -name = "bzip2-sys" -version = "0.1.11+1.0.8" +name = "atty" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "cc", + "hermit-abi", "libc", - "pkg-config", + "winapi", ] [[package]] -name = "cc" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" - -[[package]] -name = "cfg-if" -version = "0.1.10" +name = "bstr" +version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +checksum = "4c2f7349907b712260e64b0afe2f84692af14a454be26187d9df565c7f69266a" +dependencies = [ + "memchr", + "serde", +] [[package]] name = "cfg-if" @@ -57,114 +55,129 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] -name = "docopt" -version = "1.1.1" +name = "derive_arbitrary" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f3f119846c823f9eafcf953a8f6ffb6ed69bf6240883261a7f13b634579a51f" +checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8" dependencies = [ - "lazy_static", - "regex 1.8.3", - "serde", - "strsim", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "filetime" -version = "0.2.21" +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "env_logger" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" dependencies = [ - "cfg-if 1.0.0", - "libc", - "redox_syscall", - "windows-sys", + "atty", + "humantime", + "log", + "termcolor", ] [[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi", ] [[package]] -name = "lazy_static" -version = "1.4.0" +name = "hashbrown" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" [[package]] -name = "libc" -version = "0.2.144" +name = "hermit-abi" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" - -[[package]] -name = "libpcre-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ff3dd28ba96d6fe6752882f2f1b25ba8e1646448e79042442347cf3a92a6666" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ - "bzip2", "libc", - "pkg-config", - "tar", ] [[package]] -name = "memchr" -version = "2.5.0" +name = "humantime" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] -name = "memmap" -version = "0.6.2" +name = "indexmap" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" dependencies = [ - "libc", - "winapi", + "equivalent", + "hashbrown", ] [[package]] -name = "onig" -version = "3.2.2" +name = "lexopt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401" + +[[package]] +name = "libc" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5eeb268a4620c74ea5768c6d2ccd492d60a47a8754666b91a46bfc35cd4d1ba" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "memchr" +version = "2.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" dependencies = [ - "bitflags", - "lazy_static", - "libc", - "onig_sys", + "log", ] [[package]] -name = "onig_sys" -version = "68.2.1" +name = "memmap2" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "195ebddbb56740be48042ca117b8fb6e0d99fe392191a9362d82f5f69e510379" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" dependencies = [ - "cc", "libc", - "pkg-config", ] [[package]] -name = "pkg-config" -version = "0.3.27" +name = "once_cell" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "proc-macro2" -version = "1.0.59" +version = "1.0.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b" +checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" dependencies = [ "unicode-ident", ] @@ -180,9 +193,9 @@ [[package]] name = "quote" -version = "1.0.28" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] @@ -206,95 +219,101 @@ ] [[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", -] - -[[package]] name = "regex" -version = "1.7.2" +version = "1.9.5" dependencies = [ "aho-corasick", - "lazy_static", + "anyhow", + "doc-comment", + "env_logger", "memchr", + "once_cell", "quickcheck", - "rand", - "regex-syntax 0.6.29", + "regex-automata", + "regex-syntax", + "regex-test", ] [[package]] -name = "regex" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390" +name = "regex-automata" +version = "0.3.8" dependencies = [ - "regex-syntax 0.7.2", + "aho-corasick", + "anyhow", + "bstr", + "doc-comment", + "env_logger", + "log", + "memchr", + "quickcheck", + "regex-syntax", + "regex-test", ] [[package]] -name = "regex-benchmark" +name = "regex-cli" version = "0.1.0" dependencies = [ - "cc", - "cfg-if 0.1.10", - "docopt", - "lazy_static", - "libc", - "libpcre-sys", - "memmap", - "onig", - "pkg-config", - "regex 1.7.2", - "regex-syntax 0.6.29", - "serde", + "anyhow", + "bstr", + "lexopt", + "log", + "memmap2", + "regex", + "regex-automata", + "regex-lite", + "regex-syntax", + "tabwriter", + "textwrap", ] [[package]] -name = "regex-debug" +name = "regex-lite" version = "0.1.0" dependencies = [ - "docopt", - "regex 1.7.2", - "regex-syntax 0.6.29", - "serde", + "anyhow", + "regex-test", ] [[package]] name = "regex-syntax" -version = "0.6.29" +version = "0.7.5" +dependencies = [ + "arbitrary", +] [[package]] -name = "regex-syntax" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +name = "regex-test" +version = "0.1.0" +dependencies = [ + "anyhow", + "bstr", + "serde", + "toml", +] [[package]] name = "rure" version = "0.2.2" dependencies = [ "libc", - "regex 1.7.2", + "regex", ] [[package]] name = "serde" -version = "1.0.163" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", @@ -302,16 +321,19 @@ ] [[package]] -name = "strsim" -version = "0.10.0" +name = "serde_spanned" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +dependencies = [ + "serde", +] [[package]] name = "syn" -version = "2.0.18" +version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" dependencies = [ "proc-macro2", "quote", @@ -319,121 +341,117 @@ ] [[package]] -name = "tar" -version = "0.4.38" +name = "tabwriter" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b55807c0344e1e6c04d7c965f5289c39a8d94ae23ed5c0b57aabac549f871c6" +checksum = "08e1173ee641651a3095fe95d86ae314cd1f959888097debce3e0f9ca532eef1" dependencies = [ - "filetime", - "libc", - "xattr", + "unicode-width", ] [[package]] -name = "unicode-ident" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "winapi" -version = "0.3.9" +name = "termcolor" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", + "winapi-util", ] [[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" +name = "textwrap" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" +name = "toml" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] [[package]] -name = "windows-sys" -version = "0.48.0" +name = "toml_datetime" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ - "windows-targets", + "serde", ] [[package]] -name = "windows-targets" -version = "0.48.0" +name = "toml_edit" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.0" +name = "unicode-ident" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] -name = "windows_aarch64_msvc" -version = "0.48.0" +name = "unicode-width" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] -name = "windows_i686_gnu" -version = "0.48.0" +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "windows_i686_msvc" -version = "0.48.0" +name = "winapi" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] [[package]] -name = "windows_x86_64_gnu" -version = "0.48.0" +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.0" +name = "winapi-util" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] [[package]] -name = "windows_x86_64_msvc" -version = "0.48.0" +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "xattr" -version = "0.2.3" +name = "winnow" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc" +checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" dependencies = [ - "libc", + "memchr", ] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/patches/stdlib-lock.toml 2023-12-21 16:55:28.000000000 +0000 @@ -41,22 +41,6 @@ checksum = "56fc6cf8dc8c4158eed8649f9b8b0ea1518eb62b544fe9490d66fa0b349eafe9" [[package]] -name = "auxv" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e50430f9beb8effb02399fa81c76eeaa26b05e4f03b09285cad8d079c1af5a3d" -dependencies = [ - "byteorder", - "gcc", -] - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] name = "cc" version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -74,9 +58,9 @@ [[package]] name = "compiler_builtins" -version = "0.1.100" +version = "0.1.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c0f24437059853f0fa64afc51f338f93647a3de4cf3358ba1bb4171a199775" +checksum = "a3b73c3443a5fd2438d7ba4853c64e4c8efc2404a9e28a9234cc2d5eebc6c242" dependencies = [ "cc", "rustc-std-workspace-core", @@ -174,9 +158,9 @@ [[package]] name = "libc" -version = "0.2.146" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" dependencies = [ "rustc-std-workspace-core", ] @@ -256,6 +240,27 @@ ] [[package]] +name = "r-efi" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "575fc2d9b3da54adbdfaddf6eca48fec256d977c8630a1750b8991347d1ac911" +dependencies = [ + "compiler_builtins", + "rustc-std-workspace-core", +] + +[[package]] +name = "r-efi-alloc" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31d6f09fe2b6ad044bc3d2c34ce4979796581afd2f1ebc185837e02421e02fd7" +dependencies = [ + "compiler_builtins", + "r-efi", + "rustc-std-workspace-core", +] + +[[package]] name = "rand" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -353,6 +358,8 @@ "panic_abort", "panic_unwind", "profiler_builtins", + "r-efi", + "r-efi-alloc", "rand", "rand_xorshift", "rustc-demangle", @@ -365,7 +372,6 @@ name = "std_detect" version = "0.1.5" dependencies = [ - "auxv", "cfg-if", "compiler_builtins", "cupid", @@ -409,7 +415,6 @@ name = "unwind" version = "0.0.0" dependencies = [ - "cc", "cfg-if", "compiler_builtins", "core", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/rust-toolchain rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/rust-toolchain --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/rust-toolchain 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/rust-toolchain 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-09-06" +channel = "nightly-2023-11-10" components = ["rust-src", "rustc-dev", "llvm-tools"] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,12 @@ #!/usr/bin/env bash set -e -./y.sh build --no-unstable-features +# Compiletest expects all standard library paths to start with /rustc/FAKE_PREFIX. +# CG_CLIF_STDLIB_REMAP_PATH_PREFIX will cause cg_clif's build system to pass +# --remap-path-prefix to handle this. +# CG_CLIF_FORCE_GNU_AS will force usage of as instead of the LLVM backend of rustc as we +# the LLVM backend isn't compiled in here. +CG_CLIF_FORCE_GNU_AS=1 CG_CLIF_STDLIB_REMAP_PATH_PREFIX=/rustc/FAKE_PREFIX ./y.sh build echo "[SETUP] Rust fork" git clone https://github.com/rust-lang/rust.git || true @@ -13,25 +18,8 @@ git -c user.name=Dummy -c user.email=dummy@example.com -c commit.gpgSign=false \ am ../patches/*-stdlib-*.patch -git apply - < config.toml </dev/null 2>&1 || cargo install ripgrep rm -r tests/ui/{unsized-locals/,lto/,linkage*} || true -for test in $(rg --files-with-matches "lto|// needs-asm-support" tests/{codegen-units,ui,incremental}); do +for test in $(rg --files-with-matches "lto" tests/{codegen-units,ui,incremental}); do rm $test done -for test in tests/run-make/**/Makefile; do - if rg "# needs-asm-support" $test >/dev/null; then - rm -r $(dirname $test) - fi -done - for test in $(rg -i --files-with-matches "//(\[\w+\])?~[^\|]*\s*ERR|// error-pattern:|// build-fail|// run-fail|-Cllvm-args" tests/ui); do rm $test done git checkout -- tests/ui/issues/auxiliary/issue-3136-a.rs # contains //~ERROR, but shouldn't be removed git checkout -- tests/ui/proc-macro/pretty-print-hack/ +git checkout -- tests/ui/entry-point/auxiliary/bad_main_functions.rs rm tests/ui/parser/unclosed-delimiter-in-dep.rs # submodule contains //~ERROR # missing features # ================ -rm -r tests/run-make/comment-section # cg_clif doesn't yet write the .comment section - # requires stack unwinding -# FIXME add needs-unwind to this test +# FIXME add needs-unwind to these tests rm -r tests/run-make/libtest-junit +rm tests/ui/asm/may_unwind.rs # extra warning about -Cpanic=abort for proc macros rm tests/ui/proc-macro/crt-static.rs @@ -48,10 +42,8 @@ rm tests/ui/proc-macro/no-mangle-in-proc-macro-issue-111888.rs # vendor intrinsics -rm tests/ui/sse2.rs # cpuid not supported, so sse2 not detected +rm tests/ui/sse2.rs # CodegenBackend::target_features not yet implemented rm tests/ui/simd/array-type.rs # "Index argument for `simd_insert` is not a constant" -rm tests/ui/simd/intrinsic/generic-bswap-byte.rs # simd_bswap not yet implemented -rm tests/ui/simd/intrinsic/generic-arithmetic-pass.rs # many missing simd intrinsics # exotic linkages rm tests/ui/issues/issue-33992.rs # unsupported linkages @@ -76,7 +68,8 @@ rm -r tests/run-make/symbols-include-type-name # --emit=asm not supported rm -r tests/run-make/target-specs # i686 not supported by Cranelift rm -r tests/run-make/mismatching-target-triples # same -rm -r tests/run-make/use-extern-for-plugins # same +rm tests/ui/asm/x86_64/issue-82869.rs # vector regs in inline asm not yet supported +rm tests/ui/asm/x86_64/issue-96797.rs # const and sym inline asm operands don't work entirely correctly # requires LTO rm -r tests/run-make/cdylib @@ -117,20 +110,6 @@ rm tests/ui/layout/valid_range_oob.rs # different ICE message rm tests/ui/const-generics/generic_const_exprs/issue-80742.rs # gives error instead of ICE with cg_clif -rm tests/ui/consts/issue-miri-1910.rs # different error message -rm tests/ui/consts/offset_ub.rs # same -rm tests/ui/consts/const-eval/ub-slice-get-unchecked.rs # same -rm tests/ui/intrinsics/panic-uninitialized-zeroed.rs # same -rm tests/ui/lint/lint-const-item-mutation.rs # same -rm tests/ui/pattern/usefulness/doc-hidden-non-exhaustive.rs # same -rm tests/ui/suggestions/derive-trait-for-method-call.rs # same -rm tests/ui/typeck/issue-46112.rs # same -rm tests/ui/consts/const_cmp_type_id.rs # same -rm tests/ui/consts/issue-73976-monomorphic.rs # same -rm tests/ui/rfcs/rfc-3348-c-string-literals/non-ascii.rs # same -rm tests/ui/consts/const-eval/nonnull_as_ref_ub.rs # same -rm tests/ui/consts/issue-94675.rs # same - # rustdoc-clif passes extra args, suppressing the help message when no args are passed rm -r tests/run-make/issue-88756-default-output @@ -154,9 +133,12 @@ rm -r tests/run-make/used # same rm -r tests/run-make/no-alloc-shim rm -r tests/run-make/emit-to-stdout +rm -r tests/run-make/compressed-debuginfo rm -r tests/run-make/extern-fn-explicit-align # argument alignment not yet supported +rm tests/ui/codegen/subtyping-enforces-type-equality.rs # assert_assignable bug with Coroutine's + # bugs in the test suite # ====================== rm tests/ui/backtrace.rs # TODO warning @@ -164,6 +146,11 @@ rm tests/ui/stdio-is-blocking.rs # really slow with unoptimized libstd +# rustc bugs +# ========== +# https://github.com/rust-lang/rust/pull/116447#issuecomment-1790451463 +rm tests/ui/coroutine/gen_block_*.rs + cp ../dist/bin/rustdoc-clif ../dist/bin/rustdoc # some tests expect bin/rustdoc to exist # prevent $(RUSTDOC) from picking up the sysroot built by x.py. It conflicts with the one used by diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,6 +6,7 @@ use std::borrow::Cow; +use cranelift_codegen::ir::{AbiParam, SigRef}; use cranelift_module::ModuleError; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::ty::layout::FnAbiOf; @@ -13,12 +14,9 @@ use rustc_target::abi::call::{Conv, FnAbi}; use rustc_target::spec::abi::Abi; -use cranelift_codegen::ir::{AbiParam, SigRef}; - use self::pass_mode::*; -use crate::prelude::*; - pub(crate) use self::returning::codegen_return; +use crate::prelude::*; fn clif_sig_from_fn_abi<'tcx>( tcx: TyCtxt<'tcx>, @@ -30,7 +28,7 @@ let inputs = fn_abi.args.iter().flat_map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()); let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx); - // Sometimes the first param is an pointer to the place where the return value needs to be stored. + // Sometimes the first param is a pointer to the place where the return value needs to be stored. let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect(); Signature { params, returns, call_conv } @@ -122,32 +120,25 @@ args: &[Value], ) -> Cow<'_, [Value]> { if self.tcx.sess.target.is_like_windows { - let (mut params, mut args): (Vec<_>, Vec<_>) = - params - .into_iter() - .zip(args) - .map(|(param, &arg)| { - if param.value_type == types::I128 { - let arg_ptr = Pointer::stack_slot(self.bcx.create_sized_stack_slot( - StackSlotData { kind: StackSlotKind::ExplicitSlot, size: 16 }, - )); - arg_ptr.store(self, arg, MemFlags::trusted()); - (AbiParam::new(self.pointer_type), arg_ptr.get_addr(self)) - } else { - (param, arg) - } - }) - .unzip(); + let (mut params, mut args): (Vec<_>, Vec<_>) = params + .into_iter() + .zip(args) + .map(|(param, &arg)| { + if param.value_type == types::I128 { + let arg_ptr = self.create_stack_slot(16, 16); + arg_ptr.store(self, arg, MemFlags::trusted()); + (AbiParam::new(self.pointer_type), arg_ptr.get_addr(self)) + } else { + (param, arg) + } + }) + .unzip(); let indirect_ret_val = returns.len() == 1 && returns[0].value_type == types::I128; if indirect_ret_val { params.insert(0, AbiParam::new(self.pointer_type)); - let ret_ptr = - Pointer::stack_slot(self.bcx.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - size: 16, - })); + let ret_ptr = self.create_stack_slot(16, 16); args.insert(0, ret_ptr.get_addr(self)); self.lib_call_unadjusted(name, params, vec![], &args); return Cow::Owned(vec![ret_ptr.load(self, types::I128, MemFlags::trusted())]); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/pass_mode.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,14 +1,14 @@ //! Argument passing -use crate::prelude::*; -use crate::value_and_place::assert_assignable; - use cranelift_codegen::ir::{ArgumentExtension, ArgumentPurpose}; use rustc_target::abi::call::{ ArgAbi, ArgAttributes, ArgExtension as RustcArgExtension, CastTarget, PassMode, Reg, RegKind, }; use smallvec::{smallvec, SmallVec}; +use crate::prelude::*; +use crate::value_and_place::assert_assignable; + pub(super) trait ArgAbiExt<'tcx> { fn get_abi_param(&self, tcx: TyCtxt<'tcx>) -> SmallVec<[AbiParam; 2]>; fn get_abi_return(&self, tcx: TyCtxt<'tcx>) -> (Option, Vec); @@ -189,16 +189,13 @@ let abi_params = cast_target_to_abi_params(cast); let abi_param_size: u32 = abi_params.iter().map(|param| param.value_type.bytes()).sum(); let layout_size = u32::try_from(layout.size.bytes()).unwrap(); - let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to - // specify stack slot alignment. + let ptr = fx.create_stack_slot( // Stack slot size may be bigger for example `[u8; 3]` which is packed into an `i32`. // It may also be smaller for example when the type is a wrapper around an integer with a // larger alignment than the integer. - size: (std::cmp::max(abi_param_size, layout_size) + 15) / 16 * 16, - }); - let ptr = Pointer::stack_slot(stack_slot); + std::cmp::max(abi_param_size, layout_size), + u32::try_from(layout.align.pref.bytes()).unwrap(), + ); let mut offset = 0; let mut block_params_iter = block_params.iter().copied(); for param in abi_params { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/returning.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/returning.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/returning.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/abi/returning.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,10 @@ //! Return value handling -use crate::prelude::*; - use rustc_target::abi::call::{ArgAbi, PassMode}; use smallvec::{smallvec, SmallVec}; +use crate::prelude::*; + /// Return a place where the return value of the current function can be written to. If necessary /// this adds an extra parameter pointing to where the return value needs to be stored. pub(super) fn codegen_return_param<'tcx>( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/allocator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/allocator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/allocator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/allocator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,6 @@ //! Allocator shim // Adapted from rustc -use crate::prelude::*; - use rustc_ast::expand::allocator::{ alloc_error_handler_name, default_fn_name, global_fn_name, AllocatorKind, AllocatorTy, ALLOCATOR_METHODS, NO_ALLOC_SHIM_IS_UNSTABLE, @@ -10,6 +8,8 @@ use rustc_codegen_ssa::base::allocator_kind_for_codegen; use rustc_session::config::OomStrategy; +use crate::prelude::*; + /// Returns whether an allocator shim was created pub(crate) fn codegen( tcx: TyCtxt<'_>, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/analyze.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/analyze.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/analyze.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/analyze.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,11 @@ //! SSA analysis -use crate::prelude::*; - use rustc_index::IndexVec; use rustc_middle::mir::StatementKind::*; use rustc_middle::ty::Ty; +use crate::prelude::*; + #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub(crate) enum SsaKind { NotSsa, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/base.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,15 +1,14 @@ //! Codegen of a single function +use cranelift_codegen::ir::UserFuncName; +use cranelift_codegen::CodegenError; +use cranelift_module::ModuleError; use rustc_ast::InlineAsmOptions; use rustc_index::IndexVec; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::layout::FnAbiOf; use rustc_middle::ty::print::with_no_trimmed_paths; -use cranelift_codegen::ir::UserFuncName; -use cranelift_codegen::CodegenError; -use cranelift_module::ModuleError; - use crate::constant::ConstantCx; use crate::debuginfo::FunctionDebugContext; use crate::prelude::*; @@ -250,17 +249,6 @@ } fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { - if let Err(err) = - fx.mir.post_mono_checks(fx.tcx, ty::ParamEnv::reveal_all(), |c| Ok(fx.monomorphize(c))) - { - err.emit_err(fx.tcx); - fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]); - fx.bcx.switch_to_block(fx.block_map[START_BLOCK]); - // compilation should have been aborted - fx.bcx.ins().trap(TrapCode::UnreachableCodeReached); - return; - } - let arg_uninhabited = fx .mir .args_iter() @@ -490,7 +478,7 @@ TerminatorKind::Yield { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::GeneratorDrop => { + | TerminatorKind::CoroutineDrop => { bug!("shouldn't exist at codegen {:?}", bb_data.terminator()); } TerminatorKind::Drop { place, target, unwind: _, replace: _ } => { @@ -778,7 +766,7 @@ NullOp::SizeOf => layout.size.bytes(), NullOp::AlignOf => layout.align.abi.bytes(), NullOp::OffsetOf(fields) => { - layout.offset_of_subfield(fx, fields.iter().map(|f| f.index())).bytes() + layout.offset_of_subfield(fx, fields.iter()).bytes() } }; let val = CValue::by_val( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/cast.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/cast.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/cast.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/cast.rs 2023-12-21 16:55:28.000000000 +0000 @@ -104,11 +104,7 @@ &[from], )[0]; // FIXME(bytecodealliance/wasmtime#6104) use bitcast instead of store to get from i64x2 to i128 - let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - size: 16, - }); - let ret_ptr = Pointer::stack_slot(stack_slot); + let ret_ptr = fx.create_stack_slot(16, 16); ret_ptr.store(fx, ret, MemFlags::trusted()); ret_ptr.load(fx, types::I128, MemFlags::trusted()) } else { @@ -129,8 +125,8 @@ let (min, max) = match (to_ty, to_signed) { (types::I8, false) => (0, i64::from(u8::MAX)), (types::I16, false) => (0, i64::from(u16::MAX)), - (types::I8, true) => (i64::from(i8::MIN), i64::from(i8::MAX)), - (types::I16, true) => (i64::from(i16::MIN), i64::from(i16::MAX)), + (types::I8, true) => (i64::from(i8::MIN as u32), i64::from(i8::MAX as u32)), + (types::I16, true) => (i64::from(i16::MIN as u32), i64::from(i16::MAX as u32)), _ => unreachable!(), }; let min_val = fx.bcx.ins().iconst(types::I32, min); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/common.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/common.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/common.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/common.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,5 @@ use cranelift_codegen::isa::TargetFrontendConfig; use gimli::write::FileId; - use rustc_data_structures::sync::Lrc; use rustc_index::IndexVec; use rustc_middle::ty::layout::{ @@ -204,9 +203,9 @@ (types::I8, false) | (types::I16, false) | (types::I32, false) | (types::I64, false) => { 0i64 } - (types::I8, true) => i64::from(i8::MIN), - (types::I16, true) => i64::from(i16::MIN), - (types::I32, true) => i64::from(i32::MIN), + (types::I8, true) => i64::from(i8::MIN as u8), + (types::I16, true) => i64::from(i16::MIN as u16), + (types::I32, true) => i64::from(i32::MIN as u32), (types::I64, true) => i64::MIN, _ => unreachable!(), }; @@ -216,9 +215,9 @@ (types::I16, false) => i64::from(u16::MAX), (types::I32, false) => i64::from(u32::MAX), (types::I64, false) => u64::MAX as i64, - (types::I8, true) => i64::from(i8::MAX), - (types::I16, true) => i64::from(i16::MAX), - (types::I32, true) => i64::from(i32::MAX), + (types::I8, true) => i64::from(i8::MAX as u8), + (types::I16, true) => i64::from(i16::MAX as u16), + (types::I32, true) => i64::from(i32::MAX as u32), (types::I64, true) => i64::MAX, _ => unreachable!(), }; @@ -384,6 +383,25 @@ }) } + pub(crate) fn create_stack_slot(&mut self, size: u32, align: u32) -> Pointer { + if align <= 16 { + let stack_slot = self.bcx.create_sized_stack_slot(StackSlotData { + kind: StackSlotKind::ExplicitSlot, + // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to + // specify stack slot alignment. + size: (size + 15) / 16 * 16, + }); + Pointer::stack_slot(stack_slot) + } else { + // Alignment is too big to handle using the above hack. Dynamically realign a stack slot + // instead. This wastes some space for the realignment. + let base_ptr = self.create_stack_slot(size + align, 16).get_addr(self); + let misalign_offset = self.bcx.ins().urem_imm(base_ptr, i64::from(align)); + let realign_offset = self.bcx.ins().irsub_imm(misalign_offset, i64::from(align)); + Pointer::new(self.bcx.ins().iadd(base_ptr, realign_offset)) + } + } + pub(crate) fn set_debug_loc(&mut self, source_info: mir::SourceInfo) { if let Some(debug_context) = &mut self.cx.debug_context { let (file, line, column) = @@ -412,46 +430,11 @@ } } - // Note: must be kept in sync with get_caller_location from cg_ssa - pub(crate) fn get_caller_location(&mut self, mut source_info: mir::SourceInfo) -> CValue<'tcx> { - let span_to_caller_location = |fx: &mut FunctionCx<'_, '_, 'tcx>, span: Span| { - let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); - let caller = fx.tcx.sess.source_map().lookup_char_pos(topmost.lo()); - let const_loc = fx.tcx.const_caller_location(( - rustc_span::symbol::Symbol::intern( - &caller.file.name.prefer_remapped().to_string_lossy(), - ), - caller.line as u32, - caller.col_display as u32 + 1, - )); - crate::constant::codegen_const_value(fx, const_loc, fx.tcx.caller_location_ty()) - }; - - // Walk up the `SourceScope`s, in case some of them are from MIR inlining. - // If so, the starting `source_info.span` is in the innermost inlined - // function, and will be replaced with outer callsite spans as long - // as the inlined functions were `#[track_caller]`. - loop { - let scope_data = &self.mir.source_scopes[source_info.scope]; - - if let Some((callee, callsite_span)) = scope_data.inlined { - // Stop inside the most nested non-`#[track_caller]` function, - // before ever reaching its caller (which is irrelevant). - if !callee.def.requires_caller_location(self.tcx) { - return span_to_caller_location(self, source_info.span); - } - source_info.span = callsite_span; - } - - // Skip past all of the parents with `inlined: None`. - match scope_data.inlined_parent_scope { - Some(parent) => source_info.scope = parent, - None => break, - } - } - - // No inlined `SourceScope`s, or all of them were `#[track_caller]`. - self.caller_location.unwrap_or_else(|| span_to_caller_location(self, source_info.span)) + pub(crate) fn get_caller_location(&mut self, source_info: mir::SourceInfo) -> CValue<'tcx> { + self.mir.caller_location_span(source_info, self.caller_location, self.tcx, |span| { + let const_loc = self.tcx.span_as_caller_location(span); + crate::constant::codegen_const_value(self, const_loc, self.tcx.caller_location_ty()) + }) } pub(crate) fn anonymous_str(&mut self, msg: &str) -> Value { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/concurrency_limiter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/concurrency_limiter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/concurrency_limiter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/concurrency_limiter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,7 @@ use std::sync::{Arc, Condvar, Mutex}; -use rustc_session::Session; - use jobserver::HelperThread; +use rustc_session::Session; // FIXME don't panic when a worker thread panics diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/constant.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/constant.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/constant.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/constant.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,11 @@ //! Handling of `static`s, `const`s and promoted allocations +use cranelift_module::*; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::mir::interpret::{read_target_uint, AllocId, GlobalAlloc, Scalar}; use rustc_middle::mir::ConstValue; -use cranelift_module::*; - use crate::prelude::*; pub(crate) struct ConstantCx { @@ -101,7 +100,7 @@ if fx.clif_type(layout.ty).is_some() { return CValue::const_val(fx, layout, int); } else { - let raw_val = int.to_bits(int.size()).unwrap(); + let raw_val = int.size().truncate(int.to_bits(int.size()).unwrap()); let val = match int.size().bytes() { 1 => fx.bcx.ins().iconst(types::I8, raw_val as i64), 2 => fx.bcx.ins().iconst(types::I16, raw_val as i64), @@ -187,8 +186,7 @@ ConstValue::Slice { data, meta } => { let alloc_id = fx.tcx.reserve_and_set_memory_alloc(data); let ptr = pointer_for_allocation(fx, alloc_id).get_addr(fx); - // FIXME: the `try_from` here can actually fail, e.g. for very long ZST slices. - let len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(meta).unwrap()); + let len = fx.bcx.ins().iconst(fx.pointer_type, meta as i64); CValue::by_val_pair(ptr, len, layout) } } @@ -512,7 +510,7 @@ | TerminatorKind::Drop { .. } | TerminatorKind::Assert { .. } => {} TerminatorKind::Yield { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } => unreachable!(), TerminatorKind::InlineAsm { .. } => return None, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,9 @@ //! Write the debuginfo into an object file. use cranelift_object::ObjectProduct; -use rustc_data_structures::fx::FxHashMap; - use gimli::write::{Address, AttributeValue, EndianVec, Result, Sections, Writer}; use gimli::{RunTimeEndian, SectionId}; +use rustc_data_structures::fx::FxHashMap; use super::object::WriteDebugInfo; use super::DebugContext; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/line_info.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/line_info.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/line_info.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/line_info.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,20 +3,18 @@ use std::ffi::OsStr; use std::path::{Component, Path}; -use crate::debuginfo::FunctionDebugContext; -use crate::prelude::*; - -use rustc_data_structures::sync::Lrc; -use rustc_span::{ - FileName, Pos, SourceFile, SourceFileAndLine, SourceFileHash, SourceFileHashAlgorithm, -}; - use cranelift_codegen::binemit::CodeOffset; use cranelift_codegen::MachSrcLoc; - use gimli::write::{ Address, AttributeValue, FileId, FileInfo, LineProgram, LineString, LineStringTable, }; +use rustc_data_structures::sync::Lrc; +use rustc_span::{ + FileName, Pos, SourceFile, SourceFileAndLine, SourceFileHash, SourceFileHashAlgorithm, +}; + +use crate::debuginfo::FunctionDebugContext; +use crate::prelude::*; // OPTIMIZATION: It is cheaper to do this in one pass than using `.parent()` and `.file_name()`. fn split_path_dir_and_file(path: &Path) -> (&Path, &OsStr) { @@ -97,7 +95,11 @@ match &source_file.name { FileName::Real(path) => { let (dir_path, file_name) = - split_path_dir_and_file(path.remapped_path_if_available()); + split_path_dir_and_file(if self.should_remap_filepaths { + path.remapped_path_if_available() + } else { + path.local_path_if_available() + }); let dir_name = osstr_as_utf8_bytes(dir_path.as_os_str()); let file_name = osstr_as_utf8_bytes(file_name); @@ -118,7 +120,14 @@ filename => { let dir_id = line_program.default_directory(); let dummy_file_name = LineString::new( - filename.prefer_remapped().to_string().into_bytes(), + filename + .display(if self.should_remap_filepaths { + FileNameDisplayPreference::Remapped + } else { + FileNameDisplayPreference::Local + }) + .to_string() + .into_bytes(), line_program.encoding(), line_strings, ); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,11 +5,8 @@ mod object; mod unwind; -use crate::prelude::*; - use cranelift_codegen::ir::Endianness; use cranelift_codegen::isa::TargetIsa; - use gimli::write::{ Address, AttributeValue, DwarfUnit, FileId, LineProgram, LineString, Range, RangeList, UnitEntryId, @@ -17,12 +14,13 @@ use gimli::{Encoding, Format, LineEncoding, RunTimeEndian}; use indexmap::IndexSet; -pub(crate) use emit::{DebugReloc, DebugRelocName}; -pub(crate) use unwind::UnwindContext; +pub(crate) use self::emit::{DebugReloc, DebugRelocName}; +pub(crate) use self::unwind::UnwindContext; +use crate::prelude::*; pub(crate) fn producer() -> String { format!( - "cg_clif (rustc {}, cranelift {})", + "rustc version {} with cranelift {}", rustc_interface::util::rustc_version_str().unwrap_or("unknown version"), cranelift_codegen::VERSION, ) @@ -33,6 +31,8 @@ dwarf: DwarfUnit, unit_range_list: RangeList, + + should_remap_filepaths: bool, } pub(crate) struct FunctionDebugContext { @@ -65,12 +65,18 @@ let mut dwarf = DwarfUnit::new(encoding); + let should_remap_filepaths = tcx.sess.should_prefer_remapped_for_codegen(); + let producer = producer(); let comp_dir = tcx .sess .opts .working_dir - .to_string_lossy(FileNameDisplayPreference::Remapped) + .to_string_lossy(if should_remap_filepaths { + FileNameDisplayPreference::Remapped + } else { + FileNameDisplayPreference::Local + }) .into_owned(); let (name, file_info) = match tcx.sess.local_crate_source_file() { Some(path) => { @@ -104,7 +110,12 @@ root.set(gimli::DW_AT_low_pc, AttributeValue::Address(Address::Constant(0))); } - DebugContext { endian, dwarf, unit_range_list: RangeList(Vec::new()) } + DebugContext { + endian, + dwarf, + unit_range_list: RangeList(Vec::new()), + should_remap_filepaths, + } } pub(crate) fn define_function( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/object.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/object.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/object.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/object.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,9 @@ -use rustc_data_structures::fx::FxHashMap; - use cranelift_module::FuncId; use cranelift_object::ObjectProduct; - +use gimli::SectionId; use object::write::{Relocation, StandardSegment}; use object::{RelocationEncoding, SectionKind}; - -use gimli::SectionId; +use rustc_data_structures::fx::FxHashMap; use crate::debuginfo::{DebugReloc, DebugRelocName}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,15 +1,13 @@ //! Unwind info generation (`.eh_frame`) -use crate::prelude::*; - use cranelift_codegen::ir::Endianness; use cranelift_codegen::isa::{unwind::UnwindInfo, TargetIsa}; - use cranelift_object::ObjectProduct; use gimli::write::{Address, CieId, EhFrame, FrameTable, Section}; use gimli::RunTimeEndian; use super::object::WriteDebugInfo; +use crate::prelude::*; pub(crate) struct UnwindContext { endian: RunTimeEndian, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/aot.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/aot.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/aot.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/aot.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,19 +6,19 @@ use std::sync::Arc; use std::thread::JoinHandle; +use cranelift_object::{ObjectBuilder, ObjectModule}; +use rustc_codegen_ssa::assert_module_sources::CguReuse; use rustc_codegen_ssa::back::metadata::create_compressed_metadata_file; +use rustc_codegen_ssa::base::determine_cgu_reuse; use rustc_codegen_ssa::{CodegenResults, CompiledModule, CrateInfo, ModuleKind}; use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_metadata::EncodedMetadata; use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::mir::mono::{CodegenUnit, MonoItem}; -use rustc_session::cgu_reuse_tracker::CguReuse; use rustc_session::config::{DebugInfo, OutputFilenames, OutputType}; use rustc_session::Session; -use cranelift_object::{ObjectBuilder, ObjectModule}; - use crate::concurrency_limiter::{ConcurrencyLimiter, ConcurrencyLimiterToken}; use crate::global_asm::GlobalAsmConfig; use crate::{prelude::*, BackendConfig}; @@ -361,12 +361,26 @@ metadata: EncodedMetadata, need_metadata_module: bool, ) -> Box { + // FIXME handle `-Ctarget-cpu=native` + let target_cpu = match tcx.sess.opts.cg.target_cpu { + Some(ref name) => name, + None => tcx.sess.target.cpu.as_ref(), + } + .to_owned(); + let cgus = if tcx.sess.opts.output_types.should_codegen() { tcx.collect_and_partition_mono_items(()).1 } else { // If only `--emit metadata` is used, we shouldn't perform any codegen. // Also `tcx.collect_and_partition_mono_items` may panic in that case. - &[] + return Box::new(OngoingCodegen { + modules: vec![], + allocator_module: None, + metadata_module: None, + metadata, + crate_info: CrateInfo::new(tcx, target_cpu), + concurrency_limiter: ConcurrencyLimiter::new(tcx.sess, 0), + }); }; if tcx.dep_graph.is_fully_enabled() { @@ -375,20 +389,28 @@ } } + // Calculate the CGU reuse + let cgu_reuse = tcx.sess.time("find_cgu_reuse", || { + cgus.iter().map(|cgu| determine_cgu_reuse(tcx, &cgu)).collect::>() + }); + + rustc_codegen_ssa::assert_module_sources::assert_module_sources(tcx, &|cgu_reuse_tracker| { + for (i, cgu) in cgus.iter().enumerate() { + let cgu_reuse = cgu_reuse[i]; + cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse); + } + }); + let global_asm_config = Arc::new(crate::global_asm::GlobalAsmConfig::new(tcx)); let mut concurrency_limiter = ConcurrencyLimiter::new(tcx.sess, cgus.len()); let modules = tcx.sess.time("codegen mono items", || { cgus.iter() - .map(|cgu| { - let cgu_reuse = if backend_config.disable_incr_cache { - CguReuse::No - } else { - determine_cgu_reuse(tcx, cgu) - }; - tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse); - + .enumerate() + .map(|(i, cgu)| { + let cgu_reuse = + if backend_config.disable_incr_cache { CguReuse::No } else { cgu_reuse[i] }; match cgu_reuse { CguReuse::No => { let dep_node = cgu.codegen_dep_node(tcx); @@ -407,8 +429,7 @@ ) .0 } - CguReuse::PreLto => unreachable!(), - CguReuse::PostLto => { + CguReuse::PreLto | CguReuse::PostLto => { concurrency_limiter.job_already_done(); OngoingModuleCodegen::Sync(reuse_workproduct_for_cgu(tcx, cgu)) } @@ -474,13 +495,6 @@ None }; - // FIXME handle `-Ctarget-cpu=native` - let target_cpu = match tcx.sess.opts.cg.target_cpu { - Some(ref name) => name, - None => tcx.sess.target.cpu.as_ref(), - } - .to_owned(); - Box::new(OngoingCodegen { modules, allocator_module, @@ -490,32 +504,3 @@ concurrency_limiter, }) } - -// Adapted from https://github.com/rust-lang/rust/blob/303d8aff6092709edd4dbd35b1c88e9aa40bf6d8/src/librustc_codegen_ssa/base.rs#L922-L953 -fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) -> CguReuse { - if !tcx.dep_graph.is_fully_enabled() { - return CguReuse::No; - } - - let work_product_id = &cgu.work_product_id(); - if tcx.dep_graph.previous_work_product(work_product_id).is_none() { - // We don't have anything cached for this CGU. This can happen - // if the CGU did not exist in the previous session. - return CguReuse::No; - } - - // Try to mark the CGU as green. If it we can do so, it means that nothing - // affecting the LLVM module has changed and we can re-use a cached version. - // If we compile with any kind of LTO, this means we can re-use the bitcode - // of the Pre-LTO stage (possibly also the Post-LTO version but we'll only - // know that later). If we are not doing LTO, there is only one optimized - // version of each module, so we re-use that. - let dep_node = cgu.codegen_dep_node(tcx); - assert!( - !tcx.dep_graph.dep_node_exists(&dep_node), - "CompileCodegenUnit dep-node for CGU `{}` already exists before marking.", - cgu.name() - ); - - if tcx.try_mark_green(&dep_node) { CguReuse::PostLto } else { CguReuse::No } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/jit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/jit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/jit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/driver/jit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,13 +6,12 @@ use std::os::raw::{c_char, c_int}; use std::sync::{mpsc, Mutex, OnceLock}; +use cranelift_jit::{JITBuilder, JITModule}; use rustc_codegen_ssa::CrateInfo; use rustc_middle::mir::mono::MonoItem; use rustc_session::Session; use rustc_span::Symbol; -use cranelift_jit::{JITBuilder, JITModule}; - use crate::{prelude::*, BackendConfig}; use crate::{CodegenCx, CodegenMode}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/global_asm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/global_asm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/global_asm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/global_asm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,16 +9,22 @@ use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_hir::{InlineAsmOperand, ItemId}; use rustc_session::config::{OutputFilenames, OutputType}; +use rustc_target::asm::InlineAsmArch; use crate::prelude::*; pub(crate) fn codegen_global_asm_item(tcx: TyCtxt<'_>, global_asm: &mut String, item_id: ItemId) { let item = tcx.hir().item(item_id); if let rustc_hir::ItemKind::GlobalAsm(asm) = item.kind { - if !asm.options.contains(InlineAsmOptions::ATT_SYNTAX) { - global_asm.push_str("\n.intel_syntax noprefix\n"); - } else { - global_asm.push_str("\n.att_syntax\n"); + let is_x86 = + matches!(tcx.sess.asm_arch.unwrap(), InlineAsmArch::X86 | InlineAsmArch::X86_64); + + if is_x86 { + if !asm.options.contains(InlineAsmOptions::ATT_SYNTAX) { + global_asm.push_str("\n.intel_syntax noprefix\n"); + } else { + global_asm.push_str("\n.att_syntax\n"); + } } for piece in asm.template { match *piece { @@ -40,6 +46,13 @@ global_asm.push_str(&string); } InlineAsmOperand::SymFn { anon_const } => { + if cfg!(not(feature = "inline_asm_sym")) { + tcx.sess.span_err( + item.span, + "asm! and global_asm! sym operands are not yet supported", + ); + } + let ty = tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id); let instance = match ty.kind() { &ty::FnDef(def_id, args) => Instance::new(def_id, args), @@ -51,6 +64,13 @@ global_asm.push_str(symbol.name); } InlineAsmOperand::SymStatic { path: _, def_id } => { + if cfg!(not(feature = "inline_asm_sym")) { + tcx.sess.span_err( + item.span, + "asm! and global_asm! sym operands are not yet supported", + ); + } + let instance = Instance::mono(tcx, def_id).polymorphize(tcx); let symbol = tcx.symbol_name(instance); global_asm.push_str(symbol.name); @@ -65,7 +85,11 @@ } } } - global_asm.push_str("\n.att_syntax\n\n"); + + global_asm.push('\n'); + if is_x86 { + global_asm.push_str(".att_syntax\n\n"); + } } else { bug!("Expected GlobalAsm found {:?}", item); } @@ -73,18 +97,21 @@ #[derive(Debug)] pub(crate) struct GlobalAsmConfig { - asm_enabled: bool, assembler: PathBuf, + target: String, pub(crate) output_filenames: Arc, } impl GlobalAsmConfig { pub(crate) fn new(tcx: TyCtxt<'_>) -> Self { - let asm_enabled = cfg!(feature = "inline_asm") && !tcx.sess.target.is_like_windows; - GlobalAsmConfig { - asm_enabled, assembler: crate::toolchain::get_toolchain_binary(tcx.sess, "as"), + target: match &tcx.sess.opts.target_triple { + rustc_target::spec::TargetTriple::TargetTriple(triple) => triple.clone(), + rustc_target::spec::TargetTriple::TargetJson { path_for_rustdoc, .. } => { + path_for_rustdoc.to_str().unwrap().to_owned() + } + }, output_filenames: tcx.output_filenames(()).clone(), } } @@ -99,42 +126,75 @@ return Ok(None); } - if !config.asm_enabled { - if global_asm.contains("__rust_probestack") { - return Ok(None); - } - - if cfg!(not(feature = "inline_asm")) { - return Err( - "asm! and global_asm! support is disabled while compiling rustc_codegen_cranelift" - .to_owned(), - ); - } else { - return Err("asm! and global_asm! are not yet supported on Windows".to_owned()); - } - } - // Remove all LLVM style comments - let global_asm = global_asm + let mut global_asm = global_asm .lines() .map(|line| if let Some(index) = line.find("//") { &line[0..index] } else { line }) .collect::>() .join("\n"); + global_asm.push('\n'); - let output_object_file = config.output_filenames.temp_path(OutputType::Object, Some(cgu_name)); + let global_asm_object_file = add_file_stem_postfix( + config.output_filenames.temp_path(OutputType::Object, Some(cgu_name)), + ".asm", + ); // Assemble `global_asm` - let global_asm_object_file = add_file_stem_postfix(output_object_file, ".asm"); - let mut child = Command::new(&config.assembler) - .arg("-o") - .arg(&global_asm_object_file) - .stdin(Stdio::piped()) - .spawn() - .expect("Failed to spawn `as`."); - child.stdin.take().unwrap().write_all(global_asm.as_bytes()).unwrap(); - let status = child.wait().expect("Failed to wait for `as`."); - if !status.success() { - return Err(format!("Failed to assemble `{}`", global_asm)); + if option_env!("CG_CLIF_FORCE_GNU_AS").is_some() { + let mut child = Command::new(&config.assembler) + .arg("-o") + .arg(&global_asm_object_file) + .stdin(Stdio::piped()) + .spawn() + .expect("Failed to spawn `as`."); + child.stdin.take().unwrap().write_all(global_asm.as_bytes()).unwrap(); + let status = child.wait().expect("Failed to wait for `as`."); + if !status.success() { + return Err(format!("Failed to assemble `{}`", global_asm)); + } + } else { + let mut child = Command::new(std::env::current_exe().unwrap()) + .arg("--target") + .arg(&config.target) + .arg("--crate-type") + .arg("staticlib") + .arg("--emit") + .arg("obj") + .arg("-o") + .arg(&global_asm_object_file) + .arg("-") + .arg("-Abad_asm_style") + .arg("-Zcodegen-backend=llvm") + .stdin(Stdio::piped()) + .spawn() + .expect("Failed to spawn `as`."); + let mut stdin = child.stdin.take().unwrap(); + stdin + .write_all( + br####" + #![feature(decl_macro, no_core, rustc_attrs)] + #![allow(internal_features)] + #![no_core] + #[rustc_builtin_macro] + #[rustc_macro_transparency = "semitransparent"] + macro global_asm() { /* compiler built-in */ } + global_asm!(r###" + "####, + ) + .unwrap(); + stdin.write_all(global_asm.as_bytes()).unwrap(); + stdin + .write_all( + br####" + "###); + "####, + ) + .unwrap(); + std::mem::drop(stdin); + let status = child.wait().expect("Failed to wait for `as`."); + if !status.success() { + return Err(format!("Failed to assemble `{}`", global_asm)); + } } Ok(Some(global_asm_object_file)) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/inline_asm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/inline_asm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/inline_asm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/inline_asm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,18 +1,19 @@ //! Codegen of `asm!` invocations. -use crate::prelude::*; - use std::fmt::Write; use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_middle::mir::InlineAsmOperand; use rustc_span::sym; use rustc_target::asm::*; +use target_lexicon::BinaryFormat; + +use crate::prelude::*; enum CInlineAsmOperand<'tcx> { In { reg: InlineAsmRegOrRegClass, - value: CValue<'tcx>, + value: Value, }, Out { reg: InlineAsmRegOrRegClass, @@ -22,7 +23,7 @@ InOut { reg: InlineAsmRegOrRegClass, _late: bool, - in_value: CValue<'tcx>, + in_value: Value, out_place: Option>, }, Const { @@ -43,191 +44,23 @@ ) { // FIXME add .eh_frame unwind info directives - if !template.is_empty() { - // Used by panic_abort - if template[0] == InlineAsmTemplatePiece::String("int $$0x29".to_string()) { - fx.bcx.ins().trap(TrapCode::User(1)); - return; - } - - // Used by stdarch - if template[0] == InlineAsmTemplatePiece::String("mov ".to_string()) - && matches!( - template[1], - InlineAsmTemplatePiece::Placeholder { - operand_idx: 0, - modifier: Some('r'), - span: _ - } - ) - && template[2] == InlineAsmTemplatePiece::String(", rbx".to_string()) - && template[3] == InlineAsmTemplatePiece::String("\n".to_string()) - && template[4] == InlineAsmTemplatePiece::String("cpuid".to_string()) - && template[5] == InlineAsmTemplatePiece::String("\n".to_string()) - && template[6] == InlineAsmTemplatePiece::String("xchg ".to_string()) - && matches!( - template[7], - InlineAsmTemplatePiece::Placeholder { - operand_idx: 0, - modifier: Some('r'), - span: _ - } - ) - && template[8] == InlineAsmTemplatePiece::String(", rbx".to_string()) - { - assert_eq!(operands.len(), 4); - let (leaf, eax_place) = match operands[1] { - InlineAsmOperand::InOut { - reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::ax)), - late: _, - ref in_value, - out_place: Some(out_place), - } => ( - crate::base::codegen_operand(fx, in_value).load_scalar(fx), - crate::base::codegen_place(fx, out_place), - ), - _ => unreachable!(), - }; - let ebx_place = match operands[0] { - InlineAsmOperand::Out { - reg: - InlineAsmRegOrRegClass::RegClass(InlineAsmRegClass::X86( - X86InlineAsmRegClass::reg, - )), - late: _, - place: Some(place), - } => crate::base::codegen_place(fx, place), - _ => unreachable!(), - }; - let (sub_leaf, ecx_place) = match operands[2] { - InlineAsmOperand::InOut { - reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::cx)), - late: _, - ref in_value, - out_place: Some(out_place), - } => ( - crate::base::codegen_operand(fx, in_value).load_scalar(fx), - crate::base::codegen_place(fx, out_place), - ), - _ => unreachable!(), - }; - let edx_place = match operands[3] { - InlineAsmOperand::Out { - reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::dx)), - late: _, - place: Some(place), - } => crate::base::codegen_place(fx, place), - _ => unreachable!(), - }; - - let (eax, ebx, ecx, edx) = crate::intrinsics::codegen_cpuid_call(fx, leaf, sub_leaf); - - eax_place.write_cvalue(fx, CValue::by_val(eax, fx.layout_of(fx.tcx.types.u32))); - ebx_place.write_cvalue(fx, CValue::by_val(ebx, fx.layout_of(fx.tcx.types.u32))); - ecx_place.write_cvalue(fx, CValue::by_val(ecx, fx.layout_of(fx.tcx.types.u32))); - edx_place.write_cvalue(fx, CValue::by_val(edx, fx.layout_of(fx.tcx.types.u32))); - let destination_block = fx.get_block(destination.unwrap()); - fx.bcx.ins().jump(destination_block, &[]); - return; - } - - // Used by compiler-builtins - if fx.tcx.symbol_name(fx.instance).name.starts_with("___chkstk") { - // ___chkstk, ___chkstk_ms and __alloca are only used on Windows - crate::trap::trap_unimplemented(fx, "Stack probes are not supported"); - return; - } else if fx.tcx.symbol_name(fx.instance).name == "__alloca" { - crate::trap::trap_unimplemented(fx, "Alloca is not supported"); - return; - } - - // Used by measureme - if template[0] == InlineAsmTemplatePiece::String("xor %eax, %eax".to_string()) - && template[1] == InlineAsmTemplatePiece::String("\n".to_string()) - && template[2] == InlineAsmTemplatePiece::String("mov %rbx, ".to_string()) - && matches!( - template[3], - InlineAsmTemplatePiece::Placeholder { - operand_idx: 0, - modifier: Some('r'), - span: _ - } - ) - && template[4] == InlineAsmTemplatePiece::String("\n".to_string()) - && template[5] == InlineAsmTemplatePiece::String("cpuid".to_string()) - && template[6] == InlineAsmTemplatePiece::String("\n".to_string()) - && template[7] == InlineAsmTemplatePiece::String("mov ".to_string()) - && matches!( - template[8], - InlineAsmTemplatePiece::Placeholder { - operand_idx: 0, - modifier: Some('r'), - span: _ - } - ) - && template[9] == InlineAsmTemplatePiece::String(", %rbx".to_string()) - { - let destination_block = fx.get_block(destination.unwrap()); - fx.bcx.ins().jump(destination_block, &[]); - return; - } else if template[0] == InlineAsmTemplatePiece::String("rdpmc".to_string()) { - // Return zero dummy values for all performance counters - match operands[0] { - InlineAsmOperand::In { - reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::cx)), - value: _, - } => {} - _ => unreachable!(), - }; - let lo = match operands[1] { - InlineAsmOperand::Out { - reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::ax)), - late: true, - place: Some(place), - } => crate::base::codegen_place(fx, place), - _ => unreachable!(), - }; - let hi = match operands[2] { - InlineAsmOperand::Out { - reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::dx)), - late: true, - place: Some(place), - } => crate::base::codegen_place(fx, place), - _ => unreachable!(), - }; - - let u32_layout = fx.layout_of(fx.tcx.types.u32); - let zero = fx.bcx.ins().iconst(types::I32, 0); - lo.write_cvalue(fx, CValue::by_val(zero, u32_layout)); - hi.write_cvalue(fx, CValue::by_val(zero, u32_layout)); - - let destination_block = fx.get_block(destination.unwrap()); - fx.bcx.ins().jump(destination_block, &[]); - return; - } else if template[0] == InlineAsmTemplatePiece::String("lock xadd ".to_string()) - && matches!( - template[1], - InlineAsmTemplatePiece::Placeholder { operand_idx: 1, modifier: None, span: _ } - ) - && template[2] == InlineAsmTemplatePiece::String(", (".to_string()) - && matches!( - template[3], - InlineAsmTemplatePiece::Placeholder { operand_idx: 0, modifier: None, span: _ } - ) - && template[4] == InlineAsmTemplatePiece::String(")".to_string()) - { - let destination_block = fx.get_block(destination.unwrap()); - fx.bcx.ins().jump(destination_block, &[]); - return; - } + // Used by panic_abort on Windows, but uses a syntax which only happens to work with + // asm!() by accident and breaks with the GNU assembler as well as global_asm!() for + // the LLVM backend. + if template.len() == 1 + && template[0] == InlineAsmTemplatePiece::String("int $$0x29".to_string()) + { + fx.bcx.ins().trap(TrapCode::User(1)); + return; } let operands = operands .into_iter() .map(|operand| match *operand { - InlineAsmOperand::In { reg, ref value } => { - CInlineAsmOperand::In { reg, value: crate::base::codegen_operand(fx, value) } - } + InlineAsmOperand::In { reg, ref value } => CInlineAsmOperand::In { + reg, + value: crate::base::codegen_operand(fx, value).load_scalar(fx), + }, InlineAsmOperand::Out { reg, late, ref place } => CInlineAsmOperand::Out { reg, late, @@ -237,7 +70,7 @@ CInlineAsmOperand::InOut { reg, _late: late, - in_value: crate::base::codegen_operand(fx, in_value), + in_value: crate::base::codegen_operand(fx, in_value).load_scalar(fx), out_place: out_place.map(|place| crate::base::codegen_place(fx, place)), } } @@ -252,6 +85,12 @@ CInlineAsmOperand::Const { value } } InlineAsmOperand::SymFn { ref value } => { + if cfg!(not(feature = "inline_asm_sym")) { + fx.tcx + .sess + .span_err(span, "asm! and global_asm! sym operands are not yet supported"); + } + let const_ = fx.monomorphize(value.const_); if let ty::FnDef(def_id, args) = *const_.ty().kind() { let instance = ty::Instance::resolve_for_fn_ptr( @@ -329,7 +168,7 @@ for (i, operand) in operands.iter().enumerate() { match operand { CInlineAsmOperand::In { reg: _, value } => { - inputs.push((asm_gen.stack_slots_input[i].unwrap(), value.load_scalar(fx))); + inputs.push((asm_gen.stack_slots_input[i].unwrap(), *value)); } CInlineAsmOperand::Out { reg: _, late: _, place } => { if let Some(place) = place { @@ -337,7 +176,7 @@ } } CInlineAsmOperand::InOut { reg: _, _late: _, in_value, out_place } => { - inputs.push((asm_gen.stack_slots_input[i].unwrap(), in_value.load_scalar(fx))); + inputs.push((asm_gen.stack_slots_input[i].unwrap(), *in_value)); if let Some(out_place) = out_place { outputs.push((asm_gen.stack_slots_output[i].unwrap(), *out_place)); } @@ -589,11 +428,29 @@ } fn generate_asm_wrapper(&self, asm_name: &str) -> String { + let binary_format = crate::target_triple(self.tcx.sess).binary_format; + let mut generated_asm = String::new(); - writeln!(generated_asm, ".globl {}", asm_name).unwrap(); - writeln!(generated_asm, ".type {},@function", asm_name).unwrap(); - writeln!(generated_asm, ".section .text.{},\"ax\",@progbits", asm_name).unwrap(); - writeln!(generated_asm, "{}:", asm_name).unwrap(); + match binary_format { + BinaryFormat::Elf => { + writeln!(generated_asm, ".globl {}", asm_name).unwrap(); + writeln!(generated_asm, ".type {},@function", asm_name).unwrap(); + writeln!(generated_asm, ".section .text.{},\"ax\",@progbits", asm_name).unwrap(); + writeln!(generated_asm, "{}:", asm_name).unwrap(); + } + BinaryFormat::Macho => { + writeln!(generated_asm, ".globl _{}", asm_name).unwrap(); + writeln!(generated_asm, "_{}:", asm_name).unwrap(); + } + BinaryFormat::Coff => { + writeln!(generated_asm, ".globl {}", asm_name).unwrap(); + writeln!(generated_asm, "{}:", asm_name).unwrap(); + } + _ => self + .tcx + .sess + .fatal(format!("Unsupported binary format for inline asm: {binary_format:?}")), + } let is_x86 = matches!(self.arch, InlineAsmArch::X86 | InlineAsmArch::X86_64); @@ -690,8 +547,19 @@ if is_x86 { generated_asm.push_str(".att_syntax\n"); } - writeln!(generated_asm, ".size {name}, .-{name}", name = asm_name).unwrap(); - generated_asm.push_str(".text\n"); + + match binary_format { + BinaryFormat::Elf => { + writeln!(generated_asm, ".size {name}, .-{name}", name = asm_name).unwrap(); + generated_asm.push_str(".text\n"); + } + BinaryFormat::Macho | BinaryFormat::Coff => {} + _ => self + .tcx + .sess + .fatal(format!("Unsupported binary format for inline asm: {binary_format:?}")), + } + generated_asm.push_str("\n\n"); generated_asm @@ -699,25 +567,26 @@ fn prologue(generated_asm: &mut String, arch: InlineAsmArch) { match arch { - InlineAsmArch::X86 => { - generated_asm.push_str(" push ebp\n"); - generated_asm.push_str(" mov ebp,[esp+8]\n"); - } InlineAsmArch::X86_64 => { generated_asm.push_str(" push rbp\n"); - generated_asm.push_str(" mov rbp,rdi\n"); - } - InlineAsmArch::RiscV32 => { - generated_asm.push_str(" addi sp, sp, -8\n"); - generated_asm.push_str(" sw ra, 4(sp)\n"); - generated_asm.push_str(" sw s0, 0(sp)\n"); - generated_asm.push_str(" mv s0, a0\n"); + generated_asm.push_str(" mov rbp,rsp\n"); + generated_asm.push_str(" push rbx\n"); // rbx is callee saved + // rbx is reserved by LLVM for the "base pointer", so rustc doesn't allow using it + generated_asm.push_str(" mov rbx,rdi\n"); + } + InlineAsmArch::AArch64 => { + generated_asm.push_str(" stp fp, lr, [sp, #-32]!\n"); + generated_asm.push_str(" mov fp, sp\n"); + generated_asm.push_str(" str x19, [sp, #24]\n"); // x19 is callee saved + // x19 is reserved by LLVM for the "base pointer", so rustc doesn't allow using it + generated_asm.push_str(" mov x19, x0\n"); } InlineAsmArch::RiscV64 => { generated_asm.push_str(" addi sp, sp, -16\n"); generated_asm.push_str(" sd ra, 8(sp)\n"); - generated_asm.push_str(" sd s0, 0(sp)\n"); - generated_asm.push_str(" mv s0, a0\n"); + generated_asm.push_str(" sd s1, 0(sp)\n"); // s1 is callee saved + // s1/x9 is reserved by LLVM for the "base pointer", so rustc doesn't allow using it + generated_asm.push_str(" mv s1, a0\n"); } _ => unimplemented!("prologue for {:?}", arch), } @@ -725,22 +594,18 @@ fn epilogue(generated_asm: &mut String, arch: InlineAsmArch) { match arch { - InlineAsmArch::X86 => { - generated_asm.push_str(" pop ebp\n"); - generated_asm.push_str(" ret\n"); - } InlineAsmArch::X86_64 => { + generated_asm.push_str(" pop rbx\n"); generated_asm.push_str(" pop rbp\n"); generated_asm.push_str(" ret\n"); } - InlineAsmArch::RiscV32 => { - generated_asm.push_str(" lw s0, 0(sp)\n"); - generated_asm.push_str(" lw ra, 4(sp)\n"); - generated_asm.push_str(" addi sp, sp, 8\n"); + InlineAsmArch::AArch64 => { + generated_asm.push_str(" ldr x19, [sp, #24]\n"); + generated_asm.push_str(" ldp fp, lr, [sp], #32\n"); generated_asm.push_str(" ret\n"); } InlineAsmArch::RiscV64 => { - generated_asm.push_str(" ld s0, 0(sp)\n"); + generated_asm.push_str(" ld s1, 0(sp)\n"); generated_asm.push_str(" ld ra, 8(sp)\n"); generated_asm.push_str(" addi sp, sp, 16\n"); generated_asm.push_str(" ret\n"); @@ -751,10 +616,13 @@ fn epilogue_noreturn(generated_asm: &mut String, arch: InlineAsmArch) { match arch { - InlineAsmArch::X86 | InlineAsmArch::X86_64 => { + InlineAsmArch::X86_64 => { generated_asm.push_str(" ud2\n"); } - InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => { + InlineAsmArch::AArch64 => { + generated_asm.push_str(" brk #0x1\n"); + } + InlineAsmArch::RiscV64 => { generated_asm.push_str(" ebreak\n"); } _ => unimplemented!("epilogue_noreturn for {:?}", arch), @@ -768,25 +636,20 @@ offset: Size, ) { match arch { - InlineAsmArch::X86 => { - write!(generated_asm, " mov [ebp+0x{:x}], ", offset.bytes()).unwrap(); - reg.emit(generated_asm, InlineAsmArch::X86, None).unwrap(); - generated_asm.push('\n'); - } InlineAsmArch::X86_64 => { - write!(generated_asm, " mov [rbp+0x{:x}], ", offset.bytes()).unwrap(); + write!(generated_asm, " mov [rbx+0x{:x}], ", offset.bytes()).unwrap(); reg.emit(generated_asm, InlineAsmArch::X86_64, None).unwrap(); generated_asm.push('\n'); } - InlineAsmArch::RiscV32 => { - generated_asm.push_str(" sw "); - reg.emit(generated_asm, InlineAsmArch::RiscV32, None).unwrap(); - writeln!(generated_asm, ", 0x{:x}(s0)", offset.bytes()).unwrap(); + InlineAsmArch::AArch64 => { + generated_asm.push_str(" str "); + reg.emit(generated_asm, InlineAsmArch::AArch64, None).unwrap(); + writeln!(generated_asm, ", [x19, 0x{:x}]", offset.bytes()).unwrap(); } InlineAsmArch::RiscV64 => { generated_asm.push_str(" sd "); reg.emit(generated_asm, InlineAsmArch::RiscV64, None).unwrap(); - writeln!(generated_asm, ", 0x{:x}(s0)", offset.bytes()).unwrap(); + writeln!(generated_asm, ", 0x{:x}(s1)", offset.bytes()).unwrap(); } _ => unimplemented!("save_register for {:?}", arch), } @@ -799,25 +662,20 @@ offset: Size, ) { match arch { - InlineAsmArch::X86 => { - generated_asm.push_str(" mov "); - reg.emit(generated_asm, InlineAsmArch::X86, None).unwrap(); - writeln!(generated_asm, ", [ebp+0x{:x}]", offset.bytes()).unwrap(); - } InlineAsmArch::X86_64 => { generated_asm.push_str(" mov "); reg.emit(generated_asm, InlineAsmArch::X86_64, None).unwrap(); - writeln!(generated_asm, ", [rbp+0x{:x}]", offset.bytes()).unwrap(); + writeln!(generated_asm, ", [rbx+0x{:x}]", offset.bytes()).unwrap(); } - InlineAsmArch::RiscV32 => { - generated_asm.push_str(" lw "); - reg.emit(generated_asm, InlineAsmArch::RiscV32, None).unwrap(); - writeln!(generated_asm, ", 0x{:x}(s0)", offset.bytes()).unwrap(); + InlineAsmArch::AArch64 => { + generated_asm.push_str(" ldr "); + reg.emit(generated_asm, InlineAsmArch::AArch64, None).unwrap(); + writeln!(generated_asm, ", [x19, 0x{:x}]", offset.bytes()).unwrap(); } InlineAsmArch::RiscV64 => { generated_asm.push_str(" ld "); reg.emit(generated_asm, InlineAsmArch::RiscV64, None).unwrap(); - writeln!(generated_asm, ", 0x{:x}(s0)", offset.bytes()).unwrap(); + writeln!(generated_asm, ", 0x{:x}(s1)", offset.bytes()).unwrap(); } _ => unimplemented!("restore_register for {:?}", arch), } @@ -831,13 +689,7 @@ inputs: Vec<(Size, Value)>, outputs: Vec<(Size, CPlace<'tcx>)>, ) { - let stack_slot = fx.bcx.func.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - size: u32::try_from(slot_size.bytes()).unwrap(), - }); - if fx.clif_comments.enabled() { - fx.add_comment(stack_slot, "inline asm scratch slot"); - } + let stack_slot = fx.create_stack_slot(u32::try_from(slot_size.bytes()).unwrap(), 16); let inline_asm_func = fx .module @@ -857,15 +709,103 @@ } for (offset, value) in inputs { - fx.bcx.ins().stack_store(value, stack_slot, i32::try_from(offset.bytes()).unwrap()); + stack_slot.offset(fx, i32::try_from(offset.bytes()).unwrap().into()).store( + fx, + value, + MemFlags::trusted(), + ); } - let stack_slot_addr = fx.bcx.ins().stack_addr(fx.pointer_type, stack_slot, 0); + let stack_slot_addr = stack_slot.get_addr(fx); fx.bcx.ins().call(inline_asm_func, &[stack_slot_addr]); for (offset, place) in outputs { let ty = fx.clif_type(place.layout().ty).unwrap(); - let value = fx.bcx.ins().stack_load(ty, stack_slot, i32::try_from(offset.bytes()).unwrap()); + let value = stack_slot.offset(fx, i32::try_from(offset.bytes()).unwrap().into()).load( + fx, + ty, + MemFlags::trusted(), + ); place.write_cvalue(fx, CValue::by_val(value, place.layout())); } } + +pub(crate) fn codegen_xgetbv<'tcx>( + fx: &mut FunctionCx<'_, '_, 'tcx>, + xcr_no: Value, + ret: CPlace<'tcx>, +) { + // FIXME add .eh_frame unwind info directives + + let operands = vec![ + CInlineAsmOperand::In { + reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::cx)), + value: xcr_no, + }, + CInlineAsmOperand::Out { + reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::ax)), + late: true, + place: Some(ret), + }, + CInlineAsmOperand::Out { + reg: InlineAsmRegOrRegClass::Reg(InlineAsmReg::X86(X86InlineAsmReg::dx)), + late: true, + place: None, + }, + ]; + let options = InlineAsmOptions::NOSTACK | InlineAsmOptions::PURE | InlineAsmOptions::NOMEM; + + let mut inputs = Vec::new(); + let mut outputs = Vec::new(); + + let mut asm_gen = InlineAssemblyGenerator { + tcx: fx.tcx, + arch: fx.tcx.sess.asm_arch.unwrap(), + enclosing_def_id: fx.instance.def_id(), + template: &[InlineAsmTemplatePiece::String( + " + xgetbv + // out = rdx << 32 | rax + shl rdx, 32 + or rax, rdx + " + .to_string(), + )], + operands: &operands, + options, + registers: Vec::new(), + stack_slots_clobber: Vec::new(), + stack_slots_input: Vec::new(), + stack_slots_output: Vec::new(), + stack_slot_size: Size::from_bytes(0), + }; + asm_gen.allocate_registers(); + asm_gen.allocate_stack_slots(); + + let inline_asm_index = fx.cx.inline_asm_index.get(); + fx.cx.inline_asm_index.set(inline_asm_index + 1); + let asm_name = format!( + "__inline_asm_{}_n{}", + fx.cx.cgu_name.as_str().replace('.', "__").replace('-', "_"), + inline_asm_index + ); + + let generated_asm = asm_gen.generate_asm_wrapper(&asm_name); + fx.cx.global_asm.push_str(&generated_asm); + + for (i, operand) in operands.iter().enumerate() { + match operand { + CInlineAsmOperand::In { reg: _, value } => { + inputs.push((asm_gen.stack_slots_input[i].unwrap(), *value)); + } + CInlineAsmOperand::Out { reg: _, late: _, place } => { + if let Some(place) = place { + outputs.push((asm_gen.stack_slots_output[i].unwrap(), *place)); + } + } + _ => unreachable!(), + } + } + + call_inline_asm(fx, &asm_name, asm_gen.stack_slot_size, inputs, outputs); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/cpuid.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/cpuid.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/cpuid.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/cpuid.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -//! Emulation of a subset of the cpuid x86 instruction. - -use crate::prelude::*; - -/// Emulates a subset of the cpuid x86 instruction. -/// -/// This emulates an intel cpu with sse and sse2 support, but which doesn't support anything else. -pub(crate) fn codegen_cpuid_call<'tcx>( - fx: &mut FunctionCx<'_, '_, 'tcx>, - leaf: Value, - _sub_leaf: Value, -) -> (Value, Value, Value, Value) { - let leaf_0 = fx.bcx.create_block(); - let leaf_1 = fx.bcx.create_block(); - let leaf_7 = fx.bcx.create_block(); - let leaf_8000_0000 = fx.bcx.create_block(); - let leaf_8000_0001 = fx.bcx.create_block(); - let unsupported_leaf = fx.bcx.create_block(); - - let dest = fx.bcx.create_block(); - let eax = fx.bcx.append_block_param(dest, types::I32); - let ebx = fx.bcx.append_block_param(dest, types::I32); - let ecx = fx.bcx.append_block_param(dest, types::I32); - let edx = fx.bcx.append_block_param(dest, types::I32); - - let mut switch = cranelift_frontend::Switch::new(); - switch.set_entry(0, leaf_0); - switch.set_entry(1, leaf_1); - switch.set_entry(7, leaf_7); - switch.set_entry(0x8000_0000, leaf_8000_0000); - switch.set_entry(0x8000_0001, leaf_8000_0001); - switch.emit(&mut fx.bcx, leaf, unsupported_leaf); - - fx.bcx.switch_to_block(leaf_0); - let max_basic_leaf = fx.bcx.ins().iconst(types::I32, 1); - let vend0 = fx.bcx.ins().iconst(types::I32, i64::from(u32::from_le_bytes(*b"Genu"))); - let vend2 = fx.bcx.ins().iconst(types::I32, i64::from(u32::from_le_bytes(*b"ineI"))); - let vend1 = fx.bcx.ins().iconst(types::I32, i64::from(u32::from_le_bytes(*b"ntel"))); - fx.bcx.ins().jump(dest, &[max_basic_leaf, vend0, vend1, vend2]); - - fx.bcx.switch_to_block(leaf_1); - let cpu_signature = fx.bcx.ins().iconst(types::I32, 0); - let additional_information = fx.bcx.ins().iconst(types::I32, 0); - let ecx_features = fx.bcx.ins().iconst(types::I32, 0); - let edx_features = fx.bcx.ins().iconst(types::I32, 1 << 25 /* sse */ | 1 << 26 /* sse2 */); - fx.bcx.ins().jump(dest, &[cpu_signature, additional_information, ecx_features, edx_features]); - - fx.bcx.switch_to_block(leaf_7); - // This leaf technically has subleaves, but we just return zero for all subleaves. - let zero = fx.bcx.ins().iconst(types::I32, 0); - fx.bcx.ins().jump(dest, &[zero, zero, zero, zero]); - - fx.bcx.switch_to_block(leaf_8000_0000); - let extended_max_basic_leaf = fx.bcx.ins().iconst(types::I32, 0); - let zero = fx.bcx.ins().iconst(types::I32, 0); - fx.bcx.ins().jump(dest, &[extended_max_basic_leaf, zero, zero, zero]); - - fx.bcx.switch_to_block(leaf_8000_0001); - let zero = fx.bcx.ins().iconst(types::I32, 0); - let proc_info_ecx = fx.bcx.ins().iconst(types::I32, 0); - let proc_info_edx = fx.bcx.ins().iconst(types::I32, 0); - fx.bcx.ins().jump(dest, &[zero, zero, proc_info_ecx, proc_info_edx]); - - fx.bcx.switch_to_block(unsupported_leaf); - crate::trap::trap_unimplemented( - fx, - "__cpuid_count arch intrinsic doesn't yet support specified leaf", - ); - - fx.bcx.switch_to_block(dest); - fx.bcx.ins().nop(); - - (eax, ebx, ecx, edx) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,10 @@ //! Emulate LLVM intrinsics +use rustc_middle::ty::GenericArgsRef; + use crate::intrinsics::*; use crate::prelude::*; -use rustc_middle::ty::GenericArgsRef; - pub(crate) fn codegen_llvm_intrinsic_call<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, intrinsic: &str, @@ -51,6 +51,21 @@ }); } + _ if intrinsic.starts_with("llvm.fma.v") => { + intrinsic_args!(fx, args => (x,y,z); intrinsic); + + simd_trio_for_each_lane( + fx, + x, + y, + z, + ret, + &|fx, _lane_ty, _res_lane_ty, lane_x, lane_y, lane_z| { + fx.bcx.ins().fma(lane_x, lane_y, lane_z) + }, + ); + } + _ => { fx.tcx .sess diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_aarch64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_aarch64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_aarch64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_aarch64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,10 @@ //! Emulate AArch64 LLVM intrinsics +use rustc_middle::ty::GenericArgsRef; + use crate::intrinsics::*; use crate::prelude::*; -use rustc_middle::ty::GenericArgsRef; - pub(crate) fn codegen_aarch64_llvm_intrinsic_call<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, intrinsic: &str, @@ -44,7 +44,9 @@ }); } - _ if intrinsic.starts_with("llvm.aarch64.neon.sqadd.v") => { + _ if intrinsic.starts_with("llvm.aarch64.neon.sqadd.v") + || intrinsic.starts_with("llvm.aarch64.neon.uqadd.v") => + { intrinsic_args!(fx, args => (x, y); intrinsic); simd_pair_for_each_lane_typed(fx, x, y, ret, &|fx, x_lane, y_lane| { @@ -52,7 +54,9 @@ }); } - _ if intrinsic.starts_with("llvm.aarch64.neon.sqsub.v") => { + _ if intrinsic.starts_with("llvm.aarch64.neon.sqsub.v") + || intrinsic.starts_with("llvm.aarch64.neon.uqsub.v") => + { intrinsic_args!(fx, args => (x, y); intrinsic); simd_pair_for_each_lane_typed(fx, x, y, ret, &|fx, x_lane, y_lane| { @@ -156,6 +160,106 @@ }); } + _ if intrinsic.starts_with("llvm.aarch64.neon.umaxp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().umax(x_lane, y_lane), + ); + } + + _ if intrinsic.starts_with("llvm.aarch64.neon.smaxp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().smax(x_lane, y_lane), + ); + } + + _ if intrinsic.starts_with("llvm.aarch64.neon.uminp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().umin(x_lane, y_lane), + ); + } + + _ if intrinsic.starts_with("llvm.aarch64.neon.sminp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().smin(x_lane, y_lane), + ); + } + + _ if intrinsic.starts_with("llvm.aarch64.neon.fminp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().fmin(x_lane, y_lane), + ); + } + + _ if intrinsic.starts_with("llvm.aarch64.neon.fmaxp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().fmax(x_lane, y_lane), + ); + } + + _ if intrinsic.starts_with("llvm.aarch64.neon.addp.v") => { + intrinsic_args!(fx, args => (x, y); intrinsic); + + simd_horizontal_pair_for_each_lane( + fx, + x, + y, + ret, + &|fx, _lane_ty, _res_lane_ty, x_lane, y_lane| fx.bcx.ins().iadd(x_lane, y_lane), + ); + } + + // FIXME generalize vector types + "llvm.aarch64.neon.tbl1.v16i8" => { + intrinsic_args!(fx, args => (t, idx); intrinsic); + + let zero = fx.bcx.ins().iconst(types::I8, 0); + for i in 0..16 { + let idx_lane = idx.value_lane(fx, i).load_scalar(fx); + let is_zero = + fx.bcx.ins().icmp_imm(IntCC::UnsignedGreaterThanOrEqual, idx_lane, 16); + let t_idx = fx.bcx.ins().uextend(fx.pointer_type, idx_lane); + let t_lane = t.value_lane_dyn(fx, t_idx).load_scalar(fx); + let res = fx.bcx.ins().select(is_zero, zero, t_lane); + ret.place_lane(fx, i).to_ptr().store(fx, res, MemFlags::trusted()); + } + } + /* _ if intrinsic.starts_with("llvm.aarch64.neon.sshl.v") || intrinsic.starts_with("llvm.aarch64.neon.sqshl.v") diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_x86.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_x86.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_x86.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/llvm_x86.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,10 @@ //! Emulate x86 LLVM intrinsics +use rustc_middle::ty::GenericArgsRef; + use crate::intrinsics::*; use crate::prelude::*; -use rustc_middle::ty::GenericArgsRef; - pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, intrinsic: &str, @@ -20,53 +20,23 @@ // Used by is_x86_feature_detected!(); "llvm.x86.xgetbv" => { - // FIXME use the actual xgetbv instruction - intrinsic_args!(fx, args => (v); intrinsic); + intrinsic_args!(fx, args => (xcr_no); intrinsic); - let v = v.load_scalar(fx); + let xcr_no = xcr_no.load_scalar(fx); - // As of writing on XCR0 exists - fx.bcx.ins().trapnz(v, TrapCode::UnreachableCodeReached); - - let res = fx.bcx.ins().iconst(types::I64, 1 /* bit 0 must be set */); - ret.write_cvalue(fx, CValue::by_val(res, fx.layout_of(fx.tcx.types.i64))); + crate::inline_asm::codegen_xgetbv(fx, xcr_no, ret); } - // Used by `_mm_movemask_epi8` and `_mm256_movemask_epi8` - "llvm.x86.sse2.pmovmskb.128" - | "llvm.x86.avx2.pmovmskb" - | "llvm.x86.sse.movmsk.ps" - | "llvm.x86.sse2.movmsk.pd" => { - intrinsic_args!(fx, args => (a); intrinsic); - - let (lane_count, lane_ty) = a.layout().ty.simd_size_and_type(fx.tcx); - let lane_ty = fx.clif_type(lane_ty).unwrap(); - assert!(lane_count <= 32); - - let mut res = fx.bcx.ins().iconst(types::I32, 0); - - for lane in (0..lane_count).rev() { - let a_lane = a.value_lane(fx, lane).load_scalar(fx); - - // cast float to int - let a_lane = match lane_ty { - types::F32 => codegen_bitcast(fx, types::I32, a_lane), - types::F64 => codegen_bitcast(fx, types::I64, a_lane), - _ => a_lane, - }; - - // extract sign bit of an int - let a_lane_sign = fx.bcx.ins().ushr_imm(a_lane, i64::from(lane_ty.bits() - 1)); - - // shift sign bit into result - let a_lane_sign = clif_intcast(fx, a_lane_sign, types::I32, false); - res = fx.bcx.ins().ishl_imm(res, 1); - res = fx.bcx.ins().bor(res, a_lane_sign); - } + "llvm.x86.sse3.ldu.dq" | "llvm.x86.avx.ldu.dq.256" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_lddqu_si128&ig_expand=4009 + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_lddqu_si256&ig_expand=4010 + intrinsic_args!(fx, args => (ptr); intrinsic); - let res = CValue::by_val(res, fx.layout_of(fx.tcx.types.i32)); - ret.write_cvalue(fx, res); + // FIXME correctly handle unalignedness + let val = CValue::by_ref(Pointer::new(ptr.load_scalar(fx)), ret.layout()); + ret.write_cvalue(fx, val); } + "llvm.x86.sse.cmp.ps" | "llvm.x86.sse2.cmp.pd" => { let (x, y, kind) = match args { [x, y, kind] => (x, y, kind), @@ -74,8 +44,10 @@ }; let x = codegen_operand(fx, x); let y = codegen_operand(fx, y); - let kind = crate::constant::mir_operand_get_const_val(fx, kind) - .expect("llvm.x86.sse2.cmp.* kind not const"); + let kind = match kind { + Operand::Constant(const_) => crate::constant::eval_mir_constant(fx, const_).0, + Operand::Copy(_) | Operand::Move(_) => unreachable!("{kind:?}"), + }; let flt_cc = match kind .try_to_bits(Size::from_bytes(1)) @@ -210,8 +182,12 @@ } } } - "llvm.x86.avx2.vperm2i128" => { + "llvm.x86.avx2.vperm2i128" + | "llvm.x86.avx.vperm2f128.ps.256" + | "llvm.x86.avx.vperm2f128.pd.256" => { // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_permute2x128_si256 + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_permute2f128_ps + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_permute2f128_pd let (a, b, imm8) = match args { [a, b, imm8] => (a, b, imm8), _ => bug!("wrong number of args for intrinsic {intrinsic}"), @@ -220,19 +196,11 @@ let b = codegen_operand(fx, b); let imm8 = codegen_operand(fx, imm8).load_scalar(fx); - let a_0 = a.value_lane(fx, 0).load_scalar(fx); - let a_1 = a.value_lane(fx, 1).load_scalar(fx); - let a_low = fx.bcx.ins().iconcat(a_0, a_1); - let a_2 = a.value_lane(fx, 2).load_scalar(fx); - let a_3 = a.value_lane(fx, 3).load_scalar(fx); - let a_high = fx.bcx.ins().iconcat(a_2, a_3); - - let b_0 = b.value_lane(fx, 0).load_scalar(fx); - let b_1 = b.value_lane(fx, 1).load_scalar(fx); - let b_low = fx.bcx.ins().iconcat(b_0, b_1); - let b_2 = b.value_lane(fx, 2).load_scalar(fx); - let b_3 = b.value_lane(fx, 3).load_scalar(fx); - let b_high = fx.bcx.ins().iconcat(b_2, b_3); + let a_low = a.value_typed_lane(fx, fx.tcx.types.u128, 0).load_scalar(fx); + let a_high = a.value_typed_lane(fx, fx.tcx.types.u128, 1).load_scalar(fx); + + let b_low = b.value_typed_lane(fx, fx.tcx.types.u128, 0).load_scalar(fx); + let b_high = b.value_typed_lane(fx, fx.tcx.types.u128, 1).load_scalar(fx); fn select4( fx: &mut FunctionCx<'_, '_, '_>, @@ -257,16 +225,20 @@ let control0 = imm8; let res_low = select4(fx, a_high, a_low, b_high, b_low, control0); - let (res_0, res_1) = fx.bcx.ins().isplit(res_low); let control1 = fx.bcx.ins().ushr_imm(imm8, 4); let res_high = select4(fx, a_high, a_low, b_high, b_low, control1); - let (res_2, res_3) = fx.bcx.ins().isplit(res_high); - ret.place_lane(fx, 0).to_ptr().store(fx, res_0, MemFlags::trusted()); - ret.place_lane(fx, 1).to_ptr().store(fx, res_1, MemFlags::trusted()); - ret.place_lane(fx, 2).to_ptr().store(fx, res_2, MemFlags::trusted()); - ret.place_lane(fx, 3).to_ptr().store(fx, res_3, MemFlags::trusted()); + ret.place_typed_lane(fx, fx.tcx.types.u128, 0).to_ptr().store( + fx, + res_low, + MemFlags::trusted(), + ); + ret.place_typed_lane(fx, fx.tcx.types.u128, 1).to_ptr().store( + fx, + res_high, + MemFlags::trusted(), + ); } "llvm.x86.ssse3.pabs.b.128" | "llvm.x86.ssse3.pabs.w.128" | "llvm.x86.ssse3.pabs.d.128" => { let a = match args { @@ -308,6 +280,512 @@ let val = CValue::by_val_pair(cb_out, c, layout); ret.write_cvalue(fx, val); } + "llvm.x86.sse2.pavg.b" | "llvm.x86.sse2.pavg.w" => { + intrinsic_args!(fx, args => (a, b); intrinsic); + + // FIXME use vector instructions when possible + simd_pair_for_each_lane( + fx, + a, + b, + ret, + &|fx, _lane_ty, _res_lane_ty, a_lane, b_lane| { + // (a + b + 1) >> 1 + let lane_ty = fx.bcx.func.dfg.value_type(a_lane); + let a_lane = fx.bcx.ins().uextend(lane_ty.double_width().unwrap(), a_lane); + let b_lane = fx.bcx.ins().uextend(lane_ty.double_width().unwrap(), b_lane); + let sum = fx.bcx.ins().iadd(a_lane, b_lane); + let num_plus_one = fx.bcx.ins().iadd_imm(sum, 1); + let res = fx.bcx.ins().ushr_imm(num_plus_one, 1); + fx.bcx.ins().ireduce(lane_ty, res) + }, + ); + } + "llvm.x86.sse2.psra.w" => { + intrinsic_args!(fx, args => (a, count); intrinsic); + + let count_lane = count.force_stack(fx).0.load(fx, types::I64, MemFlags::trusted()); + let lane_ty = fx.clif_type(a.layout().ty.simd_size_and_type(fx.tcx).1).unwrap(); + let max_count = fx.bcx.ins().iconst(types::I64, i64::from(lane_ty.bits() - 1)); + let saturated_count = fx.bcx.ins().umin(count_lane, max_count); + + // FIXME use vector instructions when possible + simd_for_each_lane(fx, a, ret, &|fx, _lane_ty, _res_lane_ty, a_lane| { + fx.bcx.ins().sshr(a_lane, saturated_count) + }); + } + "llvm.x86.sse2.psad.bw" | "llvm.x86.avx2.psad.bw" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_sad_epu8&ig_expand=5770 + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_sad_epu8&ig_expand=5771 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.u8); + assert_eq!(ret_lane_ty, fx.tcx.types.u64); + assert_eq!(lane_count, ret_lane_count * 8); + + let ret_lane_layout = fx.layout_of(fx.tcx.types.u64); + for out_lane_idx in 0..lane_count / 8 { + let mut lane_diff_acc = fx.bcx.ins().iconst(types::I64, 0); + + for lane_idx in out_lane_idx * 8..out_lane_idx * 8 + 1 { + let a_lane = a.value_lane(fx, lane_idx).load_scalar(fx); + let b_lane = b.value_lane(fx, lane_idx).load_scalar(fx); + + let lane_diff = fx.bcx.ins().isub(a_lane, b_lane); + let abs_lane_diff = fx.bcx.ins().iabs(lane_diff); + let abs_lane_diff = fx.bcx.ins().uextend(types::I64, abs_lane_diff); + lane_diff_acc = fx.bcx.ins().iadd(lane_diff_acc, abs_lane_diff); + } + + let res_lane = CValue::by_val(lane_diff_acc, ret_lane_layout); + + ret.place_lane(fx, out_lane_idx).write_cvalue(fx, res_lane); + } + } + "llvm.x86.ssse3.pmadd.ub.sw.128" | "llvm.x86.avx2.pmadd.ub.sw" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_maddubs_epi16&ig_expand=4267 + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_maddubs_epi16&ig_expand=4270 + intrinsic_args!(fx, args => (a, b); intrinsic); + + let (lane_count, lane_ty) = a.layout().ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.u8); + assert_eq!(ret_lane_ty, fx.tcx.types.i16); + assert_eq!(lane_count, ret_lane_count * 2); + + let ret_lane_layout = fx.layout_of(fx.tcx.types.i16); + for out_lane_idx in 0..lane_count / 2 { + let a_lane0 = a.value_lane(fx, out_lane_idx * 2).load_scalar(fx); + let a_lane0 = fx.bcx.ins().uextend(types::I16, a_lane0); + let b_lane0 = b.value_lane(fx, out_lane_idx * 2).load_scalar(fx); + let b_lane0 = fx.bcx.ins().sextend(types::I16, b_lane0); + + let a_lane1 = a.value_lane(fx, out_lane_idx * 2 + 1).load_scalar(fx); + let a_lane1 = fx.bcx.ins().uextend(types::I16, a_lane1); + let b_lane1 = b.value_lane(fx, out_lane_idx * 2 + 1).load_scalar(fx); + let b_lane1 = fx.bcx.ins().sextend(types::I16, b_lane1); + + let mul0: Value = fx.bcx.ins().imul(a_lane0, b_lane0); + let mul1 = fx.bcx.ins().imul(a_lane1, b_lane1); + + let (val, has_overflow) = fx.bcx.ins().sadd_overflow(mul0, mul1); + + let rhs_ge_zero = fx.bcx.ins().icmp_imm(IntCC::SignedGreaterThanOrEqual, mul1, 0); + + let min = fx.bcx.ins().iconst(types::I16, i64::from(i16::MIN as u16)); + let max = fx.bcx.ins().iconst(types::I16, i64::from(i16::MAX as u16)); + + let sat_val = fx.bcx.ins().select(rhs_ge_zero, max, min); + let res_lane = fx.bcx.ins().select(has_overflow, sat_val, val); + + let res_lane = CValue::by_val(res_lane, ret_lane_layout); + + ret.place_lane(fx, out_lane_idx).write_cvalue(fx, res_lane); + } + } + "llvm.x86.sse2.pmadd.wd" | "llvm.x86.avx2.pmadd.wd" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_madd_epi16&ig_expand=4231 + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_madd_epi16&ig_expand=4234 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i16); + assert_eq!(ret_lane_ty, fx.tcx.types.i32); + assert_eq!(lane_count, ret_lane_count * 2); + + let ret_lane_layout = fx.layout_of(fx.tcx.types.i32); + for out_lane_idx in 0..lane_count / 2 { + let a_lane0 = a.value_lane(fx, out_lane_idx * 2).load_scalar(fx); + let a_lane0 = fx.bcx.ins().uextend(types::I32, a_lane0); + let b_lane0 = b.value_lane(fx, out_lane_idx * 2).load_scalar(fx); + let b_lane0 = fx.bcx.ins().sextend(types::I32, b_lane0); + + let a_lane1 = a.value_lane(fx, out_lane_idx * 2 + 1).load_scalar(fx); + let a_lane1 = fx.bcx.ins().uextend(types::I32, a_lane1); + let b_lane1 = b.value_lane(fx, out_lane_idx * 2 + 1).load_scalar(fx); + let b_lane1 = fx.bcx.ins().sextend(types::I32, b_lane1); + + let mul0: Value = fx.bcx.ins().imul(a_lane0, b_lane0); + let mul1 = fx.bcx.ins().imul(a_lane1, b_lane1); + + let res_lane = fx.bcx.ins().iadd(mul0, mul1); + let res_lane = CValue::by_val(res_lane, ret_lane_layout); + + ret.place_lane(fx, out_lane_idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.ssse3.pmul.hr.sw.128" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_mulhrs_epi16&ig_expand=4782 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i16); + assert_eq!(ret_lane_ty, fx.tcx.types.i16); + assert_eq!(lane_count, ret_lane_count); + + let ret_lane_layout = fx.layout_of(fx.tcx.types.i16); + for out_lane_idx in 0..lane_count { + let a_lane = a.value_lane(fx, out_lane_idx).load_scalar(fx); + let a_lane = fx.bcx.ins().sextend(types::I32, a_lane); + let b_lane = b.value_lane(fx, out_lane_idx).load_scalar(fx); + let b_lane = fx.bcx.ins().sextend(types::I32, b_lane); + + let mul: Value = fx.bcx.ins().imul(a_lane, b_lane); + let shifted = fx.bcx.ins().ushr_imm(mul, 14); + let incremented = fx.bcx.ins().iadd_imm(shifted, 1); + let shifted_again = fx.bcx.ins().ushr_imm(incremented, 1); + + let res_lane = fx.bcx.ins().ireduce(types::I16, shifted_again); + let res_lane = CValue::by_val(res_lane, ret_lane_layout); + + ret.place_lane(fx, out_lane_idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.sse2.packuswb.128" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_packus_epi16&ig_expand=4903 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i16); + assert_eq!(ret_lane_ty, fx.tcx.types.u8); + assert_eq!(lane_count * 2, ret_lane_count); + + let zero = fx.bcx.ins().iconst(types::I16, 0); + let max_u8 = fx.bcx.ins().iconst(types::I16, 255); + let ret_lane_layout = fx.layout_of(fx.tcx.types.u8); + + for idx in 0..lane_count { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, zero); + let sat = fx.bcx.ins().umin(sat, max_u8); + let res = fx.bcx.ins().ireduce(types::I8, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, zero); + let sat = fx.bcx.ins().umin(sat, max_u8); + let res = fx.bcx.ins().ireduce(types::I8, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count + idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.avx2.packuswb" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_packus_epi16&ig_expand=4906 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i16); + assert_eq!(ret_lane_ty, fx.tcx.types.u8); + assert_eq!(lane_count * 2, ret_lane_count); + + let zero = fx.bcx.ins().iconst(types::I16, 0); + let max_u8 = fx.bcx.ins().iconst(types::I16, 255); + let ret_lane_layout = fx.layout_of(fx.tcx.types.u8); + + for idx in 0..lane_count / 2 { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, zero); + let sat = fx.bcx.ins().umin(sat, max_u8); + let res = fx.bcx.ins().ireduce(types::I8, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count / 2 { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, zero); + let sat = fx.bcx.ins().umin(sat, max_u8); + let res = fx.bcx.ins().ireduce(types::I8, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count / 2 + idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count / 2 { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, zero); + let sat = fx.bcx.ins().umin(sat, max_u8); + let res = fx.bcx.ins().ireduce(types::I8, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count / 2 * 2 + idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count / 2 { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, zero); + let sat = fx.bcx.ins().umin(sat, max_u8); + let res = fx.bcx.ins().ireduce(types::I8, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count / 2 * 3 + idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.sse2.packssdw.128" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_packs_epi32&ig_expand=4889 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i32); + assert_eq!(ret_lane_ty, fx.tcx.types.i16); + assert_eq!(lane_count * 2, ret_lane_count); + + let min_i16 = fx.bcx.ins().iconst(types::I32, i64::from(i16::MIN as u16)); + let max_i16 = fx.bcx.ins().iconst(types::I32, i64::from(i16::MAX as u16)); + let ret_lane_layout = fx.layout_of(fx.tcx.types.i16); + + for idx in 0..lane_count { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, min_i16); + let sat = fx.bcx.ins().umin(sat, max_i16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, min_i16); + let sat = fx.bcx.ins().umin(sat, max_i16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count + idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.sse41.packusdw" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_packus_epi32&ig_expand=4912 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i32); + assert_eq!(ret_lane_ty, fx.tcx.types.u16); + assert_eq!(lane_count * 2, ret_lane_count); + + let min_u16 = fx.bcx.ins().iconst(types::I32, i64::from(u16::MIN)); + let max_u16 = fx.bcx.ins().iconst(types::I32, i64::from(u16::MAX)); + let ret_lane_layout = fx.layout_of(fx.tcx.types.u16); + + for idx in 0..lane_count { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().umax(lane, min_u16); + let sat = fx.bcx.ins().umin(sat, max_u16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().umax(lane, min_u16); + let sat = fx.bcx.ins().umin(sat, max_u16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count + idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.avx2.packssdw" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_packs_epi32&ig_expand=4892 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i32); + assert_eq!(ret_lane_ty, fx.tcx.types.i16); + assert_eq!(lane_count * 2, ret_lane_count); + + let min_i16 = fx.bcx.ins().iconst(types::I32, i64::from(i16::MIN as u16)); + let max_i16 = fx.bcx.ins().iconst(types::I32, i64::from(i16::MAX as u16)); + let ret_lane_layout = fx.layout_of(fx.tcx.types.i16); + + for idx in 0..lane_count / 2 { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, min_i16); + let sat = fx.bcx.ins().umin(sat, max_i16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count / 2 { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, min_i16); + let sat = fx.bcx.ins().umin(sat, max_i16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count / 2 + idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count / 2 { + let lane = a.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, min_i16); + let sat = fx.bcx.ins().umin(sat, max_i16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count / 2 * 2 + idx).write_cvalue(fx, res_lane); + } + + for idx in 0..lane_count / 2 { + let lane = b.value_lane(fx, idx).load_scalar(fx); + let sat = fx.bcx.ins().smax(lane, min_i16); + let sat = fx.bcx.ins().umin(sat, max_i16); + let res = fx.bcx.ins().ireduce(types::I16, sat); + + let res_lane = CValue::by_val(res, ret_lane_layout); + ret.place_lane(fx, lane_count / 2 * 3 + idx).write_cvalue(fx, res_lane); + } + } + + "llvm.x86.pclmulqdq" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_clmulepi64_si128&ig_expand=772 + intrinsic_args!(fx, args => (a, b, imm8); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i64); + assert_eq!(ret_lane_ty, fx.tcx.types.i64); + assert_eq!(lane_count, 2); + assert_eq!(ret_lane_count, 2); + + let imm8 = imm8.load_scalar(fx); + + let control0 = fx.bcx.ins().band_imm(imm8, 0b0000_0001); + let a_lane0 = a.value_lane(fx, 0).load_scalar(fx); + let a_lane1 = a.value_lane(fx, 1).load_scalar(fx); + let temp1 = fx.bcx.ins().select(control0, a_lane1, a_lane0); + + let control4 = fx.bcx.ins().band_imm(imm8, 0b0001_0000); + let b_lane0 = b.value_lane(fx, 0).load_scalar(fx); + let b_lane1 = b.value_lane(fx, 1).load_scalar(fx); + let temp2 = fx.bcx.ins().select(control4, b_lane1, b_lane0); + + fn extract_bit(fx: &mut FunctionCx<'_, '_, '_>, val: Value, bit: i64) -> Value { + let tmp = fx.bcx.ins().ushr_imm(val, bit); + fx.bcx.ins().band_imm(tmp, 1) + } + + let mut res1 = fx.bcx.ins().iconst(types::I64, 0); + for i in 0..=63 { + let x = extract_bit(fx, temp1, 0); + let y = extract_bit(fx, temp2, i); + let mut temp = fx.bcx.ins().band(x, y); + for j in 1..=i { + let x = extract_bit(fx, temp1, j); + let y = extract_bit(fx, temp2, i - j); + let z = fx.bcx.ins().band(x, y); + temp = fx.bcx.ins().bxor(temp, z); + } + let temp = fx.bcx.ins().ishl_imm(temp, i); + res1 = fx.bcx.ins().bor(res1, temp); + } + ret.place_lane(fx, 0).to_ptr().store(fx, res1, MemFlags::trusted()); + + let mut res2 = fx.bcx.ins().iconst(types::I64, 0); + for i in 64..=127 { + let mut temp = fx.bcx.ins().iconst(types::I64, 0); + for j in i - 63..=63 { + let x = extract_bit(fx, temp1, j); + let y = extract_bit(fx, temp2, i - j); + let z = fx.bcx.ins().band(x, y); + temp = fx.bcx.ins().bxor(temp, z); + } + let temp = fx.bcx.ins().ishl_imm(temp, i); + res2 = fx.bcx.ins().bor(res2, temp); + } + ret.place_lane(fx, 1).to_ptr().store(fx, res2, MemFlags::trusted()); + } + + "llvm.x86.avx.ptestz.256" => { + // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_testz_si256&ig_expand=6945 + intrinsic_args!(fx, args => (a, b); intrinsic); + + assert_eq!(a.layout(), b.layout()); + let layout = a.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + assert_eq!(lane_ty, fx.tcx.types.i64); + assert_eq!(ret.layout().ty, fx.tcx.types.i32); + assert_eq!(lane_count, 4); + + let a_lane0 = a.value_lane(fx, 0).load_scalar(fx); + let a_lane1 = a.value_lane(fx, 1).load_scalar(fx); + let a_lane2 = a.value_lane(fx, 2).load_scalar(fx); + let a_lane3 = a.value_lane(fx, 3).load_scalar(fx); + let b_lane0 = b.value_lane(fx, 0).load_scalar(fx); + let b_lane1 = b.value_lane(fx, 1).load_scalar(fx); + let b_lane2 = b.value_lane(fx, 2).load_scalar(fx); + let b_lane3 = b.value_lane(fx, 3).load_scalar(fx); + + let zero0 = fx.bcx.ins().band(a_lane0, b_lane0); + let zero1 = fx.bcx.ins().band(a_lane1, b_lane1); + let zero2 = fx.bcx.ins().band(a_lane2, b_lane2); + let zero3 = fx.bcx.ins().band(a_lane3, b_lane3); + + let all_zero0 = fx.bcx.ins().bor(zero0, zero1); + let all_zero1 = fx.bcx.ins().bor(zero2, zero3); + let all_zero = fx.bcx.ins().bor(all_zero0, all_zero1); + + let res = fx.bcx.ins().icmp_imm(IntCC::Equal, all_zero, 0); + let res = CValue::by_val( + fx.bcx.ins().uextend(types::I32, res), + fx.layout_of(fx.tcx.types.i32), + ); + ret.write_cvalue(fx, res); + } + _ => { fx.tcx .sess diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,23 +12,20 @@ } } -mod cpuid; mod llvm; mod llvm_aarch64; mod llvm_x86; mod simd; -pub(crate) use cpuid::codegen_cpuid_call; -pub(crate) use llvm::codegen_llvm_intrinsic_call; - +use cranelift_codegen::ir::AtomicRmwOp; use rustc_middle::ty; use rustc_middle::ty::layout::{HasParamEnv, ValidityRequirement}; use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; use rustc_middle::ty::GenericArgsRef; use rustc_span::symbol::{kw, sym, Symbol}; +pub(crate) use self::llvm::codegen_llvm_intrinsic_call; use crate::prelude::*; -use cranelift_codegen::ir::AtomicRmwOp; fn bug_on_incorrect_arg_count(intrinsic: impl std::fmt::Display) -> ! { bug!("wrong number of args for intrinsic {}", intrinsic); @@ -132,6 +129,65 @@ let res_lane = CValue::by_val(res_lane, ret_lane_layout); ret.place_lane(fx, lane_idx).write_cvalue(fx, res_lane); + } +} + +fn simd_horizontal_pair_for_each_lane<'tcx>( + fx: &mut FunctionCx<'_, '_, 'tcx>, + x: CValue<'tcx>, + y: CValue<'tcx>, + ret: CPlace<'tcx>, + f: &dyn Fn(&mut FunctionCx<'_, '_, 'tcx>, Ty<'tcx>, Ty<'tcx>, Value, Value) -> Value, +) { + assert_eq!(x.layout(), y.layout()); + let layout = x.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let lane_layout = fx.layout_of(lane_ty); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + let ret_lane_layout = fx.layout_of(ret_lane_ty); + assert_eq!(lane_count, ret_lane_count); + + for lane_idx in 0..lane_count { + let src = if lane_idx < (lane_count / 2) { x } else { y }; + let src_idx = lane_idx % (lane_count / 2); + + let lhs_lane = src.value_lane(fx, src_idx * 2).load_scalar(fx); + let rhs_lane = src.value_lane(fx, src_idx * 2 + 1).load_scalar(fx); + + let res_lane = f(fx, lane_layout.ty, ret_lane_layout.ty, lhs_lane, rhs_lane); + let res_lane = CValue::by_val(res_lane, ret_lane_layout); + + ret.place_lane(fx, lane_idx).write_cvalue(fx, res_lane); + } +} + +fn simd_trio_for_each_lane<'tcx>( + fx: &mut FunctionCx<'_, '_, 'tcx>, + x: CValue<'tcx>, + y: CValue<'tcx>, + z: CValue<'tcx>, + ret: CPlace<'tcx>, + f: &dyn Fn(&mut FunctionCx<'_, '_, 'tcx>, Ty<'tcx>, Ty<'tcx>, Value, Value, Value) -> Value, +) { + assert_eq!(x.layout(), y.layout()); + let layout = x.layout(); + + let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let lane_layout = fx.layout_of(lane_ty); + let (ret_lane_count, ret_lane_ty) = ret.layout().ty.simd_size_and_type(fx.tcx); + let ret_lane_layout = fx.layout_of(ret_lane_ty); + assert_eq!(lane_count, ret_lane_count); + + for lane_idx in 0..lane_count { + let x_lane = x.value_lane(fx, lane_idx).load_scalar(fx); + let y_lane = y.value_lane(fx, lane_idx).load_scalar(fx); + let z_lane = z.value_lane(fx, lane_idx).load_scalar(fx); + + let res_lane = f(fx, lane_layout.ty, ret_lane_layout.ty, x_lane, y_lane, z_lane); + let res_lane = CValue::by_val(res_lane, ret_lane_layout); + + ret.place_lane(fx, lane_idx).write_cvalue(fx, res_lane); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -148,7 +148,7 @@ let total_len = lane_count * 2; let indexes = - idx.iter().map(|idx| idx.unwrap_leaf().try_to_u16().unwrap()).collect::>(); + idx.iter().map(|idx| idx.unwrap_leaf().try_to_u32().unwrap()).collect::>(); for &idx in &indexes { assert!(u64::from(idx) < total_len, "idx {} out of range 0..{}", idx, total_len); @@ -216,8 +216,10 @@ let indexes = { use rustc_middle::mir::interpret::*; - let idx_const = crate::constant::mir_operand_get_const_val(fx, idx) - .expect("simd_shuffle idx not const"); + let idx_const = match idx { + Operand::Constant(const_) => crate::constant::eval_mir_constant(fx, const_).0, + Operand::Copy(_) | Operand::Move(_) => unreachable!("{idx:?}"), + }; let idx_bytes = match idx_const { ConstValue::Indirect { alloc_id, offset } => { @@ -343,7 +345,11 @@ ret.write_cvalue(fx, ret_lane); } - sym::simd_neg => { + sym::simd_neg + | sym::simd_bswap + | sym::simd_bitreverse + | sym::simd_ctlz + | sym::simd_cttz => { intrinsic_args!(fx, args => (a); intrinsic); if !a.layout().ty.is_simd() { @@ -351,16 +357,21 @@ return; } - simd_for_each_lane( - fx, - a, - ret, - &|fx, lane_ty, _ret_lane_ty, lane| match lane_ty.kind() { - ty::Int(_) => fx.bcx.ins().ineg(lane), - ty::Float(_) => fx.bcx.ins().fneg(lane), - _ => unreachable!(), - }, - ); + simd_for_each_lane(fx, a, ret, &|fx, lane_ty, _ret_lane_ty, lane| match ( + lane_ty.kind(), + intrinsic, + ) { + (ty::Int(_), sym::simd_neg) => fx.bcx.ins().ineg(lane), + (ty::Float(_), sym::simd_neg) => fx.bcx.ins().fneg(lane), + + (ty::Uint(ty::UintTy::U8) | ty::Int(ty::IntTy::I8), sym::simd_bswap) => lane, + (ty::Uint(_) | ty::Int(_), sym::simd_bswap) => fx.bcx.ins().bswap(lane), + (ty::Uint(_) | ty::Int(_), sym::simd_bitreverse) => fx.bcx.ins().bitrev(lane), + (ty::Uint(_) | ty::Int(_), sym::simd_ctlz) => fx.bcx.ins().clz(lane), + (ty::Uint(_) | ty::Int(_), sym::simd_cttz) => fx.bcx.ins().ctz(lane), + + _ => unreachable!(), + }); } sym::simd_add diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,6 @@ +#![cfg_attr(all(doc, not(bootstrap)), allow(internal_features))] +#![cfg_attr(all(doc, not(bootstrap)), feature(rustdoc_internals))] +#![cfg_attr(all(doc, not(bootstrap)), doc(rust_logo))] #![feature(rustc_private)] // Note: please avoid adding other feature gates where possible #![warn(rust_2018_idioms)] @@ -29,6 +32,8 @@ use std::cell::{Cell, RefCell}; use std::sync::Arc; +use cranelift_codegen::isa::TargetIsa; +use cranelift_codegen::settings::{self, Configurable}; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_codegen_ssa::CodegenResults; use rustc_data_structures::profiling::SelfProfilerRef; @@ -39,9 +44,6 @@ use rustc_session::Session; use rustc_span::Symbol; -use cranelift_codegen::isa::TargetIsa; -use cranelift_codegen::settings::{self, Configurable}; - pub use crate::config::*; use crate::prelude::*; @@ -76,22 +78,6 @@ mod vtable; mod prelude { - pub(crate) use rustc_span::{FileNameDisplayPreference, Span}; - - pub(crate) use rustc_hir::def_id::{DefId, LOCAL_CRATE}; - pub(crate) use rustc_middle::bug; - pub(crate) use rustc_middle::mir::{self, *}; - pub(crate) use rustc_middle::ty::layout::{self, LayoutOf, TyAndLayout}; - pub(crate) use rustc_middle::ty::{ - self, FloatTy, Instance, InstanceDef, IntTy, ParamEnv, Ty, TyCtxt, TypeAndMut, - TypeFoldable, TypeVisitableExt, UintTy, - }; - pub(crate) use rustc_target::abi::{Abi, FieldIdx, Scalar, Size, VariantIdx, FIRST_VARIANT}; - - pub(crate) use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; - - pub(crate) use rustc_index::Idx; - pub(crate) use cranelift_codegen::ir::condcodes::{FloatCC, IntCC}; pub(crate) use cranelift_codegen::ir::function::Function; pub(crate) use cranelift_codegen::ir::types; @@ -103,6 +89,18 @@ pub(crate) use cranelift_codegen::Context; pub(crate) use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext, Variable}; pub(crate) use cranelift_module::{self, DataDescription, FuncId, Linkage, Module}; + pub(crate) use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; + pub(crate) use rustc_hir::def_id::{DefId, LOCAL_CRATE}; + pub(crate) use rustc_index::Idx; + pub(crate) use rustc_middle::bug; + pub(crate) use rustc_middle::mir::{self, *}; + pub(crate) use rustc_middle::ty::layout::{self, LayoutOf, TyAndLayout}; + pub(crate) use rustc_middle::ty::{ + self, FloatTy, Instance, InstanceDef, IntTy, ParamEnv, Ty, TyCtxt, TypeAndMut, + TypeFoldable, TypeVisitableExt, UintTy, + }; + pub(crate) use rustc_span::{FileNameDisplayPreference, Span}; + pub(crate) use rustc_target::abi::{Abi, FieldIdx, Scalar, Size, VariantIdx, FIRST_VARIANT}; pub(crate) use crate::abi::*; pub(crate) use crate::base::{codegen_operand, codegen_place}; @@ -191,7 +189,7 @@ } fn target_features(&self, _sess: &Session, _allow_unstable: bool) -> Vec { - vec![] + vec![] // FIXME necessary for #[cfg(target_feature] } fn print_version(&self) { @@ -263,9 +261,9 @@ let preserve_frame_pointer = sess.target.options.frame_pointer != rustc_target::spec::FramePointer::MayOmit || matches!(sess.opts.cg.force_frame_pointers, Some(true)); - if preserve_frame_pointer { - flags_builder.set("preserve_frame_pointers", "true").unwrap(); - } + flags_builder + .set("preserve_frame_pointers", if preserve_frame_pointer { "true" } else { "false" }) + .unwrap(); let tls_model = match target_triple.binary_format { BinaryFormat::Elf => "elf_gd", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pointer.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pointer.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pointer.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pointer.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,10 @@ //! Defines [`Pointer`] which is used to improve the quality of the generated clif ir for pointer //! operations. -use crate::prelude::*; - +use cranelift_codegen::ir::immediates::Offset32; use rustc_target::abi::Align; -use cranelift_codegen::ir::immediates::Offset32; +use crate::prelude::*; /// A pointer pointing either to a certain address, a certain stack slot or nothing. #[derive(Copy, Clone, Debug)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pretty_clif.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pretty_clif.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pretty_clif.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/pretty_clif.rs 2023-12-21 16:55:28.000000000 +0000 @@ -63,8 +63,8 @@ ir::entities::AnyEntity, write::{FuncWriter, PlainWriter}, }; - use rustc_middle::ty::layout::FnAbiOf; +use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_session::config::{OutputFilenames, OutputType}; use crate::prelude::*; @@ -80,15 +80,17 @@ pub(crate) fn new<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self { let enabled = should_write_ir(tcx); let global_comments = if enabled { - vec![ - format!("symbol {}", tcx.symbol_name(instance).name), - format!("instance {:?}", instance), - format!( - "abi {:?}", - RevealAllLayoutCx(tcx).fn_abi_of_instance(instance, ty::List::empty()) - ), - String::new(), - ] + with_no_trimmed_paths!({ + vec![ + format!("symbol {}", tcx.symbol_name(instance).name), + format!("instance {:?}", instance), + format!( + "abi {:?}", + RevealAllLayoutCx(tcx).fn_abi_of_instance(instance, ty::List::empty()) + ), + String::new(), + ] + }) } else { vec![] }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/value_and_place.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/value_and_place.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/value_and_place.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_cranelift/src/value_and_place.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,10 @@ //! Definition of [`CValue`] and [`CPlace`] -use crate::prelude::*; - -use rustc_middle::ty::FnSig; - use cranelift_codegen::entity::EntityRef; use cranelift_codegen::ir::immediates::Offset32; +use rustc_middle::ty::FnSig; + +use crate::prelude::*; fn codegen_field<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, @@ -133,18 +132,11 @@ (ptr.get_addr(fx), vtable) } CValueInner::ByValPair(data, vtable) => { - let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to - // specify stack slot alignment. - size: (u32::try_from(fx.target_config.pointer_type().bytes()).unwrap() + 15) - / 16 - * 16, - }); - let data_ptr = Pointer::stack_slot(stack_slot); - let mut flags = MemFlags::new(); - flags.set_notrap(); - data_ptr.store(fx, data, flags); + let data_ptr = fx.create_stack_slot( + u32::try_from(fx.target_config.pointer_type().bytes()).unwrap(), + u32::try_from(fx.target_config.pointer_type().bytes()).unwrap(), + ); + data_ptr.store(fx, data, MemFlags::trusted()); (data_ptr.get_addr(fx), vtable) } @@ -251,6 +243,34 @@ let (lane_count, lane_ty) = layout.ty.simd_size_and_type(fx.tcx); let lane_layout = fx.layout_of(lane_ty); assert!(lane_idx < lane_count); + + match self.0 { + CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => unreachable!(), + CValueInner::ByRef(ptr, None) => { + let field_offset = lane_layout.size * lane_idx; + let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()); + CValue::by_ref(field_ptr, lane_layout) + } + CValueInner::ByRef(_, Some(_)) => unreachable!(), + } + } + + /// Like [`CValue::value_field`] except using the passed type as lane type instead of the one + /// specified by the vector type. + pub(crate) fn value_typed_lane( + self, + fx: &mut FunctionCx<'_, '_, 'tcx>, + lane_ty: Ty<'tcx>, + lane_idx: u64, + ) -> CValue<'tcx> { + let layout = self.1; + assert!(layout.ty.is_simd()); + let (orig_lane_count, orig_lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let lane_layout = fx.layout_of(lane_ty); + assert!( + (lane_idx + 1) * lane_layout.size <= orig_lane_count * fx.layout_of(orig_lane_ty).size + ); + match self.0 { CValueInner::ByVal(_) | CValueInner::ByValPair(_, _) => unreachable!(), CValueInner::ByRef(ptr, None) => { @@ -310,7 +330,8 @@ fx.bcx.ins().iconcat(lsb, msb) } ty::Bool | ty::Char | ty::Uint(_) | ty::Int(_) | ty::Ref(..) | ty::RawPtr(..) => { - fx.bcx.ins().iconst(clif_ty, const_val.to_bits(layout.size).unwrap() as i64) + let raw_val = const_val.size().truncate(const_val.to_bits(layout.size).unwrap()); + fx.bcx.ins().iconst(clif_ty, raw_val as i64) } ty::Float(FloatTy::F32) => { fx.bcx.ins().f32const(Ieee32::with_bits(u32::try_from(const_val).unwrap())) @@ -372,13 +393,11 @@ .fatal(format!("values of type {} are too big to store on the stack", layout.ty)); } - let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to - // specify stack slot alignment. - size: (u32::try_from(layout.size.bytes()).unwrap() + 15) / 16 * 16, - }); - CPlace { inner: CPlaceInner::Addr(Pointer::stack_slot(stack_slot), None), layout } + let stack_slot = fx.create_stack_slot( + u32::try_from(layout.size.bytes()).unwrap(), + u32::try_from(layout.align.pref.bytes()).unwrap(), + ); + CPlace { inner: CPlaceInner::Addr(stack_slot, None), layout } } pub(crate) fn new_var( @@ -543,13 +562,7 @@ _ if src_ty.is_vector() && dst_ty.is_vector() => codegen_bitcast(fx, dst_ty, data), _ if src_ty.is_vector() || dst_ty.is_vector() => { // FIXME(bytecodealliance/wasmtime#6104) do something more efficient for transmutes between vectors and integers. - let stack_slot = fx.bcx.create_sized_stack_slot(StackSlotData { - kind: StackSlotKind::ExplicitSlot, - // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to - // specify stack slot alignment. - size: (src_ty.bytes() + 15) / 16 * 16, - }); - let ptr = Pointer::stack_slot(stack_slot); + let ptr = fx.create_stack_slot(src_ty.bytes(), src_ty.bytes()); ptr.store(fx, data, MemFlags::trusted()); ptr.load(fx, dst_ty, MemFlags::trusted()) } @@ -739,6 +752,34 @@ match self.inner { CPlaceInner::Var(_, _) => unreachable!(), + CPlaceInner::VarPair(_, _, _) => unreachable!(), + CPlaceInner::Addr(ptr, None) => { + let field_offset = lane_layout.size * lane_idx; + let field_ptr = ptr.offset_i64(fx, i64::try_from(field_offset.bytes()).unwrap()); + CPlace::for_ptr(field_ptr, lane_layout) + } + CPlaceInner::Addr(_, Some(_)) => unreachable!(), + } + } + + /// Like [`CPlace::place_field`] except using the passed type as lane type instead of the one + /// specified by the vector type. + pub(crate) fn place_typed_lane( + self, + fx: &mut FunctionCx<'_, '_, 'tcx>, + lane_ty: Ty<'tcx>, + lane_idx: u64, + ) -> CPlace<'tcx> { + let layout = self.layout(); + assert!(layout.ty.is_simd()); + let (orig_lane_count, orig_lane_ty) = layout.ty.simd_size_and_type(fx.tcx); + let lane_layout = fx.layout_of(lane_ty); + assert!( + (lane_idx + 1) * lane_layout.size <= orig_lane_count * fx.layout_of(orig_lane_ty).size + ); + + match self.inner { + CPlaceInner::Var(_, _) => unreachable!(), CPlaceInner::VarPair(_, _, _) => unreachable!(), CPlaceInner::Addr(ptr, None) => { let field_offset = lane_layout.size * lane_idx; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/ci.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/ci.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/ci.yml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/ci.yml 2023-12-21 16:55:28.000000000 +0000 @@ -19,9 +19,8 @@ fail-fast: false matrix: libgccjit_version: - - { gcc: "libgccjit.so", extra: "", env_extra: "", artifacts_branch: "master" } - - { gcc: "libgccjit_without_int128.so", extra: "", env_extra: "", artifacts_branch: "master-without-128bit-integers" } - - { gcc: "libgccjit12.so", extra: "--no-default-features", env_extra: "TEST_FLAGS='-Cpanic=abort -Zpanic-abort-tests'", artifacts_branch: "gcc12" } + - { gcc: "libgccjit.so", artifacts_branch: "master" } + - { gcc: "libgccjit_without_int128.so", artifacts_branch: "master-without-128bit-integers" } commands: [ "--mini-tests", "--std-tests", @@ -33,47 +32,30 @@ "--extended-regex-tests", "--test-successful-rustc --nb-parts 2 --current-part 0", "--test-successful-rustc --nb-parts 2 --current-part 1", - "--test-failing-rustc", ] steps: - uses: actions/checkout@v3 - - uses: actions/checkout@v3 - with: - repository: llvm/llvm-project - path: llvm - - name: Install packages # `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests. run: sudo apt-get install ninja-build ripgrep llvm-14-tools - - name: Install libgccjit12 - if: matrix.libgccjit_version.gcc == 'libgccjit12.so' - run: sudo apt-get install libgccjit-12-dev - - name: Download artifact - if: matrix.libgccjit_version.gcc != 'libgccjit12.so' uses: dawidd6/action-download-artifact@v2 with: workflow: main.yml - name: ${{ matrix.libgccjit_version.gcc }} - path: gcc-build + name: gcc-13 + path: gcc-13 repo: antoyo/gcc branch: ${{ matrix.libgccjit_version.artifacts_branch }} event: push search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts. - name: Setup path to libgccjit - if: matrix.libgccjit_version.gcc == 'libgccjit12.so' - run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path - - - name: Setup path to libgccjit - if: matrix.libgccjit_version.gcc != 'libgccjit12.so' run: | - echo $(readlink -f gcc-build) > gcc_path - # NOTE: the filename is still libgccjit.so even when the artifact name is different. - ln gcc-build/libgccjit.so gcc-build/libgccjit.so.0 + sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb + echo /usr/lib/ > gcc_path - name: Set env run: | @@ -81,9 +63,6 @@ echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - - name: Set RUST_COMPILER_RT_ROOT - run: echo "RUST_COMPILER_RT_ROOT="${{ env.workspace }}/llvm/compiler-rt >> $GITHUB_ENV - - name: Cache cargo installed crates uses: actions/cache@v3 with: @@ -119,16 +98,16 @@ - name: Build run: | - ./prepare_build.sh - ${{ matrix.libgccjit_version.env_extra }} ./build.sh ${{ matrix.libgccjit_version.extra }} - ${{ matrix.libgccjit_version.env_extra }} cargo test ${{ matrix.libgccjit_version.extra }} + ./y.sh prepare --only-libcore + ./y.sh build + cargo test ./clean_all.sh - name: Prepare dependencies run: | git config --global user.email "user@example.com" git config --global user.name "User" - ./prepare.sh + ./y.sh prepare # Compile is a separate step, as the actions-rs/cargo action supports error annotations - name: Compile @@ -137,13 +116,12 @@ command: build args: --release - - name: Add more failing tests for GCC 12 - if: ${{ matrix.libgccjit_version.gcc == 'libgccjit12.so' }} - run: cat failing-ui-tests12.txt >> failing-ui-tests.txt + - name: Add more failing tests because the sysroot is not compiled with LTO + run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt - name: Run tests run: | - ${{ matrix.libgccjit_version.env_extra }} ./test.sh --release --clean --build-sysroot ${{ matrix.commands }} ${{ matrix.libgccjit_version.extra }} + ./test.sh --release --clean --build-sysroot ${{ matrix.commands }} duplicates: runs-on: ubuntu-latest diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/failures.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/failures.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/failures.yml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/failures.yml 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,129 @@ +# TODO: refactor to avoid duplication with the ci.yml file. +name: Failures + +on: + - pull_request + +permissions: + contents: read + +env: + # Enable backtraces for easier debugging + RUST_BACKTRACE: 1 + +jobs: + build: + runs-on: ubuntu-22.04 + + strategy: + fail-fast: false + matrix: + libgccjit_version: + - gcc: "libgccjit.so" + artifacts_branch: "master" + - gcc: "libgccjit_without_int128.so" + artifacts_branch: "master-without-128bit-integers" + - gcc: "libgccjit12.so" + artifacts_branch: "gcc12" + extra: "--no-default-features" + # FIXME(antoyo): we need to set GCC_EXEC_PREFIX so that the linker can find the linker plugin. + # Not sure why it's not found otherwise. + env_extra: "TEST_FLAGS='-Cpanic=abort -Zpanic-abort-tests' GCC_EXEC_PREFIX=/usr/lib/gcc/" + + steps: + - uses: actions/checkout@v3 + + - name: Install packages + run: sudo apt-get install ninja-build ripgrep + + - name: Install libgccjit12 + if: matrix.libgccjit_version.gcc == 'libgccjit12.so' + run: sudo apt-get install libgccjit-12-dev + + - name: Setup path to libgccjit + if: matrix.libgccjit_version.gcc == 'libgccjit12.so' + run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path + + - name: Download artifact + if: matrix.libgccjit_version.gcc != 'libgccjit12.so' + uses: dawidd6/action-download-artifact@v2 + with: + workflow: main.yml + name: gcc-13 + path: gcc-13 + repo: antoyo/gcc + branch: ${{ matrix.libgccjit_version.artifacts_branch }} + event: push + search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts. + + - name: Setup path to libgccjit + if: matrix.libgccjit_version.gcc != 'libgccjit12.so' + run: | + sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb + echo /usr/lib/ > gcc_path + + - name: Set env + run: | + echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV + echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV + + - name: Cache cargo installed crates + uses: actions/cache@v3 + with: + path: ~/.cargo/bin + key: cargo-installed-crates2-ubuntu-latest + + - name: Cache cargo registry + uses: actions/cache@v3 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v3 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo target dir + uses: actions/cache@v3 + with: + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }} + + #- name: Cache rust repository + #uses: actions/cache@v3 + #id: cache-rust-repository + #with: + #path: rust + #key: ${{ runner.os }}-packages-${{ hashFiles('rust/.git/HEAD') }} + + - name: Git config + run: | + git config --global user.email "user@example.com" + git config --global user.name "User" + + - name: Prepare dependencies + if: matrix.libgccjit_version.gcc == 'libgccjit12.so' + run: ./y.sh prepare --libgccjit12-patches + + - name: Prepare dependencies + if: matrix.libgccjit_version.gcc != 'libgccjit12.so' + run: ./y.sh prepare + + # Compile is a separate step, as the actions-rs/cargo action supports error annotations + - name: Compile + uses: actions-rs/cargo@v1.0.3 + with: + command: build + args: --release + + - name: Add more failing tests because the sysroot is not compiled with LTO + run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt + + - name: Run tests + id: tests + run: | + ${{ matrix.libgccjit_version.env_extra }} ./test.sh --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log + rg --text "test result" output_log >> $GITHUB_STEP_SUMMARY diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/gcc12.yml 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,115 @@ +name: CI libgccjit 12 + +on: + - push + - pull_request + +permissions: + contents: read + +env: + # Enable backtraces for easier debugging + RUST_BACKTRACE: 1 + TEST_FLAGS: "-Cpanic=abort -Zpanic-abort-tests" + # FIXME(antoyo): we need to set GCC_EXEC_PREFIX so that the linker can find the linker plugin. + # Not sure why it's not found otherwise. + GCC_EXEC_PREFIX: /usr/lib/gcc/ + +jobs: + build: + runs-on: ubuntu-22.04 + + strategy: + fail-fast: false + matrix: + commands: [ + "--mini-tests", + "--std-tests", + # FIXME: re-enable asm tests when GCC can emit in the right syntax. + # "--asm-tests", + "--test-libcore", + "--extended-rand-tests", + "--extended-regex-example-tests", + "--extended-regex-tests", + "--test-successful-rustc --nb-parts 2 --current-part 0", + "--test-successful-rustc --nb-parts 2 --current-part 1", + ] + + steps: + - uses: actions/checkout@v3 + + - name: Install packages + # `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests. + run: sudo apt-get install ninja-build ripgrep llvm-14-tools libgccjit-12-dev + + - name: Setup path to libgccjit + run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path + + - name: Set env + run: | + echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV + echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV + + - name: Cache cargo installed crates + uses: actions/cache@v3 + with: + path: ~/.cargo/bin + key: cargo-installed-crates2-ubuntu-latest + + - name: Cache cargo registry + uses: actions/cache@v3 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v3 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo target dir + uses: actions/cache@v3 + with: + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }} + + #- name: Cache rust repository + ## We only clone the rust repository for rustc tests + #if: ${{ contains(matrix.commands, 'rustc') }} + #uses: actions/cache@v3 + #id: cache-rust-repository + #with: + #path: rust + #key: ${{ runner.os }}-packages-${{ hashFiles('rust/.git/HEAD') }} + + - name: Build + run: | + ./y.sh prepare --only-libcore --libgccjit12-patches + ./y.sh build --no-default-features --sysroot-panic-abort + cargo test --no-default-features + ./clean_all.sh + + - name: Prepare dependencies + run: | + git config --global user.email "user@example.com" + git config --global user.name "User" + ./y.sh prepare --libgccjit12-patches + + # Compile is a separate step, as the actions-rs/cargo action supports error annotations + - name: Compile + uses: actions-rs/cargo@v1.0.3 + with: + command: build + args: --release + + - name: Add more failing tests for GCC 12 + run: cat failing-ui-tests12.txt >> failing-ui-tests.txt + + - name: Add more failing tests because the sysroot is not compiled with LTO + run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt + + - name: Run tests + run: | + ./test.sh --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,139 @@ +# TODO: check if qemu-user-static-binfmt is needed (perhaps to run some tests since it probably calls exec). + +name: m68k CI + +on: + - push + - pull_request + +permissions: + contents: read + +env: + # Enable backtraces for easier debugging + RUST_BACKTRACE: 1 + # TODO: remove when confish.sh is removed. + OVERWRITE_TARGET_TRIPLE: m68k-unknown-linux-gnu + +jobs: + build: + runs-on: ubuntu-22.04 + + strategy: + fail-fast: false + matrix: + commands: [ + "--mini-tests", + "--std-tests", + # TODO(antoyo): fix those on m68k. + #"--test-libcore", + #"--extended-rand-tests", + #"--extended-regex-example-tests", + #"--extended-regex-tests", + #"--test-successful-rustc --nb-parts 2 --current-part 0", + #"--test-successful-rustc --nb-parts 2 --current-part 1", + #"--test-failing-rustc", + ] + + steps: + - name: Install packages + run: | + sudo apt-get update + sudo apt-get install qemu qemu-user-static + + - uses: actions/checkout@v3 + + - name: Download GCC artifact + uses: dawidd6/action-download-artifact@v2 + with: + workflow: m68k.yml + name: gcc-m68k-13 + repo: cross-cg-gcc-tools/cross-gcc + branch: master + event: push + + - name: Download VM artifact + uses: dawidd6/action-download-artifact@v2 + with: + workflow: m68k.yml + name: debian-m68k + repo: cross-cg-gcc-tools/vms + branch: master + event: push + + - name: Setup path to libgccjit + run: | + sudo dpkg -i gcc-m68k-13.deb + echo /usr/lib/ > gcc_path + + - name: Set env + run: | + echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV + echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV + + - name: Cache cargo installed crates + uses: actions/cache@v3 + with: + path: ~/.cargo/bin + key: cargo-installed-crates2-ubuntu-latest + + #- name: Cache cargo registry + #uses: actions/cache@v3 + #with: + #path: ~/.cargo/registry + #key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }} + + #- name: Cache cargo index + #uses: actions/cache@v3 + #with: + #path: ~/.cargo/git + #key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo target dir + uses: actions/cache@v3 + with: + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }} + + #- name: Cache rust repository + ## We only clone the rust repository for rustc tests + #if: ${{ contains(matrix.commands, 'rustc') }} + #uses: actions/cache@v3 + #id: cache-rust-repository + #with: + #path: rust + #key: ${{ runner.os }}-packages-${{ hashFiles('rust/.git/HEAD') }} + + - name: Prepare VM + run: | + mkdir vm + sudo mount debian-m68k.img vm + sudo cp $(which qemu-m68k-static) vm/usr/bin/ + + - name: Build + run: | + ./y.sh prepare --only-libcore --cross + ./y.sh build --target-triple m68k-unknown-linux-gnu + CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test + ./clean_all.sh + + - name: Prepare dependencies + run: | + git config --global user.email "user@example.com" + git config --global user.name "User" + ./y.sh prepare --cross + + # Compile is a separate step, as the actions-rs/cargo action supports error annotations + - name: Compile + uses: actions-rs/cargo@v1.0.3 + with: + command: build + args: --release + + - name: Add more failing tests because the sysroot is not compiled with LTO + run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt + + - name: Run tests + run: | + ./test.sh --release --clean --build-sysroot ${{ matrix.commands }} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/release.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/release.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/release.yml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/release.yml 2023-12-21 16:55:28.000000000 +0000 @@ -18,8 +18,6 @@ strategy: fail-fast: false matrix: - libgccjit_version: - - { gcc: "libgccjit.so", artifacts_branch: "master" } commands: [ "--test-successful-rustc --nb-parts 2 --current-part 0", "--test-successful-rustc --nb-parts 2 --current-part 1", @@ -28,11 +26,6 @@ steps: - uses: actions/checkout@v3 - - uses: actions/checkout@v3 - with: - repository: llvm/llvm-project - path: llvm - - name: Install packages run: sudo apt-get install ninja-build ripgrep @@ -40,18 +33,17 @@ uses: dawidd6/action-download-artifact@v2 with: workflow: main.yml - name: ${{ matrix.libgccjit_version.gcc }} - path: gcc-build + name: gcc-13 + path: gcc-13 repo: antoyo/gcc - branch: ${{ matrix.libgccjit_version.artifacts_branch }} + branch: "master" event: push search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts. - name: Setup path to libgccjit run: | - echo $(readlink -f gcc-build) > gcc_path - # NOTE: the filename is still libgccjit.so even when the artifact name is different. - ln gcc-build/libgccjit.so gcc-build/libgccjit.so.0 + sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb + echo /usr/lib/ > gcc_path - name: Set env run: | @@ -59,9 +51,6 @@ echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - - name: Set RUST_COMPILER_RT_ROOT - run: echo "RUST_COMPILER_RT_ROOT="${{ env.workspace }}/llvm/compiler-rt >> $GITHUB_ENV - - name: Cache cargo installed crates uses: actions/cache@v3 with: @@ -88,8 +77,8 @@ - name: Build run: | - ./prepare_build.sh - ./build.sh --release --release-sysroot + ./y.sh prepare --only-libcore + EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot cargo test ./clean_all.sh @@ -97,7 +86,9 @@ run: | git config --global user.email "user@example.com" git config --global user.name "User" - ./prepare.sh + ./y.sh prepare + # FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros. + echo -n 'lto = "fat"' >> build_sysroot/Cargo.toml # Compile is a separate step, as the actions-rs/cargo action supports error annotations - name: Compile @@ -106,6 +97,9 @@ command: build args: --release + - name: Add more failing tests because of undefined symbol errors (FIXME) + run: cat failing-lto-tests.txt >> failing-ui-tests.txt + - name: Run tests run: | - ./test.sh --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }} + EMBED_LTO_BITCODE=1 ./test.sh --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.github/workflows/stdarch.yml 2023-12-21 16:55:28.000000000 +0000 @@ -18,8 +18,6 @@ strategy: fail-fast: false matrix: - libgccjit_version: - - { gcc: "libgccjit.so", artifacts_branch: "master" } cargo_runner: [ "sde -future -rtm_mode full --", "", @@ -28,11 +26,6 @@ steps: - uses: actions/checkout@v3 - - uses: actions/checkout@v3 - with: - repository: llvm/llvm-project - path: llvm - - name: Install packages run: sudo apt-get install ninja-build ripgrep @@ -54,18 +47,17 @@ uses: dawidd6/action-download-artifact@v2 with: workflow: main.yml - name: ${{ matrix.libgccjit_version.gcc }} - path: gcc-build + name: gcc-13 + path: gcc-13 repo: antoyo/gcc - branch: ${{ matrix.libgccjit_version.artifacts_branch }} + branch: "master" event: push search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts. - name: Setup path to libgccjit run: | - echo $(readlink -f gcc-build) > gcc_path - # NOTE: the filename is still libgccjit.so even when the artifact name is different. - ln gcc-build/libgccjit.so gcc-build/libgccjit.so.0 + sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb + echo /usr/lib/ > gcc_path - name: Set env run: | @@ -73,9 +65,6 @@ echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV - - name: Set RUST_COMPILER_RT_ROOT - run: echo "RUST_COMPILER_RT_ROOT="${{ env.workspace }}/llvm/compiler-rt >> $GITHUB_ENV - - name: Cache cargo installed crates uses: actions/cache@v3 with: @@ -102,8 +91,8 @@ - name: Build run: | - ./prepare_build.sh - ./build.sh --release --release-sysroot + ./y.sh prepare --only-libcore + ./y.sh build --release --release-sysroot cargo test - name: Clean @@ -115,7 +104,7 @@ run: | git config --global user.email "user@example.com" git config --global user.name "User" - ./prepare.sh + ./y.sh prepare # Compile is a separate step, as the actions-rs/cargo action supports error annotations - name: Compile @@ -133,10 +122,11 @@ if: ${{ !matrix.cargo_runner }} run: | cd build_sysroot/sysroot_src/library/stdarch/ - CHANNEL=release TARGET=x86_64-unknown-linux-gnu ../../../../cargo.sh test + CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test - name: Run stdarch tests if: ${{ matrix.cargo_runner }} run: | cd build_sysroot/sysroot_src/library/stdarch/ - STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu ../../../../cargo.sh test -- --skip rtm --skip tbm --skip sse4a + # FIXME: these tests fail when the sysroot is compiled with LTO because of a missing symbol in proc-macro. + STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test -- --skip rtm --skip tbm --skip sse4a diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.ignore rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.ignore --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.ignore 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/.ignore 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,10 @@ +!/build_sysroot/sysroot_src +!/simple-raytracer +!/regex +!/rand +!/test-backend +!/gcc_path +!/benchmarks +!*gimple* +!*asm* +!.github diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.lock 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -4,9 +4,9 @@ [[package]] name = "aho-corasick" -version = "0.7.18" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] @@ -18,12 +18,51 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] +name = "bitflags" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] name = "fm" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -35,7 +74,7 @@ [[package]] name = "gccjit" version = "1.0.0" -source = "git+https://github.com/antoyo/gccjit.rs#d6e52626cfc6f487094a5d5ac66302baf3439984" +source = "git+https://github.com/antoyo/gccjit.rs#c52a218f5529321285b4489e5562a00e5428e033" dependencies = [ "gccjit_sys", ] @@ -43,7 +82,7 @@ [[package]] name = "gccjit_sys" version = "0.0.1" -source = "git+https://github.com/antoyo/gccjit.rs#d6e52626cfc6f487094a5d5ac66302baf3439984" +source = "git+https://github.com/antoyo/gccjit.rs#c52a218f5529321285b4489e5562a00e5428e033" dependencies = [ "libc", ] @@ -58,24 +97,10 @@ ] [[package]] -name = "getrandom" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] name = "hermit-abi" -version = "0.1.19" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" [[package]] name = "lang_tester" @@ -95,86 +120,55 @@ [[package]] name = "libc" -version = "0.2.112" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] -name = "memchr" -version = "2.4.1" +name = "linux-raw-sys" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" [[package]] -name = "num_cpus" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.15" +name = "memchr" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] -name = "rand" -version = "0.8.4" +name = "num_cpus" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ + "hermit-abi", "libc", - "rand_chacha", - "rand_core", - "rand_hc", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" -dependencies = [ - "getrandom", ] [[package]] -name = "rand_hc" -version = "0.3.1" +name = "object" +version = "0.30.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" dependencies = [ - "rand_core", + "memchr", ] [[package]] name = "redox_syscall" -version = "0.2.10" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "regex" -version = "1.5.4" +version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" dependencies = [ "aho-corasick", "memchr", @@ -183,18 +177,9 @@ [[package]] name = "regex-syntax" -version = "0.6.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" - -[[package]] -name = "remove_dir_all" -version = "0.5.3" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] +checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" [[package]] name = "rustc_codegen_gcc" @@ -202,11 +187,25 @@ dependencies = [ "gccjit", "lang_tester", + "object", "smallvec", "tempfile", ] [[package]] +name = "rustix" +version = "0.38.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -223,23 +222,22 @@ [[package]] name = "tempfile" -version = "3.2.0" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" dependencies = [ "cfg-if", - "libc", - "rand", + "fastrand", "redox_syscall", - "remove_dir_all", - "winapi", + "rustix", + "windows-sys", ] [[package]] name = "termcolor" -version = "1.1.2" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" dependencies = [ "winapi-util", ] @@ -255,9 +253,9 @@ [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "wait-timeout" @@ -270,22 +268,15 @@ [[package]] name = "walkdir" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", - "winapi", "winapi-util", ] [[package]] -name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" - -[[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -315,3 +306,69 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -18,7 +18,6 @@ harness = false [features] -default = ["master"] master = ["gccjit/master"] [dependencies] @@ -27,7 +26,13 @@ # Local copy. #gccjit = { path = "../gccjit.rs" } +object = { version = "0.30.1", default-features = false, features = [ + "std", + "read", +] } smallvec = { version = "1.6.1", features = ["union", "may_dangle"] } +# TODO(antoyo): make tempfile optional. +tempfile = "3.7.1" [dev-dependencies] lang_tester = "0.3.9" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Readme.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Readme.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Readme.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/Readme.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,7 @@ # WIP libgccjit codegen backend for rust [![Chat on IRC](https://img.shields.io/badge/irc.libera.chat-%23rustc__codegen__gcc-blue.svg)](https://web.libera.chat/#rustc_codegen_gcc) +[![Chat on Matrix](https://img.shields.io/badge/matrix.org-%23rustc__codegen__gcc-blue.svg)](https://matrix.to/#/#rustc_codegen_gcc:matrix.org) This is a GCC codegen for rustc, which means it can be loaded by the existing rustc frontend, but benefits from GCC: more architectures are supported and GCC's optimizations are used. @@ -14,9 +15,7 @@ ## Building **This requires a patched libgccjit in order to work. -The patches in [this repository](https://github.com/antoyo/libgccjit-patches) need to be applied. -(Those patches should work when applied on master, but in case it doesn't work, they are known to work when applied on 079c23cfe079f203d5df83fea8e92a60c7d7e878.) -You can also use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.** +You need to use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.** To build it (most of these instructions come from [here](https://gcc.gnu.org/onlinedocs/jit/internals/index.html), so don't hesitate to take a look there if you encounter an issue): @@ -56,18 +55,11 @@ $ dirname $(readlink -f `find . -name libgccjit.so`) > gcc_path ``` -You also need to set RUST_COMPILER_RT_ROOT: - -```bash -$ git clone https://github.com/llvm/llvm-project llvm --depth 1 --single-branch -$ export RUST_COMPILER_RT_ROOT="$PWD/llvm/compiler-rt" -``` - Then you can run commands like this: ```bash -$ ./prepare.sh # download and patch sysroot src and install hyperfine for benchmarking -$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) ./build.sh --release +$ ./y.sh prepare # download and patch sysroot src and install hyperfine for benchmarking +$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) ./y.sh build --release ``` To run the tests: @@ -78,22 +70,37 @@ ## Usage -`$cg_gccjit_dir` is the directory you cloned this repo into in the following instructions. +`$CG_GCCJIT_DIR` is the directory you cloned this repo into in the following instructions: + +```bash +export CG_GCCJIT_DIR=[the full path to rustc_codegen_gcc] +``` ### Cargo ```bash -$ CHANNEL="release" $cg_gccjit_dir/cargo.sh run +$ CHANNEL="release" $CG_GCCJIT_DIR/cargo.sh run ``` If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./test.sh`) you should use `CHANNEL="debug"` instead or omit `CHANNEL="release"` completely. +### LTO + +To use LTO, you need to set the variable `FAT_LTO=1` and `EMBED_LTO_BITCODE=1` in addition to setting `lto = "fat"` in the `Cargo.toml`. +Don't set `FAT_LTO` when compiling the sysroot, though: only set `EMBED_LTO_BITCODE=1`. + +Failing to set `EMBED_LTO_BITCODE` will give you the following error: + +``` +error: failed to copy bitcode to object file: No such file or directory (os error 2) +``` + ### Rustc > You should prefer using the Cargo method. ```bash -$ rustc +$(cat $cg_gccjit_dir/rust-toolchain) -Cpanic=abort -Zcodegen-backend=$cg_gccjit_dir/target/release/librustc_codegen_gcc.so --sysroot $cg_gccjit_dir/build_sysroot/sysroot my_crate.rs +$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs ``` ## Env vars @@ -105,8 +112,18 @@ object files when their content should have been changed by a change to cg_gccjit.
CG_GCCJIT_DISPLAY_CG_TIME
Display the time it took to perform codegen for a crate
+
CG_RUSTFLAGS
+
Send additional flags to rustc. Can be used to build the sysroot without unwinding by setting `CG_RUSTFLAGS=-Cpanic=abort`.
+
CG_GCCJIT_DUMP_TO_FILE
+
Dump a C-like representation to /tmp/gccjit_dumps and enable debug info in order to debug this C-like representation.
+## Licensing + +While this crate is licensed under a dual Apache/MIT license, it links to `libgccjit` which is under the GPLv3+ and thus, the resulting toolchain (rustc + GCC codegen) will need to be released under the GPL license. + +However, programs compiled with `rustc_codegen_gcc` do not need to be released under a GPL license. + ## Debugging Sometimes, libgccjit will crash and output an error like this: @@ -182,6 +199,61 @@ TODO(antoyo): but that's not what I remember I was doing. +### `failed to build archive` error + +When you get this error: + +``` +error: failed to build archive: failed to open object file: No such file or directory (os error 2) +``` + +That can be caused by the fact that you try to compile with `lto = "fat"`, but you didn't compile the sysroot with LTO. +(Not sure if that's the reason since I cannot reproduce anymore. Maybe it happened when forgetting setting `FAT_LTO`.) + +### ld: cannot find crtbegin.o + +When compiling an executable with libgccijt, if setting the `*LIBRARY_PATH` variables to the install directory, you will get the following errors: + +``` +ld: cannot find crtbegin.o: No such file or directory +ld: cannot find -lgcc: No such file or directory +ld: cannot find -lgcc: No such file or directory +libgccjit.so: error: error invoking gcc driver +``` + +To fix this, set the variables to `gcc-build/build/gcc`. + +### How to debug GCC LTO + +Run do the command with `-v -save-temps` and then extract the `lto1` line from the output and run that under the debugger. + +### How to send arguments to the GCC linker + +``` +CG_RUSTFLAGS="-Clink-args=-save-temps -v" ../cargo.sh build +``` + +### How to see the personality functions in the asm dump + +``` +CG_RUSTFLAGS="-Clink-arg=-save-temps -v -Clink-arg=-dA" ../cargo.sh build +``` + +### How to see the LLVM IR for a sysroot crate + +``` +cargo build -v --target x86_64-unknown-linux-gnu -Zbuild-std +# Take the command from the output and add --emit=llvm-ir +``` + +### To prevent the linker from unmangling symbols + +Run with: + +``` +COLLECT_NO_DEMANGLE=1 +``` + ### How to use a custom-build rustc * Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`). @@ -215,6 +287,16 @@ git merge master ``` +To send the changes to the rust repo: + +```bash +cd ../rust +git pull origin master +git checkout -b subtree-update_cg_gcc_YYYY-MM-DD +PATH="$HOME/bin:$PATH" ~/bin/git-subtree pull --prefix=compiler/rustc_codegen_gcc/ https://github.com/rust-lang/rustc_codegen_gcc.git master +git push +``` + TODO: write a script that does the above. https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.20madness/near/258877725 @@ -223,20 +305,34 @@ `rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`. +### How to generate GIMPLE + +If you need to check what gccjit is generating (GIMPLE), then take a look at how to +generate it in [gimple.md](./doc/gimple.md). + ### How to build a cross-compiling libgccjit #### Building libgccjit - * Follow these instructions: https://preshing.com/20141119/how-to-build-a-gcc-cross-compiler/ with the following changes: - * Configure gcc with `../gcc/configure --enable-host-shared --disable-multilib --enable-languages=c,jit,c++ --disable-bootstrap --enable-checking=release --prefix=/opt/m68k-gcc/ --target=m68k-linux --without-headers`. - * Some shells, like fish, don't define the environment variable `$MACHTYPE`. - * Add `CFLAGS="-Wno-error=attributes -g -O2"` at the end of the configure command for building glibc (`CFLAGS="-Wno-error=attributes -Wno-error=array-parameter -Wno-error=stringop-overflow -Wno-error=array-bounds -g -O2"` for glibc 2.31, which is useful for Debian). + * Follow the instructions on [this repo](https://github.com/cross-cg-gcc-tools/cross-gcc). #### Configuring rustc_codegen_gcc - * Set `TARGET_TRIPLE="m68k-unknown-linux-gnu"` in config.sh. - * Since rustc doesn't support this architecture yet, set it back to `TARGET_TRIPLE="mips-unknown-linux-gnu"` (or another target having the same attributes). Alternatively, create a [target specification file](https://book.avr-rust.com/005.1-the-target-specification-json-file.html) (note that the `arch` specified in this file must be supported by the rust compiler). - * Set `linker='-Clinker=m68k-linux-gcc'`. + * Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case. * Set the path to the cross-compiling libgccjit in `gcc_path`. - * Comment the line: `context.add_command_line_option("-masm=intel");` in src/base.rs. - * (might not be necessary) Disable the compilation of libstd.so (and possibly libcore.so?). + * Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu`. + * Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../cargo.sh build --target m68k-unknown-linux-gnu`. + +If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler). +Then, you can use it the following way: + + * Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json` + * Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../cargo.sh build --target path/to/m68k-unknown-linux-gnu.json`. + +If you get the following error: + +``` +/usr/bin/ld: unrecognised emulation mode: m68kelf +``` + +Make sure you set `gcc_path` to the install directory. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -#!/usr/bin/env bash - -#set -x -set -e - -codegen_channel=debug -sysroot_channel=debug - -flags= - -while [[ $# -gt 0 ]]; do - case $1 in - --release) - codegen_channel=release - shift - ;; - --release-sysroot) - sysroot_channel=release - shift - ;; - --no-default-features) - flags="$flags --no-default-features" - shift - ;; - --features) - shift - flags="$flags --features $1" - shift - ;; - *) - echo "Unknown option $1" - exit 1 - ;; - esac -done - -if [ -f ./gcc_path ]; then - export GCC_PATH=$(cat gcc_path) -else - echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details' - exit 1 -fi - -export LD_LIBRARY_PATH="$GCC_PATH" -export LIBRARY_PATH="$GCC_PATH" - -if [[ "$codegen_channel" == "release" ]]; then - export CHANNEL='release' - CARGO_INCREMENTAL=1 cargo rustc --release $flags -else - echo $LD_LIBRARY_PATH - export CHANNEL='debug' - cargo rustc $flags -fi - -source config.sh - -rm -r target/out || true -mkdir -p target/out/gccjit - -echo "[BUILD] sysroot" -if [[ "$sysroot_channel" == "release" ]]; then - time ./build_sysroot/build_sysroot.sh --release -else - time ./build_sysroot/build_sysroot.sh -fi - diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -2,6 +2,7 @@ authors = ["bjorn3 "] name = "sysroot" version = "0.0.0" +resolver = "2" [dependencies] core = { path = "./sysroot_src/library/core" } @@ -18,3 +19,4 @@ [profile.release] debug = true +#lto = "fat" # TODO(antoyo): re-enable when the failing LTO tests regarding proc-macros are fixed. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/build_sysroot.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/build_sysroot.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/build_sysroot.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/build_sysroot.sh 2023-12-21 16:55:28.000000000 +0000 @@ -5,9 +5,9 @@ set -e cd $(dirname "$0") -pushd ../ >/dev/null +pushd ../ source ./config.sh -popd >/dev/null +popd # Cleanup for previous run # v Clean target dir except for build scripts and incremental cache @@ -22,7 +22,7 @@ RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=3" cargo build --target $TARGET_TRIPLE --release else sysroot_channel='debug' - cargo build --target $TARGET_TRIPLE --features compiler_builtins/c + cargo build --target $TARGET_TRIPLE fi # Copy files to sysroot diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/prepare_sysroot_src.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/prepare_sysroot_src.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/prepare_sysroot_src.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_sysroot/prepare_sysroot_src.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -#!/usr/bin/env bash -set -e -cd $(dirname "$0") - -SRC_DIR=$(dirname $(rustup which rustc))"/../lib/rustlib/src/rust/" -DST_DIR="sysroot_src" - -if [ ! -e $SRC_DIR ]; then - echo "Please install rust-src component" - exit 1 -fi - -rm -rf $DST_DIR -mkdir -p $DST_DIR/library -cp -r $SRC_DIR/library $DST_DIR/ - -pushd $DST_DIR -echo "[GIT] init" -git init -echo "[GIT] add" -git add . -echo "[GIT] commit" - -# This is needed on systems where nothing is configured. -# git really needs something here, or it will fail. -# Even using --author is not enough. -git config user.email || git config user.email "none@example.com" -git config user.name || git config user.name "None" - -git commit -m "Initial commit" -q -for file in $(ls ../../patches/ | grep -v patcha); do - echo "[GIT] apply" $file - git apply ../../patches/$file - git add -A - git commit --no-gpg-sign -m "Patch $file" -done -popd - -echo "Successfully prepared libcore for building" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,7 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "y" +version = "0.1.0" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +[package] +name = "y" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "y" +path = "src/main.rs" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/build.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/build.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/build.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/build.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,248 @@ +use crate::config::{set_config, ConfigInfo}; +use crate::utils::{ + get_gcc_path, run_command, run_command_with_output_and_env, walk_dir, +}; +use std::collections::HashMap; +use std::ffi::OsStr; +use std::fs; +use std::path::Path; + +#[derive(Default)] +struct BuildArg { + codegen_release_channel: bool, + sysroot_release_channel: bool, + sysroot_panic_abort: bool, + flags: Vec, + gcc_path: String, +} + +impl BuildArg { + fn new() -> Result, String> { + let gcc_path = get_gcc_path()?; + let mut build_arg = Self { + gcc_path, + ..Default::default() + }; + // We skip binary name and the `build` command. + let mut args = std::env::args().skip(2); + + while let Some(arg) = args.next() { + match arg.as_str() { + "--release" => build_arg.codegen_release_channel = true, + "--release-sysroot" => build_arg.sysroot_release_channel = true, + "--no-default-features" => { + build_arg.flags.push("--no-default-features".to_string()); + } + "--sysroot-panic-abort" => { + build_arg.sysroot_panic_abort = true; + }, + "--features" => { + if let Some(arg) = args.next() { + build_arg.flags.push("--features".to_string()); + build_arg.flags.push(arg.as_str().into()); + } else { + return Err( + "Expected a value after `--features`, found nothing".to_string() + ); + } + } + "--help" => { + Self::usage(); + return Ok(None); + } + "--target-triple" => { + if args.next().is_some() { + // Handled in config.rs. + } else { + return Err( + "Expected a value after `--target-triple`, found nothing".to_string() + ); + } + } + "--target" => { + if args.next().is_some() { + // Handled in config.rs. + } else { + return Err( + "Expected a value after `--target`, found nothing".to_string() + ); + } + } + arg => return Err(format!("Unknown argument `{}`", arg)), + } + } + Ok(Some(build_arg)) + } + + fn usage() { + println!( + r#" +`build` command help: + + --release : Build codegen in release mode + --release-sysroot : Build sysroot in release mode + --sysroot-panic-abort : Build the sysroot without unwinding support. + --no-default-features : Add `--no-default-features` flag + --features [arg] : Add a new feature [arg] + --target-triple [arg] : Set the target triple to [arg] + --help : Show this help +"# + ) + } +} + +fn build_sysroot( + env: &mut HashMap, + args: &BuildArg, + config: &ConfigInfo, +) -> Result<(), String> { + std::env::set_current_dir("build_sysroot") + .map_err(|error| format!("Failed to go to `build_sysroot` directory: {:?}", error))?; + // Cleanup for previous run + // Clean target dir except for build scripts and incremental cache + let _ = walk_dir( + "target", + |dir: &Path| { + for top in &["debug", "release"] { + let _ = fs::remove_dir_all(dir.join(top).join("build")); + let _ = fs::remove_dir_all(dir.join(top).join("deps")); + let _ = fs::remove_dir_all(dir.join(top).join("examples")); + let _ = fs::remove_dir_all(dir.join(top).join("native")); + + let _ = walk_dir( + dir.join(top), + |sub_dir: &Path| { + if sub_dir + .file_name() + .map(|filename| filename.to_str().unwrap().starts_with("libsysroot")) + .unwrap_or(false) + { + let _ = fs::remove_dir_all(sub_dir); + } + Ok(()) + }, + |file: &Path| { + if file + .file_name() + .map(|filename| filename.to_str().unwrap().starts_with("libsysroot")) + .unwrap_or(false) + { + let _ = fs::remove_file(file); + } + Ok(()) + }, + ); + } + Ok(()) + }, + |_| Ok(()), + ); + + let _ = fs::remove_file("Cargo.lock"); + let _ = fs::remove_file("test_target/Cargo.lock"); + let _ = fs::remove_dir_all("sysroot"); + + // Builds libs + let mut rustflags = env + .get("RUSTFLAGS") + .cloned() + .unwrap_or_default(); + if args.sysroot_panic_abort { + rustflags.push_str(" -Cpanic=abort -Zpanic-abort-tests"); + } + env.insert( + "RUSTFLAGS".to_string(), + format!("{} -Zmir-opt-level=3", rustflags), + ); + let channel = if args.sysroot_release_channel { + run_command_with_output_and_env( + &[ + &"cargo", + &"build", + &"--target", + &config.target, + &"--release", + ], + None, + Some(&env), + )?; + "release" + } else { + run_command_with_output_and_env( + &[ + &"cargo", + &"build", + &"--target", + &config.target, + ], + None, + Some(env), + )?; + "debug" + }; + + // Copy files to sysroot + let sysroot_path = format!("sysroot/lib/rustlib/{}/lib/", config.target_triple); + fs::create_dir_all(&sysroot_path) + .map_err(|error| format!("Failed to create directory `{}`: {:?}", sysroot_path, error))?; + let copier = |dir_to_copy: &Path| { + run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ()) + }; + walk_dir( + &format!("target/{}/{}/deps", config.target_triple, channel), + copier, + copier, + )?; + + Ok(()) +} + +fn build_codegen(args: &BuildArg) -> Result<(), String> { + let mut env = HashMap::new(); + + env.insert("LD_LIBRARY_PATH".to_string(), args.gcc_path.clone()); + env.insert("LIBRARY_PATH".to_string(), args.gcc_path.clone()); + + let mut command: Vec<&dyn AsRef> = vec![&"cargo", &"rustc"]; + if args.codegen_release_channel { + command.push(&"--release"); + env.insert("CHANNEL".to_string(), "release".to_string()); + env.insert("CARGO_INCREMENTAL".to_string(), "1".to_string()); + } else { + env.insert("CHANNEL".to_string(), "debug".to_string()); + } + let flags = args.flags.iter().map(|s| s.as_str()).collect::>(); + for flag in &flags { + command.push(flag); + } + run_command_with_output_and_env(&command, None, Some(&env))?; + + let config = set_config(&mut env, &[], Some(&args.gcc_path))?; + + // We voluntarily ignore the error. + let _ = fs::remove_dir_all("target/out"); + let gccjit_target = "target/out/gccjit"; + fs::create_dir_all(gccjit_target).map_err(|error| { + format!( + "Failed to create directory `{}`: {:?}", + gccjit_target, error + ) + })?; + + println!("[BUILD] sysroot"); + build_sysroot( + &mut env, + args, + &config, + )?; + Ok(()) +} + +pub fn run() -> Result<(), String> { + let args = match BuildArg::new()? { + Some(args) => args, + None => return Ok(()), + }; + build_codegen(&args)?; + Ok(()) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/config.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/config.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/config.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/config.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,149 @@ +use crate::utils::{get_gcc_path, get_os_name, get_rustc_host_triple}; +use std::collections::HashMap; +use std::env as std_env; + +pub struct ConfigInfo { + pub target: String, + pub target_triple: String, + pub rustc_command: Vec, +} + +// Returns the beginning for the command line of rustc. +pub fn set_config( + env: &mut HashMap, + test_flags: &[String], + gcc_path: Option<&str>, +) -> Result { + env.insert("CARGO_INCREMENTAL".to_string(), "0".to_string()); + + let gcc_path = match gcc_path { + Some(path) => path.to_string(), + None => get_gcc_path()?, + }; + env.insert("GCC_PATH".to_string(), gcc_path.clone()); + + let os_name = get_os_name()?; + let dylib_ext = match os_name.as_str() { + "Linux" => "so", + "Darwin" => "dylib", + os => return Err(format!("unsupported OS `{}`", os)), + }; + let host_triple = get_rustc_host_triple()?; + let mut linker = None; + let mut target_triple = host_triple.clone(); + let mut target = target_triple.clone(); + + // We skip binary name and the command. + let mut args = std::env::args().skip(2); + + let mut set_target_triple = false; + let mut set_target = false; + while let Some(arg) = args.next() { + match arg.as_str() { + "--target-triple" => { + if let Some(arg) = args.next() { + target_triple = arg; + set_target_triple = true; + } else { + return Err( + "Expected a value after `--target-triple`, found nothing".to_string() + ); + } + }, + "--target" => { + if let Some(arg) = args.next() { + target = arg; + set_target = true; + } else { + return Err( + "Expected a value after `--target`, found nothing".to_string() + ); + } + }, + _ => (), + } + } + + if set_target_triple && !set_target { + target = target_triple.clone(); + } + + if host_triple != target_triple { + linker = Some(format!("-Clinker={}-gcc", target_triple)); + } + let current_dir = + std_env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?; + let channel = if let Some(channel) = env.get("CHANNEL") { + channel.as_str() + } else { + "debug" + }; + let cg_backend_path = current_dir + .join("target") + .join(channel) + .join(&format!("librustc_codegen_gcc.{}", dylib_ext)); + let sysroot_path = current_dir.join("build_sysroot/sysroot"); + let mut rustflags = Vec::new(); + if let Some(cg_rustflags) = env.get("CG_RUSTFLAGS") { + rustflags.push(cg_rustflags.clone()); + } + if let Some(linker) = linker { + rustflags.push(linker.to_string()); + } + rustflags.extend_from_slice(&[ + "-Csymbol-mangling-version=v0".to_string(), + "-Cdebuginfo=2".to_string(), + format!("-Zcodegen-backend={}", cg_backend_path.display()), + "--sysroot".to_string(), + sysroot_path.display().to_string(), + ]); + + // Since we don't support ThinLTO, disable LTO completely when not trying to do LTO. + // TODO(antoyo): remove when we can handle ThinLTO. + if !env.contains_key(&"FAT_LTO".to_string()) { + rustflags.push("-Clto=off".to_string()); + } + rustflags.extend_from_slice(test_flags); + // FIXME(antoyo): remove once the atomic shim is gone + if os_name == "Darwin" { + rustflags.extend_from_slice(&[ + "-Clink-arg=-undefined".to_string(), + "-Clink-arg=dynamic_lookup".to_string(), + ]); + } + env.insert("RUSTFLAGS".to_string(), rustflags.join(" ")); + // display metadata load errors + env.insert("RUSTC_LOG".to_string(), "warn".to_string()); + + let sysroot = current_dir.join(&format!( + "build_sysroot/sysroot/lib/rustlib/{}/lib", + target_triple + )); + let ld_library_path = format!( + "{target}:{sysroot}:{gcc_path}", + target = current_dir.join("target/out").display(), + sysroot = sysroot.display(), + ); + env.insert("LD_LIBRARY_PATH".to_string(), ld_library_path.clone()); + env.insert("DYLD_LIBRARY_PATH".to_string(), ld_library_path); + + // NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc. + // To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH. + // Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc + let path = std::env::var("PATH").unwrap_or_default(); + env.insert("PATH".to_string(), format!("/opt/gcc/bin:{}", path)); + + let mut rustc_command = vec!["rustc".to_string()]; + rustc_command.extend_from_slice(&rustflags); + rustc_command.extend_from_slice(&[ + "-L".to_string(), + "crate=target/out".to_string(), + "--out-dir".to_string(), + "target/out".to_string(), + ]); + Ok(ConfigInfo { + target, + target_triple, + rustc_command, + }) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/main.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/main.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,67 @@ +use std::env; +use std::process; + +mod build; +mod config; +mod prepare; +mod rustc_info; +mod test; +mod utils; + +macro_rules! arg_error { + ($($err:tt)*) => {{ + eprintln!($($err)*); + eprintln!(); + usage(); + std::process::exit(1); + }}; +} + +fn usage() { + println!( + "\ +Available commands for build_system: + + prepare : Run prepare command + build : Run build command + test : Run test command + --help : Show this message" + ); +} + +pub enum Command { + Prepare, + Build, + Test, +} + +fn main() { + if env::var("RUST_BACKTRACE").is_err() { + env::set_var("RUST_BACKTRACE", "1"); + } + + let command = match env::args().nth(1).as_deref() { + Some("prepare") => Command::Prepare, + Some("build") => Command::Build, + Some("test") => Command::Test, + Some("--help") => { + usage(); + process::exit(0); + } + Some(flag) if flag.starts_with('-') => arg_error!("Expected command found flag {}", flag), + Some(command) => arg_error!("Unknown command {}", command), + None => { + usage(); + process::exit(0); + } + }; + + if let Err(e) = match command { + Command::Prepare => prepare::run(), + Command::Build => build::run(), + Command::Test => test::run(), + } { + eprintln!("Command failed to run: {e:?}"); + process::exit(1); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/prepare.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/prepare.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/prepare.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/prepare.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,255 @@ +use crate::rustc_info::get_rustc_path; +use crate::utils::{cargo_install, git_clone, run_command, run_command_with_output, walk_dir}; + +use std::fs; +use std::path::Path; + +fn prepare_libcore(sysroot_path: &Path, libgccjit12_patches: bool, cross_compile: bool) -> Result<(), String> { + let rustc_path = match get_rustc_path() { + Some(path) => path, + None => return Err("`rustc` path not found".to_string()), + }; + + let parent = match rustc_path.parent() { + Some(path) => path, + None => return Err(format!("No parent for `{}`", rustc_path.display())), + }; + + let rustlib_dir = parent + .join("../lib/rustlib/src/rust") + .canonicalize() + .map_err(|error| format!("Failed to canonicalize path: {:?}", error))?; + if !rustlib_dir.is_dir() { + return Err("Please install `rust-src` component".to_string()); + } + + let sysroot_dir = sysroot_path.join("sysroot_src"); + if sysroot_dir.is_dir() { + if let Err(error) = fs::remove_dir_all(&sysroot_dir) { + return Err(format!( + "Failed to remove `{}`: {:?}", + sysroot_dir.display(), + error, + )); + } + } + + let sysroot_library_dir = sysroot_dir.join("library"); + fs::create_dir_all(&sysroot_library_dir).map_err(|error| { + format!( + "Failed to create folder `{}`: {:?}", + sysroot_library_dir.display(), + error, + ) + })?; + + run_command( + &[&"cp", &"-r", &rustlib_dir.join("library"), &sysroot_dir], + None, + )?; + + println!("[GIT] init (cwd): `{}`", sysroot_dir.display()); + run_command(&[&"git", &"init"], Some(&sysroot_dir))?; + println!("[GIT] add (cwd): `{}`", sysroot_dir.display()); + run_command(&[&"git", &"add", &"."], Some(&sysroot_dir))?; + println!("[GIT] commit (cwd): `{}`", sysroot_dir.display()); + + // This is needed on systems where nothing is configured. + // git really needs something here, or it will fail. + // Even using --author is not enough. + run_command( + &[&"git", &"config", &"user.email", &"none@example.com"], + Some(&sysroot_dir), + )?; + run_command( + &[&"git", &"config", &"user.name", &"None"], + Some(&sysroot_dir), + )?; + run_command( + &[&"git", &"config", &"core.autocrlf", &"false"], + Some(&sysroot_dir), + )?; + run_command( + &[&"git", &"config", &"commit.gpgSign", &"false"], + Some(&sysroot_dir), + )?; + run_command( + &[&"git", &"commit", &"-m", &"Initial commit", &"-q"], + Some(&sysroot_dir), + )?; + + let mut patches = Vec::new(); + walk_dir( + "patches", + |_| Ok(()), + |file_path: &Path| { + patches.push(file_path.to_path_buf()); + Ok(()) + }, + )?; + if cross_compile { + walk_dir("cross_patches", |_| Ok(()), |file_path: &Path| { + patches.push(file_path.to_path_buf()); + Ok(()) + })?; + } + if libgccjit12_patches { + walk_dir( + "patches/libgccjit12", + |_| Ok(()), + |file_path: &Path| { + patches.push(file_path.to_path_buf()); + Ok(()) + }, + )?; + } + patches.sort(); + for file_path in patches { + println!("[GIT] apply `{}`", file_path.display()); + let path = Path::new("../..").join(file_path); + run_command_with_output(&[&"git", &"apply", &path], Some(&sysroot_dir))?; + run_command_with_output(&[&"git", &"add", &"-A"], Some(&sysroot_dir))?; + run_command_with_output( + &[ + &"git", + &"commit", + &"--no-gpg-sign", + &"-m", + &format!("Patch {}", path.display()), + ], + Some(&sysroot_dir), + )?; + } + println!("Successfully prepared libcore for building"); + Ok(()) +} + +// build with cg_llvm for perf comparison +fn build_raytracer(repo_dir: &Path) -> Result<(), String> { + run_command(&[&"cargo", &"build"], Some(repo_dir))?; + let mv_target = repo_dir.join("raytracer_cg_llvm"); + if mv_target.is_file() { + std::fs::remove_file(&mv_target) + .map_err(|e| format!("Failed to remove file `{}`: {e:?}", mv_target.display()))?; + } + run_command( + &[&"mv", &"target/debug/main", &"raytracer_cg_llvm"], + Some(repo_dir), + )?; + Ok(()) +} + +fn clone_and_setup(repo_url: &str, checkout_commit: &str, extra: Option) -> Result<(), String> +where + F: Fn(&Path) -> Result<(), String>, +{ + let clone_result = git_clone(repo_url, None)?; + if !clone_result.ran_clone { + println!("`{}` has already been cloned", clone_result.repo_name); + } + let repo_path = Path::new(&clone_result.repo_name); + run_command(&[&"git", &"checkout", &"--", &"."], Some(&repo_path))?; + run_command(&[&"git", &"checkout", &checkout_commit], Some(&repo_path))?; + let filter = format!("-{}-", clone_result.repo_name); + walk_dir( + "crate_patches", + |_| Ok(()), + |file_path| { + let patch = file_path.as_os_str().to_str().unwrap(); + if patch.contains(&filter) && patch.ends_with(".patch") { + run_command_with_output( + &[&"git", &"am", &file_path.canonicalize().unwrap()], + Some(&repo_path), + )?; + } + Ok(()) + }, + )?; + if let Some(extra) = extra { + extra(&repo_path)?; + } + Ok(()) +} + +struct PrepareArg { + cross_compile: bool, + only_libcore: bool, + libgccjit12_patches: bool, +} + +impl PrepareArg { + fn new() -> Result, String> { + let mut only_libcore = false; + let mut cross_compile = false; + let mut libgccjit12_patches = false; + + for arg in std::env::args().skip(2) { + match arg.as_str() { + "--only-libcore" => only_libcore = true, + "--cross" => cross_compile = true, + "--libgccjit12-patches" => libgccjit12_patches = true, + "--help" => { + Self::usage(); + return Ok(None); + } + a => return Err(format!("Unknown argument `{a}`")), + } + } + Ok(Some(Self { + cross_compile, + only_libcore, + libgccjit12_patches, + })) + } + + fn usage() { + println!( + r#" +`prepare` command help: + + --only-libcore : Only setup libcore and don't clone other repositories + --cross : Apply the patches needed to do cross-compilation + --libgccjit12-patches : Apply patches needed for libgccjit12 + --help : Show this help +"# + ) + } +} + +pub fn run() -> Result<(), String> { + let args = match PrepareArg::new()? { + Some(a) => a, + None => return Ok(()), + }; + let sysroot_path = Path::new("build_sysroot"); + prepare_libcore(sysroot_path, args.libgccjit12_patches, args.cross_compile)?; + + if !args.only_libcore { + cargo_install("hyperfine")?; + + let to_clone = &[ + ( + "https://github.com/rust-random/rand.git", + "0f933f9c7176e53b2a3c7952ded484e1783f0bf1", + None, + ), + ( + "https://github.com/rust-lang/regex.git", + "341f207c1071f7290e3f228c710817c280c8dca1", + None, + ), + ( + "https://github.com/ebobby/simple-raytracer", + "804a7a21b9e673a482797aa289a18ed480e4d813", + Some(build_raytracer), + ), + ]; + + for (repo_url, checkout_commit, cb) in to_clone { + clone_and_setup(repo_url, checkout_commit, *cb)?; + } + } + + println!("Successfully ran `prepare`"); + Ok(()) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/rustc_info.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/rustc_info.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/rustc_info.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/rustc_info.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,12 @@ +use std::path::{Path, PathBuf}; + +use crate::utils::run_command; + +pub fn get_rustc_path() -> Option { + if let Ok(rustc) = std::env::var("RUSTC") { + return Some(PathBuf::from(rustc)); + } + run_command(&[&"rustup", &"which", &"rustc"], None) + .ok() + .map(|out| Path::new(String::from_utf8(out.stdout).unwrap().trim()).to_path_buf()) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/test.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/test.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::utils::run_command_with_output; + +fn get_args<'a>(args: &mut Vec<&'a dyn AsRef>, extra_args: &'a Vec) { + for extra_arg in extra_args { + args.push(extra_arg); + } +} + +pub fn run() -> Result<(), String> { + let mut args: Vec<&dyn AsRef> = vec![&"bash", &"test.sh"]; + let extra_args = std::env::args().skip(2).collect::>(); + get_args(&mut args, &extra_args); + let current_dir = std::env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?; + run_command_with_output(args.as_slice(), Some(¤t_dir)) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/utils.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/utils.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/utils.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/build_system/src/utils.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,240 @@ +use std::collections::HashMap; +use std::ffi::OsStr; +use std::fmt::Debug; +use std::fs; +use std::path::Path; +use std::process::{Command, ExitStatus, Output}; + +fn get_command_inner( + input: &[&dyn AsRef], + cwd: Option<&Path>, + env: Option<&HashMap>, +) -> Command { + let (cmd, args) = match input { + [] => panic!("empty command"), + [cmd, args @ ..] => (cmd, args), + }; + let mut command = Command::new(cmd); + command.args(args); + if let Some(cwd) = cwd { + command.current_dir(cwd); + } + if let Some(env) = env { + command.envs(env.iter().map(|(k, v)| (k.as_str(), v.as_str()))); + } + command +} + +fn check_exit_status( + input: &[&dyn AsRef], + cwd: Option<&Path>, + exit_status: ExitStatus, +) -> Result<(), String> { + if exit_status.success() { + Ok(()) + } else { + Err(format!( + "Command `{}`{} exited with status {:?}", + input + .iter() + .map(|s| s.as_ref().to_str().unwrap()) + .collect::>() + .join(" "), + cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display())) + .unwrap_or_default(), + exit_status.code(), + )) + } +} + +fn command_error(input: &[&dyn AsRef], cwd: &Option<&Path>, error: D) -> String { + format!( + "Command `{}`{} failed to run: {error:?}", + input + .iter() + .map(|s| s.as_ref().to_str().unwrap()) + .collect::>() + .join(" "), + cwd.as_ref() + .map(|cwd| format!(" (running in folder `{}`)", cwd.display(),)) + .unwrap_or_default(), + ) +} + +pub fn run_command(input: &[&dyn AsRef], cwd: Option<&Path>) -> Result { + run_command_with_env(input, cwd, None) +} + +pub fn run_command_with_env( + input: &[&dyn AsRef], + cwd: Option<&Path>, + env: Option<&HashMap>, +) -> Result { + let output = get_command_inner(input, cwd, env) + .output() + .map_err(|e| command_error(input, &cwd, e))?; + check_exit_status(input, cwd, output.status)?; + Ok(output) +} + +pub fn run_command_with_output( + input: &[&dyn AsRef], + cwd: Option<&Path>, +) -> Result<(), String> { + let exit_status = get_command_inner(input, cwd, None) + .spawn() + .map_err(|e| command_error(input, &cwd, e))? + .wait() + .map_err(|e| command_error(input, &cwd, e))?; + check_exit_status(input, cwd, exit_status)?; + Ok(()) +} + +pub fn run_command_with_output_and_env( + input: &[&dyn AsRef], + cwd: Option<&Path>, + env: Option<&HashMap>, +) -> Result<(), String> { + let exit_status = get_command_inner(input, cwd, env) + .spawn() + .map_err(|e| command_error(input, &cwd, e))? + .wait() + .map_err(|e| command_error(input, &cwd, e))?; + check_exit_status(input, cwd, exit_status)?; + Ok(()) +} + +pub fn cargo_install(to_install: &str) -> Result<(), String> { + let output = run_command(&[&"cargo", &"install", &"--list"], None)?; + + let to_install_needle = format!("{to_install} "); + // cargo install --list returns something like this: + // + // mdbook-toc v0.8.0: + // mdbook-toc + // rust-reduce v0.1.0: + // rust-reduce + // + // We are only interested into the command name so we only look for lines ending with `:`. + if String::from_utf8(output.stdout) + .unwrap() + .lines() + .any(|line| line.ends_with(':') && line.starts_with(&to_install_needle)) + { + return Ok(()); + } + // We voluntarily ignore this error. + if run_command_with_output(&[&"cargo", &"install", &to_install], None).is_err() { + println!("Skipping installation of `{to_install}`"); + } + Ok(()) +} + +pub fn get_os_name() -> Result { + let output = run_command(&[&"uname"], None)?; + let name = std::str::from_utf8(&output.stdout) + .unwrap_or("") + .trim() + .to_string(); + if !name.is_empty() { + Ok(name) + } else { + Err("Failed to retrieve the OS name".to_string()) + } +} + +pub fn get_rustc_host_triple() -> Result { + let output = run_command(&[&"rustc", &"-vV"], None)?; + let content = std::str::from_utf8(&output.stdout).unwrap_or(""); + + for line in content.split('\n').map(|line| line.trim()) { + if !line.starts_with("host:") { + continue; + } + return Ok(line.split(':').nth(1).unwrap().trim().to_string()); + } + Err("Cannot find host triple".to_string()) +} + +pub fn get_gcc_path() -> Result { + let content = match fs::read_to_string("gcc_path") { + Ok(content) => content, + Err(_) => { + return Err( + "Please put the path to your custom build of libgccjit in the file \ + `gcc_path`, see Readme.md for details" + .into(), + ) + } + }; + match content + .split('\n') + .map(|line| line.trim()) + .filter(|line| !line.is_empty()) + .next() + { + Some(gcc_path) => { + let path = Path::new(gcc_path); + if !path.exists() { + Err(format!( + "Path `{}` contained in the `gcc_path` file doesn't exist", + gcc_path, + )) + } else { + Ok(gcc_path.into()) + } + } + None => Err("No path found in `gcc_path` file".into()), + } +} + +pub struct CloneResult { + pub ran_clone: bool, + pub repo_name: String, +} + +pub fn git_clone(to_clone: &str, dest: Option<&Path>) -> Result { + let repo_name = to_clone.split('/').last().unwrap(); + let repo_name = match repo_name.strip_suffix(".git") { + Some(n) => n.to_string(), + None => repo_name.to_string(), + }; + + let dest = dest + .map(|dest| dest.join(&repo_name)) + .unwrap_or_else(|| Path::new(&repo_name).into()); + if dest.is_dir() { + return Ok(CloneResult { + ran_clone: false, + repo_name, + }); + } + + run_command_with_output(&[&"git", &"clone", &to_clone, &dest], None)?; + Ok(CloneResult { + ran_clone: true, + repo_name, + }) +} + +pub fn walk_dir(dir: P, mut dir_cb: D, mut file_cb: F) -> Result<(), String> +where + P: AsRef, + D: FnMut(&Path) -> Result<(), String>, + F: FnMut(&Path) -> Result<(), String>, +{ + let dir = dir.as_ref(); + for entry in fs::read_dir(dir) + .map_err(|error| format!("Failed to read dir `{}`: {:?}", dir.display(), error))? + { + let entry = entry + .map_err(|error| format!("Failed to read entry in `{}`: {:?}", dir.display(), error))?; + let entry_path = entry.path(); + if entry_path.is_dir() { + dir_cb(&entry_path)?; + } else { + file_cb(&entry_path)?; + } + } + Ok(()) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cargo.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cargo.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cargo.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cargo.sh 2023-12-21 16:55:28.000000000 +0000 @@ -12,7 +12,7 @@ popd >/dev/null -if [[ $(rustc -V) != $(rustc +${TOOLCHAIN} -V) ]]; then +if [[ $(${RUSTC} -V) != $(${RUSTC} +${TOOLCHAIN} -V) ]]; then echo "rustc_codegen_gcc is build for $(rustc +${TOOLCHAIN} -V) but the default rustc version is $(rustc -V)." echo "Using $(rustc +${TOOLCHAIN} -V)." fi diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/config.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/config.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/config.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/config.sh 2023-12-21 16:55:28.000000000 +0000 @@ -4,49 +4,82 @@ if [ -f ./gcc_path ]; then export GCC_PATH=$(cat gcc_path) +elif (( $use_system_gcc == 1 )); then + echo 'Using system GCC' else echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details' exit 1 fi +if [[ -z "$RUSTC" ]]; then + export RUSTC="rustc" +fi + unamestr=`uname` if [[ "$unamestr" == 'Linux' ]]; then - dylib_ext='so' + dylib_ext='so' elif [[ "$unamestr" == 'Darwin' ]]; then - dylib_ext='dylib' + dylib_ext='dylib' else - echo "Unsupported os" - exit 1 + echo "Unsupported os" + exit 1 fi -HOST_TRIPLE=$(rustc -vV | grep host | cut -d: -f2 | tr -d " ") -TARGET_TRIPLE=$HOST_TRIPLE -#TARGET_TRIPLE="m68k-unknown-linux-gnu" +HOST_TRIPLE=$($RUSTC -vV | grep host | cut -d: -f2 | tr -d " ") +# TODO: remove $OVERWRITE_TARGET_TRIPLE when config.sh is removed. +TARGET_TRIPLE="${OVERWRITE_TARGET_TRIPLE:-$HOST_TRIPLE}" linker='' RUN_WRAPPER='' if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then - if [[ "$TARGET_TRIPLE" == "m68k-unknown-linux-gnu" ]]; then - TARGET_TRIPLE="mips-unknown-linux-gnu" - linker='-Clinker=m68k-linux-gcc' - elif [[ "$TARGET_TRIPLE" == "aarch64-unknown-linux-gnu" ]]; then - # We are cross-compiling for aarch64. Use the correct linker and run tests in qemu. - linker='-Clinker=aarch64-linux-gnu-gcc' - RUN_WRAPPER='qemu-aarch64 -L /usr/aarch64-linux-gnu' - else - echo "Unknown non-native platform" - fi + RUN_WRAPPER=run_in_vm + if [[ "$TARGET_TRIPLE" == "m68k-unknown-linux-gnu" ]]; then + linker='-Clinker=m68k-unknown-linux-gnu-gcc' + elif [[ "$TARGET_TRIPLE" == "aarch64-unknown-linux-gnu" ]]; then + # We are cross-compiling for aarch64. Use the correct linker and run tests in qemu. + linker='-Clinker=aarch64-linux-gnu-gcc' + else + echo "Unknown non-native platform" + fi fi -export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 -Clto=off -Zcodegen-backend=$(pwd)/target/${CHANNEL:-debug}/librustc_codegen_gcc.$dylib_ext --sysroot $(pwd)/build_sysroot/sysroot $TEST_FLAGS" +# Since we don't support ThinLTO, disable LTO completely when not trying to do LTO. +# TODO(antoyo): remove when we can handle ThinLTO. +disable_lto_flags='' +if [[ ! -v FAT_LTO ]]; then + disable_lto_flags='-Clto=off' +fi + +if [[ -z "$BUILTIN_BACKEND" ]]; then + export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=$(pwd)/target/${CHANNEL:-debug}/librustc_codegen_gcc.$dylib_ext --sysroot $(pwd)/build_sysroot/sysroot $TEST_FLAGS" +else + export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=gcc $TEST_FLAGS -Cpanic=abort" + + if [[ ! -z "$RUSTC_SYSROOT" ]]; then + export RUSTFLAGS="$RUSTFLAGS --sysroot $RUSTC_SYSROOT" + fi +fi # FIXME(antoyo): remove once the atomic shim is gone -if [[ `uname` == 'Darwin' ]]; then - export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup" +if [[ unamestr == 'Darwin' ]]; then + export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup" fi -RUSTC="rustc $RUSTFLAGS -L crate=target/out --out-dir target/out" +if [[ -z "$cargo_target_dir" ]]; then + RUST_CMD="$RUSTC $RUSTFLAGS -L crate=target/out --out-dir target/out" + cargo_target_dir="target/out" +else + RUST_CMD="$RUSTC $RUSTFLAGS -L crate=$cargo_target_dir --out-dir $cargo_target_dir" +fi export RUSTC_LOG=warn # display metadata load errors -export LD_LIBRARY_PATH="$(pwd)/target/out:$(pwd)/build_sysroot/sysroot/lib/rustlib/$TARGET_TRIPLE/lib:$GCC_PATH" +export LD_LIBRARY_PATH="$(pwd)/target/out:$(pwd)/build_sysroot/sysroot/lib/rustlib/$TARGET_TRIPLE/lib" +if [[ ! -z "$:$GCC_PATH" ]]; then + export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GCC_PATH" +fi + export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH +# NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc. +# To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH. +# Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc +export PATH="/opt/gcc/bin:/opt/m68k-unknown-linux-gnu/bin:$PATH" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,39 @@ +From 966beefe08be6045bfcca26079b76a7a80413080 Mon Sep 17 00:00:00 2001 +From: None +Date: Thu, 28 Sep 2023 17:37:38 -0400 +Subject: [PATCH] Disable libstd and libtest dylib + +--- + library/std/Cargo.toml | 2 +- + library/test/Cargo.toml | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml +index 5b21355..cb0c49b 100644 +--- a/library/std/Cargo.toml ++++ b/library/std/Cargo.toml +@@ -9,7 +9,7 @@ description = "The Rust Standard Library" + edition = "2021" + + [lib] +-crate-type = ["dylib", "rlib"] ++crate-type = ["rlib"] + + [dependencies] + alloc = { path = "../alloc", public = true } +diff --git a/library/test/Cargo.toml b/library/test/Cargo.toml +index 91a1abd..a58c160 100644 +--- a/library/test/Cargo.toml ++++ b/library/test/Cargo.toml +@@ -4,7 +4,7 @@ version = "0.0.0" + edition = "2021" + + [lib] +-crate-type = ["dylib", "rlib"] ++crate-type = ["rlib"] + + [dependencies] + getopts = { version = "0.2.21", features = ['rustc-dep-of-std'] } +-- +2.42.0 + diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/add-attribute.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/add-attribute.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/add-attribute.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/add-attribute.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +# Add support for a new function attribute + +To add support for a new function attribute in libgccjit, you need to do the following steps: + + 1. Copy the corresponding function from `c-family/c-attribs.cc` into `jit/dummy-frontend.cc`. For example if you add the `target` attribute, the function name will be `handle_target_attribute`. + 2. Copy the corresponding entry from the `c_common_attribute_table` variable in the `c-family/c-attribs.cc` file into the `jit_attribute_table` variable in `jit/dummy-frontend.cc`. + 3. Add a new variant in the `gcc_jit_fn_attribute` enum in the `jit/libgccjit.h` file. + 4. Add a test to ensure the attribute is correctly applied in `gcc/testsuite/jit.dg/`. Take a look at `gcc/testsuite/jit.dg/test-nonnull.c` if you want an example. + 5. Run the example like this (in your `gcc-build` folder): `make check-jit RUNTESTFLAGS="-v -v -v jit.exp=jit.dg/test-nonnull.c"` + +Once done, you need to update the [gccjit.rs] crate to add the new enum variant in the corresponding enum (`FnAttribute`). + +Finally, you need to update this repository by calling the relevant API you added in [gccjit.rs]. + +To test it, build `gcc`, run `cargo update -p gccjit` and then you can test the generated output for a given Rust crate. + +[gccjit.rs]: https://github.com/antoyo/gccjit.rs diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/gimple.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/gimple.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/gimple.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/gimple.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,111 @@ +# GIMPLE + +You can see the full documentation about what GIMPLE is [here](https://gcc.gnu.org/onlinedocs/gccint/GIMPLE.html). In this document we will explain how to generate it. + +First, we'll copy the content from `gcc/gcc/testsuite/jit.dg/test-const-attribute.c` into a +file named `local.c` and remove the content we're not interested into: + +```diff +- /* { dg-do compile { target x86_64-*-* } } */ +... +- /* We don't want set_options() in harness.h to set -O3 to see that the const +- attribute affects the optimizations. */ +- #define TEST_ESCHEWS_SET_OPTIONS +- static void set_options (gcc_jit_context *ctxt, const char *argv0) +- { +- // Set "-O3". +- gcc_jit_context_set_int_option(ctxt, GCC_JIT_INT_OPTION_OPTIMIZATION_LEVEL, 3); +- } +- +- #define TEST_COMPILING_TO_FILE +- #define OUTPUT_KIND GCC_JIT_OUTPUT_KIND_ASSEMBLER +- #define OUTPUT_FILENAME "output-of-test-const-attribute.c.s" +- #include "harness.h" +... +- /* { dg-final { jit-verify-output-file-was-created "" } } */ +- /* Check that the loop was optimized away */ +- /* { dg-final { jit-verify-assembler-output-not "jne" } } */ +``` + +Then we'll add a `main` function which will call the `create_code` function but +also add the calls we need to generate the GIMPLE: + +```C +int main() { + gcc_jit_context *ctxt = gcc_jit_context_acquire(); + // To set `-O3`, update it depending on your needs. + gcc_jit_context_set_int_option(ctxt, GCC_JIT_INT_OPTION_OPTIMIZATION_LEVEL, 3); + // Very important option to generate the gimple format. + gcc_jit_context_set_bool_option(ctxt, GCC_JIT_BOOL_OPTION_DUMP_INITIAL_GIMPLE, 1); + create_code(ctxt, NULL); + + gcc_jit_context_compile(ctxt); + // If you want to compile to assembly (or any other format) directly, you can + // use the following call instead: + // gcc_jit_context_compile_to_file(ctxt, GCC_JIT_OUTPUT_KIND_ASSEMBLER, "out.s"); + + return 0; +} +``` + +Then we can compile it by using: + +```console +gcc local.c -I `pwd`/gcc/gcc/jit/ -L `pwd`/gcc-build/gcc -lgccjit -o out +``` + +And finally when you run it: + +```console +LD_LIBRARY_PATH=`pwd`/gcc-build/gcc LIBRARY_PATH=`pwd`/gcc-build/gcc ./out +``` + +It should display: + +```c +__attribute__((const)) +int xxx () +{ + int D.3394; + int sum; + int x; + + : + x = 45; + sum = 0; + goto loop_cond; + loop_cond: + x = x >> 1; + if (x != 0) goto after_loop; else goto loop_body; + loop_body: + _1 = foo (x); + _2 = _1 * 2; + x = x + _2; + goto loop_cond; + after_loop: + D.3394 = sum; + return D.3394; +} +``` + +An alternative way to generate the GIMPLE is to replace: + +```c + gcc_jit_context_set_bool_option(ctxt, GCC_JIT_BOOL_OPTION_DUMP_INITIAL_GIMPLE, 1); +``` + +with: + +```c + gcc_jit_context_add_command_line_option(ctxt, "-fdump-tree-gimple"); +``` + +(although you can have both at the same time too). Then you can compile it like previously. Only one difference: before executing it, I recommend to run: + +```console +rm -rf /tmp/libgccjit-* +``` + +to make it easier for you to know which folder to look into. + +Once the execution is done, you should now have a file with path looking like `/tmp/libgccjit-9OFqkD/fake.c.006t.gimple` which contains the GIMPLE format. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/sending-gcc-patch.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/sending-gcc-patch.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/sending-gcc-patch.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/sending-gcc-patch.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,44 @@ +This guide explains what to do to send a GCC patch for review. + +All the commands are supposed to be run in the folder where you cloned GCC. + +```bash +./contrib/gcc-changelog/git_check_commit.py +``` + +You can provide a specific commit hash: + +```bash +./contrib/gcc-changelog/git_check_commit.py abdef78989 +``` + +a range: + +```bash +./contrib/gcc-changelog/git_check_commit.py HEAD~2 +``` + +or even a comparison with a remote branch: + +```bash +./contrib/gcc-changelog/git_check_commit.py upstream/master..HEAD +``` + +When there is no more errors, generate the git patch: + +```bash +git format-patch -1 `git rev-parse --short HEAD` +``` + +Then you can run the remaining checks using: + +```bash +contrib/check_GNU_style.sh 0001-your-patch.patch +``` + +When you have no more errors, you can send the `.patch` file to GCC by sending an +email to `gcc-patches@gcc.gnu.org` and to the relevant GCC mailing lists +depending on what your patch changes. You can find the list of the mailing lists +[here](https://gcc.gnu.org/lists.html). + +You can find more information about "contributing to GCC" [here](https://gcc.gnu.org/contribute.html). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/tests.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/tests.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/tests.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/doc/tests.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,5 @@ +# Tests + +## Show the rustc command for UI tests + +Add ` --test-args "--verbose"` to `./x.py test`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_example.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_example.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_example.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_example.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ #![feature(start, core_intrinsics, alloc_error_handler, lang_items)] #![no_std] +#![allow(internal_features)] extern crate alloc; extern crate alloc_system; @@ -17,7 +18,7 @@ } #[panic_handler] -fn panic_handler(_: &core::panic::PanicInfo) -> ! { +fn panic_handler(_: &core::panic::PanicInfo<'_>) -> ! { core::intrinsics::abort(); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_system.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_system.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_system.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/alloc_system.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,16 +3,16 @@ #![no_std] #![feature(allocator_api, rustc_private)] -#![cfg_attr(any(unix, target_os = "redox"), feature(libc))] // The minimum alignment guaranteed by the architecture. This value is used to // add fast paths for low alignment values. #[cfg(any(target_arch = "x86", target_arch = "arm", + target_arch = "m68k", target_arch = "mips", target_arch = "mips32r6", target_arch = "powerpc", - target_arch = "csky" + target_arch = "csky", target_arch = "powerpc64"))] const MIN_ALIGN: usize = 8; #[cfg(any(target_arch = "x86_64", @@ -47,7 +47,18 @@ } #[cfg(any(unix, target_os = "redox"))] mod platform { - extern crate libc; + mod libc { + use core::ffi::{c_void, c_int}; + + #[link(name = "c")] + extern "C" { + pub fn malloc(size: usize) -> *mut c_void; + pub fn realloc(ptr: *mut c_void, size: usize) -> *mut c_void; + pub fn calloc(nmemb: usize, size: usize) -> *mut c_void; + pub fn free(ptr: *mut u8); + pub fn posix_memalign(memptr: *mut *mut c_void, alignment: usize, size: usize) -> c_int; + } + } use core::ptr; use MIN_ALIGN; use System; @@ -81,12 +92,12 @@ } #[inline] unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { - libc::free(ptr as *mut libc::c_void) + libc::free(ptr as *mut _) } #[inline] unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { if layout.align() <= MIN_ALIGN && layout.align() <= new_size { - libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 + libc::realloc(ptr as *mut _, new_size) as *mut u8 } else { self.realloc_fallback(ptr, layout, new_size) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/arbitrary_self_types_pointers_and_wrappers.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/arbitrary_self_types_pointers_and_wrappers.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/arbitrary_self_types_pointers_and_wrappers.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/arbitrary_self_types_pointers_and_wrappers.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,6 +2,7 @@ #![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)] #![feature(rustc_attrs)] +#![allow(internal_features)] use std::{ ops::{Deref, CoerceUnsized, DispatchFromDyn}, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ thread_local )] #![no_core] -#![allow(dead_code)] +#![allow(dead_code, internal_features)] #[no_mangle] unsafe extern "C" fn _Unwind_Resume() { @@ -428,6 +428,15 @@ intrinsics::abort(); } } + +#[lang = "panic_in_cleanup"] +#[rustc_nounwind] +fn panic_in_cleanup() -> ! { + unsafe { + libc::printf("panic in a destructor during cleanup\n\0" as *const str as *const i8); + intrinsics::abort(); + } +} #[lang = "panic_bounds_check"] #[track_caller] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mini_core_hello_world.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,7 @@ extern_types, thread_local )] #![no_core] -#![allow(dead_code, non_camel_case_types)] +#![allow(dead_code, internal_features, non_camel_case_types)] extern crate mini_core; @@ -152,7 +152,8 @@ let slice = &[0, 1] as &[i32]; let slice_ptr = slice as *const [i32] as *const i32; - assert_eq!(slice_ptr as usize % 4, 0); + let align = intrinsics::min_align_of::<*const i32>(); + assert_eq!(slice_ptr as usize % align, 0); //return; @@ -186,7 +187,10 @@ let a: &dyn SomeTrait = &"abc\0"; a.object_safe(); + #[cfg(target_arch="x86_64")] assert_eq!(intrinsics::size_of_val(a) as u8, 16); + #[cfg(target_arch="m68k")] + assert_eq!(intrinsics::size_of_val(a) as u8, 8); assert_eq!(intrinsics::size_of_val(&0u32) as u8, 4); assert_eq!(intrinsics::min_align_of::() as u8, 2); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mod_bench.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mod_bench.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mod_bench.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/mod_bench.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,12 @@ #![feature(start, core_intrinsics, lang_items)] #![no_std] +#![allow(internal_features)] #[link(name = "c")] extern {} #[panic_handler] -fn panic_handler(_: &core::panic::PanicInfo) -> ! { +fn panic_handler(_: &core::panic::PanicInfo<'_>) -> ! { core::intrinsics::abort(); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/std_example.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/std_example.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/std_example.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/example/std_example.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,9 +1,10 @@ -#![feature(core_intrinsics, generators, generator_trait, is_sorted)] +#![feature(core_intrinsics, coroutines, coroutine_trait, is_sorted)] #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] use std::arch::x86_64::*; use std::io::Write; -use std::ops::Generator; +use std::ops::Coroutine; extern { pub fn printf(format: *const i8, ...) -> i32; @@ -95,6 +96,7 @@ println!("{:?}", std::intrinsics::caller_location()); + #[cfg(target_arch="x86_64")] #[cfg(feature="master")] unsafe { test_simd(); @@ -108,6 +110,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] unsafe fn test_simd() { let x = _mm_setzero_si128(); @@ -136,6 +139,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] unsafe fn test_mm_slli_si128() { #[rustfmt::skip] @@ -164,6 +168,7 @@ #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] unsafe fn test_mm_movemask_epi8() { #[rustfmt::skip] @@ -178,6 +183,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "avx2")] unsafe fn test_mm256_movemask_epi8() { let a = _mm256_set1_epi8(-1); @@ -187,6 +193,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] unsafe fn test_mm_add_epi8() { let a = _mm_setr_epi8(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); @@ -203,6 +210,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] unsafe fn test_mm_add_pd() { let a = _mm_setr_pd(1.0, 2.0); @@ -212,6 +220,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] fn assert_eq_m128i(x: std::arch::x86_64::__m128i, y: std::arch::x86_64::__m128i) { unsafe { assert_eq!(std::mem::transmute::<_, [u8; 16]>(x), std::mem::transmute::<_, [u8; 16]>(y)); @@ -219,6 +228,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] pub unsafe fn assert_eq_m128d(a: __m128d, b: __m128d) { if _mm_movemask_pd(_mm_cmpeq_pd(a, b)) != 0b11 { @@ -227,6 +237,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse2")] unsafe fn test_mm_cvtsi128_si64() { let r = _mm_cvtsi128_si64(std::mem::transmute::<[i64; 2], _>([5, 0])); @@ -234,6 +245,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse4.1")] unsafe fn test_mm_cvtepi8_epi16() { let a = _mm_set1_epi8(10); @@ -247,6 +259,7 @@ } #[cfg(feature="master")] +#[cfg(target_arch="x86_64")] #[target_feature(enable = "sse4.1")] unsafe fn test_mm_extract_epi8() { #[rustfmt::skip] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-lto-tests.txt rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-lto-tests.txt --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-lto-tests.txt 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-lto-tests.txt 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +tests/ui/lint/unsafe_code/forge_unsafe_block.rs +tests/ui/lint/unused-qualification-in-derive-expansion.rs +tests/ui/macro-quote-test.rs +tests/ui/macros/proc_macro.rs +tests/ui/panic-runtime/lto-unwind.rs +tests/ui/resolve/derive-macro-1.rs +tests/ui/resolve/derive-macro-2.rs +tests/ui/rfcs/rfc-2565-param-attrs/param-attrs-pretty.rs +tests/ui/rfcs/rfc-2565-param-attrs/issue-64682-dropping-first-attrs-in-impl-fns.rs +tests/ui/rfcs/rfc-3348-c-string-literals/edition-spans.rs +tests/ui/rust-2018/suggestions-not-always-applicable.rs +tests/ui/rust-2021/reserved-prefixes-via-macro.rs +tests/ui/underscore-imports/duplicate.rs +tests/ui/async-await/issues/issue-60674.rs +tests/ui/attributes/main-removed-2/main.rs +tests/ui/cfg/assume-incomplete-release/assume-incomplete.rs +tests/ui/crate-loading/cross-compiled-proc-macro.rs +tests/ui/derives/derive-marker-tricky.rs +tests/ui/diagnostic_namespace/existing_proc_macros.rs +tests/ui/fmt/format-args-capture-issue-106408.rs +tests/ui/fmt/indoc-issue-106408.rs +tests/ui/hygiene/issue-77523-def-site-async-await.rs +tests/ui/inherent-impls-overlap-check/no-overlap.rs diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-non-lto-tests.txt rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-non-lto-tests.txt --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-non-lto-tests.txt 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-non-lto-tests.txt 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,11 @@ +tests/ui/issues/issue-44056.rs +tests/ui/lto/fat-lto.rs +tests/ui/lto/debuginfo-lto.rs +tests/ui/lto/lto-many-codegen-units.rs +tests/ui/lto/issue-100772.rs +tests/ui/lto/lto-rustc-loads-linker-plugin.rs +tests/ui/panic-runtime/lto-unwind.rs +tests/ui/sanitize/issue-111184-coroutine-witness.rs +tests/ui/sepcomp/sepcomp-lib-lto.rs +tests/ui/lto/lto-opt-level-s.rs +tests/ui/lto/lto-opt-level-z.rs diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests.txt rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests.txt --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests.txt 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests.txt 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,5 @@ -tests/ui/allocator/custom-in-block.rs -tests/ui/allocator/custom-in-submodule.rs -tests/ui/allocator/custom.rs -tests/ui/allocator/hygiene.rs tests/ui/allocator/no_std-alloc-error-handler-custom.rs tests/ui/allocator/no_std-alloc-error-handler-default.rs -tests/ui/allocator/xcrate-use.rs -tests/ui/allocator/xcrate-use2.rs tests/ui/asm/may_unwind.rs tests/ui/asm/x86_64/multiple-clobber-abi.rs tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs @@ -14,15 +8,12 @@ tests/ui/lto/dylib-works.rs tests/ui/numbers-arithmetic/saturating-float-casts.rs tests/ui/polymorphization/promoted-function.rs -tests/ui/process/nofile-limit.rs tests/ui/sepcomp/sepcomp-cci.rs tests/ui/sepcomp/sepcomp-extern.rs tests/ui/sepcomp/sepcomp-fns-backwards.rs tests/ui/sepcomp/sepcomp-fns.rs tests/ui/sepcomp/sepcomp-statics.rs tests/ui/simd/intrinsic/generic-arithmetic-pass.rs -tests/ui/sse2.rs -tests/ui/target-feature/missing-plusminus.rs tests/ui/asm/x86_64/may_unwind.rs tests/ui/backtrace.rs tests/ui/catch-unwind-bang.rs @@ -30,8 +21,8 @@ tests/ui/drop/dynamic-drop-async.rs tests/ui/drop/repeat-drop.rs tests/ui/fmt/format-args-capture.rs -tests/ui/generator/panic-drops-resume.rs -tests/ui/generator/panic-drops.rs +tests/ui/coroutine/panic-drops-resume.rs +tests/ui/coroutine/panic-drops.rs tests/ui/intrinsics/panic-uninitialized-zeroed.rs tests/ui/iterators/iter-sum-overflow-debug.rs tests/ui/iterators/iter-sum-overflow-overflow-checks.rs @@ -54,17 +45,30 @@ tests/ui/issues/issue-43853.rs tests/ui/issues/issue-47364.rs tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs -tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs tests/ui/rfcs/rfc-1857-stabilize-drop-order/drop-order.rs +tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs tests/ui/simd/issue-17170.rs tests/ui/simd/issue-39720.rs tests/ui/simd/issue-89193.rs tests/ui/statics/issue-91050-1.rs tests/ui/statics/issue-91050-2.rs tests/ui/alloc-error/default-alloc-error-hook.rs -tests/ui/generator/panic-safe.rs +tests/ui/coroutine/panic-safe.rs tests/ui/issues/issue-14875.rs tests/ui/issues/issue-29948.rs -tests/ui/panic-while-printing.rs -tests/ui/enum-discriminant/get_discr.rs tests/ui/panics/nested_panic_caught.rs +tests/ui/simd/intrinsic/generic-bswap-byte.rs +tests/ui/const_prop/ice-issue-111353.rs +tests/ui/process/println-with-broken-pipe.rs +tests/ui/panic-runtime/lto-abort.rs +tests/ui/lto/thin-lto-inlines2.rs +tests/ui/lto/weak-works.rs +tests/ui/lto/thin-lto-inlines.rs +tests/ui/lto/thin-lto-global-allocator.rs +tests/ui/lto/msvc-imp-present.rs +tests/ui/lto/lto-thin-rustc-loads-linker-plugin.rs +tests/ui/lto/all-crates.rs +tests/ui/async-await/deep-futures-are-freeze.rs +tests/ui/closures/capture-unsized-by-ref.rs +tests/ui/coroutine/resume-after-return.rs +tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests12.txt rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests12.txt --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests12.txt 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/failing-ui-tests12.txt 2023-12-21 16:55:28.000000000 +0000 @@ -19,21 +19,22 @@ tests/ui/simd/intrinsic/generic-select-pass.rs tests/ui/simd/intrinsic/inlining-issue67557-ice.rs tests/ui/simd/intrinsic/inlining-issue67557.rs -tests/ui/simd/monomorphize-shuffle-index.rs tests/ui/simd/shuffle.rs tests/ui/simd/simd-bitmask.rs -tests/ui/generator/resume-after-return.rs tests/ui/iterators/iter-step-overflow-debug.rs -tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs tests/ui/numbers-arithmetic/next-power-of-two-overflow-debug.rs tests/ui/privacy/reachable-unnameable-items.rs -tests/ui/rfc-1937-termination-trait/termination-trait-in-test.rs +tests/ui/rfcs/rfc-1937-termination-trait/termination-trait-in-test.rs tests/ui/async-await/async-fn-size-moved-locals.rs tests/ui/async-await/async-fn-size-uninit-locals.rs tests/ui/cfg/cfg-panic.rs -tests/ui/generator/size-moved-locals.rs +tests/ui/coroutine/size-moved-locals.rs tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs tests/ui/simd/intrinsic/generic-gather-pass.rs tests/ui/simd/issue-85915-simd-ptrs.rs tests/ui/issues/issue-68010-large-zst-consts.rs tests/ui/rust-2018/proc-macro-crate-in-paths.rs +tests/ui/target-feature/missing-plusminus.rs +tests/ui/sse2.rs +tests/ui/codegen/issue-79865-llvm-miscompile.rs +tests/ui/intrinsics/intrinsics-integer.rs diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,7 @@ +codegen_gcc_unknown_ctarget_feature_prefix = + unknown feature specified for `-Ctarget-feature`: `{$feature}` + .note = features must begin with a `+` to enable or `-` to disable it + codegen_gcc_invalid_minimum_alignment = invalid minimum global alignment: {$err} @@ -9,3 +13,29 @@ codegen_gcc_unwinding_inline_asm = GCC backend does not support unwinding from inline asm + +codegen_gcc_copy_bitcode = failed to copy bitcode to object file: {$err} + +codegen_gcc_dynamic_linking_with_lto = + cannot prefer dynamic linking when performing LTO + .note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO + +codegen_gcc_load_bitcode = failed to load bitcode of module "{$name}" + +codegen_gcc_lto_disallowed = lto can only be run for executables, cdylibs and static library outputs + +codegen_gcc_lto_dylib = lto cannot be used for `dylib` crate type without `-Zdylib-lto` + +codegen_gcc_lto_bitcode_from_rlib = failed to get bitcode from object file for LTO ({$gcc_err}) + +codegen_gcc_unknown_ctarget_feature = + unknown feature specified for `-Ctarget-feature`: `{$feature}` + .note = it is still passed through to the codegen backend + .possible_feature = you might have meant: `{$rust_feature}` + .consider_filing_feature_request = consider filing a feature request + +codegen_gcc_missing_features = + add the missing features in a `target_feature` attribute + +codegen_gcc_target_feature_disable_or_enable = + the target features {$features} must all be either enabled or disabled together diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Add-stdarch-Cargo.toml-for-testing.patch 2023-12-21 16:55:28.000000000 +0000 @@ -1,25 +1,26 @@ -From c3821e02fbd6cb5ad6e06d759fccdc9073712375 Mon Sep 17 00:00:00 2001 +From b8f3eed3053c9333b5dfbeaeb2a6a65a4b3156df Mon Sep 17 00:00:00 2001 From: Antoni Boucher -Date: Tue, 7 Jun 2022 21:40:13 -0400 -Subject: [PATCH] Add stdarch Cargo.toml for testing +Date: Tue, 29 Aug 2023 13:06:34 -0400 +Subject: [PATCH] Patch 0001-Add-stdarch-Cargo.toml-for-testing.patch --- - library/stdarch/Cargo.toml | 20 ++++++++++++++++++++ - 1 file changed, 20 insertions(+) + library/stdarch/Cargo.toml | 23 +++++++++++++++++++++++ + 1 file changed, 23 insertions(+) create mode 100644 library/stdarch/Cargo.toml diff --git a/library/stdarch/Cargo.toml b/library/stdarch/Cargo.toml new file mode 100644 -index 0000000..fbe0a95 +index 0000000..4c63700 --- /dev/null +++ b/library/stdarch/Cargo.toml -@@ -0,0 +1,20 @@ +@@ -0,0 +1,21 @@ +[workspace] ++resolver = "1" +members = [ + "crates/core_arch", + "crates/std_detect", + "crates/stdarch-gen", -+ "examples/" ++ #"examples/" +] +exclude = [ + "crates/wasm-assert-instr-tests" @@ -35,5 +36,5 @@ +opt-level = 3 +incremental = true -- -2.26.2.7.g19db9cfb68.dirty +2.42.0 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Disable-examples.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Disable-examples.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Disable-examples.patch 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/0001-Disable-examples.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -From a2d53a324a02c04b76c0e9d39dc15cd443a3b8b2 Mon Sep 17 00:00:00 2001 -From: Antoni Boucher -Date: Fri, 25 Nov 2022 11:18:11 -0500 -Subject: [PATCH] Disable examples - ---- - library/stdarch/Cargo.toml | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/library/stdarch/Cargo.toml b/library/stdarch/Cargo.toml -index fbe0a95..748d72d 100644 ---- a/library/stdarch/Cargo.toml -+++ b/library/stdarch/Cargo.toml -@@ -3,7 +3,7 @@ members = [ - "crates/core_arch", - "crates/std_detect", - "crates/stdarch-gen", -- "examples/" -+ #"examples/" - ] - exclude = [ - "crates/wasm-assert-instr-tests" --- -2.26.2.7.g19db9cfb68.dirty - diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/patches/libgccjit12/0001-core-Disable-portable-simd-test.patch 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,32 @@ +From 7bcd24ec6d4a96121874cb1ae5a23ea274aeff34 Mon Sep 17 00:00:00 2001 +From: None +Date: Thu, 19 Oct 2023 13:12:51 -0400 +Subject: [PATCH] [core] Disable portable-simd test + +--- + library/core/tests/lib.rs | 2 -- + 1 file changed, 2 deletions(-) + +diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs +index 5814ed4..194ad4c 100644 +--- a/library/core/tests/lib.rs ++++ b/library/core/tests/lib.rs +@@ -90,7 +90,6 @@ + #![feature(unwrap_infallible)] + #![feature(pointer_byte_offsets)] + #![feature(pointer_is_aligned)] +-#![feature(portable_simd)] + #![feature(ptr_metadata)] + #![feature(lazy_cell)] + #![feature(unsized_tuple_coercion)] +@@ -157,7 +156,6 @@ mod pin; + mod pin_macro; + mod ptr; + mod result; +-mod simd; + mod slice; + mod str; + mod str_lossy; +-- +2.42.0 + diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -#!/usr/bin/env bash -set -e -set -v - -source prepare_build.sh - -cargo install hyperfine || echo "Skipping hyperfine install" - -git clone https://github.com/rust-random/rand.git || echo "rust-random/rand has already been cloned" -pushd rand -git checkout -- . -git checkout 0f933f9c7176e53b2a3c7952ded484e1783f0bf1 -git am ../crate_patches/*-rand-*.patch -popd - -git clone https://github.com/rust-lang/regex.git || echo "rust-lang/regex has already been cloned" -pushd regex -git checkout -- . -git checkout 341f207c1071f7290e3f228c710817c280c8dca1 -popd - -git clone https://github.com/ebobby/simple-raytracer || echo "ebobby/simple-raytracer has already been cloned" -pushd simple-raytracer -git checkout -- . -git checkout 804a7a21b9e673a482797aa289a18ed480e4d813 - -# build with cg_llvm for perf comparison -cargo build -mv target/debug/main raytracer_cg_llvm -popd diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare_build.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare_build.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare_build.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/prepare_build.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -#!/usr/bin/env bash -set -e -set -v - -./build_sysroot/prepare_sysroot_src.sh diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rust-toolchain rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rust-toolchain --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rust-toolchain 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rust-toolchain 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-06-19" +channel = "nightly-2023-10-21" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rustup.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rustup.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rustup.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/rustup.sh 2023-12-21 16:55:28.000000000 +0000 @@ -16,7 +16,7 @@ done ./clean_all.sh - ./prepare.sh + ./y.sh prepare ;; "commit") git add rust-toolchain diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/abi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/abi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/abi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/abi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,9 +1,13 @@ +#[cfg(feature = "master")] +use gccjit::FnAttribute; use gccjit::{ToLValue, ToRValue, Type}; use rustc_codegen_ssa::traits::{AbiBuilderMethods, BaseTypeMethods}; use rustc_data_structures::fx::FxHashSet; use rustc_middle::bug; use rustc_middle::ty::Ty; -use rustc_target::abi::call::{CastTarget, FnAbi, PassMode, Reg, RegKind}; +#[cfg(feature = "master")] +use rustc_session::config; +use rustc_target::abi::call::{ArgAttributes, CastTarget, FnAbi, PassMode, Reg, RegKind}; use crate::builder::Builder; use crate::context::CodegenCx; @@ -94,14 +98,23 @@ } } +pub struct FnAbiGcc<'gcc> { + pub return_type: Type<'gcc>, + pub arguments_type: Vec>, + pub is_c_variadic: bool, + pub on_stack_param_indices: FxHashSet, + #[cfg(feature = "master")] + pub fn_attributes: Vec>, +} + pub trait FnAbiGccExt<'gcc, 'tcx> { // TODO(antoyo): return a function pointer type instead? - fn gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> (Type<'gcc>, Vec>, bool, FxHashSet); + fn gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> FnAbiGcc<'gcc>; fn ptr_to_gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>; } impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { - fn gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> (Type<'gcc>, Vec>, bool, FxHashSet) { + fn gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> FnAbiGcc<'gcc> { let mut on_stack_param_indices = FxHashSet::default(); // This capacity calculation is approximate. @@ -109,7 +122,7 @@ self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 } ); - let return_ty = + let return_type = match self.ret.mode { PassMode::Ignore => cx.type_void(), PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_gcc_type(cx), @@ -119,41 +132,89 @@ cx.type_void() } }; + #[cfg(feature = "master")] + let mut non_null_args = Vec::new(); + + #[cfg(feature = "master")] + let mut apply_attrs = |mut ty: Type<'gcc>, attrs: &ArgAttributes, arg_index: usize| { + if cx.sess().opts.optimize == config::OptLevel::No { + return ty; + } + if attrs.regular.contains(rustc_target::abi::call::ArgAttribute::NoAlias) { + ty = ty.make_restrict() + } + if attrs.regular.contains(rustc_target::abi::call::ArgAttribute::NonNull) { + non_null_args.push(arg_index as i32 + 1); + } + ty + }; + #[cfg(not(feature = "master"))] + let apply_attrs = |ty: Type<'gcc>, _attrs: &ArgAttributes, _arg_index: usize| { + ty + }; for arg in self.args.iter() { let arg_ty = match arg.mode { PassMode::Ignore => continue, - PassMode::Direct(_) => arg.layout.immediate_gcc_type(cx), - PassMode::Pair(..) => { - argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 0)); - argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 1)); + PassMode::Pair(a, b) => { + let arg_pos = argument_tys.len(); + argument_tys.push(apply_attrs(arg.layout.scalar_pair_element_gcc_type(cx, 0), &a, arg_pos)); + argument_tys.push(apply_attrs(arg.layout.scalar_pair_element_gcc_type(cx, 1), &b, arg_pos + 1)); continue; } - PassMode::Indirect { meta_attrs: Some(_), .. } => { - unimplemented!(); - } PassMode::Cast { ref cast, pad_i32 } => { // add padding if pad_i32 { argument_tys.push(Reg::i32().gcc_type(cx)); } - cast.gcc_type(cx) + let ty = cast.gcc_type(cx); + apply_attrs(ty, &cast.attrs, argument_tys.len()) } - PassMode::Indirect { meta_attrs: None, on_stack: true, .. } => { + PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: true } => { + // This is a "byval" argument, so we don't apply the `restrict` attribute on it. on_stack_param_indices.insert(argument_tys.len()); arg.memory_ty(cx) }, - PassMode::Indirect { meta_attrs: None, on_stack: false, .. } => cx.type_ptr_to(arg.memory_ty(cx)), + PassMode::Direct(attrs) => apply_attrs(arg.layout.immediate_gcc_type(cx), &attrs, argument_tys.len()), + PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => { + apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs, argument_tys.len()) + } + PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => { + assert!(!on_stack); + let ty = apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs, argument_tys.len()); + apply_attrs(ty, &meta_attrs, argument_tys.len()) + } }; argument_tys.push(arg_ty); } - (return_ty, argument_tys, self.c_variadic, on_stack_param_indices) + #[cfg(feature = "master")] + let fn_attrs = if non_null_args.is_empty() { + Vec::new() + } else { + vec![FnAttribute::NonNull(non_null_args)] + }; + + FnAbiGcc { + return_type, + arguments_type: argument_tys, + is_c_variadic: self.c_variadic, + on_stack_param_indices, + #[cfg(feature = "master")] + fn_attributes: fn_attrs, + } } fn ptr_to_gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> { - let (return_type, params, variadic, on_stack_param_indices) = self.gcc_type(cx); - let pointer_type = cx.context.new_function_pointer_type(None, return_type, ¶ms, variadic); + // FIXME(antoyo): Should we do something with `FnAbiGcc::fn_attributes`? + let FnAbiGcc { + return_type, + arguments_type, + is_c_variadic, + on_stack_param_indices, + .. + } = self.gcc_type(cx); + let pointer_type = cx.context.new_function_pointer_type(None, return_type, &arguments_type, is_c_variadic); cx.on_stack_params.borrow_mut().insert(pointer_type.dyncast_function_ptr_type().expect("function ptr type"), on_stack_param_indices); pointer_type } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/allocator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/allocator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/allocator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/allocator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ #[cfg(feature="master")] use gccjit::FnAttribute; -use gccjit::{FunctionType, GlobalKind, ToRValue}; +use gccjit::{Context, FunctionType, GlobalKind, ToRValue, Type}; use rustc_ast::expand::allocator::{ alloc_error_handler_name, default_fn_name, global_fn_name, AllocatorKind, AllocatorTy, ALLOCATOR_METHODS, NO_ALLOC_SHIM_IS_UNSTABLE, @@ -22,7 +22,6 @@ }; let i8 = context.new_type::(); let i8p = i8.make_pointer(); - let void = context.new_type::<()>(); if kind == AllocatorKind::Default { for method in ALLOCATOR_METHODS { @@ -47,67 +46,62 @@ panic!("invalid allocator output") } }; - let name = global_fn_name(method.name); + let from_name = global_fn_name(method.name); + let to_name = default_fn_name(method.name); - let args: Vec<_> = types.iter().enumerate() - .map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index))) - .collect(); - let func = context.new_function(None, FunctionType::Exported, output.unwrap_or(void), &args, name, false); - - if tcx.sess.target.options.default_hidden_visibility { - #[cfg(feature="master")] - func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); - } - if tcx.sess.must_emit_unwind_tables() { - // TODO(antoyo): emit unwind tables. - } - - let callee = default_fn_name(method.name); - let args: Vec<_> = types.iter().enumerate() - .map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index))) - .collect(); - let callee = context.new_function(None, FunctionType::Extern, output.unwrap_or(void), &args, callee, false); - #[cfg(feature="master")] - callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); - - let block = func.new_block("entry"); - - let args = args - .iter() - .enumerate() - .map(|(i, _)| func.get_param(i as i32).to_rvalue()) - .collect::>(); - let ret = context.new_call(None, callee, &args); - //llvm::LLVMSetTailCall(ret, True); - if output.is_some() { - block.end_with_return(None, ret); - } - else { - block.end_with_void_return(None); - } - - // TODO(@Commeownist): Check if we need to emit some extra debugging info in certain circumstances - // as described in https://github.com/rust-lang/rust/commit/77a96ed5646f7c3ee8897693decc4626fe380643 + create_wrapper_function(tcx, context, &from_name, &to_name, &types, output); } } - let types = [usize, usize]; - let name = "__rust_alloc_error_handler".to_string(); + // FIXME(bjorn3): Add noreturn attribute + create_wrapper_function( + tcx, + context, + "__rust_alloc_error_handler", + &alloc_error_handler_name(alloc_error_handler_kind), + &[usize, usize], + None, + ); + + let name = OomStrategy::SYMBOL.to_string(); + let global = context.new_global(None, GlobalKind::Exported, i8, name); + let value = tcx.sess.opts.unstable_opts.oom.should_panic(); + let value = context.new_rvalue_from_int(i8, value as i32); + global.global_set_initializer_rvalue(value); + + let name = NO_ALLOC_SHIM_IS_UNSTABLE.to_string(); + let global = context.new_global(None, GlobalKind::Exported, i8, name); + let value = context.new_rvalue_from_int(i8, 0); + global.global_set_initializer_rvalue(value); +} + +fn create_wrapper_function( + tcx: TyCtxt<'_>, + context: &Context<'_>, + from_name: &str, + to_name: &str, + types: &[Type<'_>], + output: Option>, +) { + let void = context.new_type::<()>(); + let args: Vec<_> = types.iter().enumerate() .map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index))) .collect(); - let func = context.new_function(None, FunctionType::Exported, void, &args, name, false); + let func = context.new_function(None, FunctionType::Exported, output.unwrap_or(void), &args, from_name, false); - if tcx.sess.target.default_hidden_visibility { + if tcx.sess.target.options.default_hidden_visibility { #[cfg(feature="master")] func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); } + if tcx.sess.must_emit_unwind_tables() { + // TODO(antoyo): emit unwind tables. + } - let callee = alloc_error_handler_name(alloc_error_handler_kind); let args: Vec<_> = types.iter().enumerate() .map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index))) .collect(); - let callee = context.new_function(None, FunctionType::Extern, void, &args, callee, false); + let callee = context.new_function(None, FunctionType::Extern, output.unwrap_or(void), &args, to_name, false); #[cfg(feature="master")] callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden)); @@ -118,18 +112,15 @@ .enumerate() .map(|(i, _)| func.get_param(i as i32).to_rvalue()) .collect::>(); - let _ret = context.new_call(None, callee, &args); + let ret = context.new_call(None, callee, &args); //llvm::LLVMSetTailCall(ret, True); - block.end_with_void_return(None); - - let name = OomStrategy::SYMBOL.to_string(); - let global = context.new_global(None, GlobalKind::Exported, i8, name); - let value = tcx.sess.opts.unstable_opts.oom.should_panic(); - let value = context.new_rvalue_from_int(i8, value as i32); - global.global_set_initializer_rvalue(value); + if output.is_some() { + block.end_with_return(None, ret); + } + else { + block.end_with_void_return(None); + } - let name = NO_ALLOC_SHIM_IS_UNSTABLE.to_string(); - let global = context.new_global(None, GlobalKind::Exported, i8, name); - let value = context.new_rvalue_from_int(i8, 0); - global.global_set_initializer_rvalue(value); + // TODO(@Commeownist): Check if we need to emit some extra debugging info in certain circumstances + // as described in https://github.com/rust-lang/rust/commit/77a96ed5646f7c3ee8897693decc4626fe380643 } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/asm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/asm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/asm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/asm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -452,10 +452,6 @@ } InlineAsmOperandRef::Const { ref string } => { - // Const operands get injected directly into the template - if att_dialect { - template_str.push('$'); - } template_str.push_str(string); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/attributes.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/attributes.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/attributes.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/attributes.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,72 +4,13 @@ use rustc_attr::InstructionSetAttr; #[cfg(feature="master")] use rustc_attr::InlineAttr; -use rustc_codegen_ssa::target_features::tied_target_features; -use rustc_data_structures::fx::FxHashMap; use rustc_middle::ty; #[cfg(feature="master")] use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_session::Session; use rustc_span::symbol::sym; -use smallvec::{smallvec, SmallVec}; use crate::{context::CodegenCx, errors::TiedTargetFeatures}; - -// Given a map from target_features to whether they are enabled or disabled, -// ensure only valid combinations are allowed. -pub fn check_tied_features(sess: &Session, features: &FxHashMap<&str, bool>) -> Option<&'static [&'static str]> { - for tied in tied_target_features(sess) { - // Tied features must be set to the same value, or not set at all - let mut tied_iter = tied.iter(); - let enabled = features.get(tied_iter.next().unwrap()); - if tied_iter.any(|feature| enabled != features.get(feature)) { - return Some(tied); - } - } - None -} - -// TODO(antoyo): maybe move to a new module gcc_util. -// To find a list of GCC's names, check https://gcc.gnu.org/onlinedocs/gcc/Function-Attributes.html -fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> { - let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch }; - match (arch, s) { - ("x86", "sse4.2") => smallvec!["sse4.2", "crc32"], - ("x86", "pclmulqdq") => smallvec!["pclmul"], - ("x86", "rdrand") => smallvec!["rdrnd"], - ("x86", "bmi1") => smallvec!["bmi"], - ("x86", "cmpxchg16b") => smallvec!["cx16"], - ("x86", "avx512vaes") => smallvec!["vaes"], - ("x86", "avx512gfni") => smallvec!["gfni"], - ("x86", "avx512vpclmulqdq") => smallvec!["vpclmulqdq"], - // NOTE: seems like GCC requires 'avx512bw' for 'avx512vbmi2'. - ("x86", "avx512vbmi2") => smallvec!["avx512vbmi2", "avx512bw"], - // NOTE: seems like GCC requires 'avx512bw' for 'avx512bitalg'. - ("x86", "avx512bitalg") => smallvec!["avx512bitalg", "avx512bw"], - ("aarch64", "rcpc2") => smallvec!["rcpc-immo"], - ("aarch64", "dpb") => smallvec!["ccpp"], - ("aarch64", "dpb2") => smallvec!["ccdp"], - ("aarch64", "frintts") => smallvec!["fptoint"], - ("aarch64", "fcma") => smallvec!["complxnum"], - ("aarch64", "pmuv3") => smallvec!["perfmon"], - ("aarch64", "paca") => smallvec!["pauth"], - ("aarch64", "pacg") => smallvec!["pauth"], - // Rust ties fp and neon together. In LLVM neon implicitly enables fp, - // but we manually enable neon when a feature only implicitly enables fp - ("aarch64", "f32mm") => smallvec!["f32mm", "neon"], - ("aarch64", "f64mm") => smallvec!["f64mm", "neon"], - ("aarch64", "fhm") => smallvec!["fp16fml", "neon"], - ("aarch64", "fp16") => smallvec!["fullfp16", "neon"], - ("aarch64", "jsconv") => smallvec!["jsconv", "neon"], - ("aarch64", "sve") => smallvec!["sve", "neon"], - ("aarch64", "sve2") => smallvec!["sve2", "neon"], - ("aarch64", "sve2-aes") => smallvec!["sve2-aes", "neon"], - ("aarch64", "sve2-sm4") => smallvec!["sve2-sm4", "neon"], - ("aarch64", "sve2-sha3") => smallvec!["sve2-sha3", "neon"], - ("aarch64", "sve2-bitperm") => smallvec!["sve2-bitperm", "neon"], - (_, s) => smallvec![s], - } -} +use crate::gcc_util::{check_tied_features, to_gcc_features}; /// Get GCC attribute for the provided inline heuristic. #[cfg(feature="master")] @@ -112,8 +53,24 @@ codegen_fn_attrs.inline }; if let Some(attr) = inline_attr(cx, inline) { + if let FnAttribute::AlwaysInline = attr { + func.add_attribute(FnAttribute::Inline); + } func.add_attribute(attr); } + + if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) { + func.add_attribute(FnAttribute::Cold); + } + if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_RETURNS_TWICE) { + func.add_attribute(FnAttribute::ReturnsTwice); + } + if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_PURE) { + func.add_attribute(FnAttribute::Pure); + } + if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_CONST) { + func.add_attribute(FnAttribute::Const); + } } let function_features = @@ -140,11 +97,33 @@ })) .collect::>(); - // TODO(antoyo): check if we really need global backend features. (Maybe they could be applied - // globally?) + // TODO(antoyo): cg_llvm adds global features to each function so that LTO keep them. + // Check if GCC requires the same. let mut global_features = cx.tcx.global_backend_features(()).iter().map(|s| s.as_str()); function_features.extend(&mut global_features); - let target_features = function_features.join(","); + let target_features = function_features + .iter() + .filter_map(|feature| { + // FIXME(antoyo): for some reasons, disabling SSE results in the following error when + // compiling Rust for Linux: + // SSE register return with SSE disabled + // TODO(antoyo): support soft-float and retpoline-external-thunk. + if feature.contains("soft-float") || feature.contains("retpoline-external-thunk") || *feature == "-sse" { + return None; + } + + if feature.starts_with('-') { + Some(format!("no{}", feature)) + } + else if feature.starts_with('+') { + Some(feature[1..].to_string()) + } + else { + Some(feature.to_string()) + } + }) + .collect::>() + .join(","); if !target_features.is_empty() { #[cfg(feature="master")] func.add_attribute(FnAttribute::Target(&target_features)); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/lto.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/lto.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/lto.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/lto.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,341 @@ +/// GCC requires to use the same toolchain for the whole compilation when doing LTO. +/// So, we need the same version/commit of the linker (gcc) and lto front-end binaries (lto1, +/// lto-wrapper, liblto_plugin.so). + +// FIXME(antoyo): the executables compiled with LTO are bigger than those compiled without LTO. +// Since it is the opposite for cg_llvm, check if this is normal. +// +// Maybe we embed the bitcode in the final binary? +// It doesn't look like we try to generate fat objects for the final binary. +// Check if the way we combine the object files make it keep the LTO sections on the final link. +// Maybe that's because the combined object files contain the IR (true) and the final link +// does not remove it? +// +// TODO(antoyo): for performance, check which optimizations the C++ frontend enables. +// +// Fix these warnings: +// /usr/bin/ld: warning: type of symbol `_RNvNvNvNtCs5JWOrf9uCus_5rayon11thread_pool19WORKER_THREAD_STATE7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o +// /usr/bin/ld: warning: type of symbol `_RNvNvNvNvNtNtNtCsAj5i4SGTR7_3std4sync4mpmc5waker17current_thread_id5DUMMY7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o +// /usr/bin/ld: warning: incremental linking of LTO and non-LTO objects; using -flinker-output=nolto-rel which will bypass whole program optimization + +use std::ffi::CString; +use std::fs::{self, File}; +use std::path::{Path, PathBuf}; + +use gccjit::OutputKind; +use object::read::archive::ArchiveFile; +use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule}; +use rustc_codegen_ssa::back::symbol_export; +use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput}; +use rustc_codegen_ssa::traits::*; +use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind}; +use rustc_data_structures::memmap::Mmap; +use rustc_errors::{FatalError, Handler}; +use rustc_hir::def_id::LOCAL_CRATE; +use rustc_middle::dep_graph::WorkProduct; +use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; +use rustc_session::config::{CrateType, Lto}; +use tempfile::{TempDir, tempdir}; + +use crate::back::write::save_temp_bitcode; +use crate::errors::{ + DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib, +}; +use crate::{GccCodegenBackend, GccContext, to_gcc_opt_level}; + +/// We keep track of the computed LTO cache keys from the previous +/// session to determine which CGUs we can reuse. +//pub const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin"; + +pub fn crate_type_allows_lto(crate_type: CrateType) -> bool { + match crate_type { + CrateType::Executable | CrateType::Dylib | CrateType::Staticlib | CrateType::Cdylib => true, + CrateType::Rlib | CrateType::ProcMacro => false, + } +} + +struct LtoData { + // TODO(antoyo): use symbols_below_threshold. + //symbols_below_threshold: Vec, + upstream_modules: Vec<(SerializedModule, CString)>, + tmp_path: TempDir, +} + +fn prepare_lto(cgcx: &CodegenContext, diag_handler: &Handler) -> Result { + let export_threshold = match cgcx.lto { + // We're just doing LTO for our one crate + Lto::ThinLocal => SymbolExportLevel::Rust, + + // We're doing LTO for the entire crate graph + Lto::Fat | Lto::Thin => symbol_export::crates_export_threshold(&cgcx.crate_types), + + Lto::No => panic!("didn't request LTO but we're doing LTO"), + }; + + let tmp_path = + match tempdir() { + Ok(tmp_path) => tmp_path, + Err(error) => { + eprintln!("Cannot create temporary directory: {}", error); + return Err(FatalError); + }, + }; + + let symbol_filter = &|&(ref name, info): &(String, SymbolExportInfo)| { + if info.level.is_below_threshold(export_threshold) || info.used { + Some(CString::new(name.as_str()).unwrap()) + } else { + None + } + }; + let exported_symbols = cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); + let mut symbols_below_threshold = { + let _timer = cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold"); + exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::>() + }; + info!("{} symbols to preserve in this crate", symbols_below_threshold.len()); + + // If we're performing LTO for the entire crate graph, then for each of our + // upstream dependencies, find the corresponding rlib and load the bitcode + // from the archive. + // + // We save off all the bytecode and GCC module file path for later processing + // with either fat or thin LTO + let mut upstream_modules = Vec::new(); + if cgcx.lto != Lto::ThinLocal { + // Make sure we actually can run LTO + for crate_type in cgcx.crate_types.iter() { + if !crate_type_allows_lto(*crate_type) { + diag_handler.emit_err(LtoDisallowed); + return Err(FatalError); + } else if *crate_type == CrateType::Dylib { + if !cgcx.opts.unstable_opts.dylib_lto { + diag_handler.emit_err(LtoDylib); + return Err(FatalError); + } + } + } + + if cgcx.opts.cg.prefer_dynamic && !cgcx.opts.unstable_opts.dylib_lto { + diag_handler.emit_err(DynamicLinkingWithLTO); + return Err(FatalError); + } + + for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() { + let exported_symbols = + cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); + { + let _timer = + cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold"); + symbols_below_threshold + .extend(exported_symbols[&cnum].iter().filter_map(symbol_filter)); + } + + let archive_data = unsafe { + Mmap::map(File::open(&path).expect("couldn't open rlib")) + .expect("couldn't map rlib") + }; + let archive = ArchiveFile::parse(&*archive_data).expect("wanted an rlib"); + let obj_files = archive + .members() + .filter_map(|child| { + child.ok().and_then(|c| { + std::str::from_utf8(c.name()).ok().map(|name| (name.trim(), c)) + }) + }) + .filter(|&(name, _)| looks_like_rust_object_file(name)); + for (name, child) in obj_files { + info!("adding bitcode from {}", name); + let path = tmp_path.path().join(name); + match save_as_file(child.data(&*archive_data).expect("corrupt rlib"), &path) { + Ok(()) => { + let buffer = ModuleBuffer::new(path); + let module = SerializedModule::Local(buffer); + upstream_modules.push((module, CString::new(name).unwrap())); + } + Err(e) => { + diag_handler.emit_err(e); + return Err(FatalError); + } + } + } + } + } + + Ok(LtoData { + //symbols_below_threshold, + upstream_modules, + tmp_path, + }) +} + +fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> { + fs::write(path, obj) + .map_err(|error| LtoBitcodeFromRlib { + gcc_err: format!("write object file to temp dir: {}", error) + }) +} + +/// Performs fat LTO by merging all modules into a single one and returning it +/// for further optimization. +pub(crate) fn run_fat( + cgcx: &CodegenContext, + modules: Vec>, + cached_modules: Vec<(SerializedModule, WorkProduct)>, +) -> Result, FatalError> { + let diag_handler = cgcx.create_diag_handler(); + let lto_data = prepare_lto(cgcx, &diag_handler)?; + /*let symbols_below_threshold = + lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::>();*/ + fat_lto(cgcx, &diag_handler, modules, cached_modules, lto_data.upstream_modules, lto_data.tmp_path, + //&symbols_below_threshold, + ) +} + +fn fat_lto(cgcx: &CodegenContext, _diag_handler: &Handler, modules: Vec>, cached_modules: Vec<(SerializedModule, WorkProduct)>, mut serialized_modules: Vec<(SerializedModule, CString)>, tmp_path: TempDir, + //symbols_below_threshold: &[*const libc::c_char], +) -> Result, FatalError> { + let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module"); + info!("going for a fat lto"); + + // Sort out all our lists of incoming modules into two lists. + // + // * `serialized_modules` (also and argument to this function) contains all + // modules that are serialized in-memory. + // * `in_memory` contains modules which are already parsed and in-memory, + // such as from multi-CGU builds. + // + // All of `cached_modules` (cached from previous incremental builds) can + // immediately go onto the `serialized_modules` modules list and then we can + // split the `modules` array into these two lists. + let mut in_memory = Vec::new(); + serialized_modules.extend(cached_modules.into_iter().map(|(buffer, wp)| { + info!("pushing cached module {:?}", wp.cgu_name); + (buffer, CString::new(wp.cgu_name).unwrap()) + })); + for module in modules { + match module { + FatLtoInput::InMemory(m) => in_memory.push(m), + FatLtoInput::Serialized { name, buffer } => { + info!("pushing serialized module {:?}", name); + let buffer = SerializedModule::Local(buffer); + serialized_modules.push((buffer, CString::new(name).unwrap())); + } + } + } + + // Find the "costliest" module and merge everything into that codegen unit. + // All the other modules will be serialized and reparsed into the new + // context, so this hopefully avoids serializing and parsing the largest + // codegen unit. + // + // Additionally use a regular module as the base here to ensure that various + // file copy operations in the backend work correctly. The only other kind + // of module here should be an allocator one, and if your crate is smaller + // than the allocator module then the size doesn't really matter anyway. + let costliest_module = in_memory + .iter() + .enumerate() + .filter(|&(_, module)| module.kind == ModuleKind::Regular) + .map(|(i, _module)| { + //let cost = unsafe { llvm::LLVMRustModuleCost(module.module_llvm.llmod()) }; + // TODO(antoyo): compute the cost of a module if GCC allows this. + (0, i) + }) + .max(); + + // If we found a costliest module, we're good to go. Otherwise all our + // inputs were serialized which could happen in the case, for example, that + // all our inputs were incrementally reread from the cache and we're just + // re-executing the LTO passes. If that's the case deserialize the first + // module and create a linker with it. + let mut module: ModuleCodegen = match costliest_module { + Some((_cost, i)) => in_memory.remove(i), + None => { + unimplemented!("Incremental"); + /*assert!(!serialized_modules.is_empty(), "must have at least one serialized module"); + let (buffer, name) = serialized_modules.remove(0); + info!("no in-memory regular modules to choose from, parsing {:?}", name); + ModuleCodegen { + module_llvm: GccContext::parse(cgcx, &name, buffer.data(), diag_handler)?, + name: name.into_string().unwrap(), + kind: ModuleKind::Regular, + }*/ + } + }; + let mut serialized_bitcode = Vec::new(); + { + info!("using {:?} as a base module", module.name); + + // We cannot load and merge GCC contexts in memory like cg_llvm is doing. + // Instead, we combine the object files into a single object file. + for module in in_memory { + let path = tmp_path.path().to_path_buf().join(&module.name); + let path = path.to_str().expect("path"); + let context = &module.module_llvm.context; + let config = cgcx.config(module.kind); + // NOTE: we need to set the optimization level here in order for LTO to do its job. + context.set_optimization_level(to_gcc_opt_level(config.opt_level)); + context.add_command_line_option("-flto=auto"); + context.add_command_line_option("-flto-partition=one"); + context.compile_to_file(OutputKind::ObjectFile, path); + let buffer = ModuleBuffer::new(PathBuf::from(path)); + let llmod_id = CString::new(&module.name[..]).unwrap(); + serialized_modules.push((SerializedModule::Local(buffer), llmod_id)); + } + // Sort the modules to ensure we produce deterministic results. + serialized_modules.sort_by(|module1, module2| module1.1.cmp(&module2.1)); + + // We add the object files and save in should_combine_object_files that we should combine + // them into a single object file when compiling later. + for (bc_decoded, name) in serialized_modules { + let _timer = cgcx + .prof + .generic_activity_with_arg_recorder("GCC_fat_lto_link_module", |recorder| { + recorder.record_arg(format!("{:?}", name)) + }); + info!("linking {:?}", name); + match bc_decoded { + SerializedModule::Local(ref module_buffer) => { + module.module_llvm.should_combine_object_files = true; + module.module_llvm.context.add_driver_option(module_buffer.0.to_str().expect("path")); + }, + SerializedModule::FromRlib(_) => unimplemented!("from rlib"), + SerializedModule::FromUncompressedFile(_) => unimplemented!("from uncompressed file"), + } + serialized_bitcode.push(bc_decoded); + } + save_temp_bitcode(cgcx, &module, "lto.input"); + + // Internalize everything below threshold to help strip out more modules and such. + /*unsafe { + let ptr = symbols_below_threshold.as_ptr(); + llvm::LLVMRustRunRestrictionPass( + llmod, + ptr as *const *const libc::c_char, + symbols_below_threshold.len() as libc::size_t, + );*/ + save_temp_bitcode(cgcx, &module, "lto.after-restriction"); + //} + } + + // NOTE: save the temporary directory used by LTO so that it gets deleted after linking instead + // of now. + module.module_llvm.temp_dir = Some(tmp_path); + + Ok(LtoModuleCodegen::Fat { module, _serialized_bitcode: serialized_bitcode }) +} + +pub struct ModuleBuffer(PathBuf); + +impl ModuleBuffer { + pub fn new(path: PathBuf) -> ModuleBuffer { + ModuleBuffer(path) + } +} + +impl ModuleBufferMethods for ModuleBuffer { + fn data(&self) -> &[u8] { + unimplemented!("data not needed for GCC codegen"); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1 +1,2 @@ +pub mod lto; pub mod write; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/write.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/write.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/write.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/back/write.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,27 +2,71 @@ use gccjit::OutputKind; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; -use rustc_codegen_ssa::back::write::{CodegenContext, EmitObj, ModuleConfig}; +use rustc_codegen_ssa::back::link::ensure_removed; +use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig}; use rustc_errors::Handler; +use rustc_fs_util::link_or_copy; use rustc_session::config::OutputType; use rustc_span::fatal_error::FatalError; use rustc_target::spec::SplitDebuginfo; use crate::{GccCodegenBackend, GccContext}; +use crate::errors::CopyBitcode; -pub(crate) unsafe fn codegen(cgcx: &CodegenContext, _diag_handler: &Handler, module: ModuleCodegen, config: &ModuleConfig) -> Result { - let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name); +pub(crate) unsafe fn codegen(cgcx: &CodegenContext, diag_handler: &Handler, module: ModuleCodegen, config: &ModuleConfig) -> Result { + let _timer = cgcx.prof.generic_activity_with_arg("GCC_module_codegen", &*module.name); { let context = &module.module_llvm.context; let module_name = module.name.clone(); + + let should_combine_object_files = module.module_llvm.should_combine_object_files; + let module_name = Some(&module_name[..]); - let _bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); + // NOTE: Only generate object files with GIMPLE when this environment variable is set for + // now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit). + let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1"); + + let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name); - if config.bitcode_needed() { + if config.bitcode_needed() && fat_lto { + let _timer = cgcx + .prof + .generic_activity_with_arg("GCC_module_codegen_make_bitcode", &*module.name); + // TODO(antoyo) + /*if let Some(bitcode_filename) = bc_out.file_name() { + cgcx.prof.artifact_size( + "llvm_bitcode", + bitcode_filename.to_string_lossy(), + data.len() as u64, + ); + }*/ + + if config.emit_bc || config.emit_obj == EmitObj::Bitcode { + let _timer = cgcx + .prof + .generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name); + context.add_command_line_option("-flto=auto"); + context.add_command_line_option("-flto-partition=one"); + context.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str")); + } + + if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) { + let _timer = cgcx + .prof + .generic_activity_with_arg("GCC_module_codegen_embed_bitcode", &*module.name); + // TODO(antoyo): maybe we should call embed_bitcode to have the proper iOS fixes? + //embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data); + + context.add_command_line_option("-flto=auto"); + context.add_command_line_option("-flto-partition=one"); + context.add_command_line_option("-ffat-lto-objects"); + // TODO(antoyo): Send -plugin/usr/lib/gcc/x86_64-pc-linux-gnu/11.1.0/liblto_plugin.so to linker (this should be done when specifying the appropriate rustc cli argument). + context.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str")); + } } if config.emit_ir { @@ -32,7 +76,7 @@ if config.emit_asm { let _timer = cgcx .prof - .generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name); + .generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name); let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str")); } @@ -41,7 +85,7 @@ EmitObj::ObjectCode(_) => { let _timer = cgcx .prof - .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name); + .generic_activity_with_arg("GCC_module_codegen_emit_obj", &*module.name); if env::var("CG_GCCJIT_DUMP_MODULE_NAMES").as_deref() == Ok("1") { println!("Module {}", module.name); } @@ -60,11 +104,36 @@ context.set_debug_info(true); context.dump_to_file(path, true); } - context.compile_to_file(OutputKind::ObjectFile, obj_out.to_str().expect("path to str")); + if should_combine_object_files && fat_lto { + context.add_command_line_option("-flto=auto"); + context.add_command_line_option("-flto-partition=one"); + + context.add_driver_option("-Wl,-r"); + // NOTE: we need -nostdlib, otherwise, we get the following error: + // /usr/bin/ld: cannot find -lgcc_s: No such file or directory + context.add_driver_option("-nostdlib"); + // NOTE: without -fuse-linker-plugin, we get the following error: + // lto1: internal compiler error: decompressed stream: Destination buffer is too small + context.add_driver_option("-fuse-linker-plugin"); + + // NOTE: this doesn't actually generate an executable. With the above flags, it combines the .o files together in another .o. + context.compile_to_file(OutputKind::Executable, obj_out.to_str().expect("path to str")); + } + else { + context.compile_to_file(OutputKind::ObjectFile, obj_out.to_str().expect("path to str")); + } } EmitObj::Bitcode => { - // TODO(antoyo) + debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out); + if let Err(err) = link_or_copy(&bc_out, &obj_out) { + diag_handler.emit_err(CopyBitcode { err }); + } + + if !config.emit_bc { + debug!("removing_bitcode {:?}", bc_out); + ensure_removed(diag_handler, &bc_out); + } } EmitObj::None => {} @@ -82,3 +151,18 @@ pub(crate) fn link(_cgcx: &CodegenContext, _diag_handler: &Handler, mut _modules: Vec>) -> Result, FatalError> { unimplemented!(); } + +pub(crate) fn save_temp_bitcode(cgcx: &CodegenContext, _module: &ModuleCodegen, _name: &str) { + if !cgcx.save_temps { + return; + } + unimplemented!(); + /*unsafe { + let ext = format!("{}.bc", name); + let cgu = Some(&module.name[..]); + let path = cgcx.output_filenames.temp_path_ext(&ext, cgu); + let cstr = path_to_c_string(&path); + let llmod = module.module_llvm.llmod(); + llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr()); + }*/ +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/base.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use std::collections::HashSet; use std::env; use std::time::Instant; @@ -18,6 +19,7 @@ use rustc_session::config::DebugInfo; use rustc_span::Symbol; +use crate::{LockedTargetInfo, gcc_util}; use crate::GccContext; use crate::builder::Builder; use crate::context::CodegenCx; @@ -50,6 +52,7 @@ pub fn linkage_to_gcc(linkage: Linkage) -> FunctionType { match linkage { Linkage::External => FunctionType::Exported, + // TODO(antoyo): set the attribute externally_visible. Linkage::AvailableExternally => FunctionType::Extern, Linkage::LinkOnceAny => unimplemented!(), Linkage::LinkOnceODR => unimplemented!(), @@ -63,7 +66,7 @@ } } -pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_integers: bool) -> (ModuleCodegen, u64) { +pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: LockedTargetInfo) -> (ModuleCodegen, u64) { let prof_timer = tcx.prof.generic_activity("codegen_module"); let start_time = Instant::now(); @@ -71,7 +74,7 @@ let (module, _) = tcx.dep_graph.with_task( dep_node, tcx, - (cgu_name, supports_128bit_integers), + (cgu_name, target_info), module_codegen, Some(dep_graph::hash_result), ); @@ -82,38 +85,29 @@ // the time we needed for codegenning it. let cost = time_to_codegen.as_secs() * 1_000_000_000 + time_to_codegen.subsec_nanos() as u64; - fn module_codegen(tcx: TyCtxt<'_>, (cgu_name, supports_128bit_integers): (Symbol, bool)) -> ModuleCodegen { + fn module_codegen(tcx: TyCtxt<'_>, (cgu_name, target_info): (Symbol, LockedTargetInfo)) -> ModuleCodegen { let cgu = tcx.codegen_unit(cgu_name); // Instantiate monomorphizations without filling out definitions yet... - //let llvm_module = ModuleLlvm::new(tcx, &cgu_name.as_str()); let context = Context::default(); context.add_command_line_option("-fexceptions"); context.add_driver_option("-fexceptions"); - // TODO(antoyo): only set on x86 platforms. - context.add_command_line_option("-masm=intel"); - // TODO(antoyo): only add the following cli argument if the feature is supported. - context.add_command_line_option("-msse2"); - context.add_command_line_option("-mavx2"); - // FIXME(antoyo): the following causes an illegal instruction on vmovdqu64 in std_example on my CPU. - // Only add if the CPU supports it. - context.add_command_line_option("-msha"); - context.add_command_line_option("-mpclmul"); - context.add_command_line_option("-mfma"); - context.add_command_line_option("-mfma4"); - context.add_command_line_option("-m64"); - context.add_command_line_option("-mbmi"); - context.add_command_line_option("-mgfni"); - //context.add_command_line_option("-mavxvnni"); // The CI doesn't support this option. - context.add_command_line_option("-mf16c"); - context.add_command_line_option("-maes"); - context.add_command_line_option("-mxsavec"); - context.add_command_line_option("-mbmi2"); - context.add_command_line_option("-mrtm"); - context.add_command_line_option("-mvaes"); - context.add_command_line_option("-mvpclmulqdq"); - context.add_command_line_option("-mavx"); + let disabled_features: HashSet<_> = tcx.sess.opts.cg.target_feature.split(',') + .filter(|feature| feature.starts_with('-')) + .map(|string| &string[1..]) + .collect(); + + if tcx.sess.target.arch == "x86" || tcx.sess.target.arch == "x86_64" { + context.add_command_line_option("-masm=intel"); + } + + if !disabled_features.contains("avx") && tcx.sess.target.arch == "x86_64" { + // NOTE: we always enable AVX because the equivalent of llvm.x86.sse2.cmp.pd in GCC for + // SSE2 is multiple builtins, so we use the AVX __builtin_ia32_cmppd instead. + // FIXME(antoyo): use the proper builtins for llvm.x86.sse2.cmp.pd and similar. + context.add_command_line_option("-mavx"); + } for arg in &tcx.sess.opts.cg.llvm_args { context.add_command_line_option(arg); @@ -127,6 +121,16 @@ // NOTE: Rust relies on LLVM doing wrapping on overflow. context.add_command_line_option("-fwrapv"); + if tcx.sess.relocation_model() == rustc_target::spec::RelocModel::Static { + context.add_command_line_option("-mcmodel=kernel"); + context.add_command_line_option("-fno-pie"); + } + + let target_cpu = gcc_util::target_cpu(tcx.sess); + if target_cpu != "generic" { + context.add_command_line_option(&format!("-march={}", target_cpu)); + } + if tcx.sess.opts.unstable_opts.function_sections.unwrap_or(tcx.sess.target.function_sections) { context.add_command_line_option("-ffunction-sections"); context.add_command_line_option("-fdata-sections"); @@ -135,8 +139,14 @@ if env::var("CG_GCCJIT_DUMP_RTL").as_deref() == Ok("1") { context.add_command_line_option("-fdump-rtl-vregs"); } + if env::var("CG_GCCJIT_DUMP_RTL_ALL").as_deref() == Ok("1") { + context.add_command_line_option("-fdump-rtl-all"); + } if env::var("CG_GCCJIT_DUMP_TREE_ALL").as_deref() == Ok("1") { - context.add_command_line_option("-fdump-tree-all"); + context.add_command_line_option("-fdump-tree-all-eh"); + } + if env::var("CG_GCCJIT_DUMP_IPA_ALL").as_deref() == Ok("1") { + context.add_command_line_option("-fdump-ipa-all-eh"); } if env::var("CG_GCCJIT_DUMP_CODE").as_deref() == Ok("1") { context.set_dump_code_on_compile(true); @@ -152,11 +162,15 @@ context.set_keep_intermediates(true); } + if env::var("CG_GCCJIT_VERBOSE").as_deref() == Ok("1") { + context.add_driver_option("-v"); + } + // NOTE: The codegen generates unrechable blocks. context.set_allow_unreachable_blocks(true); { - let cx = CodegenCx::new(&context, cgu, tcx, supports_128bit_integers); + let cx = CodegenCx::new(&context, cgu, tcx, target_info.supports_128bit_int()); let mono_items = cgu.items_in_deterministic_order(tcx); for &(mono_item, data) in &mono_items { @@ -181,7 +195,9 @@ ModuleCodegen { name: cgu_name.to_string(), module_llvm: GccContext { - context + context, + should_combine_object_files: false, + temp_dir: None, }, kind: ModuleKind::Regular, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/builder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -247,16 +247,9 @@ } fn check_store(&mut self, val: RValue<'gcc>, ptr: RValue<'gcc>) -> RValue<'gcc> { - let dest_ptr_ty = self.cx.val_ty(ptr).make_pointer(); // TODO(antoyo): make sure make_pointer() is okay here. let stored_ty = self.cx.val_ty(val); let stored_ptr_ty = self.cx.type_ptr_to(stored_ty); - - if dest_ptr_ty == stored_ptr_ty { - ptr - } - else { - self.bitcast(ptr, stored_ptr_ty) - } + self.bitcast(ptr, stored_ptr_ty) } pub fn current_func(&self) -> Function<'gcc> { @@ -500,7 +493,7 @@ } #[cfg(not(feature="master"))] - fn invoke(&mut self, typ: Type<'gcc>, fn_attrs: &CodegenFnAttrs, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, func: RValue<'gcc>, args: &[RValue<'gcc>], then: Block<'gcc>, catch: Block<'gcc>, _funclet: Option<&Funclet>) -> RValue<'gcc> { + fn invoke(&mut self, typ: Type<'gcc>, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, func: RValue<'gcc>, args: &[RValue<'gcc>], then: Block<'gcc>, catch: Block<'gcc>, _funclet: Option<&Funclet>) -> RValue<'gcc> { let call_site = self.call(typ, fn_attrs, None, func, args, None); let condition = self.context.new_rvalue_from_int(self.bool_type, 1); self.llbb().end_with_conditional(None, condition, then, catch); @@ -663,7 +656,7 @@ } fn unchecked_sadd(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { - a + b + self.gcc_add(a, b) } fn unchecked_uadd(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { @@ -671,7 +664,7 @@ } fn unchecked_ssub(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { - a - b + self.gcc_sub(a, b) } fn unchecked_usub(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { @@ -680,11 +673,11 @@ } fn unchecked_smul(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { - a * b + self.gcc_mul(a, b) } fn unchecked_umul(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> { - a * b + self.gcc_mul(a, b) } fn fadd_fast(&mut self, lhs: RValue<'gcc>, rhs: RValue<'gcc>) -> RValue<'gcc> { @@ -758,9 +751,8 @@ loaded_value.to_rvalue() } - fn volatile_load(&mut self, _ty: Type<'gcc>, ptr: RValue<'gcc>) -> RValue<'gcc> { - // TODO(antoyo): use ty. - let ptr = self.context.new_cast(None, ptr, ptr.get_type().make_volatile()); + fn volatile_load(&mut self, ty: Type<'gcc>, ptr: RValue<'gcc>) -> RValue<'gcc> { + let ptr = self.context.new_cast(None, ptr, ty.make_volatile().make_pointer()); ptr.dereference(None).to_rvalue() } @@ -916,7 +908,9 @@ .add_eval(None, self.context.new_call(None, atomic_store, &[ptr, value, ordering])); } - fn gep(&mut self, _typ: Type<'gcc>, ptr: RValue<'gcc>, indices: &[RValue<'gcc>]) -> RValue<'gcc> { + fn gep(&mut self, typ: Type<'gcc>, ptr: RValue<'gcc>, indices: &[RValue<'gcc>]) -> RValue<'gcc> { + // NOTE: due to opaque pointers now being used, we need to cast here. + let ptr = self.context.new_cast(None, ptr, typ.make_pointer()); let ptr_type = ptr.get_type(); let mut pointee_type = ptr.get_type(); // NOTE: we cannot use array indexing here like in inbounds_gep because array indexing is @@ -927,6 +921,12 @@ // require dereferencing the pointer. for index in indices { pointee_type = pointee_type.get_pointee().expect("pointee type"); + #[cfg(feature="master")] + let pointee_size = { + let size = self.cx.context.new_sizeof(pointee_type); + self.context.new_cast(None, size, index.get_type()) + }; + #[cfg(not(feature="master"))] let pointee_size = self.context.new_rvalue_from_int(index.get_type(), pointee_type.get_size() as i32); result = result + self.gcc_int_cast(*index * pointee_size, self.sizet_type); } @@ -1420,7 +1420,7 @@ self.cx } - fn do_not_inline(&mut self, _llret: RValue<'gcc>) { + fn apply_attrs_to_cleanup_callsite(&mut self, _llret: RValue<'gcc>) { // FIXME(bjorn3): implement } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/common.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/common.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/common.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/common.rs 2023-12-21 16:55:28.000000000 +0000 @@ -424,35 +424,35 @@ } fn is_i8(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.i8_type + self.is_compatible_with(cx.i8_type) } fn is_u8(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.u8_type + self.is_compatible_with(cx.u8_type) } fn is_i16(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.i16_type + self.is_compatible_with(cx.i16_type) } fn is_u16(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.u16_type + self.is_compatible_with(cx.u16_type) } fn is_i32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.i32_type + self.is_compatible_with(cx.i32_type) } fn is_u32(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.u32_type + self.is_compatible_with(cx.u32_type) } fn is_i64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.i64_type + self.is_compatible_with(cx.i64_type) } fn is_u64(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { - self.unqualified() == cx.u64_type + self.is_compatible_with(cx.u64_type) } fn is_i128(&self, cx: &CodegenCx<'gcc, 'tcx>) -> bool { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,6 +20,7 @@ use rustc_target::spec::{HasTargetSpec, Target, TlsModel}; use crate::callee::get_fn; +use crate::common::SignType; #[derive(Clone)] pub struct FuncSig<'gcc> { @@ -129,29 +130,57 @@ pub fn new(context: &'gcc Context<'gcc>, codegen_unit: &'tcx CodegenUnit<'tcx>, tcx: TyCtxt<'tcx>, supports_128bit_integers: bool) -> Self { let check_overflow = tcx.sess.overflow_checks(); - let i8_type = context.new_c_type(CType::Int8t); - let i16_type = context.new_c_type(CType::Int16t); - let i32_type = context.new_c_type(CType::Int32t); - let i64_type = context.new_c_type(CType::Int64t); - let u8_type = context.new_c_type(CType::UInt8t); - let u16_type = context.new_c_type(CType::UInt16t); - let u32_type = context.new_c_type(CType::UInt32t); - let u64_type = context.new_c_type(CType::UInt64t); + let create_type = |ctype, rust_type| { + let layout = tcx.layout_of(ParamEnv::reveal_all().and(rust_type)).unwrap(); + let align = layout.align.abi.bytes(); + #[cfg(feature="master")] + { + context.new_c_type(ctype).get_aligned(align) + } + #[cfg(not(feature="master"))] + { + // Since libgccjit 12 doesn't contain the fix to compare aligned integer types, + // only align u128 and i128. + if layout.ty.int_size_and_signed(tcx).0.bytes() == 16 { + context.new_c_type(ctype).get_aligned(align) + } + else { + context.new_c_type(ctype) + } + } + }; + + let i8_type = create_type(CType::Int8t, tcx.types.i8); + let i16_type = create_type(CType::Int16t, tcx.types.i16); + let i32_type = create_type(CType::Int32t, tcx.types.i32); + let i64_type = create_type(CType::Int64t, tcx.types.i64); + let u8_type = create_type(CType::UInt8t, tcx.types.u8); + let u16_type = create_type(CType::UInt16t, tcx.types.u16); + let u32_type = create_type(CType::UInt32t, tcx.types.u32); + let u64_type = create_type(CType::UInt64t, tcx.types.u64); let (i128_type, u128_type) = if supports_128bit_integers { - let i128_type = context.new_c_type(CType::Int128t).get_aligned(8); // TODO(antoyo): should the alignment be hard-coded?; - let u128_type = context.new_c_type(CType::UInt128t).get_aligned(8); // TODO(antoyo): should the alignment be hard-coded?; + let i128_type = create_type(CType::Int128t, tcx.types.i128); + let u128_type = create_type(CType::UInt128t, tcx.types.u128); (i128_type, u128_type) } else { - let i128_type = context.new_array_type(None, i64_type, 2); - let u128_type = context.new_array_type(None, u64_type, 2); + /*let layout = tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.i128)).unwrap(); + let i128_align = layout.align.abi.bytes(); + let layout = tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.u128)).unwrap(); + let u128_align = layout.align.abi.bytes();*/ + + // TODO(antoyo): re-enable the alignment when libgccjit fixed the issue in + // gcc_jit_context_new_array_constructor (it should not use reinterpret_cast). + let i128_type = context.new_array_type(None, i64_type, 2)/*.get_aligned(i128_align)*/; + let u128_type = context.new_array_type(None, u64_type, 2)/*.get_aligned(u128_align)*/; (i128_type, u128_type) }; let tls_model = to_gcc_tls_mode(tcx.sess.tls_model()); + // TODO(antoyo): set alignment on those types as well. let float_type = context.new_type::(); let double_type = context.new_type::(); @@ -167,14 +196,10 @@ let ulonglong_type = context.new_c_type(CType::ULongLong); let sizet_type = context.new_c_type(CType::SizeT); - let isize_type = context.new_c_type(CType::LongLong); - let usize_type = context.new_c_type(CType::ULongLong); + let usize_type = sizet_type; + let isize_type = usize_type; let bool_type = context.new_type::(); - // TODO(antoyo): only have those assertions on x86_64. - assert_eq!(isize_type.get_size(), i64_type.get_size()); - assert_eq!(usize_type.get_size(), u64_type.get_size()); - let mut functions = FxHashMap::default(); let builtins = [ "__builtin_unreachable", "abort", "__builtin_expect", "__builtin_add_overflow", "__builtin_mul_overflow", @@ -192,7 +217,7 @@ functions.insert(builtin.to_string(), context.get_builtin_function(builtin)); } - Self { + let mut cx = Self { check_overflow, codegen_unit, context, @@ -254,7 +279,10 @@ pointee_infos: Default::default(), structs_as_pointer: Default::default(), cleanup_blocks: Default::default(), - } + }; + // TODO(antoyo): instead of doing this, add SsizeT to libgccjit. + cx.isize_type = usize_type.to_signed(&cx); + cx } pub fn rvalue_as_function(&self, value: RValue<'gcc>) -> Function<'gcc> { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/declare.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/declare.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/declare.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/declare.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,12 @@ use gccjit::{Function, FunctionType, GlobalKind, LValue, RValue, Type}; +#[cfg(feature="master")] +use gccjit::{FnAttribute, ToRValue}; use rustc_codegen_ssa::traits::BaseTypeMethods; use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::abi::call::FnAbi; -use crate::abi::FnAbiGccExt; +use crate::abi::{FnAbiGcc, FnAbiGccExt}; use crate::context::CodegenCx; use crate::intrinsic::llvm; @@ -78,9 +80,20 @@ } pub fn declare_fn(&self, name: &str, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Function<'gcc> { - let (return_type, params, variadic, on_stack_param_indices) = fn_abi.gcc_type(self); - let func = declare_raw_fn(self, name, () /*fn_abi.llvm_cconv()*/, return_type, ¶ms, variadic); + let FnAbiGcc { + return_type, + arguments_type, + is_c_variadic, + on_stack_param_indices, + #[cfg(feature="master")] + fn_attributes, + } = fn_abi.gcc_type(self); + let func = declare_raw_fn(self, name, () /*fn_abi.llvm_cconv()*/, return_type, &arguments_type, is_c_variadic); self.on_stack_function_params.borrow_mut().insert(func, on_stack_param_indices); + #[cfg(feature="master")] + for fn_attr in fn_attributes { + func.add_attribute(fn_attr); + } func } @@ -114,6 +127,44 @@ .collect(); let func = cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, mangle_name(name), variadic); cx.functions.borrow_mut().insert(name.to_string(), func); + + #[cfg(feature="master")] + if name == "rust_eh_personality" { + // NOTE: GCC will sometimes change the personality function set on a function from + // rust_eh_personality to __gcc_personality_v0 as an optimization. + // As such, we need to create a weak alias from __gcc_personality_v0 to + // rust_eh_personality in order to avoid a linker error. + // This needs to be weak in order to still allow using the standard + // __gcc_personality_v0 when the linking to it. + // Since aliases don't work (maybe because of a bug in LTO partitioning?), we + // create a wrapper function that calls rust_eh_personality. + + let params: Vec<_> = param_types.into_iter().enumerate() + .map(|(index, param)| cx.context.new_parameter(None, *param, &format!("param{}", index))) // TODO(antoyo): set name. + .collect(); + let gcc_func = cx.context.new_function(None, FunctionType::Exported, return_type, ¶ms, "__gcc_personality_v0", variadic); + + // We need a normal extern function for the crates that access rust_eh_personality + // without defining it, otherwise we'll get a compiler error. + // + // For the crate defining it, that needs to be a weak alias instead. + gcc_func.add_attribute(FnAttribute::Weak); + + let block = gcc_func.new_block("start"); + let mut args = vec![]; + for param in ¶ms { + args.push(param.to_rvalue()); + } + let call = cx.context.new_call(None, func, &args); + if return_type == cx.type_void() { + block.add_eval(None, call); + block.end_with_void_return(None); + } + else { + block.end_with_return(None, call); + } + } + func }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,36 @@ -use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg}; -use rustc_macros::Diagnostic; +use rustc_errors::{ + DiagnosticArgValue, DiagnosticBuilder, ErrorGuaranteed, Handler, IntoDiagnostic, IntoDiagnosticArg, +}; +use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_span::Span; use std::borrow::Cow; +use crate::fluent_generated as fluent; + +#[derive(Diagnostic)] +#[diag(codegen_gcc_unknown_ctarget_feature_prefix)] +#[note] +pub(crate) struct UnknownCTargetFeaturePrefix<'a> { + pub feature: &'a str, +} + +#[derive(Diagnostic)] +#[diag(codegen_gcc_unknown_ctarget_feature)] +#[note] +pub(crate) struct UnknownCTargetFeature<'a> { + pub feature: &'a str, + #[subdiagnostic] + pub rust_feature: PossibleFeature<'a>, +} + +#[derive(Subdiagnostic)] +pub(crate) enum PossibleFeature<'a> { + #[help(codegen_gcc_possible_feature)] + Some { rust_feature: &'a str }, + #[help(codegen_gcc_consider_filing_feature_request)] + None, +} + struct ExitCode(Option); impl IntoDiagnosticArg for ExitCode { @@ -40,3 +68,58 @@ pub span: Span, pub features: String, } + +#[derive(Diagnostic)] +#[diag(codegen_gcc_copy_bitcode)] +pub(crate) struct CopyBitcode { + pub err: std::io::Error, +} + +#[derive(Diagnostic)] +#[diag(codegen_gcc_dynamic_linking_with_lto)] +#[note] +pub(crate) struct DynamicLinkingWithLTO; + +#[derive(Diagnostic)] +#[diag(codegen_gcc_load_bitcode)] +pub(crate) struct LoadBitcode { + name: String, +} + +#[derive(Diagnostic)] +#[diag(codegen_gcc_lto_disallowed)] +pub(crate) struct LtoDisallowed; + +#[derive(Diagnostic)] +#[diag(codegen_gcc_lto_dylib)] +pub(crate) struct LtoDylib; + +#[derive(Diagnostic)] +#[diag(codegen_gcc_lto_bitcode_from_rlib)] +pub(crate) struct LtoBitcodeFromRlib { + pub gcc_err: String, +} + +pub(crate) struct TargetFeatureDisableOrEnable<'a> { + pub features: &'a [&'a str], + pub span: Option, + pub missing_features: Option, +} + +#[derive(Subdiagnostic)] +#[help(codegen_gcc_missing_features)] +pub(crate) struct MissingFeatures; + +impl IntoDiagnostic<'_, ErrorGuaranteed> for TargetFeatureDisableOrEnable<'_> { + fn into_diagnostic(self, sess: &'_ Handler) -> DiagnosticBuilder<'_, ErrorGuaranteed> { + let mut diag = sess.struct_err(fluent::codegen_gcc_target_feature_disable_or_enable); + if let Some(span) = self.span { + diag.set_span(span); + }; + if let Some(missing_features) = self.missing_features { + diag.subdiagnostic(missing_features); + } + diag.set_arg("features", self.features.join(", ")); + diag + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/gcc_util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/gcc_util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/gcc_util.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/gcc_util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,230 @@ +#[cfg(feature="master")] +use gccjit::Context; +use smallvec::{smallvec, SmallVec}; + +use rustc_codegen_ssa::target_features::{ + supported_target_features, tied_target_features, RUSTC_SPECIFIC_FEATURES, +}; +use rustc_data_structures::fx::FxHashMap; +use rustc_middle::bug; +use rustc_session::Session; + +use crate::errors::{PossibleFeature, TargetFeatureDisableOrEnable, UnknownCTargetFeature, UnknownCTargetFeaturePrefix}; + +/// The list of GCC features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`, +/// `--target` and similar). +pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec { + // Features that come earlier are overridden by conflicting features later in the string. + // Typically we'll want more explicit settings to override the implicit ones, so: + // + // * Features from -Ctarget-cpu=*; are overridden by [^1] + // * Features implied by --target; are overridden by + // * Features from -Ctarget-feature; are overridden by + // * function specific features. + // + // [^1]: target-cpu=native is handled here, other target-cpu values are handled implicitly + // through GCC march implementation. + // + // FIXME(nagisa): it isn't clear what's the best interaction between features implied by + // `-Ctarget-cpu` and `--target` are. On one hand, you'd expect CLI arguments to always + // override anything that's implicit, so e.g. when there's no `--target` flag, features implied + // the host target are overridden by `-Ctarget-cpu=*`. On the other hand, what about when both + // `--target` and `-Ctarget-cpu=*` are specified? Both then imply some target features and both + // flags are specified by the user on the CLI. It isn't as clear-cut which order of precedence + // should be taken in cases like these. + let mut features = vec![]; + + // Features implied by an implicit or explicit `--target`. + features.extend( + sess.target + .features + .split(',') + .filter(|v| !v.is_empty() && backend_feature_name(v).is_some()) + .map(String::from), + ); + + // -Ctarget-features + let supported_features = supported_target_features(sess); + let mut featsmap = FxHashMap::default(); + let feats = sess.opts.cg.target_feature + .split(',') + .filter_map(|s| { + let enable_disable = match s.chars().next() { + None => return None, + Some(c @ ('+' | '-')) => c, + Some(_) => { + if diagnostics { + sess.emit_warning(UnknownCTargetFeaturePrefix { feature: s }); + } + return None; + } + }; + + let feature = backend_feature_name(s)?; + // Warn against use of GCC specific feature names on the CLI. + if diagnostics && !supported_features.iter().any(|&(v, _)| v == feature) { + let rust_feature = supported_features.iter().find_map(|&(rust_feature, _)| { + let gcc_features = to_gcc_features(sess, rust_feature); + if gcc_features.contains(&feature) && !gcc_features.contains(&rust_feature) { + Some(rust_feature) + } else { + None + } + }); + let unknown_feature = + if let Some(rust_feature) = rust_feature { + UnknownCTargetFeature { + feature, + rust_feature: PossibleFeature::Some { rust_feature }, + } + } + else { + UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } + }; + sess.emit_warning(unknown_feature); + } + + if diagnostics { + // FIXME(nagisa): figure out how to not allocate a full hashset here. + featsmap.insert(feature, enable_disable == '+'); + } + + // rustc-specific features do not get passed down to GCC… + if RUSTC_SPECIFIC_FEATURES.contains(&feature) { + return None; + } + // ... otherwise though we run through `to_gcc_features` when + // passing requests down to GCC. This means that all in-language + // features also work on the command line instead of having two + // different names when the GCC name and the Rust name differ. + Some(to_gcc_features(sess, feature) + .iter() + .flat_map(|feat| to_gcc_features(sess, feat).into_iter()) + .map(|feature| { + if enable_disable == '-' { + format!("-{}", feature) + } + else { + feature.to_string() + } + }) + .collect::>(), + ) + }) + .flatten(); + features.extend(feats); + + if diagnostics { + if let Some(f) = check_tied_features(sess, &featsmap) { + sess.emit_err(TargetFeatureDisableOrEnable { + features: f, + span: None, + missing_features: None, + }); + } + } + + features +} + +/// Returns a feature name for the given `+feature` or `-feature` string. +/// +/// Only allows features that are backend specific (i.e. not [`RUSTC_SPECIFIC_FEATURES`].) +fn backend_feature_name(s: &str) -> Option<&str> { + // features must start with a `+` or `-`. + let feature = s.strip_prefix(&['+', '-'][..]).unwrap_or_else(|| { + bug!("target feature `{}` must begin with a `+` or `-`", s); + }); + // Rustc-specific feature requests like `+crt-static` or `-crt-static` + // are not passed down to GCC. + if RUSTC_SPECIFIC_FEATURES.contains(&feature) { + return None; + } + Some(feature) +} + +// To find a list of GCC's names, check https://gcc.gnu.org/onlinedocs/gcc/Function-Attributes.html +pub fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> { + let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch }; + match (arch, s) { + ("x86", "sse4.2") => smallvec!["sse4.2", "crc32"], + ("x86", "pclmulqdq") => smallvec!["pclmul"], + ("x86", "rdrand") => smallvec!["rdrnd"], + ("x86", "bmi1") => smallvec!["bmi"], + ("x86", "cmpxchg16b") => smallvec!["cx16"], + ("x86", "avx512vaes") => smallvec!["vaes"], + ("x86", "avx512gfni") => smallvec!["gfni"], + ("x86", "avx512vpclmulqdq") => smallvec!["vpclmulqdq"], + // NOTE: seems like GCC requires 'avx512bw' for 'avx512vbmi2'. + ("x86", "avx512vbmi2") => smallvec!["avx512vbmi2", "avx512bw"], + // NOTE: seems like GCC requires 'avx512bw' for 'avx512bitalg'. + ("x86", "avx512bitalg") => smallvec!["avx512bitalg", "avx512bw"], + ("aarch64", "rcpc2") => smallvec!["rcpc-immo"], + ("aarch64", "dpb") => smallvec!["ccpp"], + ("aarch64", "dpb2") => smallvec!["ccdp"], + ("aarch64", "frintts") => smallvec!["fptoint"], + ("aarch64", "fcma") => smallvec!["complxnum"], + ("aarch64", "pmuv3") => smallvec!["perfmon"], + ("aarch64", "paca") => smallvec!["pauth"], + ("aarch64", "pacg") => smallvec!["pauth"], + // Rust ties fp and neon together. In GCC neon implicitly enables fp, + // but we manually enable neon when a feature only implicitly enables fp + ("aarch64", "f32mm") => smallvec!["f32mm", "neon"], + ("aarch64", "f64mm") => smallvec!["f64mm", "neon"], + ("aarch64", "fhm") => smallvec!["fp16fml", "neon"], + ("aarch64", "fp16") => smallvec!["fullfp16", "neon"], + ("aarch64", "jsconv") => smallvec!["jsconv", "neon"], + ("aarch64", "sve") => smallvec!["sve", "neon"], + ("aarch64", "sve2") => smallvec!["sve2", "neon"], + ("aarch64", "sve2-aes") => smallvec!["sve2-aes", "neon"], + ("aarch64", "sve2-sm4") => smallvec!["sve2-sm4", "neon"], + ("aarch64", "sve2-sha3") => smallvec!["sve2-sha3", "neon"], + ("aarch64", "sve2-bitperm") => smallvec!["sve2-bitperm", "neon"], + (_, s) => smallvec![s], + } +} + +// Given a map from target_features to whether they are enabled or disabled, +// ensure only valid combinations are allowed. +pub fn check_tied_features(sess: &Session, features: &FxHashMap<&str, bool>) -> Option<&'static [&'static str]> { + for tied in tied_target_features(sess) { + // Tied features must be set to the same value, or not set at all + let mut tied_iter = tied.iter(); + let enabled = features.get(tied_iter.next().unwrap()); + if tied_iter.any(|feature| enabled != features.get(feature)) { + return Some(tied); + } + } + None +} + +fn arch_to_gcc(name: &str) -> &str { + match name { + "M68020" => "68020", + _ => name, + } +} + +fn handle_native(name: &str) -> &str { + if name != "native" { + return arch_to_gcc(name); + } + + #[cfg(feature="master")] + { + // Get the native arch. + let context = Context::default(); + context.get_target_info().arch().unwrap() + .to_str() + .unwrap() + } + #[cfg(not(feature="master"))] + unimplemented!(); +} + +pub fn target_cpu(sess: &Session) -> &str { + match sess.opts.cg.target_cpu { + Some(ref name) => handle_native(name), + None => handle_native(sess.target.cpu.as_ref()), + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/int.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/int.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/int.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/int.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,7 +7,9 @@ use gccjit::{ComparisonOp, FunctionType, RValue, ToRValue, Type, UnaryOp, BinaryOp}; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; use rustc_codegen_ssa::traits::{BackendTypes, BaseTypeMethods, BuilderMethods, OverflowOp}; -use rustc_middle::ty::Ty; +use rustc_middle::ty::{ParamEnv, Ty}; +use rustc_target::abi::{Endian, call::{ArgAbi, ArgAttributes, Conv, FnAbi, PassMode}}; +use rustc_target::spec; use crate::builder::ToGccComp; use crate::{builder::Builder, common::{SignType, TypeReflection}, context::CodegenCx}; @@ -36,13 +38,11 @@ self.cx.context.new_unary_op(None, operation, typ, a) } else { - // TODO(antoyo): use __negdi2 and __negti2 instead? let element_type = typ.dyncast_array().expect("element type"); - let values = [ + self.from_low_high_rvalues(typ, self.cx.context.new_unary_op(None, UnaryOp::BitwiseNegate, element_type, self.low(a)), self.cx.context.new_unary_op(None, UnaryOp::BitwiseNegate, element_type, self.high(a)), - ]; - self.cx.context.new_array_constructor(None, typ, &values) + ) } } @@ -52,9 +52,7 @@ self.cx.context.new_unary_op(None, UnaryOp::Minus, a.get_type(), a) } else { - let param_a = self.context.new_parameter(None, a_type, "a"); - let func = self.context.new_function(None, FunctionType::Extern, a_type, &[param_a], "__negti2", false); - self.context.new_call(None, func, &[a]) + self.gcc_add(self.gcc_not(a), self.gcc_int(a_type, 1)) } } @@ -103,7 +101,6 @@ let condition = self.gcc_icmp(IntPredicate::IntNE, self.gcc_and(b, sixty_four), zero); self.llbb().end_with_conditional(None, condition, then_block, else_block); - // TODO(antoyo): take endianness into account. let shift_value = self.gcc_sub(b, sixty_four); let high = self.high(a); let sign = @@ -113,11 +110,7 @@ else { zero }; - let values = [ - high >> shift_value, - sign, - ]; - let array_value = self.context.new_array_constructor(None, a_type, &values); + let array_value = self.from_low_high_rvalues(a_type, high >> shift_value, sign); then_block.add_assignment(None, result, array_value); then_block.end_with_jump(None, after_block); @@ -133,11 +126,10 @@ let casted_low = self.context.new_cast(None, self.low(a), unsigned_type); let shifted_low = casted_low >> self.context.new_cast(None, b, unsigned_type); let shifted_low = self.context.new_cast(None, shifted_low, native_int_type); - let values = [ + let array_value = self.from_low_high_rvalues(a_type, (high << shift_value) | shifted_low, high >> b, - ]; - let array_value = self.context.new_array_constructor(None, a_type, &values); + ); actual_else_block.add_assignment(None, result, array_value); actual_else_block.end_with_jump(None, after_block); @@ -317,18 +309,7 @@ _ => unreachable!(), }, }; - let a_type = lhs.get_type(); - let b_type = rhs.get_type(); - let param_a = self.context.new_parameter(None, a_type, "a"); - let param_b = self.context.new_parameter(None, b_type, "b"); - let result_field = self.context.new_field(None, a_type, "result"); - let overflow_field = self.context.new_field(None, self.bool_type, "overflow"); - let return_type = self.context.new_struct_type(None, "result_overflow", &[result_field, overflow_field]); - let func = self.context.new_function(None, FunctionType::Extern, return_type.as_type(), &[param_a, param_b], func_name, false); - let result = self.context.new_call(None, func, &[lhs, rhs]); - let overflow = result.access_field(None, overflow_field); - let int_result = result.access_field(None, result_field); - return (int_result, overflow); + return self.operation_with_overflow(func_name, lhs, rhs); }, _ => { match oop { @@ -353,23 +334,111 @@ (res.dereference(None).to_rvalue(), overflow) } - pub fn gcc_icmp(&self, op: IntPredicate, mut lhs: RValue<'gcc>, mut rhs: RValue<'gcc>) -> RValue<'gcc> { + pub fn operation_with_overflow(&self, func_name: &str, lhs: RValue<'gcc>, rhs: RValue<'gcc>) -> (RValue<'gcc>, RValue<'gcc>) { + let a_type = lhs.get_type(); + let b_type = rhs.get_type(); + let param_a = self.context.new_parameter(None, a_type, "a"); + let param_b = self.context.new_parameter(None, b_type, "b"); + let result_field = self.context.new_field(None, a_type, "result"); + let overflow_field = self.context.new_field(None, self.bool_type, "overflow"); + + let ret_ty = Ty::new_tup(self.tcx, &[self.tcx.types.i128, self.tcx.types.bool]); + let layout = self.tcx.layout_of(ParamEnv::reveal_all().and(ret_ty)).unwrap(); + + let arg_abi = ArgAbi { + layout, + mode: PassMode::Direct(ArgAttributes::new()), + }; + let mut fn_abi = FnAbi { + args: vec![arg_abi.clone(), arg_abi.clone()].into_boxed_slice(), + ret: arg_abi, + c_variadic: false, + fixed_count: 2, + conv: Conv::C, + can_unwind: false, + }; + fn_abi.adjust_for_foreign_abi(self.cx, spec::abi::Abi::C { + unwind: false, + }).unwrap(); + + let indirect = matches!(fn_abi.ret.mode, PassMode::Indirect { .. }); + + let return_type = self.context.new_struct_type(None, "result_overflow", &[result_field, overflow_field]); + let result = + if indirect { + let return_value = self.current_func().new_local(None, return_type.as_type(), "return_value"); + let return_param_type = return_type.as_type().make_pointer(); + let return_param = self.context.new_parameter(None, return_param_type, "return_value"); + let func = self.context.new_function(None, FunctionType::Extern, self.type_void(), &[return_param, param_a, param_b], func_name, false); + self.llbb().add_eval(None, self.context.new_call(None, func, &[return_value.get_address(None), lhs, rhs])); + return_value.to_rvalue() + } + else { + let func = self.context.new_function(None, FunctionType::Extern, return_type.as_type(), &[param_a, param_b], func_name, false); + self.context.new_call(None, func, &[lhs, rhs]) + }; + let overflow = result.access_field(None, overflow_field); + let int_result = result.access_field(None, result_field); + return (int_result, overflow); + } + + pub fn gcc_icmp(&mut self, op: IntPredicate, mut lhs: RValue<'gcc>, mut rhs: RValue<'gcc>) -> RValue<'gcc> { let a_type = lhs.get_type(); let b_type = rhs.get_type(); if self.is_non_native_int_type(a_type) || self.is_non_native_int_type(b_type) { - let signed = a_type.is_compatible_with(self.i128_type); - let sign = - if signed { - "" - } - else { - "u" - }; - let func_name = format!("__{}cmpti2", sign); - let param_a = self.context.new_parameter(None, a_type, "a"); - let param_b = self.context.new_parameter(None, b_type, "b"); - let func = self.context.new_function(None, FunctionType::Extern, self.int_type, &[param_a, param_b], func_name, false); - let cmp = self.context.new_call(None, func, &[lhs, rhs]); + // This algorithm is based on compiler-rt's __cmpti2: + // https://github.com/llvm-mirror/compiler-rt/blob/f0745e8476f069296a7c71accedd061dce4cdf79/lib/builtins/cmpti2.c#L21 + let result = self.current_func().new_local(None, self.int_type, "icmp_result"); + let block1 = self.current_func().new_block("block1"); + let block2 = self.current_func().new_block("block2"); + let block3 = self.current_func().new_block("block3"); + let block4 = self.current_func().new_block("block4"); + let block5 = self.current_func().new_block("block5"); + let block6 = self.current_func().new_block("block6"); + let block7 = self.current_func().new_block("block7"); + let block8 = self.current_func().new_block("block8"); + let after = self.current_func().new_block("after"); + + let native_int_type = a_type.dyncast_array().expect("get element type"); + // NOTE: cast low to its unsigned type in order to perform a comparison correctly (e.g. + // the sign is only on high). + let unsigned_type = native_int_type.to_unsigned(&self.cx); + + let lhs_low = self.context.new_cast(None, self.low(lhs), unsigned_type); + let rhs_low = self.context.new_cast(None, self.low(rhs), unsigned_type); + + let condition = self.context.new_comparison(None, ComparisonOp::LessThan, self.high(lhs), self.high(rhs)); + self.llbb().end_with_conditional(None, condition, block1, block2); + + block1.add_assignment(None, result, self.context.new_rvalue_zero(self.int_type)); + block1.end_with_jump(None, after); + + let condition = self.context.new_comparison(None, ComparisonOp::GreaterThan, self.high(lhs), self.high(rhs)); + block2.end_with_conditional(None, condition, block3, block4); + + block3.add_assignment(None, result, self.context.new_rvalue_from_int(self.int_type, 2)); + block3.end_with_jump(None, after); + + let condition = self.context.new_comparison(None, ComparisonOp::LessThan, lhs_low, rhs_low); + block4.end_with_conditional(None, condition, block5, block6); + + block5.add_assignment(None, result, self.context.new_rvalue_zero(self.int_type)); + block5.end_with_jump(None, after); + + let condition = self.context.new_comparison(None, ComparisonOp::GreaterThan, lhs_low, rhs_low); + block6.end_with_conditional(None, condition, block7, block8); + + block7.add_assignment(None, result, self.context.new_rvalue_from_int(self.int_type, 2)); + block7.end_with_jump(None, after); + + block8.add_assignment(None, result, self.context.new_rvalue_one(self.int_type)); + block8.end_with_jump(None, after); + + // NOTE: since jumps were added in a place rustc does not expect, the current block in the + // state need to be updated. + self.switch_to_block(after); + + let cmp = result.to_rvalue(); let (op, limit) = match op { IntPredicate::IntEQ => { @@ -378,6 +447,7 @@ IntPredicate::IntNE => { return self.context.new_comparison(None, ComparisonOp::NotEquals, cmp, self.context.new_rvalue_one(self.int_type)); }, + // TODO(antoyo): cast to u128 for unsigned comparison. See below. IntPredicate::IntUGT => (ComparisonOp::Equals, 2), IntPredicate::IntUGE => (ComparisonOp::GreaterThanEquals, 1), IntPredicate::IntULT => (ComparisonOp::Equals, 0), @@ -407,6 +477,18 @@ rhs = self.context.new_cast(None, rhs, a_type); } } + match op { + IntPredicate::IntUGT | IntPredicate::IntUGE | IntPredicate::IntULT | IntPredicate::IntULE => { + if !a_type.is_vector() { + let unsigned_type = a_type.to_unsigned(&self.cx); + lhs = self.context.new_cast(None, lhs, unsigned_type); + rhs = self.context.new_cast(None, rhs, unsigned_type); + } + }, + // TODO(antoyo): we probably need to handle signed comparison for unsigned + // integers. + _ => (), + } self.context.new_comparison(None, op.to_gcc_comparison(), lhs, rhs) } } @@ -418,11 +500,10 @@ a ^ b } else { - let values = [ + self.from_low_high_rvalues(a_type, self.low(a) ^ self.low(b), self.high(a) ^ self.high(b), - ]; - self.context.new_array_constructor(None, a_type, &values) + ) } } @@ -468,12 +549,10 @@ let condition = self.gcc_icmp(IntPredicate::IntNE, self.gcc_and(b, sixty_four), zero); self.llbb().end_with_conditional(None, condition, then_block, else_block); - // TODO(antoyo): take endianness into account. - let values = [ + let array_value = self.from_low_high_rvalues(a_type, zero, self.low(a) << (b - sixty_four), - ]; - let array_value = self.context.new_array_constructor(None, a_type, &values); + ); then_block.add_assignment(None, result, array_value); then_block.end_with_jump(None, after_block); @@ -484,16 +563,16 @@ b0_block.end_with_jump(None, after_block); // NOTE: cast low to its unsigned type in order to perform a logical right shift. + // TODO(antoyo): adjust this ^ comment. let unsigned_type = native_int_type.to_unsigned(&self.cx); let casted_low = self.context.new_cast(None, self.low(a), unsigned_type); let shift_value = self.context.new_cast(None, sixty_four - b, unsigned_type); let high_low = self.context.new_cast(None, casted_low >> shift_value, native_int_type); - let values = [ + + let array_value = self.from_low_high_rvalues(a_type, self.low(a) << b, (self.high(a) << b) | high_low, - ]; - - let array_value = self.context.new_array_constructor(None, a_type, &values); + ); actual_else_block.add_assignment(None, result, array_value); actual_else_block.end_with_jump(None, after_block); @@ -509,16 +588,16 @@ let arg_type = arg.get_type(); if !self.is_native_int_type(arg_type) { let native_int_type = arg_type.dyncast_array().expect("get element type"); - let lsb = self.context.new_array_access(None, arg, self.context.new_rvalue_from_int(self.int_type, 0)).to_rvalue(); + let lsb = self.low(arg); let swapped_lsb = self.gcc_bswap(lsb, width / 2); let swapped_lsb = self.context.new_cast(None, swapped_lsb, native_int_type); - let msb = self.context.new_array_access(None, arg, self.context.new_rvalue_from_int(self.int_type, 1)).to_rvalue(); + let msb = self.high(arg); let swapped_msb = self.gcc_bswap(msb, width / 2); let swapped_msb = self.context.new_cast(None, swapped_msb, native_int_type); // NOTE: we also need to swap the two elements here, in addition to swapping inside // the elements themselves like done above. - return self.context.new_array_constructor(None, arg_type, &[swapped_msb, swapped_lsb]); + return self.from_low_high_rvalues(arg_type, swapped_msb, swapped_lsb); } // TODO(antoyo): check if it's faster to use string literals and a @@ -546,7 +625,12 @@ } pub fn gcc_uint(&self, typ: Type<'gcc>, int: u64) -> RValue<'gcc> { - if self.is_native_int_type_or_bool(typ) { + if typ.is_u128(self) { + // FIXME(antoyo): libgccjit cannot create 128-bit values yet. + let num = self.context.new_rvalue_from_long(self.u64_type, int as i64); + self.gcc_int_cast(num, typ) + } + else if self.is_native_int_type_or_bool(typ) { self.context.new_rvalue_from_long(typ, u64::try_from(int).expect("u64::try_from") as i64) } else { @@ -572,6 +656,7 @@ } } else if typ.is_i128(self) { + // FIXME(antoyo): libgccjit cannot create 128-bit values yet. let num = self.context.new_rvalue_from_long(self.u64_type, num as u64 as i64); self.gcc_int_cast(num, typ) } @@ -616,11 +701,10 @@ else { assert!(!a_native && !b_native, "both types should either be native or non-native for or operation"); let native_int_type = a_type.dyncast_array().expect("get element type"); - let values = [ + self.from_low_high_rvalues(a_type, self.context.new_binary_op(None, operation, native_int_type, self.low(a), self.low(b)), self.context.new_binary_op(None, operation, native_int_type, self.high(a), self.high(b)), - ]; - self.context.new_array_constructor(None, a_type, &values) + ) } } @@ -644,11 +728,10 @@ let zero = self.context.new_rvalue_zero(value_type); let is_negative = self.context.new_comparison(None, ComparisonOp::LessThan, value, zero); let is_negative = self.gcc_int_cast(is_negative, dest_element_type); - let values = [ + self.from_low_high_rvalues(dest_typ, self.context.new_cast(None, value, dest_element_type), self.context.new_unary_op(None, UnaryOp::Minus, dest_element_type, is_negative), - ]; - self.context.new_array_constructor(None, dest_typ, &values) + ) } else { // Since u128 and i128 are the only types that can be unsupported, we know the type of @@ -726,20 +809,47 @@ } fn high(&self, value: RValue<'gcc>) -> RValue<'gcc> { - self.context.new_array_access(None, value, self.context.new_rvalue_from_int(self.int_type, 1)) + let index = + match self.sess().target.options.endian { + Endian::Little => 1, + Endian::Big => 0, + }; + self.context.new_array_access(None, value, self.context.new_rvalue_from_int(self.int_type, index)) .to_rvalue() } fn low(&self, value: RValue<'gcc>) -> RValue<'gcc> { - self.context.new_array_access(None, value, self.context.new_rvalue_from_int(self.int_type, 0)) + let index = + match self.sess().target.options.endian { + Endian::Little => 0, + Endian::Big => 1, + }; + self.context.new_array_access(None, value, self.context.new_rvalue_from_int(self.int_type, index)) .to_rvalue() } + fn from_low_high_rvalues(&self, typ: Type<'gcc>, low: RValue<'gcc>, high: RValue<'gcc>) -> RValue<'gcc> { + let (first, last) = + match self.sess().target.options.endian { + Endian::Little => (low, high), + Endian::Big => (high, low), + }; + + let values = [first, last]; + self.context.new_array_constructor(None, typ, &values) + } + fn from_low_high(&self, typ: Type<'gcc>, low: i64, high: i64) -> RValue<'gcc> { + let (first, last) = + match self.sess().target.options.endian { + Endian::Little => (low, high), + Endian::Big => (high, low), + }; + let native_int_type = typ.dyncast_array().expect("get element type"); let values = [ - self.context.new_rvalue_from_long(native_int_type, low), - self.context.new_rvalue_from_long(native_int_type, high), + self.context.new_rvalue_from_long(native_int_type, first), + self.context.new_rvalue_from_long(native_int_type, last), ]; self.context.new_array_constructor(None, typ, &values) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/archs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2254,6 +2254,1494 @@ "llvm.hexagon.prefetch" => "__builtin_HEXAGON_prefetch", "llvm.hexagon.vmemcpy" => "__builtin_hexagon_vmemcpy", "llvm.hexagon.vmemset" => "__builtin_hexagon_vmemset", + // loongarch + "llvm.loongarch.asrtgt.d" => "__builtin_loongarch_asrtgt_d", + "llvm.loongarch.asrtle.d" => "__builtin_loongarch_asrtle_d", + "llvm.loongarch.break" => "__builtin_loongarch_break", + "llvm.loongarch.cacop.d" => "__builtin_loongarch_cacop_d", + "llvm.loongarch.cacop.w" => "__builtin_loongarch_cacop_w", + "llvm.loongarch.cpucfg" => "__builtin_loongarch_cpucfg", + "llvm.loongarch.crc.w.b.w" => "__builtin_loongarch_crc_w_b_w", + "llvm.loongarch.crc.w.d.w" => "__builtin_loongarch_crc_w_d_w", + "llvm.loongarch.crc.w.h.w" => "__builtin_loongarch_crc_w_h_w", + "llvm.loongarch.crc.w.w.w" => "__builtin_loongarch_crc_w_w_w", + "llvm.loongarch.crcc.w.b.w" => "__builtin_loongarch_crcc_w_b_w", + "llvm.loongarch.crcc.w.d.w" => "__builtin_loongarch_crcc_w_d_w", + "llvm.loongarch.crcc.w.h.w" => "__builtin_loongarch_crcc_w_h_w", + "llvm.loongarch.crcc.w.w.w" => "__builtin_loongarch_crcc_w_w_w", + "llvm.loongarch.csrrd.d" => "__builtin_loongarch_csrrd_d", + "llvm.loongarch.csrrd.w" => "__builtin_loongarch_csrrd_w", + "llvm.loongarch.csrwr.d" => "__builtin_loongarch_csrwr_d", + "llvm.loongarch.csrwr.w" => "__builtin_loongarch_csrwr_w", + "llvm.loongarch.csrxchg.d" => "__builtin_loongarch_csrxchg_d", + "llvm.loongarch.csrxchg.w" => "__builtin_loongarch_csrxchg_w", + "llvm.loongarch.dbar" => "__builtin_loongarch_dbar", + "llvm.loongarch.ibar" => "__builtin_loongarch_ibar", + "llvm.loongarch.iocsrrd.b" => "__builtin_loongarch_iocsrrd_b", + "llvm.loongarch.iocsrrd.d" => "__builtin_loongarch_iocsrrd_d", + "llvm.loongarch.iocsrrd.h" => "__builtin_loongarch_iocsrrd_h", + "llvm.loongarch.iocsrrd.w" => "__builtin_loongarch_iocsrrd_w", + "llvm.loongarch.iocsrwr.b" => "__builtin_loongarch_iocsrwr_b", + "llvm.loongarch.iocsrwr.d" => "__builtin_loongarch_iocsrwr_d", + "llvm.loongarch.iocsrwr.h" => "__builtin_loongarch_iocsrwr_h", + "llvm.loongarch.iocsrwr.w" => "__builtin_loongarch_iocsrwr_w", + "llvm.loongarch.lasx.vext2xv.d.b" => "__builtin_lasx_vext2xv_d_b", + "llvm.loongarch.lasx.vext2xv.d.h" => "__builtin_lasx_vext2xv_d_h", + "llvm.loongarch.lasx.vext2xv.d.w" => "__builtin_lasx_vext2xv_d_w", + "llvm.loongarch.lasx.vext2xv.du.bu" => "__builtin_lasx_vext2xv_du_bu", + "llvm.loongarch.lasx.vext2xv.du.hu" => "__builtin_lasx_vext2xv_du_hu", + "llvm.loongarch.lasx.vext2xv.du.wu" => "__builtin_lasx_vext2xv_du_wu", + "llvm.loongarch.lasx.vext2xv.h.b" => "__builtin_lasx_vext2xv_h_b", + "llvm.loongarch.lasx.vext2xv.hu.bu" => "__builtin_lasx_vext2xv_hu_bu", + "llvm.loongarch.lasx.vext2xv.w.b" => "__builtin_lasx_vext2xv_w_b", + "llvm.loongarch.lasx.vext2xv.w.h" => "__builtin_lasx_vext2xv_w_h", + "llvm.loongarch.lasx.vext2xv.wu.bu" => "__builtin_lasx_vext2xv_wu_bu", + "llvm.loongarch.lasx.vext2xv.wu.hu" => "__builtin_lasx_vext2xv_wu_hu", + "llvm.loongarch.lasx.xbnz.b" => "__builtin_lasx_xbnz_b", + "llvm.loongarch.lasx.xbnz.d" => "__builtin_lasx_xbnz_d", + "llvm.loongarch.lasx.xbnz.h" => "__builtin_lasx_xbnz_h", + "llvm.loongarch.lasx.xbnz.v" => "__builtin_lasx_xbnz_v", + "llvm.loongarch.lasx.xbnz.w" => "__builtin_lasx_xbnz_w", + "llvm.loongarch.lasx.xbz.b" => "__builtin_lasx_xbz_b", + "llvm.loongarch.lasx.xbz.d" => "__builtin_lasx_xbz_d", + "llvm.loongarch.lasx.xbz.h" => "__builtin_lasx_xbz_h", + "llvm.loongarch.lasx.xbz.v" => "__builtin_lasx_xbz_v", + "llvm.loongarch.lasx.xbz.w" => "__builtin_lasx_xbz_w", + "llvm.loongarch.lasx.xvabsd.b" => "__builtin_lasx_xvabsd_b", + "llvm.loongarch.lasx.xvabsd.bu" => "__builtin_lasx_xvabsd_bu", + "llvm.loongarch.lasx.xvabsd.d" => "__builtin_lasx_xvabsd_d", + "llvm.loongarch.lasx.xvabsd.du" => "__builtin_lasx_xvabsd_du", + "llvm.loongarch.lasx.xvabsd.h" => "__builtin_lasx_xvabsd_h", + "llvm.loongarch.lasx.xvabsd.hu" => "__builtin_lasx_xvabsd_hu", + "llvm.loongarch.lasx.xvabsd.w" => "__builtin_lasx_xvabsd_w", + "llvm.loongarch.lasx.xvabsd.wu" => "__builtin_lasx_xvabsd_wu", + "llvm.loongarch.lasx.xvadd.b" => "__builtin_lasx_xvadd_b", + "llvm.loongarch.lasx.xvadd.d" => "__builtin_lasx_xvadd_d", + "llvm.loongarch.lasx.xvadd.h" => "__builtin_lasx_xvadd_h", + "llvm.loongarch.lasx.xvadd.q" => "__builtin_lasx_xvadd_q", + "llvm.loongarch.lasx.xvadd.w" => "__builtin_lasx_xvadd_w", + "llvm.loongarch.lasx.xvadda.b" => "__builtin_lasx_xvadda_b", + "llvm.loongarch.lasx.xvadda.d" => "__builtin_lasx_xvadda_d", + "llvm.loongarch.lasx.xvadda.h" => "__builtin_lasx_xvadda_h", + "llvm.loongarch.lasx.xvadda.w" => "__builtin_lasx_xvadda_w", + "llvm.loongarch.lasx.xvaddi.bu" => "__builtin_lasx_xvaddi_bu", + "llvm.loongarch.lasx.xvaddi.du" => "__builtin_lasx_xvaddi_du", + "llvm.loongarch.lasx.xvaddi.hu" => "__builtin_lasx_xvaddi_hu", + "llvm.loongarch.lasx.xvaddi.wu" => "__builtin_lasx_xvaddi_wu", + "llvm.loongarch.lasx.xvaddwev.d.w" => "__builtin_lasx_xvaddwev_d_w", + "llvm.loongarch.lasx.xvaddwev.d.wu" => "__builtin_lasx_xvaddwev_d_wu", + "llvm.loongarch.lasx.xvaddwev.d.wu.w" => "__builtin_lasx_xvaddwev_d_wu_w", + "llvm.loongarch.lasx.xvaddwev.h.b" => "__builtin_lasx_xvaddwev_h_b", + "llvm.loongarch.lasx.xvaddwev.h.bu" => "__builtin_lasx_xvaddwev_h_bu", + "llvm.loongarch.lasx.xvaddwev.h.bu.b" => "__builtin_lasx_xvaddwev_h_bu_b", + "llvm.loongarch.lasx.xvaddwev.q.d" => "__builtin_lasx_xvaddwev_q_d", + "llvm.loongarch.lasx.xvaddwev.q.du" => "__builtin_lasx_xvaddwev_q_du", + "llvm.loongarch.lasx.xvaddwev.q.du.d" => "__builtin_lasx_xvaddwev_q_du_d", + "llvm.loongarch.lasx.xvaddwev.w.h" => "__builtin_lasx_xvaddwev_w_h", + "llvm.loongarch.lasx.xvaddwev.w.hu" => "__builtin_lasx_xvaddwev_w_hu", + "llvm.loongarch.lasx.xvaddwev.w.hu.h" => "__builtin_lasx_xvaddwev_w_hu_h", + "llvm.loongarch.lasx.xvaddwod.d.w" => "__builtin_lasx_xvaddwod_d_w", + "llvm.loongarch.lasx.xvaddwod.d.wu" => "__builtin_lasx_xvaddwod_d_wu", + "llvm.loongarch.lasx.xvaddwod.d.wu.w" => "__builtin_lasx_xvaddwod_d_wu_w", + "llvm.loongarch.lasx.xvaddwod.h.b" => "__builtin_lasx_xvaddwod_h_b", + "llvm.loongarch.lasx.xvaddwod.h.bu" => "__builtin_lasx_xvaddwod_h_bu", + "llvm.loongarch.lasx.xvaddwod.h.bu.b" => "__builtin_lasx_xvaddwod_h_bu_b", + "llvm.loongarch.lasx.xvaddwod.q.d" => "__builtin_lasx_xvaddwod_q_d", + "llvm.loongarch.lasx.xvaddwod.q.du" => "__builtin_lasx_xvaddwod_q_du", + "llvm.loongarch.lasx.xvaddwod.q.du.d" => "__builtin_lasx_xvaddwod_q_du_d", + "llvm.loongarch.lasx.xvaddwod.w.h" => "__builtin_lasx_xvaddwod_w_h", + "llvm.loongarch.lasx.xvaddwod.w.hu" => "__builtin_lasx_xvaddwod_w_hu", + "llvm.loongarch.lasx.xvaddwod.w.hu.h" => "__builtin_lasx_xvaddwod_w_hu_h", + "llvm.loongarch.lasx.xvand.v" => "__builtin_lasx_xvand_v", + "llvm.loongarch.lasx.xvandi.b" => "__builtin_lasx_xvandi_b", + "llvm.loongarch.lasx.xvandn.v" => "__builtin_lasx_xvandn_v", + "llvm.loongarch.lasx.xvavg.b" => "__builtin_lasx_xvavg_b", + "llvm.loongarch.lasx.xvavg.bu" => "__builtin_lasx_xvavg_bu", + "llvm.loongarch.lasx.xvavg.d" => "__builtin_lasx_xvavg_d", + "llvm.loongarch.lasx.xvavg.du" => "__builtin_lasx_xvavg_du", + "llvm.loongarch.lasx.xvavg.h" => "__builtin_lasx_xvavg_h", + "llvm.loongarch.lasx.xvavg.hu" => "__builtin_lasx_xvavg_hu", + "llvm.loongarch.lasx.xvavg.w" => "__builtin_lasx_xvavg_w", + "llvm.loongarch.lasx.xvavg.wu" => "__builtin_lasx_xvavg_wu", + "llvm.loongarch.lasx.xvavgr.b" => "__builtin_lasx_xvavgr_b", + "llvm.loongarch.lasx.xvavgr.bu" => "__builtin_lasx_xvavgr_bu", + "llvm.loongarch.lasx.xvavgr.d" => "__builtin_lasx_xvavgr_d", + "llvm.loongarch.lasx.xvavgr.du" => "__builtin_lasx_xvavgr_du", + "llvm.loongarch.lasx.xvavgr.h" => "__builtin_lasx_xvavgr_h", + "llvm.loongarch.lasx.xvavgr.hu" => "__builtin_lasx_xvavgr_hu", + "llvm.loongarch.lasx.xvavgr.w" => "__builtin_lasx_xvavgr_w", + "llvm.loongarch.lasx.xvavgr.wu" => "__builtin_lasx_xvavgr_wu", + "llvm.loongarch.lasx.xvbitclr.b" => "__builtin_lasx_xvbitclr_b", + "llvm.loongarch.lasx.xvbitclr.d" => "__builtin_lasx_xvbitclr_d", + "llvm.loongarch.lasx.xvbitclr.h" => "__builtin_lasx_xvbitclr_h", + "llvm.loongarch.lasx.xvbitclr.w" => "__builtin_lasx_xvbitclr_w", + "llvm.loongarch.lasx.xvbitclri.b" => "__builtin_lasx_xvbitclri_b", + "llvm.loongarch.lasx.xvbitclri.d" => "__builtin_lasx_xvbitclri_d", + "llvm.loongarch.lasx.xvbitclri.h" => "__builtin_lasx_xvbitclri_h", + "llvm.loongarch.lasx.xvbitclri.w" => "__builtin_lasx_xvbitclri_w", + "llvm.loongarch.lasx.xvbitrev.b" => "__builtin_lasx_xvbitrev_b", + "llvm.loongarch.lasx.xvbitrev.d" => "__builtin_lasx_xvbitrev_d", + "llvm.loongarch.lasx.xvbitrev.h" => "__builtin_lasx_xvbitrev_h", + "llvm.loongarch.lasx.xvbitrev.w" => "__builtin_lasx_xvbitrev_w", + "llvm.loongarch.lasx.xvbitrevi.b" => "__builtin_lasx_xvbitrevi_b", + "llvm.loongarch.lasx.xvbitrevi.d" => "__builtin_lasx_xvbitrevi_d", + "llvm.loongarch.lasx.xvbitrevi.h" => "__builtin_lasx_xvbitrevi_h", + "llvm.loongarch.lasx.xvbitrevi.w" => "__builtin_lasx_xvbitrevi_w", + "llvm.loongarch.lasx.xvbitsel.v" => "__builtin_lasx_xvbitsel_v", + "llvm.loongarch.lasx.xvbitseli.b" => "__builtin_lasx_xvbitseli_b", + "llvm.loongarch.lasx.xvbitset.b" => "__builtin_lasx_xvbitset_b", + "llvm.loongarch.lasx.xvbitset.d" => "__builtin_lasx_xvbitset_d", + "llvm.loongarch.lasx.xvbitset.h" => "__builtin_lasx_xvbitset_h", + "llvm.loongarch.lasx.xvbitset.w" => "__builtin_lasx_xvbitset_w", + "llvm.loongarch.lasx.xvbitseti.b" => "__builtin_lasx_xvbitseti_b", + "llvm.loongarch.lasx.xvbitseti.d" => "__builtin_lasx_xvbitseti_d", + "llvm.loongarch.lasx.xvbitseti.h" => "__builtin_lasx_xvbitseti_h", + "llvm.loongarch.lasx.xvbitseti.w" => "__builtin_lasx_xvbitseti_w", + "llvm.loongarch.lasx.xvbsll.v" => "__builtin_lasx_xvbsll_v", + "llvm.loongarch.lasx.xvbsrl.v" => "__builtin_lasx_xvbsrl_v", + "llvm.loongarch.lasx.xvclo.b" => "__builtin_lasx_xvclo_b", + "llvm.loongarch.lasx.xvclo.d" => "__builtin_lasx_xvclo_d", + "llvm.loongarch.lasx.xvclo.h" => "__builtin_lasx_xvclo_h", + "llvm.loongarch.lasx.xvclo.w" => "__builtin_lasx_xvclo_w", + "llvm.loongarch.lasx.xvclz.b" => "__builtin_lasx_xvclz_b", + "llvm.loongarch.lasx.xvclz.d" => "__builtin_lasx_xvclz_d", + "llvm.loongarch.lasx.xvclz.h" => "__builtin_lasx_xvclz_h", + "llvm.loongarch.lasx.xvclz.w" => "__builtin_lasx_xvclz_w", + "llvm.loongarch.lasx.xvdiv.b" => "__builtin_lasx_xvdiv_b", + "llvm.loongarch.lasx.xvdiv.bu" => "__builtin_lasx_xvdiv_bu", + "llvm.loongarch.lasx.xvdiv.d" => "__builtin_lasx_xvdiv_d", + "llvm.loongarch.lasx.xvdiv.du" => "__builtin_lasx_xvdiv_du", + "llvm.loongarch.lasx.xvdiv.h" => "__builtin_lasx_xvdiv_h", + "llvm.loongarch.lasx.xvdiv.hu" => "__builtin_lasx_xvdiv_hu", + "llvm.loongarch.lasx.xvdiv.w" => "__builtin_lasx_xvdiv_w", + "llvm.loongarch.lasx.xvdiv.wu" => "__builtin_lasx_xvdiv_wu", + "llvm.loongarch.lasx.xvexth.d.w" => "__builtin_lasx_xvexth_d_w", + "llvm.loongarch.lasx.xvexth.du.wu" => "__builtin_lasx_xvexth_du_wu", + "llvm.loongarch.lasx.xvexth.h.b" => "__builtin_lasx_xvexth_h_b", + "llvm.loongarch.lasx.xvexth.hu.bu" => "__builtin_lasx_xvexth_hu_bu", + "llvm.loongarch.lasx.xvexth.q.d" => "__builtin_lasx_xvexth_q_d", + "llvm.loongarch.lasx.xvexth.qu.du" => "__builtin_lasx_xvexth_qu_du", + "llvm.loongarch.lasx.xvexth.w.h" => "__builtin_lasx_xvexth_w_h", + "llvm.loongarch.lasx.xvexth.wu.hu" => "__builtin_lasx_xvexth_wu_hu", + "llvm.loongarch.lasx.xvextl.q.d" => "__builtin_lasx_xvextl_q_d", + "llvm.loongarch.lasx.xvextl.qu.du" => "__builtin_lasx_xvextl_qu_du", + "llvm.loongarch.lasx.xvextrins.b" => "__builtin_lasx_xvextrins_b", + "llvm.loongarch.lasx.xvextrins.d" => "__builtin_lasx_xvextrins_d", + "llvm.loongarch.lasx.xvextrins.h" => "__builtin_lasx_xvextrins_h", + "llvm.loongarch.lasx.xvextrins.w" => "__builtin_lasx_xvextrins_w", + "llvm.loongarch.lasx.xvfadd.d" => "__builtin_lasx_xvfadd_d", + "llvm.loongarch.lasx.xvfadd.s" => "__builtin_lasx_xvfadd_s", + "llvm.loongarch.lasx.xvfclass.d" => "__builtin_lasx_xvfclass_d", + "llvm.loongarch.lasx.xvfclass.s" => "__builtin_lasx_xvfclass_s", + "llvm.loongarch.lasx.xvfcmp.caf.d" => "__builtin_lasx_xvfcmp_caf_d", + "llvm.loongarch.lasx.xvfcmp.caf.s" => "__builtin_lasx_xvfcmp_caf_s", + "llvm.loongarch.lasx.xvfcmp.ceq.d" => "__builtin_lasx_xvfcmp_ceq_d", + "llvm.loongarch.lasx.xvfcmp.ceq.s" => "__builtin_lasx_xvfcmp_ceq_s", + "llvm.loongarch.lasx.xvfcmp.cle.d" => "__builtin_lasx_xvfcmp_cle_d", + "llvm.loongarch.lasx.xvfcmp.cle.s" => "__builtin_lasx_xvfcmp_cle_s", + "llvm.loongarch.lasx.xvfcmp.clt.d" => "__builtin_lasx_xvfcmp_clt_d", + "llvm.loongarch.lasx.xvfcmp.clt.s" => "__builtin_lasx_xvfcmp_clt_s", + "llvm.loongarch.lasx.xvfcmp.cne.d" => "__builtin_lasx_xvfcmp_cne_d", + "llvm.loongarch.lasx.xvfcmp.cne.s" => "__builtin_lasx_xvfcmp_cne_s", + "llvm.loongarch.lasx.xvfcmp.cor.d" => "__builtin_lasx_xvfcmp_cor_d", + "llvm.loongarch.lasx.xvfcmp.cor.s" => "__builtin_lasx_xvfcmp_cor_s", + "llvm.loongarch.lasx.xvfcmp.cueq.d" => "__builtin_lasx_xvfcmp_cueq_d", + "llvm.loongarch.lasx.xvfcmp.cueq.s" => "__builtin_lasx_xvfcmp_cueq_s", + "llvm.loongarch.lasx.xvfcmp.cule.d" => "__builtin_lasx_xvfcmp_cule_d", + "llvm.loongarch.lasx.xvfcmp.cule.s" => "__builtin_lasx_xvfcmp_cule_s", + "llvm.loongarch.lasx.xvfcmp.cult.d" => "__builtin_lasx_xvfcmp_cult_d", + "llvm.loongarch.lasx.xvfcmp.cult.s" => "__builtin_lasx_xvfcmp_cult_s", + "llvm.loongarch.lasx.xvfcmp.cun.d" => "__builtin_lasx_xvfcmp_cun_d", + "llvm.loongarch.lasx.xvfcmp.cun.s" => "__builtin_lasx_xvfcmp_cun_s", + "llvm.loongarch.lasx.xvfcmp.cune.d" => "__builtin_lasx_xvfcmp_cune_d", + "llvm.loongarch.lasx.xvfcmp.cune.s" => "__builtin_lasx_xvfcmp_cune_s", + "llvm.loongarch.lasx.xvfcmp.saf.d" => "__builtin_lasx_xvfcmp_saf_d", + "llvm.loongarch.lasx.xvfcmp.saf.s" => "__builtin_lasx_xvfcmp_saf_s", + "llvm.loongarch.lasx.xvfcmp.seq.d" => "__builtin_lasx_xvfcmp_seq_d", + "llvm.loongarch.lasx.xvfcmp.seq.s" => "__builtin_lasx_xvfcmp_seq_s", + "llvm.loongarch.lasx.xvfcmp.sle.d" => "__builtin_lasx_xvfcmp_sle_d", + "llvm.loongarch.lasx.xvfcmp.sle.s" => "__builtin_lasx_xvfcmp_sle_s", + "llvm.loongarch.lasx.xvfcmp.slt.d" => "__builtin_lasx_xvfcmp_slt_d", + "llvm.loongarch.lasx.xvfcmp.slt.s" => "__builtin_lasx_xvfcmp_slt_s", + "llvm.loongarch.lasx.xvfcmp.sne.d" => "__builtin_lasx_xvfcmp_sne_d", + "llvm.loongarch.lasx.xvfcmp.sne.s" => "__builtin_lasx_xvfcmp_sne_s", + "llvm.loongarch.lasx.xvfcmp.sor.d" => "__builtin_lasx_xvfcmp_sor_d", + "llvm.loongarch.lasx.xvfcmp.sor.s" => "__builtin_lasx_xvfcmp_sor_s", + "llvm.loongarch.lasx.xvfcmp.sueq.d" => "__builtin_lasx_xvfcmp_sueq_d", + "llvm.loongarch.lasx.xvfcmp.sueq.s" => "__builtin_lasx_xvfcmp_sueq_s", + "llvm.loongarch.lasx.xvfcmp.sule.d" => "__builtin_lasx_xvfcmp_sule_d", + "llvm.loongarch.lasx.xvfcmp.sule.s" => "__builtin_lasx_xvfcmp_sule_s", + "llvm.loongarch.lasx.xvfcmp.sult.d" => "__builtin_lasx_xvfcmp_sult_d", + "llvm.loongarch.lasx.xvfcmp.sult.s" => "__builtin_lasx_xvfcmp_sult_s", + "llvm.loongarch.lasx.xvfcmp.sun.d" => "__builtin_lasx_xvfcmp_sun_d", + "llvm.loongarch.lasx.xvfcmp.sun.s" => "__builtin_lasx_xvfcmp_sun_s", + "llvm.loongarch.lasx.xvfcmp.sune.d" => "__builtin_lasx_xvfcmp_sune_d", + "llvm.loongarch.lasx.xvfcmp.sune.s" => "__builtin_lasx_xvfcmp_sune_s", + "llvm.loongarch.lasx.xvfcvt.h.s" => "__builtin_lasx_xvfcvt_h_s", + "llvm.loongarch.lasx.xvfcvt.s.d" => "__builtin_lasx_xvfcvt_s_d", + "llvm.loongarch.lasx.xvfcvth.d.s" => "__builtin_lasx_xvfcvth_d_s", + "llvm.loongarch.lasx.xvfcvth.s.h" => "__builtin_lasx_xvfcvth_s_h", + "llvm.loongarch.lasx.xvfcvtl.d.s" => "__builtin_lasx_xvfcvtl_d_s", + "llvm.loongarch.lasx.xvfcvtl.s.h" => "__builtin_lasx_xvfcvtl_s_h", + "llvm.loongarch.lasx.xvfdiv.d" => "__builtin_lasx_xvfdiv_d", + "llvm.loongarch.lasx.xvfdiv.s" => "__builtin_lasx_xvfdiv_s", + "llvm.loongarch.lasx.xvffint.d.l" => "__builtin_lasx_xvffint_d_l", + "llvm.loongarch.lasx.xvffint.d.lu" => "__builtin_lasx_xvffint_d_lu", + "llvm.loongarch.lasx.xvffint.s.l" => "__builtin_lasx_xvffint_s_l", + "llvm.loongarch.lasx.xvffint.s.w" => "__builtin_lasx_xvffint_s_w", + "llvm.loongarch.lasx.xvffint.s.wu" => "__builtin_lasx_xvffint_s_wu", + "llvm.loongarch.lasx.xvffinth.d.w" => "__builtin_lasx_xvffinth_d_w", + "llvm.loongarch.lasx.xvffintl.d.w" => "__builtin_lasx_xvffintl_d_w", + "llvm.loongarch.lasx.xvflogb.d" => "__builtin_lasx_xvflogb_d", + "llvm.loongarch.lasx.xvflogb.s" => "__builtin_lasx_xvflogb_s", + "llvm.loongarch.lasx.xvfmadd.d" => "__builtin_lasx_xvfmadd_d", + "llvm.loongarch.lasx.xvfmadd.s" => "__builtin_lasx_xvfmadd_s", + "llvm.loongarch.lasx.xvfmax.d" => "__builtin_lasx_xvfmax_d", + "llvm.loongarch.lasx.xvfmax.s" => "__builtin_lasx_xvfmax_s", + "llvm.loongarch.lasx.xvfmaxa.d" => "__builtin_lasx_xvfmaxa_d", + "llvm.loongarch.lasx.xvfmaxa.s" => "__builtin_lasx_xvfmaxa_s", + "llvm.loongarch.lasx.xvfmin.d" => "__builtin_lasx_xvfmin_d", + "llvm.loongarch.lasx.xvfmin.s" => "__builtin_lasx_xvfmin_s", + "llvm.loongarch.lasx.xvfmina.d" => "__builtin_lasx_xvfmina_d", + "llvm.loongarch.lasx.xvfmina.s" => "__builtin_lasx_xvfmina_s", + "llvm.loongarch.lasx.xvfmsub.d" => "__builtin_lasx_xvfmsub_d", + "llvm.loongarch.lasx.xvfmsub.s" => "__builtin_lasx_xvfmsub_s", + "llvm.loongarch.lasx.xvfmul.d" => "__builtin_lasx_xvfmul_d", + "llvm.loongarch.lasx.xvfmul.s" => "__builtin_lasx_xvfmul_s", + "llvm.loongarch.lasx.xvfnmadd.d" => "__builtin_lasx_xvfnmadd_d", + "llvm.loongarch.lasx.xvfnmadd.s" => "__builtin_lasx_xvfnmadd_s", + "llvm.loongarch.lasx.xvfnmsub.d" => "__builtin_lasx_xvfnmsub_d", + "llvm.loongarch.lasx.xvfnmsub.s" => "__builtin_lasx_xvfnmsub_s", + "llvm.loongarch.lasx.xvfrecip.d" => "__builtin_lasx_xvfrecip_d", + "llvm.loongarch.lasx.xvfrecip.s" => "__builtin_lasx_xvfrecip_s", + "llvm.loongarch.lasx.xvfrint.d" => "__builtin_lasx_xvfrint_d", + "llvm.loongarch.lasx.xvfrint.s" => "__builtin_lasx_xvfrint_s", + "llvm.loongarch.lasx.xvfrintrm.d" => "__builtin_lasx_xvfrintrm_d", + "llvm.loongarch.lasx.xvfrintrm.s" => "__builtin_lasx_xvfrintrm_s", + "llvm.loongarch.lasx.xvfrintrne.d" => "__builtin_lasx_xvfrintrne_d", + "llvm.loongarch.lasx.xvfrintrne.s" => "__builtin_lasx_xvfrintrne_s", + "llvm.loongarch.lasx.xvfrintrp.d" => "__builtin_lasx_xvfrintrp_d", + "llvm.loongarch.lasx.xvfrintrp.s" => "__builtin_lasx_xvfrintrp_s", + "llvm.loongarch.lasx.xvfrintrz.d" => "__builtin_lasx_xvfrintrz_d", + "llvm.loongarch.lasx.xvfrintrz.s" => "__builtin_lasx_xvfrintrz_s", + "llvm.loongarch.lasx.xvfrsqrt.d" => "__builtin_lasx_xvfrsqrt_d", + "llvm.loongarch.lasx.xvfrsqrt.s" => "__builtin_lasx_xvfrsqrt_s", + "llvm.loongarch.lasx.xvfrstp.b" => "__builtin_lasx_xvfrstp_b", + "llvm.loongarch.lasx.xvfrstp.h" => "__builtin_lasx_xvfrstp_h", + "llvm.loongarch.lasx.xvfrstpi.b" => "__builtin_lasx_xvfrstpi_b", + "llvm.loongarch.lasx.xvfrstpi.h" => "__builtin_lasx_xvfrstpi_h", + "llvm.loongarch.lasx.xvfsqrt.d" => "__builtin_lasx_xvfsqrt_d", + "llvm.loongarch.lasx.xvfsqrt.s" => "__builtin_lasx_xvfsqrt_s", + "llvm.loongarch.lasx.xvfsub.d" => "__builtin_lasx_xvfsub_d", + "llvm.loongarch.lasx.xvfsub.s" => "__builtin_lasx_xvfsub_s", + "llvm.loongarch.lasx.xvftint.l.d" => "__builtin_lasx_xvftint_l_d", + "llvm.loongarch.lasx.xvftint.lu.d" => "__builtin_lasx_xvftint_lu_d", + "llvm.loongarch.lasx.xvftint.w.d" => "__builtin_lasx_xvftint_w_d", + "llvm.loongarch.lasx.xvftint.w.s" => "__builtin_lasx_xvftint_w_s", + "llvm.loongarch.lasx.xvftint.wu.s" => "__builtin_lasx_xvftint_wu_s", + "llvm.loongarch.lasx.xvftinth.l.s" => "__builtin_lasx_xvftinth_l_s", + "llvm.loongarch.lasx.xvftintl.l.s" => "__builtin_lasx_xvftintl_l_s", + "llvm.loongarch.lasx.xvftintrm.l.d" => "__builtin_lasx_xvftintrm_l_d", + "llvm.loongarch.lasx.xvftintrm.w.d" => "__builtin_lasx_xvftintrm_w_d", + "llvm.loongarch.lasx.xvftintrm.w.s" => "__builtin_lasx_xvftintrm_w_s", + "llvm.loongarch.lasx.xvftintrmh.l.s" => "__builtin_lasx_xvftintrmh_l_s", + "llvm.loongarch.lasx.xvftintrml.l.s" => "__builtin_lasx_xvftintrml_l_s", + "llvm.loongarch.lasx.xvftintrne.l.d" => "__builtin_lasx_xvftintrne_l_d", + "llvm.loongarch.lasx.xvftintrne.w.d" => "__builtin_lasx_xvftintrne_w_d", + "llvm.loongarch.lasx.xvftintrne.w.s" => "__builtin_lasx_xvftintrne_w_s", + "llvm.loongarch.lasx.xvftintrneh.l.s" => "__builtin_lasx_xvftintrneh_l_s", + "llvm.loongarch.lasx.xvftintrnel.l.s" => "__builtin_lasx_xvftintrnel_l_s", + "llvm.loongarch.lasx.xvftintrp.l.d" => "__builtin_lasx_xvftintrp_l_d", + "llvm.loongarch.lasx.xvftintrp.w.d" => "__builtin_lasx_xvftintrp_w_d", + "llvm.loongarch.lasx.xvftintrp.w.s" => "__builtin_lasx_xvftintrp_w_s", + "llvm.loongarch.lasx.xvftintrph.l.s" => "__builtin_lasx_xvftintrph_l_s", + "llvm.loongarch.lasx.xvftintrpl.l.s" => "__builtin_lasx_xvftintrpl_l_s", + "llvm.loongarch.lasx.xvftintrz.l.d" => "__builtin_lasx_xvftintrz_l_d", + "llvm.loongarch.lasx.xvftintrz.lu.d" => "__builtin_lasx_xvftintrz_lu_d", + "llvm.loongarch.lasx.xvftintrz.w.d" => "__builtin_lasx_xvftintrz_w_d", + "llvm.loongarch.lasx.xvftintrz.w.s" => "__builtin_lasx_xvftintrz_w_s", + "llvm.loongarch.lasx.xvftintrz.wu.s" => "__builtin_lasx_xvftintrz_wu_s", + "llvm.loongarch.lasx.xvftintrzh.l.s" => "__builtin_lasx_xvftintrzh_l_s", + "llvm.loongarch.lasx.xvftintrzl.l.s" => "__builtin_lasx_xvftintrzl_l_s", + "llvm.loongarch.lasx.xvhaddw.d.w" => "__builtin_lasx_xvhaddw_d_w", + "llvm.loongarch.lasx.xvhaddw.du.wu" => "__builtin_lasx_xvhaddw_du_wu", + "llvm.loongarch.lasx.xvhaddw.h.b" => "__builtin_lasx_xvhaddw_h_b", + "llvm.loongarch.lasx.xvhaddw.hu.bu" => "__builtin_lasx_xvhaddw_hu_bu", + "llvm.loongarch.lasx.xvhaddw.q.d" => "__builtin_lasx_xvhaddw_q_d", + "llvm.loongarch.lasx.xvhaddw.qu.du" => "__builtin_lasx_xvhaddw_qu_du", + "llvm.loongarch.lasx.xvhaddw.w.h" => "__builtin_lasx_xvhaddw_w_h", + "llvm.loongarch.lasx.xvhaddw.wu.hu" => "__builtin_lasx_xvhaddw_wu_hu", + "llvm.loongarch.lasx.xvhsubw.d.w" => "__builtin_lasx_xvhsubw_d_w", + "llvm.loongarch.lasx.xvhsubw.du.wu" => "__builtin_lasx_xvhsubw_du_wu", + "llvm.loongarch.lasx.xvhsubw.h.b" => "__builtin_lasx_xvhsubw_h_b", + "llvm.loongarch.lasx.xvhsubw.hu.bu" => "__builtin_lasx_xvhsubw_hu_bu", + "llvm.loongarch.lasx.xvhsubw.q.d" => "__builtin_lasx_xvhsubw_q_d", + "llvm.loongarch.lasx.xvhsubw.qu.du" => "__builtin_lasx_xvhsubw_qu_du", + "llvm.loongarch.lasx.xvhsubw.w.h" => "__builtin_lasx_xvhsubw_w_h", + "llvm.loongarch.lasx.xvhsubw.wu.hu" => "__builtin_lasx_xvhsubw_wu_hu", + "llvm.loongarch.lasx.xvilvh.b" => "__builtin_lasx_xvilvh_b", + "llvm.loongarch.lasx.xvilvh.d" => "__builtin_lasx_xvilvh_d", + "llvm.loongarch.lasx.xvilvh.h" => "__builtin_lasx_xvilvh_h", + "llvm.loongarch.lasx.xvilvh.w" => "__builtin_lasx_xvilvh_w", + "llvm.loongarch.lasx.xvilvl.b" => "__builtin_lasx_xvilvl_b", + "llvm.loongarch.lasx.xvilvl.d" => "__builtin_lasx_xvilvl_d", + "llvm.loongarch.lasx.xvilvl.h" => "__builtin_lasx_xvilvl_h", + "llvm.loongarch.lasx.xvilvl.w" => "__builtin_lasx_xvilvl_w", + "llvm.loongarch.lasx.xvinsgr2vr.d" => "__builtin_lasx_xvinsgr2vr_d", + "llvm.loongarch.lasx.xvinsgr2vr.w" => "__builtin_lasx_xvinsgr2vr_w", + "llvm.loongarch.lasx.xvinsve0.d" => "__builtin_lasx_xvinsve0_d", + "llvm.loongarch.lasx.xvinsve0.w" => "__builtin_lasx_xvinsve0_w", + "llvm.loongarch.lasx.xvld" => "__builtin_lasx_xvld", + "llvm.loongarch.lasx.xvldi" => "__builtin_lasx_xvldi", + "llvm.loongarch.lasx.xvldrepl.b" => "__builtin_lasx_xvldrepl_b", + "llvm.loongarch.lasx.xvldrepl.d" => "__builtin_lasx_xvldrepl_d", + "llvm.loongarch.lasx.xvldrepl.h" => "__builtin_lasx_xvldrepl_h", + "llvm.loongarch.lasx.xvldrepl.w" => "__builtin_lasx_xvldrepl_w", + "llvm.loongarch.lasx.xvldx" => "__builtin_lasx_xvldx", + "llvm.loongarch.lasx.xvmadd.b" => "__builtin_lasx_xvmadd_b", + "llvm.loongarch.lasx.xvmadd.d" => "__builtin_lasx_xvmadd_d", + "llvm.loongarch.lasx.xvmadd.h" => "__builtin_lasx_xvmadd_h", + "llvm.loongarch.lasx.xvmadd.w" => "__builtin_lasx_xvmadd_w", + "llvm.loongarch.lasx.xvmaddwev.d.w" => "__builtin_lasx_xvmaddwev_d_w", + "llvm.loongarch.lasx.xvmaddwev.d.wu" => "__builtin_lasx_xvmaddwev_d_wu", + "llvm.loongarch.lasx.xvmaddwev.d.wu.w" => "__builtin_lasx_xvmaddwev_d_wu_w", + "llvm.loongarch.lasx.xvmaddwev.h.b" => "__builtin_lasx_xvmaddwev_h_b", + "llvm.loongarch.lasx.xvmaddwev.h.bu" => "__builtin_lasx_xvmaddwev_h_bu", + "llvm.loongarch.lasx.xvmaddwev.h.bu.b" => "__builtin_lasx_xvmaddwev_h_bu_b", + "llvm.loongarch.lasx.xvmaddwev.q.d" => "__builtin_lasx_xvmaddwev_q_d", + "llvm.loongarch.lasx.xvmaddwev.q.du" => "__builtin_lasx_xvmaddwev_q_du", + "llvm.loongarch.lasx.xvmaddwev.q.du.d" => "__builtin_lasx_xvmaddwev_q_du_d", + "llvm.loongarch.lasx.xvmaddwev.w.h" => "__builtin_lasx_xvmaddwev_w_h", + "llvm.loongarch.lasx.xvmaddwev.w.hu" => "__builtin_lasx_xvmaddwev_w_hu", + "llvm.loongarch.lasx.xvmaddwev.w.hu.h" => "__builtin_lasx_xvmaddwev_w_hu_h", + "llvm.loongarch.lasx.xvmaddwod.d.w" => "__builtin_lasx_xvmaddwod_d_w", + "llvm.loongarch.lasx.xvmaddwod.d.wu" => "__builtin_lasx_xvmaddwod_d_wu", + "llvm.loongarch.lasx.xvmaddwod.d.wu.w" => "__builtin_lasx_xvmaddwod_d_wu_w", + "llvm.loongarch.lasx.xvmaddwod.h.b" => "__builtin_lasx_xvmaddwod_h_b", + "llvm.loongarch.lasx.xvmaddwod.h.bu" => "__builtin_lasx_xvmaddwod_h_bu", + "llvm.loongarch.lasx.xvmaddwod.h.bu.b" => "__builtin_lasx_xvmaddwod_h_bu_b", + "llvm.loongarch.lasx.xvmaddwod.q.d" => "__builtin_lasx_xvmaddwod_q_d", + "llvm.loongarch.lasx.xvmaddwod.q.du" => "__builtin_lasx_xvmaddwod_q_du", + "llvm.loongarch.lasx.xvmaddwod.q.du.d" => "__builtin_lasx_xvmaddwod_q_du_d", + "llvm.loongarch.lasx.xvmaddwod.w.h" => "__builtin_lasx_xvmaddwod_w_h", + "llvm.loongarch.lasx.xvmaddwod.w.hu" => "__builtin_lasx_xvmaddwod_w_hu", + "llvm.loongarch.lasx.xvmaddwod.w.hu.h" => "__builtin_lasx_xvmaddwod_w_hu_h", + "llvm.loongarch.lasx.xvmax.b" => "__builtin_lasx_xvmax_b", + "llvm.loongarch.lasx.xvmax.bu" => "__builtin_lasx_xvmax_bu", + "llvm.loongarch.lasx.xvmax.d" => "__builtin_lasx_xvmax_d", + "llvm.loongarch.lasx.xvmax.du" => "__builtin_lasx_xvmax_du", + "llvm.loongarch.lasx.xvmax.h" => "__builtin_lasx_xvmax_h", + "llvm.loongarch.lasx.xvmax.hu" => "__builtin_lasx_xvmax_hu", + "llvm.loongarch.lasx.xvmax.w" => "__builtin_lasx_xvmax_w", + "llvm.loongarch.lasx.xvmax.wu" => "__builtin_lasx_xvmax_wu", + "llvm.loongarch.lasx.xvmaxi.b" => "__builtin_lasx_xvmaxi_b", + "llvm.loongarch.lasx.xvmaxi.bu" => "__builtin_lasx_xvmaxi_bu", + "llvm.loongarch.lasx.xvmaxi.d" => "__builtin_lasx_xvmaxi_d", + "llvm.loongarch.lasx.xvmaxi.du" => "__builtin_lasx_xvmaxi_du", + "llvm.loongarch.lasx.xvmaxi.h" => "__builtin_lasx_xvmaxi_h", + "llvm.loongarch.lasx.xvmaxi.hu" => "__builtin_lasx_xvmaxi_hu", + "llvm.loongarch.lasx.xvmaxi.w" => "__builtin_lasx_xvmaxi_w", + "llvm.loongarch.lasx.xvmaxi.wu" => "__builtin_lasx_xvmaxi_wu", + "llvm.loongarch.lasx.xvmin.b" => "__builtin_lasx_xvmin_b", + "llvm.loongarch.lasx.xvmin.bu" => "__builtin_lasx_xvmin_bu", + "llvm.loongarch.lasx.xvmin.d" => "__builtin_lasx_xvmin_d", + "llvm.loongarch.lasx.xvmin.du" => "__builtin_lasx_xvmin_du", + "llvm.loongarch.lasx.xvmin.h" => "__builtin_lasx_xvmin_h", + "llvm.loongarch.lasx.xvmin.hu" => "__builtin_lasx_xvmin_hu", + "llvm.loongarch.lasx.xvmin.w" => "__builtin_lasx_xvmin_w", + "llvm.loongarch.lasx.xvmin.wu" => "__builtin_lasx_xvmin_wu", + "llvm.loongarch.lasx.xvmini.b" => "__builtin_lasx_xvmini_b", + "llvm.loongarch.lasx.xvmini.bu" => "__builtin_lasx_xvmini_bu", + "llvm.loongarch.lasx.xvmini.d" => "__builtin_lasx_xvmini_d", + "llvm.loongarch.lasx.xvmini.du" => "__builtin_lasx_xvmini_du", + "llvm.loongarch.lasx.xvmini.h" => "__builtin_lasx_xvmini_h", + "llvm.loongarch.lasx.xvmini.hu" => "__builtin_lasx_xvmini_hu", + "llvm.loongarch.lasx.xvmini.w" => "__builtin_lasx_xvmini_w", + "llvm.loongarch.lasx.xvmini.wu" => "__builtin_lasx_xvmini_wu", + "llvm.loongarch.lasx.xvmod.b" => "__builtin_lasx_xvmod_b", + "llvm.loongarch.lasx.xvmod.bu" => "__builtin_lasx_xvmod_bu", + "llvm.loongarch.lasx.xvmod.d" => "__builtin_lasx_xvmod_d", + "llvm.loongarch.lasx.xvmod.du" => "__builtin_lasx_xvmod_du", + "llvm.loongarch.lasx.xvmod.h" => "__builtin_lasx_xvmod_h", + "llvm.loongarch.lasx.xvmod.hu" => "__builtin_lasx_xvmod_hu", + "llvm.loongarch.lasx.xvmod.w" => "__builtin_lasx_xvmod_w", + "llvm.loongarch.lasx.xvmod.wu" => "__builtin_lasx_xvmod_wu", + "llvm.loongarch.lasx.xvmskgez.b" => "__builtin_lasx_xvmskgez_b", + "llvm.loongarch.lasx.xvmskltz.b" => "__builtin_lasx_xvmskltz_b", + "llvm.loongarch.lasx.xvmskltz.d" => "__builtin_lasx_xvmskltz_d", + "llvm.loongarch.lasx.xvmskltz.h" => "__builtin_lasx_xvmskltz_h", + "llvm.loongarch.lasx.xvmskltz.w" => "__builtin_lasx_xvmskltz_w", + "llvm.loongarch.lasx.xvmsknz.b" => "__builtin_lasx_xvmsknz_b", + "llvm.loongarch.lasx.xvmsub.b" => "__builtin_lasx_xvmsub_b", + "llvm.loongarch.lasx.xvmsub.d" => "__builtin_lasx_xvmsub_d", + "llvm.loongarch.lasx.xvmsub.h" => "__builtin_lasx_xvmsub_h", + "llvm.loongarch.lasx.xvmsub.w" => "__builtin_lasx_xvmsub_w", + "llvm.loongarch.lasx.xvmuh.b" => "__builtin_lasx_xvmuh_b", + "llvm.loongarch.lasx.xvmuh.bu" => "__builtin_lasx_xvmuh_bu", + "llvm.loongarch.lasx.xvmuh.d" => "__builtin_lasx_xvmuh_d", + "llvm.loongarch.lasx.xvmuh.du" => "__builtin_lasx_xvmuh_du", + "llvm.loongarch.lasx.xvmuh.h" => "__builtin_lasx_xvmuh_h", + "llvm.loongarch.lasx.xvmuh.hu" => "__builtin_lasx_xvmuh_hu", + "llvm.loongarch.lasx.xvmuh.w" => "__builtin_lasx_xvmuh_w", + "llvm.loongarch.lasx.xvmuh.wu" => "__builtin_lasx_xvmuh_wu", + "llvm.loongarch.lasx.xvmul.b" => "__builtin_lasx_xvmul_b", + "llvm.loongarch.lasx.xvmul.d" => "__builtin_lasx_xvmul_d", + "llvm.loongarch.lasx.xvmul.h" => "__builtin_lasx_xvmul_h", + "llvm.loongarch.lasx.xvmul.w" => "__builtin_lasx_xvmul_w", + "llvm.loongarch.lasx.xvmulwev.d.w" => "__builtin_lasx_xvmulwev_d_w", + "llvm.loongarch.lasx.xvmulwev.d.wu" => "__builtin_lasx_xvmulwev_d_wu", + "llvm.loongarch.lasx.xvmulwev.d.wu.w" => "__builtin_lasx_xvmulwev_d_wu_w", + "llvm.loongarch.lasx.xvmulwev.h.b" => "__builtin_lasx_xvmulwev_h_b", + "llvm.loongarch.lasx.xvmulwev.h.bu" => "__builtin_lasx_xvmulwev_h_bu", + "llvm.loongarch.lasx.xvmulwev.h.bu.b" => "__builtin_lasx_xvmulwev_h_bu_b", + "llvm.loongarch.lasx.xvmulwev.q.d" => "__builtin_lasx_xvmulwev_q_d", + "llvm.loongarch.lasx.xvmulwev.q.du" => "__builtin_lasx_xvmulwev_q_du", + "llvm.loongarch.lasx.xvmulwev.q.du.d" => "__builtin_lasx_xvmulwev_q_du_d", + "llvm.loongarch.lasx.xvmulwev.w.h" => "__builtin_lasx_xvmulwev_w_h", + "llvm.loongarch.lasx.xvmulwev.w.hu" => "__builtin_lasx_xvmulwev_w_hu", + "llvm.loongarch.lasx.xvmulwev.w.hu.h" => "__builtin_lasx_xvmulwev_w_hu_h", + "llvm.loongarch.lasx.xvmulwod.d.w" => "__builtin_lasx_xvmulwod_d_w", + "llvm.loongarch.lasx.xvmulwod.d.wu" => "__builtin_lasx_xvmulwod_d_wu", + "llvm.loongarch.lasx.xvmulwod.d.wu.w" => "__builtin_lasx_xvmulwod_d_wu_w", + "llvm.loongarch.lasx.xvmulwod.h.b" => "__builtin_lasx_xvmulwod_h_b", + "llvm.loongarch.lasx.xvmulwod.h.bu" => "__builtin_lasx_xvmulwod_h_bu", + "llvm.loongarch.lasx.xvmulwod.h.bu.b" => "__builtin_lasx_xvmulwod_h_bu_b", + "llvm.loongarch.lasx.xvmulwod.q.d" => "__builtin_lasx_xvmulwod_q_d", + "llvm.loongarch.lasx.xvmulwod.q.du" => "__builtin_lasx_xvmulwod_q_du", + "llvm.loongarch.lasx.xvmulwod.q.du.d" => "__builtin_lasx_xvmulwod_q_du_d", + "llvm.loongarch.lasx.xvmulwod.w.h" => "__builtin_lasx_xvmulwod_w_h", + "llvm.loongarch.lasx.xvmulwod.w.hu" => "__builtin_lasx_xvmulwod_w_hu", + "llvm.loongarch.lasx.xvmulwod.w.hu.h" => "__builtin_lasx_xvmulwod_w_hu_h", + "llvm.loongarch.lasx.xvneg.b" => "__builtin_lasx_xvneg_b", + "llvm.loongarch.lasx.xvneg.d" => "__builtin_lasx_xvneg_d", + "llvm.loongarch.lasx.xvneg.h" => "__builtin_lasx_xvneg_h", + "llvm.loongarch.lasx.xvneg.w" => "__builtin_lasx_xvneg_w", + "llvm.loongarch.lasx.xvnor.v" => "__builtin_lasx_xvnor_v", + "llvm.loongarch.lasx.xvnori.b" => "__builtin_lasx_xvnori_b", + "llvm.loongarch.lasx.xvor.v" => "__builtin_lasx_xvor_v", + "llvm.loongarch.lasx.xvori.b" => "__builtin_lasx_xvori_b", + "llvm.loongarch.lasx.xvorn.v" => "__builtin_lasx_xvorn_v", + "llvm.loongarch.lasx.xvpackev.b" => "__builtin_lasx_xvpackev_b", + "llvm.loongarch.lasx.xvpackev.d" => "__builtin_lasx_xvpackev_d", + "llvm.loongarch.lasx.xvpackev.h" => "__builtin_lasx_xvpackev_h", + "llvm.loongarch.lasx.xvpackev.w" => "__builtin_lasx_xvpackev_w", + "llvm.loongarch.lasx.xvpackod.b" => "__builtin_lasx_xvpackod_b", + "llvm.loongarch.lasx.xvpackod.d" => "__builtin_lasx_xvpackod_d", + "llvm.loongarch.lasx.xvpackod.h" => "__builtin_lasx_xvpackod_h", + "llvm.loongarch.lasx.xvpackod.w" => "__builtin_lasx_xvpackod_w", + "llvm.loongarch.lasx.xvpcnt.b" => "__builtin_lasx_xvpcnt_b", + "llvm.loongarch.lasx.xvpcnt.d" => "__builtin_lasx_xvpcnt_d", + "llvm.loongarch.lasx.xvpcnt.h" => "__builtin_lasx_xvpcnt_h", + "llvm.loongarch.lasx.xvpcnt.w" => "__builtin_lasx_xvpcnt_w", + "llvm.loongarch.lasx.xvperm.w" => "__builtin_lasx_xvperm_w", + "llvm.loongarch.lasx.xvpermi.d" => "__builtin_lasx_xvpermi_d", + "llvm.loongarch.lasx.xvpermi.q" => "__builtin_lasx_xvpermi_q", + "llvm.loongarch.lasx.xvpermi.w" => "__builtin_lasx_xvpermi_w", + "llvm.loongarch.lasx.xvpickev.b" => "__builtin_lasx_xvpickev_b", + "llvm.loongarch.lasx.xvpickev.d" => "__builtin_lasx_xvpickev_d", + "llvm.loongarch.lasx.xvpickev.h" => "__builtin_lasx_xvpickev_h", + "llvm.loongarch.lasx.xvpickev.w" => "__builtin_lasx_xvpickev_w", + "llvm.loongarch.lasx.xvpickod.b" => "__builtin_lasx_xvpickod_b", + "llvm.loongarch.lasx.xvpickod.d" => "__builtin_lasx_xvpickod_d", + "llvm.loongarch.lasx.xvpickod.h" => "__builtin_lasx_xvpickod_h", + "llvm.loongarch.lasx.xvpickod.w" => "__builtin_lasx_xvpickod_w", + "llvm.loongarch.lasx.xvpickve.d" => "__builtin_lasx_xvpickve_d", + "llvm.loongarch.lasx.xvpickve.d.f" => "__builtin_lasx_xvpickve_d_f", + "llvm.loongarch.lasx.xvpickve.w" => "__builtin_lasx_xvpickve_w", + "llvm.loongarch.lasx.xvpickve.w.f" => "__builtin_lasx_xvpickve_w_f", + "llvm.loongarch.lasx.xvpickve2gr.d" => "__builtin_lasx_xvpickve2gr_d", + "llvm.loongarch.lasx.xvpickve2gr.du" => "__builtin_lasx_xvpickve2gr_du", + "llvm.loongarch.lasx.xvpickve2gr.w" => "__builtin_lasx_xvpickve2gr_w", + "llvm.loongarch.lasx.xvpickve2gr.wu" => "__builtin_lasx_xvpickve2gr_wu", + "llvm.loongarch.lasx.xvrepl128vei.b" => "__builtin_lasx_xvrepl128vei_b", + "llvm.loongarch.lasx.xvrepl128vei.d" => "__builtin_lasx_xvrepl128vei_d", + "llvm.loongarch.lasx.xvrepl128vei.h" => "__builtin_lasx_xvrepl128vei_h", + "llvm.loongarch.lasx.xvrepl128vei.w" => "__builtin_lasx_xvrepl128vei_w", + "llvm.loongarch.lasx.xvreplgr2vr.b" => "__builtin_lasx_xvreplgr2vr_b", + "llvm.loongarch.lasx.xvreplgr2vr.d" => "__builtin_lasx_xvreplgr2vr_d", + "llvm.loongarch.lasx.xvreplgr2vr.h" => "__builtin_lasx_xvreplgr2vr_h", + "llvm.loongarch.lasx.xvreplgr2vr.w" => "__builtin_lasx_xvreplgr2vr_w", + "llvm.loongarch.lasx.xvrepli.b" => "__builtin_lasx_xvrepli_b", + "llvm.loongarch.lasx.xvrepli.d" => "__builtin_lasx_xvrepli_d", + "llvm.loongarch.lasx.xvrepli.h" => "__builtin_lasx_xvrepli_h", + "llvm.loongarch.lasx.xvrepli.w" => "__builtin_lasx_xvrepli_w", + "llvm.loongarch.lasx.xvreplve.b" => "__builtin_lasx_xvreplve_b", + "llvm.loongarch.lasx.xvreplve.d" => "__builtin_lasx_xvreplve_d", + "llvm.loongarch.lasx.xvreplve.h" => "__builtin_lasx_xvreplve_h", + "llvm.loongarch.lasx.xvreplve.w" => "__builtin_lasx_xvreplve_w", + "llvm.loongarch.lasx.xvreplve0.b" => "__builtin_lasx_xvreplve0_b", + "llvm.loongarch.lasx.xvreplve0.d" => "__builtin_lasx_xvreplve0_d", + "llvm.loongarch.lasx.xvreplve0.h" => "__builtin_lasx_xvreplve0_h", + "llvm.loongarch.lasx.xvreplve0.q" => "__builtin_lasx_xvreplve0_q", + "llvm.loongarch.lasx.xvreplve0.w" => "__builtin_lasx_xvreplve0_w", + "llvm.loongarch.lasx.xvrotr.b" => "__builtin_lasx_xvrotr_b", + "llvm.loongarch.lasx.xvrotr.d" => "__builtin_lasx_xvrotr_d", + "llvm.loongarch.lasx.xvrotr.h" => "__builtin_lasx_xvrotr_h", + "llvm.loongarch.lasx.xvrotr.w" => "__builtin_lasx_xvrotr_w", + "llvm.loongarch.lasx.xvrotri.b" => "__builtin_lasx_xvrotri_b", + "llvm.loongarch.lasx.xvrotri.d" => "__builtin_lasx_xvrotri_d", + "llvm.loongarch.lasx.xvrotri.h" => "__builtin_lasx_xvrotri_h", + "llvm.loongarch.lasx.xvrotri.w" => "__builtin_lasx_xvrotri_w", + "llvm.loongarch.lasx.xvsadd.b" => "__builtin_lasx_xvsadd_b", + "llvm.loongarch.lasx.xvsadd.bu" => "__builtin_lasx_xvsadd_bu", + "llvm.loongarch.lasx.xvsadd.d" => "__builtin_lasx_xvsadd_d", + "llvm.loongarch.lasx.xvsadd.du" => "__builtin_lasx_xvsadd_du", + "llvm.loongarch.lasx.xvsadd.h" => "__builtin_lasx_xvsadd_h", + "llvm.loongarch.lasx.xvsadd.hu" => "__builtin_lasx_xvsadd_hu", + "llvm.loongarch.lasx.xvsadd.w" => "__builtin_lasx_xvsadd_w", + "llvm.loongarch.lasx.xvsadd.wu" => "__builtin_lasx_xvsadd_wu", + "llvm.loongarch.lasx.xvsat.b" => "__builtin_lasx_xvsat_b", + "llvm.loongarch.lasx.xvsat.bu" => "__builtin_lasx_xvsat_bu", + "llvm.loongarch.lasx.xvsat.d" => "__builtin_lasx_xvsat_d", + "llvm.loongarch.lasx.xvsat.du" => "__builtin_lasx_xvsat_du", + "llvm.loongarch.lasx.xvsat.h" => "__builtin_lasx_xvsat_h", + "llvm.loongarch.lasx.xvsat.hu" => "__builtin_lasx_xvsat_hu", + "llvm.loongarch.lasx.xvsat.w" => "__builtin_lasx_xvsat_w", + "llvm.loongarch.lasx.xvsat.wu" => "__builtin_lasx_xvsat_wu", + "llvm.loongarch.lasx.xvseq.b" => "__builtin_lasx_xvseq_b", + "llvm.loongarch.lasx.xvseq.d" => "__builtin_lasx_xvseq_d", + "llvm.loongarch.lasx.xvseq.h" => "__builtin_lasx_xvseq_h", + "llvm.loongarch.lasx.xvseq.w" => "__builtin_lasx_xvseq_w", + "llvm.loongarch.lasx.xvseqi.b" => "__builtin_lasx_xvseqi_b", + "llvm.loongarch.lasx.xvseqi.d" => "__builtin_lasx_xvseqi_d", + "llvm.loongarch.lasx.xvseqi.h" => "__builtin_lasx_xvseqi_h", + "llvm.loongarch.lasx.xvseqi.w" => "__builtin_lasx_xvseqi_w", + "llvm.loongarch.lasx.xvshuf.b" => "__builtin_lasx_xvshuf_b", + "llvm.loongarch.lasx.xvshuf.d" => "__builtin_lasx_xvshuf_d", + "llvm.loongarch.lasx.xvshuf.h" => "__builtin_lasx_xvshuf_h", + "llvm.loongarch.lasx.xvshuf.w" => "__builtin_lasx_xvshuf_w", + "llvm.loongarch.lasx.xvshuf4i.b" => "__builtin_lasx_xvshuf4i_b", + "llvm.loongarch.lasx.xvshuf4i.d" => "__builtin_lasx_xvshuf4i_d", + "llvm.loongarch.lasx.xvshuf4i.h" => "__builtin_lasx_xvshuf4i_h", + "llvm.loongarch.lasx.xvshuf4i.w" => "__builtin_lasx_xvshuf4i_w", + "llvm.loongarch.lasx.xvsigncov.b" => "__builtin_lasx_xvsigncov_b", + "llvm.loongarch.lasx.xvsigncov.d" => "__builtin_lasx_xvsigncov_d", + "llvm.loongarch.lasx.xvsigncov.h" => "__builtin_lasx_xvsigncov_h", + "llvm.loongarch.lasx.xvsigncov.w" => "__builtin_lasx_xvsigncov_w", + "llvm.loongarch.lasx.xvsle.b" => "__builtin_lasx_xvsle_b", + "llvm.loongarch.lasx.xvsle.bu" => "__builtin_lasx_xvsle_bu", + "llvm.loongarch.lasx.xvsle.d" => "__builtin_lasx_xvsle_d", + "llvm.loongarch.lasx.xvsle.du" => "__builtin_lasx_xvsle_du", + "llvm.loongarch.lasx.xvsle.h" => "__builtin_lasx_xvsle_h", + "llvm.loongarch.lasx.xvsle.hu" => "__builtin_lasx_xvsle_hu", + "llvm.loongarch.lasx.xvsle.w" => "__builtin_lasx_xvsle_w", + "llvm.loongarch.lasx.xvsle.wu" => "__builtin_lasx_xvsle_wu", + "llvm.loongarch.lasx.xvslei.b" => "__builtin_lasx_xvslei_b", + "llvm.loongarch.lasx.xvslei.bu" => "__builtin_lasx_xvslei_bu", + "llvm.loongarch.lasx.xvslei.d" => "__builtin_lasx_xvslei_d", + "llvm.loongarch.lasx.xvslei.du" => "__builtin_lasx_xvslei_du", + "llvm.loongarch.lasx.xvslei.h" => "__builtin_lasx_xvslei_h", + "llvm.loongarch.lasx.xvslei.hu" => "__builtin_lasx_xvslei_hu", + "llvm.loongarch.lasx.xvslei.w" => "__builtin_lasx_xvslei_w", + "llvm.loongarch.lasx.xvslei.wu" => "__builtin_lasx_xvslei_wu", + "llvm.loongarch.lasx.xvsll.b" => "__builtin_lasx_xvsll_b", + "llvm.loongarch.lasx.xvsll.d" => "__builtin_lasx_xvsll_d", + "llvm.loongarch.lasx.xvsll.h" => "__builtin_lasx_xvsll_h", + "llvm.loongarch.lasx.xvsll.w" => "__builtin_lasx_xvsll_w", + "llvm.loongarch.lasx.xvslli.b" => "__builtin_lasx_xvslli_b", + "llvm.loongarch.lasx.xvslli.d" => "__builtin_lasx_xvslli_d", + "llvm.loongarch.lasx.xvslli.h" => "__builtin_lasx_xvslli_h", + "llvm.loongarch.lasx.xvslli.w" => "__builtin_lasx_xvslli_w", + "llvm.loongarch.lasx.xvsllwil.d.w" => "__builtin_lasx_xvsllwil_d_w", + "llvm.loongarch.lasx.xvsllwil.du.wu" => "__builtin_lasx_xvsllwil_du_wu", + "llvm.loongarch.lasx.xvsllwil.h.b" => "__builtin_lasx_xvsllwil_h_b", + "llvm.loongarch.lasx.xvsllwil.hu.bu" => "__builtin_lasx_xvsllwil_hu_bu", + "llvm.loongarch.lasx.xvsllwil.w.h" => "__builtin_lasx_xvsllwil_w_h", + "llvm.loongarch.lasx.xvsllwil.wu.hu" => "__builtin_lasx_xvsllwil_wu_hu", + "llvm.loongarch.lasx.xvslt.b" => "__builtin_lasx_xvslt_b", + "llvm.loongarch.lasx.xvslt.bu" => "__builtin_lasx_xvslt_bu", + "llvm.loongarch.lasx.xvslt.d" => "__builtin_lasx_xvslt_d", + "llvm.loongarch.lasx.xvslt.du" => "__builtin_lasx_xvslt_du", + "llvm.loongarch.lasx.xvslt.h" => "__builtin_lasx_xvslt_h", + "llvm.loongarch.lasx.xvslt.hu" => "__builtin_lasx_xvslt_hu", + "llvm.loongarch.lasx.xvslt.w" => "__builtin_lasx_xvslt_w", + "llvm.loongarch.lasx.xvslt.wu" => "__builtin_lasx_xvslt_wu", + "llvm.loongarch.lasx.xvslti.b" => "__builtin_lasx_xvslti_b", + "llvm.loongarch.lasx.xvslti.bu" => "__builtin_lasx_xvslti_bu", + "llvm.loongarch.lasx.xvslti.d" => "__builtin_lasx_xvslti_d", + "llvm.loongarch.lasx.xvslti.du" => "__builtin_lasx_xvslti_du", + "llvm.loongarch.lasx.xvslti.h" => "__builtin_lasx_xvslti_h", + "llvm.loongarch.lasx.xvslti.hu" => "__builtin_lasx_xvslti_hu", + "llvm.loongarch.lasx.xvslti.w" => "__builtin_lasx_xvslti_w", + "llvm.loongarch.lasx.xvslti.wu" => "__builtin_lasx_xvslti_wu", + "llvm.loongarch.lasx.xvsra.b" => "__builtin_lasx_xvsra_b", + "llvm.loongarch.lasx.xvsra.d" => "__builtin_lasx_xvsra_d", + "llvm.loongarch.lasx.xvsra.h" => "__builtin_lasx_xvsra_h", + "llvm.loongarch.lasx.xvsra.w" => "__builtin_lasx_xvsra_w", + "llvm.loongarch.lasx.xvsrai.b" => "__builtin_lasx_xvsrai_b", + "llvm.loongarch.lasx.xvsrai.d" => "__builtin_lasx_xvsrai_d", + "llvm.loongarch.lasx.xvsrai.h" => "__builtin_lasx_xvsrai_h", + "llvm.loongarch.lasx.xvsrai.w" => "__builtin_lasx_xvsrai_w", + "llvm.loongarch.lasx.xvsran.b.h" => "__builtin_lasx_xvsran_b_h", + "llvm.loongarch.lasx.xvsran.h.w" => "__builtin_lasx_xvsran_h_w", + "llvm.loongarch.lasx.xvsran.w.d" => "__builtin_lasx_xvsran_w_d", + "llvm.loongarch.lasx.xvsrani.b.h" => "__builtin_lasx_xvsrani_b_h", + "llvm.loongarch.lasx.xvsrani.d.q" => "__builtin_lasx_xvsrani_d_q", + "llvm.loongarch.lasx.xvsrani.h.w" => "__builtin_lasx_xvsrani_h_w", + "llvm.loongarch.lasx.xvsrani.w.d" => "__builtin_lasx_xvsrani_w_d", + "llvm.loongarch.lasx.xvsrar.b" => "__builtin_lasx_xvsrar_b", + "llvm.loongarch.lasx.xvsrar.d" => "__builtin_lasx_xvsrar_d", + "llvm.loongarch.lasx.xvsrar.h" => "__builtin_lasx_xvsrar_h", + "llvm.loongarch.lasx.xvsrar.w" => "__builtin_lasx_xvsrar_w", + "llvm.loongarch.lasx.xvsrari.b" => "__builtin_lasx_xvsrari_b", + "llvm.loongarch.lasx.xvsrari.d" => "__builtin_lasx_xvsrari_d", + "llvm.loongarch.lasx.xvsrari.h" => "__builtin_lasx_xvsrari_h", + "llvm.loongarch.lasx.xvsrari.w" => "__builtin_lasx_xvsrari_w", + "llvm.loongarch.lasx.xvsrarn.b.h" => "__builtin_lasx_xvsrarn_b_h", + "llvm.loongarch.lasx.xvsrarn.h.w" => "__builtin_lasx_xvsrarn_h_w", + "llvm.loongarch.lasx.xvsrarn.w.d" => "__builtin_lasx_xvsrarn_w_d", + "llvm.loongarch.lasx.xvsrarni.b.h" => "__builtin_lasx_xvsrarni_b_h", + "llvm.loongarch.lasx.xvsrarni.d.q" => "__builtin_lasx_xvsrarni_d_q", + "llvm.loongarch.lasx.xvsrarni.h.w" => "__builtin_lasx_xvsrarni_h_w", + "llvm.loongarch.lasx.xvsrarni.w.d" => "__builtin_lasx_xvsrarni_w_d", + "llvm.loongarch.lasx.xvsrl.b" => "__builtin_lasx_xvsrl_b", + "llvm.loongarch.lasx.xvsrl.d" => "__builtin_lasx_xvsrl_d", + "llvm.loongarch.lasx.xvsrl.h" => "__builtin_lasx_xvsrl_h", + "llvm.loongarch.lasx.xvsrl.w" => "__builtin_lasx_xvsrl_w", + "llvm.loongarch.lasx.xvsrli.b" => "__builtin_lasx_xvsrli_b", + "llvm.loongarch.lasx.xvsrli.d" => "__builtin_lasx_xvsrli_d", + "llvm.loongarch.lasx.xvsrli.h" => "__builtin_lasx_xvsrli_h", + "llvm.loongarch.lasx.xvsrli.w" => "__builtin_lasx_xvsrli_w", + "llvm.loongarch.lasx.xvsrln.b.h" => "__builtin_lasx_xvsrln_b_h", + "llvm.loongarch.lasx.xvsrln.h.w" => "__builtin_lasx_xvsrln_h_w", + "llvm.loongarch.lasx.xvsrln.w.d" => "__builtin_lasx_xvsrln_w_d", + "llvm.loongarch.lasx.xvsrlni.b.h" => "__builtin_lasx_xvsrlni_b_h", + "llvm.loongarch.lasx.xvsrlni.d.q" => "__builtin_lasx_xvsrlni_d_q", + "llvm.loongarch.lasx.xvsrlni.h.w" => "__builtin_lasx_xvsrlni_h_w", + "llvm.loongarch.lasx.xvsrlni.w.d" => "__builtin_lasx_xvsrlni_w_d", + "llvm.loongarch.lasx.xvsrlr.b" => "__builtin_lasx_xvsrlr_b", + "llvm.loongarch.lasx.xvsrlr.d" => "__builtin_lasx_xvsrlr_d", + "llvm.loongarch.lasx.xvsrlr.h" => "__builtin_lasx_xvsrlr_h", + "llvm.loongarch.lasx.xvsrlr.w" => "__builtin_lasx_xvsrlr_w", + "llvm.loongarch.lasx.xvsrlri.b" => "__builtin_lasx_xvsrlri_b", + "llvm.loongarch.lasx.xvsrlri.d" => "__builtin_lasx_xvsrlri_d", + "llvm.loongarch.lasx.xvsrlri.h" => "__builtin_lasx_xvsrlri_h", + "llvm.loongarch.lasx.xvsrlri.w" => "__builtin_lasx_xvsrlri_w", + "llvm.loongarch.lasx.xvsrlrn.b.h" => "__builtin_lasx_xvsrlrn_b_h", + "llvm.loongarch.lasx.xvsrlrn.h.w" => "__builtin_lasx_xvsrlrn_h_w", + "llvm.loongarch.lasx.xvsrlrn.w.d" => "__builtin_lasx_xvsrlrn_w_d", + "llvm.loongarch.lasx.xvsrlrni.b.h" => "__builtin_lasx_xvsrlrni_b_h", + "llvm.loongarch.lasx.xvsrlrni.d.q" => "__builtin_lasx_xvsrlrni_d_q", + "llvm.loongarch.lasx.xvsrlrni.h.w" => "__builtin_lasx_xvsrlrni_h_w", + "llvm.loongarch.lasx.xvsrlrni.w.d" => "__builtin_lasx_xvsrlrni_w_d", + "llvm.loongarch.lasx.xvssran.b.h" => "__builtin_lasx_xvssran_b_h", + "llvm.loongarch.lasx.xvssran.bu.h" => "__builtin_lasx_xvssran_bu_h", + "llvm.loongarch.lasx.xvssran.h.w" => "__builtin_lasx_xvssran_h_w", + "llvm.loongarch.lasx.xvssran.hu.w" => "__builtin_lasx_xvssran_hu_w", + "llvm.loongarch.lasx.xvssran.w.d" => "__builtin_lasx_xvssran_w_d", + "llvm.loongarch.lasx.xvssran.wu.d" => "__builtin_lasx_xvssran_wu_d", + "llvm.loongarch.lasx.xvssrani.b.h" => "__builtin_lasx_xvssrani_b_h", + "llvm.loongarch.lasx.xvssrani.bu.h" => "__builtin_lasx_xvssrani_bu_h", + "llvm.loongarch.lasx.xvssrani.d.q" => "__builtin_lasx_xvssrani_d_q", + "llvm.loongarch.lasx.xvssrani.du.q" => "__builtin_lasx_xvssrani_du_q", + "llvm.loongarch.lasx.xvssrani.h.w" => "__builtin_lasx_xvssrani_h_w", + "llvm.loongarch.lasx.xvssrani.hu.w" => "__builtin_lasx_xvssrani_hu_w", + "llvm.loongarch.lasx.xvssrani.w.d" => "__builtin_lasx_xvssrani_w_d", + "llvm.loongarch.lasx.xvssrani.wu.d" => "__builtin_lasx_xvssrani_wu_d", + "llvm.loongarch.lasx.xvssrarn.b.h" => "__builtin_lasx_xvssrarn_b_h", + "llvm.loongarch.lasx.xvssrarn.bu.h" => "__builtin_lasx_xvssrarn_bu_h", + "llvm.loongarch.lasx.xvssrarn.h.w" => "__builtin_lasx_xvssrarn_h_w", + "llvm.loongarch.lasx.xvssrarn.hu.w" => "__builtin_lasx_xvssrarn_hu_w", + "llvm.loongarch.lasx.xvssrarn.w.d" => "__builtin_lasx_xvssrarn_w_d", + "llvm.loongarch.lasx.xvssrarn.wu.d" => "__builtin_lasx_xvssrarn_wu_d", + "llvm.loongarch.lasx.xvssrarni.b.h" => "__builtin_lasx_xvssrarni_b_h", + "llvm.loongarch.lasx.xvssrarni.bu.h" => "__builtin_lasx_xvssrarni_bu_h", + "llvm.loongarch.lasx.xvssrarni.d.q" => "__builtin_lasx_xvssrarni_d_q", + "llvm.loongarch.lasx.xvssrarni.du.q" => "__builtin_lasx_xvssrarni_du_q", + "llvm.loongarch.lasx.xvssrarni.h.w" => "__builtin_lasx_xvssrarni_h_w", + "llvm.loongarch.lasx.xvssrarni.hu.w" => "__builtin_lasx_xvssrarni_hu_w", + "llvm.loongarch.lasx.xvssrarni.w.d" => "__builtin_lasx_xvssrarni_w_d", + "llvm.loongarch.lasx.xvssrarni.wu.d" => "__builtin_lasx_xvssrarni_wu_d", + "llvm.loongarch.lasx.xvssrln.b.h" => "__builtin_lasx_xvssrln_b_h", + "llvm.loongarch.lasx.xvssrln.bu.h" => "__builtin_lasx_xvssrln_bu_h", + "llvm.loongarch.lasx.xvssrln.h.w" => "__builtin_lasx_xvssrln_h_w", + "llvm.loongarch.lasx.xvssrln.hu.w" => "__builtin_lasx_xvssrln_hu_w", + "llvm.loongarch.lasx.xvssrln.w.d" => "__builtin_lasx_xvssrln_w_d", + "llvm.loongarch.lasx.xvssrln.wu.d" => "__builtin_lasx_xvssrln_wu_d", + "llvm.loongarch.lasx.xvssrlni.b.h" => "__builtin_lasx_xvssrlni_b_h", + "llvm.loongarch.lasx.xvssrlni.bu.h" => "__builtin_lasx_xvssrlni_bu_h", + "llvm.loongarch.lasx.xvssrlni.d.q" => "__builtin_lasx_xvssrlni_d_q", + "llvm.loongarch.lasx.xvssrlni.du.q" => "__builtin_lasx_xvssrlni_du_q", + "llvm.loongarch.lasx.xvssrlni.h.w" => "__builtin_lasx_xvssrlni_h_w", + "llvm.loongarch.lasx.xvssrlni.hu.w" => "__builtin_lasx_xvssrlni_hu_w", + "llvm.loongarch.lasx.xvssrlni.w.d" => "__builtin_lasx_xvssrlni_w_d", + "llvm.loongarch.lasx.xvssrlni.wu.d" => "__builtin_lasx_xvssrlni_wu_d", + "llvm.loongarch.lasx.xvssrlrn.b.h" => "__builtin_lasx_xvssrlrn_b_h", + "llvm.loongarch.lasx.xvssrlrn.bu.h" => "__builtin_lasx_xvssrlrn_bu_h", + "llvm.loongarch.lasx.xvssrlrn.h.w" => "__builtin_lasx_xvssrlrn_h_w", + "llvm.loongarch.lasx.xvssrlrn.hu.w" => "__builtin_lasx_xvssrlrn_hu_w", + "llvm.loongarch.lasx.xvssrlrn.w.d" => "__builtin_lasx_xvssrlrn_w_d", + "llvm.loongarch.lasx.xvssrlrn.wu.d" => "__builtin_lasx_xvssrlrn_wu_d", + "llvm.loongarch.lasx.xvssrlrni.b.h" => "__builtin_lasx_xvssrlrni_b_h", + "llvm.loongarch.lasx.xvssrlrni.bu.h" => "__builtin_lasx_xvssrlrni_bu_h", + "llvm.loongarch.lasx.xvssrlrni.d.q" => "__builtin_lasx_xvssrlrni_d_q", + "llvm.loongarch.lasx.xvssrlrni.du.q" => "__builtin_lasx_xvssrlrni_du_q", + "llvm.loongarch.lasx.xvssrlrni.h.w" => "__builtin_lasx_xvssrlrni_h_w", + "llvm.loongarch.lasx.xvssrlrni.hu.w" => "__builtin_lasx_xvssrlrni_hu_w", + "llvm.loongarch.lasx.xvssrlrni.w.d" => "__builtin_lasx_xvssrlrni_w_d", + "llvm.loongarch.lasx.xvssrlrni.wu.d" => "__builtin_lasx_xvssrlrni_wu_d", + "llvm.loongarch.lasx.xvssub.b" => "__builtin_lasx_xvssub_b", + "llvm.loongarch.lasx.xvssub.bu" => "__builtin_lasx_xvssub_bu", + "llvm.loongarch.lasx.xvssub.d" => "__builtin_lasx_xvssub_d", + "llvm.loongarch.lasx.xvssub.du" => "__builtin_lasx_xvssub_du", + "llvm.loongarch.lasx.xvssub.h" => "__builtin_lasx_xvssub_h", + "llvm.loongarch.lasx.xvssub.hu" => "__builtin_lasx_xvssub_hu", + "llvm.loongarch.lasx.xvssub.w" => "__builtin_lasx_xvssub_w", + "llvm.loongarch.lasx.xvssub.wu" => "__builtin_lasx_xvssub_wu", + "llvm.loongarch.lasx.xvst" => "__builtin_lasx_xvst", + "llvm.loongarch.lasx.xvstelm.b" => "__builtin_lasx_xvstelm_b", + "llvm.loongarch.lasx.xvstelm.d" => "__builtin_lasx_xvstelm_d", + "llvm.loongarch.lasx.xvstelm.h" => "__builtin_lasx_xvstelm_h", + "llvm.loongarch.lasx.xvstelm.w" => "__builtin_lasx_xvstelm_w", + "llvm.loongarch.lasx.xvstx" => "__builtin_lasx_xvstx", + "llvm.loongarch.lasx.xvsub.b" => "__builtin_lasx_xvsub_b", + "llvm.loongarch.lasx.xvsub.d" => "__builtin_lasx_xvsub_d", + "llvm.loongarch.lasx.xvsub.h" => "__builtin_lasx_xvsub_h", + "llvm.loongarch.lasx.xvsub.q" => "__builtin_lasx_xvsub_q", + "llvm.loongarch.lasx.xvsub.w" => "__builtin_lasx_xvsub_w", + "llvm.loongarch.lasx.xvsubi.bu" => "__builtin_lasx_xvsubi_bu", + "llvm.loongarch.lasx.xvsubi.du" => "__builtin_lasx_xvsubi_du", + "llvm.loongarch.lasx.xvsubi.hu" => "__builtin_lasx_xvsubi_hu", + "llvm.loongarch.lasx.xvsubi.wu" => "__builtin_lasx_xvsubi_wu", + "llvm.loongarch.lasx.xvsubwev.d.w" => "__builtin_lasx_xvsubwev_d_w", + "llvm.loongarch.lasx.xvsubwev.d.wu" => "__builtin_lasx_xvsubwev_d_wu", + "llvm.loongarch.lasx.xvsubwev.h.b" => "__builtin_lasx_xvsubwev_h_b", + "llvm.loongarch.lasx.xvsubwev.h.bu" => "__builtin_lasx_xvsubwev_h_bu", + "llvm.loongarch.lasx.xvsubwev.q.d" => "__builtin_lasx_xvsubwev_q_d", + "llvm.loongarch.lasx.xvsubwev.q.du" => "__builtin_lasx_xvsubwev_q_du", + "llvm.loongarch.lasx.xvsubwev.w.h" => "__builtin_lasx_xvsubwev_w_h", + "llvm.loongarch.lasx.xvsubwev.w.hu" => "__builtin_lasx_xvsubwev_w_hu", + "llvm.loongarch.lasx.xvsubwod.d.w" => "__builtin_lasx_xvsubwod_d_w", + "llvm.loongarch.lasx.xvsubwod.d.wu" => "__builtin_lasx_xvsubwod_d_wu", + "llvm.loongarch.lasx.xvsubwod.h.b" => "__builtin_lasx_xvsubwod_h_b", + "llvm.loongarch.lasx.xvsubwod.h.bu" => "__builtin_lasx_xvsubwod_h_bu", + "llvm.loongarch.lasx.xvsubwod.q.d" => "__builtin_lasx_xvsubwod_q_d", + "llvm.loongarch.lasx.xvsubwod.q.du" => "__builtin_lasx_xvsubwod_q_du", + "llvm.loongarch.lasx.xvsubwod.w.h" => "__builtin_lasx_xvsubwod_w_h", + "llvm.loongarch.lasx.xvsubwod.w.hu" => "__builtin_lasx_xvsubwod_w_hu", + "llvm.loongarch.lasx.xvxor.v" => "__builtin_lasx_xvxor_v", + "llvm.loongarch.lasx.xvxori.b" => "__builtin_lasx_xvxori_b", + "llvm.loongarch.lddir.d" => "__builtin_loongarch_lddir_d", + "llvm.loongarch.ldpte.d" => "__builtin_loongarch_ldpte_d", + "llvm.loongarch.lsx.bnz.b" => "__builtin_lsx_bnz_b", + "llvm.loongarch.lsx.bnz.d" => "__builtin_lsx_bnz_d", + "llvm.loongarch.lsx.bnz.h" => "__builtin_lsx_bnz_h", + "llvm.loongarch.lsx.bnz.v" => "__builtin_lsx_bnz_v", + "llvm.loongarch.lsx.bnz.w" => "__builtin_lsx_bnz_w", + "llvm.loongarch.lsx.bz.b" => "__builtin_lsx_bz_b", + "llvm.loongarch.lsx.bz.d" => "__builtin_lsx_bz_d", + "llvm.loongarch.lsx.bz.h" => "__builtin_lsx_bz_h", + "llvm.loongarch.lsx.bz.v" => "__builtin_lsx_bz_v", + "llvm.loongarch.lsx.bz.w" => "__builtin_lsx_bz_w", + "llvm.loongarch.lsx.vabsd.b" => "__builtin_lsx_vabsd_b", + "llvm.loongarch.lsx.vabsd.bu" => "__builtin_lsx_vabsd_bu", + "llvm.loongarch.lsx.vabsd.d" => "__builtin_lsx_vabsd_d", + "llvm.loongarch.lsx.vabsd.du" => "__builtin_lsx_vabsd_du", + "llvm.loongarch.lsx.vabsd.h" => "__builtin_lsx_vabsd_h", + "llvm.loongarch.lsx.vabsd.hu" => "__builtin_lsx_vabsd_hu", + "llvm.loongarch.lsx.vabsd.w" => "__builtin_lsx_vabsd_w", + "llvm.loongarch.lsx.vabsd.wu" => "__builtin_lsx_vabsd_wu", + "llvm.loongarch.lsx.vadd.b" => "__builtin_lsx_vadd_b", + "llvm.loongarch.lsx.vadd.d" => "__builtin_lsx_vadd_d", + "llvm.loongarch.lsx.vadd.h" => "__builtin_lsx_vadd_h", + "llvm.loongarch.lsx.vadd.q" => "__builtin_lsx_vadd_q", + "llvm.loongarch.lsx.vadd.w" => "__builtin_lsx_vadd_w", + "llvm.loongarch.lsx.vadda.b" => "__builtin_lsx_vadda_b", + "llvm.loongarch.lsx.vadda.d" => "__builtin_lsx_vadda_d", + "llvm.loongarch.lsx.vadda.h" => "__builtin_lsx_vadda_h", + "llvm.loongarch.lsx.vadda.w" => "__builtin_lsx_vadda_w", + "llvm.loongarch.lsx.vaddi.bu" => "__builtin_lsx_vaddi_bu", + "llvm.loongarch.lsx.vaddi.du" => "__builtin_lsx_vaddi_du", + "llvm.loongarch.lsx.vaddi.hu" => "__builtin_lsx_vaddi_hu", + "llvm.loongarch.lsx.vaddi.wu" => "__builtin_lsx_vaddi_wu", + "llvm.loongarch.lsx.vaddwev.d.w" => "__builtin_lsx_vaddwev_d_w", + "llvm.loongarch.lsx.vaddwev.d.wu" => "__builtin_lsx_vaddwev_d_wu", + "llvm.loongarch.lsx.vaddwev.d.wu.w" => "__builtin_lsx_vaddwev_d_wu_w", + "llvm.loongarch.lsx.vaddwev.h.b" => "__builtin_lsx_vaddwev_h_b", + "llvm.loongarch.lsx.vaddwev.h.bu" => "__builtin_lsx_vaddwev_h_bu", + "llvm.loongarch.lsx.vaddwev.h.bu.b" => "__builtin_lsx_vaddwev_h_bu_b", + "llvm.loongarch.lsx.vaddwev.q.d" => "__builtin_lsx_vaddwev_q_d", + "llvm.loongarch.lsx.vaddwev.q.du" => "__builtin_lsx_vaddwev_q_du", + "llvm.loongarch.lsx.vaddwev.q.du.d" => "__builtin_lsx_vaddwev_q_du_d", + "llvm.loongarch.lsx.vaddwev.w.h" => "__builtin_lsx_vaddwev_w_h", + "llvm.loongarch.lsx.vaddwev.w.hu" => "__builtin_lsx_vaddwev_w_hu", + "llvm.loongarch.lsx.vaddwev.w.hu.h" => "__builtin_lsx_vaddwev_w_hu_h", + "llvm.loongarch.lsx.vaddwod.d.w" => "__builtin_lsx_vaddwod_d_w", + "llvm.loongarch.lsx.vaddwod.d.wu" => "__builtin_lsx_vaddwod_d_wu", + "llvm.loongarch.lsx.vaddwod.d.wu.w" => "__builtin_lsx_vaddwod_d_wu_w", + "llvm.loongarch.lsx.vaddwod.h.b" => "__builtin_lsx_vaddwod_h_b", + "llvm.loongarch.lsx.vaddwod.h.bu" => "__builtin_lsx_vaddwod_h_bu", + "llvm.loongarch.lsx.vaddwod.h.bu.b" => "__builtin_lsx_vaddwod_h_bu_b", + "llvm.loongarch.lsx.vaddwod.q.d" => "__builtin_lsx_vaddwod_q_d", + "llvm.loongarch.lsx.vaddwod.q.du" => "__builtin_lsx_vaddwod_q_du", + "llvm.loongarch.lsx.vaddwod.q.du.d" => "__builtin_lsx_vaddwod_q_du_d", + "llvm.loongarch.lsx.vaddwod.w.h" => "__builtin_lsx_vaddwod_w_h", + "llvm.loongarch.lsx.vaddwod.w.hu" => "__builtin_lsx_vaddwod_w_hu", + "llvm.loongarch.lsx.vaddwod.w.hu.h" => "__builtin_lsx_vaddwod_w_hu_h", + "llvm.loongarch.lsx.vand.v" => "__builtin_lsx_vand_v", + "llvm.loongarch.lsx.vandi.b" => "__builtin_lsx_vandi_b", + "llvm.loongarch.lsx.vandn.v" => "__builtin_lsx_vandn_v", + "llvm.loongarch.lsx.vavg.b" => "__builtin_lsx_vavg_b", + "llvm.loongarch.lsx.vavg.bu" => "__builtin_lsx_vavg_bu", + "llvm.loongarch.lsx.vavg.d" => "__builtin_lsx_vavg_d", + "llvm.loongarch.lsx.vavg.du" => "__builtin_lsx_vavg_du", + "llvm.loongarch.lsx.vavg.h" => "__builtin_lsx_vavg_h", + "llvm.loongarch.lsx.vavg.hu" => "__builtin_lsx_vavg_hu", + "llvm.loongarch.lsx.vavg.w" => "__builtin_lsx_vavg_w", + "llvm.loongarch.lsx.vavg.wu" => "__builtin_lsx_vavg_wu", + "llvm.loongarch.lsx.vavgr.b" => "__builtin_lsx_vavgr_b", + "llvm.loongarch.lsx.vavgr.bu" => "__builtin_lsx_vavgr_bu", + "llvm.loongarch.lsx.vavgr.d" => "__builtin_lsx_vavgr_d", + "llvm.loongarch.lsx.vavgr.du" => "__builtin_lsx_vavgr_du", + "llvm.loongarch.lsx.vavgr.h" => "__builtin_lsx_vavgr_h", + "llvm.loongarch.lsx.vavgr.hu" => "__builtin_lsx_vavgr_hu", + "llvm.loongarch.lsx.vavgr.w" => "__builtin_lsx_vavgr_w", + "llvm.loongarch.lsx.vavgr.wu" => "__builtin_lsx_vavgr_wu", + "llvm.loongarch.lsx.vbitclr.b" => "__builtin_lsx_vbitclr_b", + "llvm.loongarch.lsx.vbitclr.d" => "__builtin_lsx_vbitclr_d", + "llvm.loongarch.lsx.vbitclr.h" => "__builtin_lsx_vbitclr_h", + "llvm.loongarch.lsx.vbitclr.w" => "__builtin_lsx_vbitclr_w", + "llvm.loongarch.lsx.vbitclri.b" => "__builtin_lsx_vbitclri_b", + "llvm.loongarch.lsx.vbitclri.d" => "__builtin_lsx_vbitclri_d", + "llvm.loongarch.lsx.vbitclri.h" => "__builtin_lsx_vbitclri_h", + "llvm.loongarch.lsx.vbitclri.w" => "__builtin_lsx_vbitclri_w", + "llvm.loongarch.lsx.vbitrev.b" => "__builtin_lsx_vbitrev_b", + "llvm.loongarch.lsx.vbitrev.d" => "__builtin_lsx_vbitrev_d", + "llvm.loongarch.lsx.vbitrev.h" => "__builtin_lsx_vbitrev_h", + "llvm.loongarch.lsx.vbitrev.w" => "__builtin_lsx_vbitrev_w", + "llvm.loongarch.lsx.vbitrevi.b" => "__builtin_lsx_vbitrevi_b", + "llvm.loongarch.lsx.vbitrevi.d" => "__builtin_lsx_vbitrevi_d", + "llvm.loongarch.lsx.vbitrevi.h" => "__builtin_lsx_vbitrevi_h", + "llvm.loongarch.lsx.vbitrevi.w" => "__builtin_lsx_vbitrevi_w", + "llvm.loongarch.lsx.vbitsel.v" => "__builtin_lsx_vbitsel_v", + "llvm.loongarch.lsx.vbitseli.b" => "__builtin_lsx_vbitseli_b", + "llvm.loongarch.lsx.vbitset.b" => "__builtin_lsx_vbitset_b", + "llvm.loongarch.lsx.vbitset.d" => "__builtin_lsx_vbitset_d", + "llvm.loongarch.lsx.vbitset.h" => "__builtin_lsx_vbitset_h", + "llvm.loongarch.lsx.vbitset.w" => "__builtin_lsx_vbitset_w", + "llvm.loongarch.lsx.vbitseti.b" => "__builtin_lsx_vbitseti_b", + "llvm.loongarch.lsx.vbitseti.d" => "__builtin_lsx_vbitseti_d", + "llvm.loongarch.lsx.vbitseti.h" => "__builtin_lsx_vbitseti_h", + "llvm.loongarch.lsx.vbitseti.w" => "__builtin_lsx_vbitseti_w", + "llvm.loongarch.lsx.vbsll.v" => "__builtin_lsx_vbsll_v", + "llvm.loongarch.lsx.vbsrl.v" => "__builtin_lsx_vbsrl_v", + "llvm.loongarch.lsx.vclo.b" => "__builtin_lsx_vclo_b", + "llvm.loongarch.lsx.vclo.d" => "__builtin_lsx_vclo_d", + "llvm.loongarch.lsx.vclo.h" => "__builtin_lsx_vclo_h", + "llvm.loongarch.lsx.vclo.w" => "__builtin_lsx_vclo_w", + "llvm.loongarch.lsx.vclz.b" => "__builtin_lsx_vclz_b", + "llvm.loongarch.lsx.vclz.d" => "__builtin_lsx_vclz_d", + "llvm.loongarch.lsx.vclz.h" => "__builtin_lsx_vclz_h", + "llvm.loongarch.lsx.vclz.w" => "__builtin_lsx_vclz_w", + "llvm.loongarch.lsx.vdiv.b" => "__builtin_lsx_vdiv_b", + "llvm.loongarch.lsx.vdiv.bu" => "__builtin_lsx_vdiv_bu", + "llvm.loongarch.lsx.vdiv.d" => "__builtin_lsx_vdiv_d", + "llvm.loongarch.lsx.vdiv.du" => "__builtin_lsx_vdiv_du", + "llvm.loongarch.lsx.vdiv.h" => "__builtin_lsx_vdiv_h", + "llvm.loongarch.lsx.vdiv.hu" => "__builtin_lsx_vdiv_hu", + "llvm.loongarch.lsx.vdiv.w" => "__builtin_lsx_vdiv_w", + "llvm.loongarch.lsx.vdiv.wu" => "__builtin_lsx_vdiv_wu", + "llvm.loongarch.lsx.vexth.d.w" => "__builtin_lsx_vexth_d_w", + "llvm.loongarch.lsx.vexth.du.wu" => "__builtin_lsx_vexth_du_wu", + "llvm.loongarch.lsx.vexth.h.b" => "__builtin_lsx_vexth_h_b", + "llvm.loongarch.lsx.vexth.hu.bu" => "__builtin_lsx_vexth_hu_bu", + "llvm.loongarch.lsx.vexth.q.d" => "__builtin_lsx_vexth_q_d", + "llvm.loongarch.lsx.vexth.qu.du" => "__builtin_lsx_vexth_qu_du", + "llvm.loongarch.lsx.vexth.w.h" => "__builtin_lsx_vexth_w_h", + "llvm.loongarch.lsx.vexth.wu.hu" => "__builtin_lsx_vexth_wu_hu", + "llvm.loongarch.lsx.vextl.q.d" => "__builtin_lsx_vextl_q_d", + "llvm.loongarch.lsx.vextl.qu.du" => "__builtin_lsx_vextl_qu_du", + "llvm.loongarch.lsx.vextrins.b" => "__builtin_lsx_vextrins_b", + "llvm.loongarch.lsx.vextrins.d" => "__builtin_lsx_vextrins_d", + "llvm.loongarch.lsx.vextrins.h" => "__builtin_lsx_vextrins_h", + "llvm.loongarch.lsx.vextrins.w" => "__builtin_lsx_vextrins_w", + "llvm.loongarch.lsx.vfadd.d" => "__builtin_lsx_vfadd_d", + "llvm.loongarch.lsx.vfadd.s" => "__builtin_lsx_vfadd_s", + "llvm.loongarch.lsx.vfclass.d" => "__builtin_lsx_vfclass_d", + "llvm.loongarch.lsx.vfclass.s" => "__builtin_lsx_vfclass_s", + "llvm.loongarch.lsx.vfcmp.caf.d" => "__builtin_lsx_vfcmp_caf_d", + "llvm.loongarch.lsx.vfcmp.caf.s" => "__builtin_lsx_vfcmp_caf_s", + "llvm.loongarch.lsx.vfcmp.ceq.d" => "__builtin_lsx_vfcmp_ceq_d", + "llvm.loongarch.lsx.vfcmp.ceq.s" => "__builtin_lsx_vfcmp_ceq_s", + "llvm.loongarch.lsx.vfcmp.cle.d" => "__builtin_lsx_vfcmp_cle_d", + "llvm.loongarch.lsx.vfcmp.cle.s" => "__builtin_lsx_vfcmp_cle_s", + "llvm.loongarch.lsx.vfcmp.clt.d" => "__builtin_lsx_vfcmp_clt_d", + "llvm.loongarch.lsx.vfcmp.clt.s" => "__builtin_lsx_vfcmp_clt_s", + "llvm.loongarch.lsx.vfcmp.cne.d" => "__builtin_lsx_vfcmp_cne_d", + "llvm.loongarch.lsx.vfcmp.cne.s" => "__builtin_lsx_vfcmp_cne_s", + "llvm.loongarch.lsx.vfcmp.cor.d" => "__builtin_lsx_vfcmp_cor_d", + "llvm.loongarch.lsx.vfcmp.cor.s" => "__builtin_lsx_vfcmp_cor_s", + "llvm.loongarch.lsx.vfcmp.cueq.d" => "__builtin_lsx_vfcmp_cueq_d", + "llvm.loongarch.lsx.vfcmp.cueq.s" => "__builtin_lsx_vfcmp_cueq_s", + "llvm.loongarch.lsx.vfcmp.cule.d" => "__builtin_lsx_vfcmp_cule_d", + "llvm.loongarch.lsx.vfcmp.cule.s" => "__builtin_lsx_vfcmp_cule_s", + "llvm.loongarch.lsx.vfcmp.cult.d" => "__builtin_lsx_vfcmp_cult_d", + "llvm.loongarch.lsx.vfcmp.cult.s" => "__builtin_lsx_vfcmp_cult_s", + "llvm.loongarch.lsx.vfcmp.cun.d" => "__builtin_lsx_vfcmp_cun_d", + "llvm.loongarch.lsx.vfcmp.cun.s" => "__builtin_lsx_vfcmp_cun_s", + "llvm.loongarch.lsx.vfcmp.cune.d" => "__builtin_lsx_vfcmp_cune_d", + "llvm.loongarch.lsx.vfcmp.cune.s" => "__builtin_lsx_vfcmp_cune_s", + "llvm.loongarch.lsx.vfcmp.saf.d" => "__builtin_lsx_vfcmp_saf_d", + "llvm.loongarch.lsx.vfcmp.saf.s" => "__builtin_lsx_vfcmp_saf_s", + "llvm.loongarch.lsx.vfcmp.seq.d" => "__builtin_lsx_vfcmp_seq_d", + "llvm.loongarch.lsx.vfcmp.seq.s" => "__builtin_lsx_vfcmp_seq_s", + "llvm.loongarch.lsx.vfcmp.sle.d" => "__builtin_lsx_vfcmp_sle_d", + "llvm.loongarch.lsx.vfcmp.sle.s" => "__builtin_lsx_vfcmp_sle_s", + "llvm.loongarch.lsx.vfcmp.slt.d" => "__builtin_lsx_vfcmp_slt_d", + "llvm.loongarch.lsx.vfcmp.slt.s" => "__builtin_lsx_vfcmp_slt_s", + "llvm.loongarch.lsx.vfcmp.sne.d" => "__builtin_lsx_vfcmp_sne_d", + "llvm.loongarch.lsx.vfcmp.sne.s" => "__builtin_lsx_vfcmp_sne_s", + "llvm.loongarch.lsx.vfcmp.sor.d" => "__builtin_lsx_vfcmp_sor_d", + "llvm.loongarch.lsx.vfcmp.sor.s" => "__builtin_lsx_vfcmp_sor_s", + "llvm.loongarch.lsx.vfcmp.sueq.d" => "__builtin_lsx_vfcmp_sueq_d", + "llvm.loongarch.lsx.vfcmp.sueq.s" => "__builtin_lsx_vfcmp_sueq_s", + "llvm.loongarch.lsx.vfcmp.sule.d" => "__builtin_lsx_vfcmp_sule_d", + "llvm.loongarch.lsx.vfcmp.sule.s" => "__builtin_lsx_vfcmp_sule_s", + "llvm.loongarch.lsx.vfcmp.sult.d" => "__builtin_lsx_vfcmp_sult_d", + "llvm.loongarch.lsx.vfcmp.sult.s" => "__builtin_lsx_vfcmp_sult_s", + "llvm.loongarch.lsx.vfcmp.sun.d" => "__builtin_lsx_vfcmp_sun_d", + "llvm.loongarch.lsx.vfcmp.sun.s" => "__builtin_lsx_vfcmp_sun_s", + "llvm.loongarch.lsx.vfcmp.sune.d" => "__builtin_lsx_vfcmp_sune_d", + "llvm.loongarch.lsx.vfcmp.sune.s" => "__builtin_lsx_vfcmp_sune_s", + "llvm.loongarch.lsx.vfcvt.h.s" => "__builtin_lsx_vfcvt_h_s", + "llvm.loongarch.lsx.vfcvt.s.d" => "__builtin_lsx_vfcvt_s_d", + "llvm.loongarch.lsx.vfcvth.d.s" => "__builtin_lsx_vfcvth_d_s", + "llvm.loongarch.lsx.vfcvth.s.h" => "__builtin_lsx_vfcvth_s_h", + "llvm.loongarch.lsx.vfcvtl.d.s" => "__builtin_lsx_vfcvtl_d_s", + "llvm.loongarch.lsx.vfcvtl.s.h" => "__builtin_lsx_vfcvtl_s_h", + "llvm.loongarch.lsx.vfdiv.d" => "__builtin_lsx_vfdiv_d", + "llvm.loongarch.lsx.vfdiv.s" => "__builtin_lsx_vfdiv_s", + "llvm.loongarch.lsx.vffint.d.l" => "__builtin_lsx_vffint_d_l", + "llvm.loongarch.lsx.vffint.d.lu" => "__builtin_lsx_vffint_d_lu", + "llvm.loongarch.lsx.vffint.s.l" => "__builtin_lsx_vffint_s_l", + "llvm.loongarch.lsx.vffint.s.w" => "__builtin_lsx_vffint_s_w", + "llvm.loongarch.lsx.vffint.s.wu" => "__builtin_lsx_vffint_s_wu", + "llvm.loongarch.lsx.vffinth.d.w" => "__builtin_lsx_vffinth_d_w", + "llvm.loongarch.lsx.vffintl.d.w" => "__builtin_lsx_vffintl_d_w", + "llvm.loongarch.lsx.vflogb.d" => "__builtin_lsx_vflogb_d", + "llvm.loongarch.lsx.vflogb.s" => "__builtin_lsx_vflogb_s", + "llvm.loongarch.lsx.vfmadd.d" => "__builtin_lsx_vfmadd_d", + "llvm.loongarch.lsx.vfmadd.s" => "__builtin_lsx_vfmadd_s", + "llvm.loongarch.lsx.vfmax.d" => "__builtin_lsx_vfmax_d", + "llvm.loongarch.lsx.vfmax.s" => "__builtin_lsx_vfmax_s", + "llvm.loongarch.lsx.vfmaxa.d" => "__builtin_lsx_vfmaxa_d", + "llvm.loongarch.lsx.vfmaxa.s" => "__builtin_lsx_vfmaxa_s", + "llvm.loongarch.lsx.vfmin.d" => "__builtin_lsx_vfmin_d", + "llvm.loongarch.lsx.vfmin.s" => "__builtin_lsx_vfmin_s", + "llvm.loongarch.lsx.vfmina.d" => "__builtin_lsx_vfmina_d", + "llvm.loongarch.lsx.vfmina.s" => "__builtin_lsx_vfmina_s", + "llvm.loongarch.lsx.vfmsub.d" => "__builtin_lsx_vfmsub_d", + "llvm.loongarch.lsx.vfmsub.s" => "__builtin_lsx_vfmsub_s", + "llvm.loongarch.lsx.vfmul.d" => "__builtin_lsx_vfmul_d", + "llvm.loongarch.lsx.vfmul.s" => "__builtin_lsx_vfmul_s", + "llvm.loongarch.lsx.vfnmadd.d" => "__builtin_lsx_vfnmadd_d", + "llvm.loongarch.lsx.vfnmadd.s" => "__builtin_lsx_vfnmadd_s", + "llvm.loongarch.lsx.vfnmsub.d" => "__builtin_lsx_vfnmsub_d", + "llvm.loongarch.lsx.vfnmsub.s" => "__builtin_lsx_vfnmsub_s", + "llvm.loongarch.lsx.vfrecip.d" => "__builtin_lsx_vfrecip_d", + "llvm.loongarch.lsx.vfrecip.s" => "__builtin_lsx_vfrecip_s", + "llvm.loongarch.lsx.vfrint.d" => "__builtin_lsx_vfrint_d", + "llvm.loongarch.lsx.vfrint.s" => "__builtin_lsx_vfrint_s", + "llvm.loongarch.lsx.vfrintrm.d" => "__builtin_lsx_vfrintrm_d", + "llvm.loongarch.lsx.vfrintrm.s" => "__builtin_lsx_vfrintrm_s", + "llvm.loongarch.lsx.vfrintrne.d" => "__builtin_lsx_vfrintrne_d", + "llvm.loongarch.lsx.vfrintrne.s" => "__builtin_lsx_vfrintrne_s", + "llvm.loongarch.lsx.vfrintrp.d" => "__builtin_lsx_vfrintrp_d", + "llvm.loongarch.lsx.vfrintrp.s" => "__builtin_lsx_vfrintrp_s", + "llvm.loongarch.lsx.vfrintrz.d" => "__builtin_lsx_vfrintrz_d", + "llvm.loongarch.lsx.vfrintrz.s" => "__builtin_lsx_vfrintrz_s", + "llvm.loongarch.lsx.vfrsqrt.d" => "__builtin_lsx_vfrsqrt_d", + "llvm.loongarch.lsx.vfrsqrt.s" => "__builtin_lsx_vfrsqrt_s", + "llvm.loongarch.lsx.vfrstp.b" => "__builtin_lsx_vfrstp_b", + "llvm.loongarch.lsx.vfrstp.h" => "__builtin_lsx_vfrstp_h", + "llvm.loongarch.lsx.vfrstpi.b" => "__builtin_lsx_vfrstpi_b", + "llvm.loongarch.lsx.vfrstpi.h" => "__builtin_lsx_vfrstpi_h", + "llvm.loongarch.lsx.vfsqrt.d" => "__builtin_lsx_vfsqrt_d", + "llvm.loongarch.lsx.vfsqrt.s" => "__builtin_lsx_vfsqrt_s", + "llvm.loongarch.lsx.vfsub.d" => "__builtin_lsx_vfsub_d", + "llvm.loongarch.lsx.vfsub.s" => "__builtin_lsx_vfsub_s", + "llvm.loongarch.lsx.vftint.l.d" => "__builtin_lsx_vftint_l_d", + "llvm.loongarch.lsx.vftint.lu.d" => "__builtin_lsx_vftint_lu_d", + "llvm.loongarch.lsx.vftint.w.d" => "__builtin_lsx_vftint_w_d", + "llvm.loongarch.lsx.vftint.w.s" => "__builtin_lsx_vftint_w_s", + "llvm.loongarch.lsx.vftint.wu.s" => "__builtin_lsx_vftint_wu_s", + "llvm.loongarch.lsx.vftinth.l.s" => "__builtin_lsx_vftinth_l_s", + "llvm.loongarch.lsx.vftintl.l.s" => "__builtin_lsx_vftintl_l_s", + "llvm.loongarch.lsx.vftintrm.l.d" => "__builtin_lsx_vftintrm_l_d", + "llvm.loongarch.lsx.vftintrm.w.d" => "__builtin_lsx_vftintrm_w_d", + "llvm.loongarch.lsx.vftintrm.w.s" => "__builtin_lsx_vftintrm_w_s", + "llvm.loongarch.lsx.vftintrmh.l.s" => "__builtin_lsx_vftintrmh_l_s", + "llvm.loongarch.lsx.vftintrml.l.s" => "__builtin_lsx_vftintrml_l_s", + "llvm.loongarch.lsx.vftintrne.l.d" => "__builtin_lsx_vftintrne_l_d", + "llvm.loongarch.lsx.vftintrne.w.d" => "__builtin_lsx_vftintrne_w_d", + "llvm.loongarch.lsx.vftintrne.w.s" => "__builtin_lsx_vftintrne_w_s", + "llvm.loongarch.lsx.vftintrneh.l.s" => "__builtin_lsx_vftintrneh_l_s", + "llvm.loongarch.lsx.vftintrnel.l.s" => "__builtin_lsx_vftintrnel_l_s", + "llvm.loongarch.lsx.vftintrp.l.d" => "__builtin_lsx_vftintrp_l_d", + "llvm.loongarch.lsx.vftintrp.w.d" => "__builtin_lsx_vftintrp_w_d", + "llvm.loongarch.lsx.vftintrp.w.s" => "__builtin_lsx_vftintrp_w_s", + "llvm.loongarch.lsx.vftintrph.l.s" => "__builtin_lsx_vftintrph_l_s", + "llvm.loongarch.lsx.vftintrpl.l.s" => "__builtin_lsx_vftintrpl_l_s", + "llvm.loongarch.lsx.vftintrz.l.d" => "__builtin_lsx_vftintrz_l_d", + "llvm.loongarch.lsx.vftintrz.lu.d" => "__builtin_lsx_vftintrz_lu_d", + "llvm.loongarch.lsx.vftintrz.w.d" => "__builtin_lsx_vftintrz_w_d", + "llvm.loongarch.lsx.vftintrz.w.s" => "__builtin_lsx_vftintrz_w_s", + "llvm.loongarch.lsx.vftintrz.wu.s" => "__builtin_lsx_vftintrz_wu_s", + "llvm.loongarch.lsx.vftintrzh.l.s" => "__builtin_lsx_vftintrzh_l_s", + "llvm.loongarch.lsx.vftintrzl.l.s" => "__builtin_lsx_vftintrzl_l_s", + "llvm.loongarch.lsx.vhaddw.d.w" => "__builtin_lsx_vhaddw_d_w", + "llvm.loongarch.lsx.vhaddw.du.wu" => "__builtin_lsx_vhaddw_du_wu", + "llvm.loongarch.lsx.vhaddw.h.b" => "__builtin_lsx_vhaddw_h_b", + "llvm.loongarch.lsx.vhaddw.hu.bu" => "__builtin_lsx_vhaddw_hu_bu", + "llvm.loongarch.lsx.vhaddw.q.d" => "__builtin_lsx_vhaddw_q_d", + "llvm.loongarch.lsx.vhaddw.qu.du" => "__builtin_lsx_vhaddw_qu_du", + "llvm.loongarch.lsx.vhaddw.w.h" => "__builtin_lsx_vhaddw_w_h", + "llvm.loongarch.lsx.vhaddw.wu.hu" => "__builtin_lsx_vhaddw_wu_hu", + "llvm.loongarch.lsx.vhsubw.d.w" => "__builtin_lsx_vhsubw_d_w", + "llvm.loongarch.lsx.vhsubw.du.wu" => "__builtin_lsx_vhsubw_du_wu", + "llvm.loongarch.lsx.vhsubw.h.b" => "__builtin_lsx_vhsubw_h_b", + "llvm.loongarch.lsx.vhsubw.hu.bu" => "__builtin_lsx_vhsubw_hu_bu", + "llvm.loongarch.lsx.vhsubw.q.d" => "__builtin_lsx_vhsubw_q_d", + "llvm.loongarch.lsx.vhsubw.qu.du" => "__builtin_lsx_vhsubw_qu_du", + "llvm.loongarch.lsx.vhsubw.w.h" => "__builtin_lsx_vhsubw_w_h", + "llvm.loongarch.lsx.vhsubw.wu.hu" => "__builtin_lsx_vhsubw_wu_hu", + "llvm.loongarch.lsx.vilvh.b" => "__builtin_lsx_vilvh_b", + "llvm.loongarch.lsx.vilvh.d" => "__builtin_lsx_vilvh_d", + "llvm.loongarch.lsx.vilvh.h" => "__builtin_lsx_vilvh_h", + "llvm.loongarch.lsx.vilvh.w" => "__builtin_lsx_vilvh_w", + "llvm.loongarch.lsx.vilvl.b" => "__builtin_lsx_vilvl_b", + "llvm.loongarch.lsx.vilvl.d" => "__builtin_lsx_vilvl_d", + "llvm.loongarch.lsx.vilvl.h" => "__builtin_lsx_vilvl_h", + "llvm.loongarch.lsx.vilvl.w" => "__builtin_lsx_vilvl_w", + "llvm.loongarch.lsx.vinsgr2vr.b" => "__builtin_lsx_vinsgr2vr_b", + "llvm.loongarch.lsx.vinsgr2vr.d" => "__builtin_lsx_vinsgr2vr_d", + "llvm.loongarch.lsx.vinsgr2vr.h" => "__builtin_lsx_vinsgr2vr_h", + "llvm.loongarch.lsx.vinsgr2vr.w" => "__builtin_lsx_vinsgr2vr_w", + "llvm.loongarch.lsx.vld" => "__builtin_lsx_vld", + "llvm.loongarch.lsx.vldi" => "__builtin_lsx_vldi", + "llvm.loongarch.lsx.vldrepl.b" => "__builtin_lsx_vldrepl_b", + "llvm.loongarch.lsx.vldrepl.d" => "__builtin_lsx_vldrepl_d", + "llvm.loongarch.lsx.vldrepl.h" => "__builtin_lsx_vldrepl_h", + "llvm.loongarch.lsx.vldrepl.w" => "__builtin_lsx_vldrepl_w", + "llvm.loongarch.lsx.vldx" => "__builtin_lsx_vldx", + "llvm.loongarch.lsx.vmadd.b" => "__builtin_lsx_vmadd_b", + "llvm.loongarch.lsx.vmadd.d" => "__builtin_lsx_vmadd_d", + "llvm.loongarch.lsx.vmadd.h" => "__builtin_lsx_vmadd_h", + "llvm.loongarch.lsx.vmadd.w" => "__builtin_lsx_vmadd_w", + "llvm.loongarch.lsx.vmaddwev.d.w" => "__builtin_lsx_vmaddwev_d_w", + "llvm.loongarch.lsx.vmaddwev.d.wu" => "__builtin_lsx_vmaddwev_d_wu", + "llvm.loongarch.lsx.vmaddwev.d.wu.w" => "__builtin_lsx_vmaddwev_d_wu_w", + "llvm.loongarch.lsx.vmaddwev.h.b" => "__builtin_lsx_vmaddwev_h_b", + "llvm.loongarch.lsx.vmaddwev.h.bu" => "__builtin_lsx_vmaddwev_h_bu", + "llvm.loongarch.lsx.vmaddwev.h.bu.b" => "__builtin_lsx_vmaddwev_h_bu_b", + "llvm.loongarch.lsx.vmaddwev.q.d" => "__builtin_lsx_vmaddwev_q_d", + "llvm.loongarch.lsx.vmaddwev.q.du" => "__builtin_lsx_vmaddwev_q_du", + "llvm.loongarch.lsx.vmaddwev.q.du.d" => "__builtin_lsx_vmaddwev_q_du_d", + "llvm.loongarch.lsx.vmaddwev.w.h" => "__builtin_lsx_vmaddwev_w_h", + "llvm.loongarch.lsx.vmaddwev.w.hu" => "__builtin_lsx_vmaddwev_w_hu", + "llvm.loongarch.lsx.vmaddwev.w.hu.h" => "__builtin_lsx_vmaddwev_w_hu_h", + "llvm.loongarch.lsx.vmaddwod.d.w" => "__builtin_lsx_vmaddwod_d_w", + "llvm.loongarch.lsx.vmaddwod.d.wu" => "__builtin_lsx_vmaddwod_d_wu", + "llvm.loongarch.lsx.vmaddwod.d.wu.w" => "__builtin_lsx_vmaddwod_d_wu_w", + "llvm.loongarch.lsx.vmaddwod.h.b" => "__builtin_lsx_vmaddwod_h_b", + "llvm.loongarch.lsx.vmaddwod.h.bu" => "__builtin_lsx_vmaddwod_h_bu", + "llvm.loongarch.lsx.vmaddwod.h.bu.b" => "__builtin_lsx_vmaddwod_h_bu_b", + "llvm.loongarch.lsx.vmaddwod.q.d" => "__builtin_lsx_vmaddwod_q_d", + "llvm.loongarch.lsx.vmaddwod.q.du" => "__builtin_lsx_vmaddwod_q_du", + "llvm.loongarch.lsx.vmaddwod.q.du.d" => "__builtin_lsx_vmaddwod_q_du_d", + "llvm.loongarch.lsx.vmaddwod.w.h" => "__builtin_lsx_vmaddwod_w_h", + "llvm.loongarch.lsx.vmaddwod.w.hu" => "__builtin_lsx_vmaddwod_w_hu", + "llvm.loongarch.lsx.vmaddwod.w.hu.h" => "__builtin_lsx_vmaddwod_w_hu_h", + "llvm.loongarch.lsx.vmax.b" => "__builtin_lsx_vmax_b", + "llvm.loongarch.lsx.vmax.bu" => "__builtin_lsx_vmax_bu", + "llvm.loongarch.lsx.vmax.d" => "__builtin_lsx_vmax_d", + "llvm.loongarch.lsx.vmax.du" => "__builtin_lsx_vmax_du", + "llvm.loongarch.lsx.vmax.h" => "__builtin_lsx_vmax_h", + "llvm.loongarch.lsx.vmax.hu" => "__builtin_lsx_vmax_hu", + "llvm.loongarch.lsx.vmax.w" => "__builtin_lsx_vmax_w", + "llvm.loongarch.lsx.vmax.wu" => "__builtin_lsx_vmax_wu", + "llvm.loongarch.lsx.vmaxi.b" => "__builtin_lsx_vmaxi_b", + "llvm.loongarch.lsx.vmaxi.bu" => "__builtin_lsx_vmaxi_bu", + "llvm.loongarch.lsx.vmaxi.d" => "__builtin_lsx_vmaxi_d", + "llvm.loongarch.lsx.vmaxi.du" => "__builtin_lsx_vmaxi_du", + "llvm.loongarch.lsx.vmaxi.h" => "__builtin_lsx_vmaxi_h", + "llvm.loongarch.lsx.vmaxi.hu" => "__builtin_lsx_vmaxi_hu", + "llvm.loongarch.lsx.vmaxi.w" => "__builtin_lsx_vmaxi_w", + "llvm.loongarch.lsx.vmaxi.wu" => "__builtin_lsx_vmaxi_wu", + "llvm.loongarch.lsx.vmin.b" => "__builtin_lsx_vmin_b", + "llvm.loongarch.lsx.vmin.bu" => "__builtin_lsx_vmin_bu", + "llvm.loongarch.lsx.vmin.d" => "__builtin_lsx_vmin_d", + "llvm.loongarch.lsx.vmin.du" => "__builtin_lsx_vmin_du", + "llvm.loongarch.lsx.vmin.h" => "__builtin_lsx_vmin_h", + "llvm.loongarch.lsx.vmin.hu" => "__builtin_lsx_vmin_hu", + "llvm.loongarch.lsx.vmin.w" => "__builtin_lsx_vmin_w", + "llvm.loongarch.lsx.vmin.wu" => "__builtin_lsx_vmin_wu", + "llvm.loongarch.lsx.vmini.b" => "__builtin_lsx_vmini_b", + "llvm.loongarch.lsx.vmini.bu" => "__builtin_lsx_vmini_bu", + "llvm.loongarch.lsx.vmini.d" => "__builtin_lsx_vmini_d", + "llvm.loongarch.lsx.vmini.du" => "__builtin_lsx_vmini_du", + "llvm.loongarch.lsx.vmini.h" => "__builtin_lsx_vmini_h", + "llvm.loongarch.lsx.vmini.hu" => "__builtin_lsx_vmini_hu", + "llvm.loongarch.lsx.vmini.w" => "__builtin_lsx_vmini_w", + "llvm.loongarch.lsx.vmini.wu" => "__builtin_lsx_vmini_wu", + "llvm.loongarch.lsx.vmod.b" => "__builtin_lsx_vmod_b", + "llvm.loongarch.lsx.vmod.bu" => "__builtin_lsx_vmod_bu", + "llvm.loongarch.lsx.vmod.d" => "__builtin_lsx_vmod_d", + "llvm.loongarch.lsx.vmod.du" => "__builtin_lsx_vmod_du", + "llvm.loongarch.lsx.vmod.h" => "__builtin_lsx_vmod_h", + "llvm.loongarch.lsx.vmod.hu" => "__builtin_lsx_vmod_hu", + "llvm.loongarch.lsx.vmod.w" => "__builtin_lsx_vmod_w", + "llvm.loongarch.lsx.vmod.wu" => "__builtin_lsx_vmod_wu", + "llvm.loongarch.lsx.vmskgez.b" => "__builtin_lsx_vmskgez_b", + "llvm.loongarch.lsx.vmskltz.b" => "__builtin_lsx_vmskltz_b", + "llvm.loongarch.lsx.vmskltz.d" => "__builtin_lsx_vmskltz_d", + "llvm.loongarch.lsx.vmskltz.h" => "__builtin_lsx_vmskltz_h", + "llvm.loongarch.lsx.vmskltz.w" => "__builtin_lsx_vmskltz_w", + "llvm.loongarch.lsx.vmsknz.b" => "__builtin_lsx_vmsknz_b", + "llvm.loongarch.lsx.vmsub.b" => "__builtin_lsx_vmsub_b", + "llvm.loongarch.lsx.vmsub.d" => "__builtin_lsx_vmsub_d", + "llvm.loongarch.lsx.vmsub.h" => "__builtin_lsx_vmsub_h", + "llvm.loongarch.lsx.vmsub.w" => "__builtin_lsx_vmsub_w", + "llvm.loongarch.lsx.vmuh.b" => "__builtin_lsx_vmuh_b", + "llvm.loongarch.lsx.vmuh.bu" => "__builtin_lsx_vmuh_bu", + "llvm.loongarch.lsx.vmuh.d" => "__builtin_lsx_vmuh_d", + "llvm.loongarch.lsx.vmuh.du" => "__builtin_lsx_vmuh_du", + "llvm.loongarch.lsx.vmuh.h" => "__builtin_lsx_vmuh_h", + "llvm.loongarch.lsx.vmuh.hu" => "__builtin_lsx_vmuh_hu", + "llvm.loongarch.lsx.vmuh.w" => "__builtin_lsx_vmuh_w", + "llvm.loongarch.lsx.vmuh.wu" => "__builtin_lsx_vmuh_wu", + "llvm.loongarch.lsx.vmul.b" => "__builtin_lsx_vmul_b", + "llvm.loongarch.lsx.vmul.d" => "__builtin_lsx_vmul_d", + "llvm.loongarch.lsx.vmul.h" => "__builtin_lsx_vmul_h", + "llvm.loongarch.lsx.vmul.w" => "__builtin_lsx_vmul_w", + "llvm.loongarch.lsx.vmulwev.d.w" => "__builtin_lsx_vmulwev_d_w", + "llvm.loongarch.lsx.vmulwev.d.wu" => "__builtin_lsx_vmulwev_d_wu", + "llvm.loongarch.lsx.vmulwev.d.wu.w" => "__builtin_lsx_vmulwev_d_wu_w", + "llvm.loongarch.lsx.vmulwev.h.b" => "__builtin_lsx_vmulwev_h_b", + "llvm.loongarch.lsx.vmulwev.h.bu" => "__builtin_lsx_vmulwev_h_bu", + "llvm.loongarch.lsx.vmulwev.h.bu.b" => "__builtin_lsx_vmulwev_h_bu_b", + "llvm.loongarch.lsx.vmulwev.q.d" => "__builtin_lsx_vmulwev_q_d", + "llvm.loongarch.lsx.vmulwev.q.du" => "__builtin_lsx_vmulwev_q_du", + "llvm.loongarch.lsx.vmulwev.q.du.d" => "__builtin_lsx_vmulwev_q_du_d", + "llvm.loongarch.lsx.vmulwev.w.h" => "__builtin_lsx_vmulwev_w_h", + "llvm.loongarch.lsx.vmulwev.w.hu" => "__builtin_lsx_vmulwev_w_hu", + "llvm.loongarch.lsx.vmulwev.w.hu.h" => "__builtin_lsx_vmulwev_w_hu_h", + "llvm.loongarch.lsx.vmulwod.d.w" => "__builtin_lsx_vmulwod_d_w", + "llvm.loongarch.lsx.vmulwod.d.wu" => "__builtin_lsx_vmulwod_d_wu", + "llvm.loongarch.lsx.vmulwod.d.wu.w" => "__builtin_lsx_vmulwod_d_wu_w", + "llvm.loongarch.lsx.vmulwod.h.b" => "__builtin_lsx_vmulwod_h_b", + "llvm.loongarch.lsx.vmulwod.h.bu" => "__builtin_lsx_vmulwod_h_bu", + "llvm.loongarch.lsx.vmulwod.h.bu.b" => "__builtin_lsx_vmulwod_h_bu_b", + "llvm.loongarch.lsx.vmulwod.q.d" => "__builtin_lsx_vmulwod_q_d", + "llvm.loongarch.lsx.vmulwod.q.du" => "__builtin_lsx_vmulwod_q_du", + "llvm.loongarch.lsx.vmulwod.q.du.d" => "__builtin_lsx_vmulwod_q_du_d", + "llvm.loongarch.lsx.vmulwod.w.h" => "__builtin_lsx_vmulwod_w_h", + "llvm.loongarch.lsx.vmulwod.w.hu" => "__builtin_lsx_vmulwod_w_hu", + "llvm.loongarch.lsx.vmulwod.w.hu.h" => "__builtin_lsx_vmulwod_w_hu_h", + "llvm.loongarch.lsx.vneg.b" => "__builtin_lsx_vneg_b", + "llvm.loongarch.lsx.vneg.d" => "__builtin_lsx_vneg_d", + "llvm.loongarch.lsx.vneg.h" => "__builtin_lsx_vneg_h", + "llvm.loongarch.lsx.vneg.w" => "__builtin_lsx_vneg_w", + "llvm.loongarch.lsx.vnor.v" => "__builtin_lsx_vnor_v", + "llvm.loongarch.lsx.vnori.b" => "__builtin_lsx_vnori_b", + "llvm.loongarch.lsx.vor.v" => "__builtin_lsx_vor_v", + "llvm.loongarch.lsx.vori.b" => "__builtin_lsx_vori_b", + "llvm.loongarch.lsx.vorn.v" => "__builtin_lsx_vorn_v", + "llvm.loongarch.lsx.vpackev.b" => "__builtin_lsx_vpackev_b", + "llvm.loongarch.lsx.vpackev.d" => "__builtin_lsx_vpackev_d", + "llvm.loongarch.lsx.vpackev.h" => "__builtin_lsx_vpackev_h", + "llvm.loongarch.lsx.vpackev.w" => "__builtin_lsx_vpackev_w", + "llvm.loongarch.lsx.vpackod.b" => "__builtin_lsx_vpackod_b", + "llvm.loongarch.lsx.vpackod.d" => "__builtin_lsx_vpackod_d", + "llvm.loongarch.lsx.vpackod.h" => "__builtin_lsx_vpackod_h", + "llvm.loongarch.lsx.vpackod.w" => "__builtin_lsx_vpackod_w", + "llvm.loongarch.lsx.vpcnt.b" => "__builtin_lsx_vpcnt_b", + "llvm.loongarch.lsx.vpcnt.d" => "__builtin_lsx_vpcnt_d", + "llvm.loongarch.lsx.vpcnt.h" => "__builtin_lsx_vpcnt_h", + "llvm.loongarch.lsx.vpcnt.w" => "__builtin_lsx_vpcnt_w", + "llvm.loongarch.lsx.vpermi.w" => "__builtin_lsx_vpermi_w", + "llvm.loongarch.lsx.vpickev.b" => "__builtin_lsx_vpickev_b", + "llvm.loongarch.lsx.vpickev.d" => "__builtin_lsx_vpickev_d", + "llvm.loongarch.lsx.vpickev.h" => "__builtin_lsx_vpickev_h", + "llvm.loongarch.lsx.vpickev.w" => "__builtin_lsx_vpickev_w", + "llvm.loongarch.lsx.vpickod.b" => "__builtin_lsx_vpickod_b", + "llvm.loongarch.lsx.vpickod.d" => "__builtin_lsx_vpickod_d", + "llvm.loongarch.lsx.vpickod.h" => "__builtin_lsx_vpickod_h", + "llvm.loongarch.lsx.vpickod.w" => "__builtin_lsx_vpickod_w", + "llvm.loongarch.lsx.vpickve2gr.b" => "__builtin_lsx_vpickve2gr_b", + "llvm.loongarch.lsx.vpickve2gr.bu" => "__builtin_lsx_vpickve2gr_bu", + "llvm.loongarch.lsx.vpickve2gr.d" => "__builtin_lsx_vpickve2gr_d", + "llvm.loongarch.lsx.vpickve2gr.du" => "__builtin_lsx_vpickve2gr_du", + "llvm.loongarch.lsx.vpickve2gr.h" => "__builtin_lsx_vpickve2gr_h", + "llvm.loongarch.lsx.vpickve2gr.hu" => "__builtin_lsx_vpickve2gr_hu", + "llvm.loongarch.lsx.vpickve2gr.w" => "__builtin_lsx_vpickve2gr_w", + "llvm.loongarch.lsx.vpickve2gr.wu" => "__builtin_lsx_vpickve2gr_wu", + "llvm.loongarch.lsx.vreplgr2vr.b" => "__builtin_lsx_vreplgr2vr_b", + "llvm.loongarch.lsx.vreplgr2vr.d" => "__builtin_lsx_vreplgr2vr_d", + "llvm.loongarch.lsx.vreplgr2vr.h" => "__builtin_lsx_vreplgr2vr_h", + "llvm.loongarch.lsx.vreplgr2vr.w" => "__builtin_lsx_vreplgr2vr_w", + "llvm.loongarch.lsx.vrepli.b" => "__builtin_lsx_vrepli_b", + "llvm.loongarch.lsx.vrepli.d" => "__builtin_lsx_vrepli_d", + "llvm.loongarch.lsx.vrepli.h" => "__builtin_lsx_vrepli_h", + "llvm.loongarch.lsx.vrepli.w" => "__builtin_lsx_vrepli_w", + "llvm.loongarch.lsx.vreplve.b" => "__builtin_lsx_vreplve_b", + "llvm.loongarch.lsx.vreplve.d" => "__builtin_lsx_vreplve_d", + "llvm.loongarch.lsx.vreplve.h" => "__builtin_lsx_vreplve_h", + "llvm.loongarch.lsx.vreplve.w" => "__builtin_lsx_vreplve_w", + "llvm.loongarch.lsx.vreplvei.b" => "__builtin_lsx_vreplvei_b", + "llvm.loongarch.lsx.vreplvei.d" => "__builtin_lsx_vreplvei_d", + "llvm.loongarch.lsx.vreplvei.h" => "__builtin_lsx_vreplvei_h", + "llvm.loongarch.lsx.vreplvei.w" => "__builtin_lsx_vreplvei_w", + "llvm.loongarch.lsx.vrotr.b" => "__builtin_lsx_vrotr_b", + "llvm.loongarch.lsx.vrotr.d" => "__builtin_lsx_vrotr_d", + "llvm.loongarch.lsx.vrotr.h" => "__builtin_lsx_vrotr_h", + "llvm.loongarch.lsx.vrotr.w" => "__builtin_lsx_vrotr_w", + "llvm.loongarch.lsx.vrotri.b" => "__builtin_lsx_vrotri_b", + "llvm.loongarch.lsx.vrotri.d" => "__builtin_lsx_vrotri_d", + "llvm.loongarch.lsx.vrotri.h" => "__builtin_lsx_vrotri_h", + "llvm.loongarch.lsx.vrotri.w" => "__builtin_lsx_vrotri_w", + "llvm.loongarch.lsx.vsadd.b" => "__builtin_lsx_vsadd_b", + "llvm.loongarch.lsx.vsadd.bu" => "__builtin_lsx_vsadd_bu", + "llvm.loongarch.lsx.vsadd.d" => "__builtin_lsx_vsadd_d", + "llvm.loongarch.lsx.vsadd.du" => "__builtin_lsx_vsadd_du", + "llvm.loongarch.lsx.vsadd.h" => "__builtin_lsx_vsadd_h", + "llvm.loongarch.lsx.vsadd.hu" => "__builtin_lsx_vsadd_hu", + "llvm.loongarch.lsx.vsadd.w" => "__builtin_lsx_vsadd_w", + "llvm.loongarch.lsx.vsadd.wu" => "__builtin_lsx_vsadd_wu", + "llvm.loongarch.lsx.vsat.b" => "__builtin_lsx_vsat_b", + "llvm.loongarch.lsx.vsat.bu" => "__builtin_lsx_vsat_bu", + "llvm.loongarch.lsx.vsat.d" => "__builtin_lsx_vsat_d", + "llvm.loongarch.lsx.vsat.du" => "__builtin_lsx_vsat_du", + "llvm.loongarch.lsx.vsat.h" => "__builtin_lsx_vsat_h", + "llvm.loongarch.lsx.vsat.hu" => "__builtin_lsx_vsat_hu", + "llvm.loongarch.lsx.vsat.w" => "__builtin_lsx_vsat_w", + "llvm.loongarch.lsx.vsat.wu" => "__builtin_lsx_vsat_wu", + "llvm.loongarch.lsx.vseq.b" => "__builtin_lsx_vseq_b", + "llvm.loongarch.lsx.vseq.d" => "__builtin_lsx_vseq_d", + "llvm.loongarch.lsx.vseq.h" => "__builtin_lsx_vseq_h", + "llvm.loongarch.lsx.vseq.w" => "__builtin_lsx_vseq_w", + "llvm.loongarch.lsx.vseqi.b" => "__builtin_lsx_vseqi_b", + "llvm.loongarch.lsx.vseqi.d" => "__builtin_lsx_vseqi_d", + "llvm.loongarch.lsx.vseqi.h" => "__builtin_lsx_vseqi_h", + "llvm.loongarch.lsx.vseqi.w" => "__builtin_lsx_vseqi_w", + "llvm.loongarch.lsx.vshuf.b" => "__builtin_lsx_vshuf_b", + "llvm.loongarch.lsx.vshuf.d" => "__builtin_lsx_vshuf_d", + "llvm.loongarch.lsx.vshuf.h" => "__builtin_lsx_vshuf_h", + "llvm.loongarch.lsx.vshuf.w" => "__builtin_lsx_vshuf_w", + "llvm.loongarch.lsx.vshuf4i.b" => "__builtin_lsx_vshuf4i_b", + "llvm.loongarch.lsx.vshuf4i.d" => "__builtin_lsx_vshuf4i_d", + "llvm.loongarch.lsx.vshuf4i.h" => "__builtin_lsx_vshuf4i_h", + "llvm.loongarch.lsx.vshuf4i.w" => "__builtin_lsx_vshuf4i_w", + "llvm.loongarch.lsx.vsigncov.b" => "__builtin_lsx_vsigncov_b", + "llvm.loongarch.lsx.vsigncov.d" => "__builtin_lsx_vsigncov_d", + "llvm.loongarch.lsx.vsigncov.h" => "__builtin_lsx_vsigncov_h", + "llvm.loongarch.lsx.vsigncov.w" => "__builtin_lsx_vsigncov_w", + "llvm.loongarch.lsx.vsle.b" => "__builtin_lsx_vsle_b", + "llvm.loongarch.lsx.vsle.bu" => "__builtin_lsx_vsle_bu", + "llvm.loongarch.lsx.vsle.d" => "__builtin_lsx_vsle_d", + "llvm.loongarch.lsx.vsle.du" => "__builtin_lsx_vsle_du", + "llvm.loongarch.lsx.vsle.h" => "__builtin_lsx_vsle_h", + "llvm.loongarch.lsx.vsle.hu" => "__builtin_lsx_vsle_hu", + "llvm.loongarch.lsx.vsle.w" => "__builtin_lsx_vsle_w", + "llvm.loongarch.lsx.vsle.wu" => "__builtin_lsx_vsle_wu", + "llvm.loongarch.lsx.vslei.b" => "__builtin_lsx_vslei_b", + "llvm.loongarch.lsx.vslei.bu" => "__builtin_lsx_vslei_bu", + "llvm.loongarch.lsx.vslei.d" => "__builtin_lsx_vslei_d", + "llvm.loongarch.lsx.vslei.du" => "__builtin_lsx_vslei_du", + "llvm.loongarch.lsx.vslei.h" => "__builtin_lsx_vslei_h", + "llvm.loongarch.lsx.vslei.hu" => "__builtin_lsx_vslei_hu", + "llvm.loongarch.lsx.vslei.w" => "__builtin_lsx_vslei_w", + "llvm.loongarch.lsx.vslei.wu" => "__builtin_lsx_vslei_wu", + "llvm.loongarch.lsx.vsll.b" => "__builtin_lsx_vsll_b", + "llvm.loongarch.lsx.vsll.d" => "__builtin_lsx_vsll_d", + "llvm.loongarch.lsx.vsll.h" => "__builtin_lsx_vsll_h", + "llvm.loongarch.lsx.vsll.w" => "__builtin_lsx_vsll_w", + "llvm.loongarch.lsx.vslli.b" => "__builtin_lsx_vslli_b", + "llvm.loongarch.lsx.vslli.d" => "__builtin_lsx_vslli_d", + "llvm.loongarch.lsx.vslli.h" => "__builtin_lsx_vslli_h", + "llvm.loongarch.lsx.vslli.w" => "__builtin_lsx_vslli_w", + "llvm.loongarch.lsx.vsllwil.d.w" => "__builtin_lsx_vsllwil_d_w", + "llvm.loongarch.lsx.vsllwil.du.wu" => "__builtin_lsx_vsllwil_du_wu", + "llvm.loongarch.lsx.vsllwil.h.b" => "__builtin_lsx_vsllwil_h_b", + "llvm.loongarch.lsx.vsllwil.hu.bu" => "__builtin_lsx_vsllwil_hu_bu", + "llvm.loongarch.lsx.vsllwil.w.h" => "__builtin_lsx_vsllwil_w_h", + "llvm.loongarch.lsx.vsllwil.wu.hu" => "__builtin_lsx_vsllwil_wu_hu", + "llvm.loongarch.lsx.vslt.b" => "__builtin_lsx_vslt_b", + "llvm.loongarch.lsx.vslt.bu" => "__builtin_lsx_vslt_bu", + "llvm.loongarch.lsx.vslt.d" => "__builtin_lsx_vslt_d", + "llvm.loongarch.lsx.vslt.du" => "__builtin_lsx_vslt_du", + "llvm.loongarch.lsx.vslt.h" => "__builtin_lsx_vslt_h", + "llvm.loongarch.lsx.vslt.hu" => "__builtin_lsx_vslt_hu", + "llvm.loongarch.lsx.vslt.w" => "__builtin_lsx_vslt_w", + "llvm.loongarch.lsx.vslt.wu" => "__builtin_lsx_vslt_wu", + "llvm.loongarch.lsx.vslti.b" => "__builtin_lsx_vslti_b", + "llvm.loongarch.lsx.vslti.bu" => "__builtin_lsx_vslti_bu", + "llvm.loongarch.lsx.vslti.d" => "__builtin_lsx_vslti_d", + "llvm.loongarch.lsx.vslti.du" => "__builtin_lsx_vslti_du", + "llvm.loongarch.lsx.vslti.h" => "__builtin_lsx_vslti_h", + "llvm.loongarch.lsx.vslti.hu" => "__builtin_lsx_vslti_hu", + "llvm.loongarch.lsx.vslti.w" => "__builtin_lsx_vslti_w", + "llvm.loongarch.lsx.vslti.wu" => "__builtin_lsx_vslti_wu", + "llvm.loongarch.lsx.vsra.b" => "__builtin_lsx_vsra_b", + "llvm.loongarch.lsx.vsra.d" => "__builtin_lsx_vsra_d", + "llvm.loongarch.lsx.vsra.h" => "__builtin_lsx_vsra_h", + "llvm.loongarch.lsx.vsra.w" => "__builtin_lsx_vsra_w", + "llvm.loongarch.lsx.vsrai.b" => "__builtin_lsx_vsrai_b", + "llvm.loongarch.lsx.vsrai.d" => "__builtin_lsx_vsrai_d", + "llvm.loongarch.lsx.vsrai.h" => "__builtin_lsx_vsrai_h", + "llvm.loongarch.lsx.vsrai.w" => "__builtin_lsx_vsrai_w", + "llvm.loongarch.lsx.vsran.b.h" => "__builtin_lsx_vsran_b_h", + "llvm.loongarch.lsx.vsran.h.w" => "__builtin_lsx_vsran_h_w", + "llvm.loongarch.lsx.vsran.w.d" => "__builtin_lsx_vsran_w_d", + "llvm.loongarch.lsx.vsrani.b.h" => "__builtin_lsx_vsrani_b_h", + "llvm.loongarch.lsx.vsrani.d.q" => "__builtin_lsx_vsrani_d_q", + "llvm.loongarch.lsx.vsrani.h.w" => "__builtin_lsx_vsrani_h_w", + "llvm.loongarch.lsx.vsrani.w.d" => "__builtin_lsx_vsrani_w_d", + "llvm.loongarch.lsx.vsrar.b" => "__builtin_lsx_vsrar_b", + "llvm.loongarch.lsx.vsrar.d" => "__builtin_lsx_vsrar_d", + "llvm.loongarch.lsx.vsrar.h" => "__builtin_lsx_vsrar_h", + "llvm.loongarch.lsx.vsrar.w" => "__builtin_lsx_vsrar_w", + "llvm.loongarch.lsx.vsrari.b" => "__builtin_lsx_vsrari_b", + "llvm.loongarch.lsx.vsrari.d" => "__builtin_lsx_vsrari_d", + "llvm.loongarch.lsx.vsrari.h" => "__builtin_lsx_vsrari_h", + "llvm.loongarch.lsx.vsrari.w" => "__builtin_lsx_vsrari_w", + "llvm.loongarch.lsx.vsrarn.b.h" => "__builtin_lsx_vsrarn_b_h", + "llvm.loongarch.lsx.vsrarn.h.w" => "__builtin_lsx_vsrarn_h_w", + "llvm.loongarch.lsx.vsrarn.w.d" => "__builtin_lsx_vsrarn_w_d", + "llvm.loongarch.lsx.vsrarni.b.h" => "__builtin_lsx_vsrarni_b_h", + "llvm.loongarch.lsx.vsrarni.d.q" => "__builtin_lsx_vsrarni_d_q", + "llvm.loongarch.lsx.vsrarni.h.w" => "__builtin_lsx_vsrarni_h_w", + "llvm.loongarch.lsx.vsrarni.w.d" => "__builtin_lsx_vsrarni_w_d", + "llvm.loongarch.lsx.vsrl.b" => "__builtin_lsx_vsrl_b", + "llvm.loongarch.lsx.vsrl.d" => "__builtin_lsx_vsrl_d", + "llvm.loongarch.lsx.vsrl.h" => "__builtin_lsx_vsrl_h", + "llvm.loongarch.lsx.vsrl.w" => "__builtin_lsx_vsrl_w", + "llvm.loongarch.lsx.vsrli.b" => "__builtin_lsx_vsrli_b", + "llvm.loongarch.lsx.vsrli.d" => "__builtin_lsx_vsrli_d", + "llvm.loongarch.lsx.vsrli.h" => "__builtin_lsx_vsrli_h", + "llvm.loongarch.lsx.vsrli.w" => "__builtin_lsx_vsrli_w", + "llvm.loongarch.lsx.vsrln.b.h" => "__builtin_lsx_vsrln_b_h", + "llvm.loongarch.lsx.vsrln.h.w" => "__builtin_lsx_vsrln_h_w", + "llvm.loongarch.lsx.vsrln.w.d" => "__builtin_lsx_vsrln_w_d", + "llvm.loongarch.lsx.vsrlni.b.h" => "__builtin_lsx_vsrlni_b_h", + "llvm.loongarch.lsx.vsrlni.d.q" => "__builtin_lsx_vsrlni_d_q", + "llvm.loongarch.lsx.vsrlni.h.w" => "__builtin_lsx_vsrlni_h_w", + "llvm.loongarch.lsx.vsrlni.w.d" => "__builtin_lsx_vsrlni_w_d", + "llvm.loongarch.lsx.vsrlr.b" => "__builtin_lsx_vsrlr_b", + "llvm.loongarch.lsx.vsrlr.d" => "__builtin_lsx_vsrlr_d", + "llvm.loongarch.lsx.vsrlr.h" => "__builtin_lsx_vsrlr_h", + "llvm.loongarch.lsx.vsrlr.w" => "__builtin_lsx_vsrlr_w", + "llvm.loongarch.lsx.vsrlri.b" => "__builtin_lsx_vsrlri_b", + "llvm.loongarch.lsx.vsrlri.d" => "__builtin_lsx_vsrlri_d", + "llvm.loongarch.lsx.vsrlri.h" => "__builtin_lsx_vsrlri_h", + "llvm.loongarch.lsx.vsrlri.w" => "__builtin_lsx_vsrlri_w", + "llvm.loongarch.lsx.vsrlrn.b.h" => "__builtin_lsx_vsrlrn_b_h", + "llvm.loongarch.lsx.vsrlrn.h.w" => "__builtin_lsx_vsrlrn_h_w", + "llvm.loongarch.lsx.vsrlrn.w.d" => "__builtin_lsx_vsrlrn_w_d", + "llvm.loongarch.lsx.vsrlrni.b.h" => "__builtin_lsx_vsrlrni_b_h", + "llvm.loongarch.lsx.vsrlrni.d.q" => "__builtin_lsx_vsrlrni_d_q", + "llvm.loongarch.lsx.vsrlrni.h.w" => "__builtin_lsx_vsrlrni_h_w", + "llvm.loongarch.lsx.vsrlrni.w.d" => "__builtin_lsx_vsrlrni_w_d", + "llvm.loongarch.lsx.vssran.b.h" => "__builtin_lsx_vssran_b_h", + "llvm.loongarch.lsx.vssran.bu.h" => "__builtin_lsx_vssran_bu_h", + "llvm.loongarch.lsx.vssran.h.w" => "__builtin_lsx_vssran_h_w", + "llvm.loongarch.lsx.vssran.hu.w" => "__builtin_lsx_vssran_hu_w", + "llvm.loongarch.lsx.vssran.w.d" => "__builtin_lsx_vssran_w_d", + "llvm.loongarch.lsx.vssran.wu.d" => "__builtin_lsx_vssran_wu_d", + "llvm.loongarch.lsx.vssrani.b.h" => "__builtin_lsx_vssrani_b_h", + "llvm.loongarch.lsx.vssrani.bu.h" => "__builtin_lsx_vssrani_bu_h", + "llvm.loongarch.lsx.vssrani.d.q" => "__builtin_lsx_vssrani_d_q", + "llvm.loongarch.lsx.vssrani.du.q" => "__builtin_lsx_vssrani_du_q", + "llvm.loongarch.lsx.vssrani.h.w" => "__builtin_lsx_vssrani_h_w", + "llvm.loongarch.lsx.vssrani.hu.w" => "__builtin_lsx_vssrani_hu_w", + "llvm.loongarch.lsx.vssrani.w.d" => "__builtin_lsx_vssrani_w_d", + "llvm.loongarch.lsx.vssrani.wu.d" => "__builtin_lsx_vssrani_wu_d", + "llvm.loongarch.lsx.vssrarn.b.h" => "__builtin_lsx_vssrarn_b_h", + "llvm.loongarch.lsx.vssrarn.bu.h" => "__builtin_lsx_vssrarn_bu_h", + "llvm.loongarch.lsx.vssrarn.h.w" => "__builtin_lsx_vssrarn_h_w", + "llvm.loongarch.lsx.vssrarn.hu.w" => "__builtin_lsx_vssrarn_hu_w", + "llvm.loongarch.lsx.vssrarn.w.d" => "__builtin_lsx_vssrarn_w_d", + "llvm.loongarch.lsx.vssrarn.wu.d" => "__builtin_lsx_vssrarn_wu_d", + "llvm.loongarch.lsx.vssrarni.b.h" => "__builtin_lsx_vssrarni_b_h", + "llvm.loongarch.lsx.vssrarni.bu.h" => "__builtin_lsx_vssrarni_bu_h", + "llvm.loongarch.lsx.vssrarni.d.q" => "__builtin_lsx_vssrarni_d_q", + "llvm.loongarch.lsx.vssrarni.du.q" => "__builtin_lsx_vssrarni_du_q", + "llvm.loongarch.lsx.vssrarni.h.w" => "__builtin_lsx_vssrarni_h_w", + "llvm.loongarch.lsx.vssrarni.hu.w" => "__builtin_lsx_vssrarni_hu_w", + "llvm.loongarch.lsx.vssrarni.w.d" => "__builtin_lsx_vssrarni_w_d", + "llvm.loongarch.lsx.vssrarni.wu.d" => "__builtin_lsx_vssrarni_wu_d", + "llvm.loongarch.lsx.vssrln.b.h" => "__builtin_lsx_vssrln_b_h", + "llvm.loongarch.lsx.vssrln.bu.h" => "__builtin_lsx_vssrln_bu_h", + "llvm.loongarch.lsx.vssrln.h.w" => "__builtin_lsx_vssrln_h_w", + "llvm.loongarch.lsx.vssrln.hu.w" => "__builtin_lsx_vssrln_hu_w", + "llvm.loongarch.lsx.vssrln.w.d" => "__builtin_lsx_vssrln_w_d", + "llvm.loongarch.lsx.vssrln.wu.d" => "__builtin_lsx_vssrln_wu_d", + "llvm.loongarch.lsx.vssrlni.b.h" => "__builtin_lsx_vssrlni_b_h", + "llvm.loongarch.lsx.vssrlni.bu.h" => "__builtin_lsx_vssrlni_bu_h", + "llvm.loongarch.lsx.vssrlni.d.q" => "__builtin_lsx_vssrlni_d_q", + "llvm.loongarch.lsx.vssrlni.du.q" => "__builtin_lsx_vssrlni_du_q", + "llvm.loongarch.lsx.vssrlni.h.w" => "__builtin_lsx_vssrlni_h_w", + "llvm.loongarch.lsx.vssrlni.hu.w" => "__builtin_lsx_vssrlni_hu_w", + "llvm.loongarch.lsx.vssrlni.w.d" => "__builtin_lsx_vssrlni_w_d", + "llvm.loongarch.lsx.vssrlni.wu.d" => "__builtin_lsx_vssrlni_wu_d", + "llvm.loongarch.lsx.vssrlrn.b.h" => "__builtin_lsx_vssrlrn_b_h", + "llvm.loongarch.lsx.vssrlrn.bu.h" => "__builtin_lsx_vssrlrn_bu_h", + "llvm.loongarch.lsx.vssrlrn.h.w" => "__builtin_lsx_vssrlrn_h_w", + "llvm.loongarch.lsx.vssrlrn.hu.w" => "__builtin_lsx_vssrlrn_hu_w", + "llvm.loongarch.lsx.vssrlrn.w.d" => "__builtin_lsx_vssrlrn_w_d", + "llvm.loongarch.lsx.vssrlrn.wu.d" => "__builtin_lsx_vssrlrn_wu_d", + "llvm.loongarch.lsx.vssrlrni.b.h" => "__builtin_lsx_vssrlrni_b_h", + "llvm.loongarch.lsx.vssrlrni.bu.h" => "__builtin_lsx_vssrlrni_bu_h", + "llvm.loongarch.lsx.vssrlrni.d.q" => "__builtin_lsx_vssrlrni_d_q", + "llvm.loongarch.lsx.vssrlrni.du.q" => "__builtin_lsx_vssrlrni_du_q", + "llvm.loongarch.lsx.vssrlrni.h.w" => "__builtin_lsx_vssrlrni_h_w", + "llvm.loongarch.lsx.vssrlrni.hu.w" => "__builtin_lsx_vssrlrni_hu_w", + "llvm.loongarch.lsx.vssrlrni.w.d" => "__builtin_lsx_vssrlrni_w_d", + "llvm.loongarch.lsx.vssrlrni.wu.d" => "__builtin_lsx_vssrlrni_wu_d", + "llvm.loongarch.lsx.vssub.b" => "__builtin_lsx_vssub_b", + "llvm.loongarch.lsx.vssub.bu" => "__builtin_lsx_vssub_bu", + "llvm.loongarch.lsx.vssub.d" => "__builtin_lsx_vssub_d", + "llvm.loongarch.lsx.vssub.du" => "__builtin_lsx_vssub_du", + "llvm.loongarch.lsx.vssub.h" => "__builtin_lsx_vssub_h", + "llvm.loongarch.lsx.vssub.hu" => "__builtin_lsx_vssub_hu", + "llvm.loongarch.lsx.vssub.w" => "__builtin_lsx_vssub_w", + "llvm.loongarch.lsx.vssub.wu" => "__builtin_lsx_vssub_wu", + "llvm.loongarch.lsx.vst" => "__builtin_lsx_vst", + "llvm.loongarch.lsx.vstelm.b" => "__builtin_lsx_vstelm_b", + "llvm.loongarch.lsx.vstelm.d" => "__builtin_lsx_vstelm_d", + "llvm.loongarch.lsx.vstelm.h" => "__builtin_lsx_vstelm_h", + "llvm.loongarch.lsx.vstelm.w" => "__builtin_lsx_vstelm_w", + "llvm.loongarch.lsx.vstx" => "__builtin_lsx_vstx", + "llvm.loongarch.lsx.vsub.b" => "__builtin_lsx_vsub_b", + "llvm.loongarch.lsx.vsub.d" => "__builtin_lsx_vsub_d", + "llvm.loongarch.lsx.vsub.h" => "__builtin_lsx_vsub_h", + "llvm.loongarch.lsx.vsub.q" => "__builtin_lsx_vsub_q", + "llvm.loongarch.lsx.vsub.w" => "__builtin_lsx_vsub_w", + "llvm.loongarch.lsx.vsubi.bu" => "__builtin_lsx_vsubi_bu", + "llvm.loongarch.lsx.vsubi.du" => "__builtin_lsx_vsubi_du", + "llvm.loongarch.lsx.vsubi.hu" => "__builtin_lsx_vsubi_hu", + "llvm.loongarch.lsx.vsubi.wu" => "__builtin_lsx_vsubi_wu", + "llvm.loongarch.lsx.vsubwev.d.w" => "__builtin_lsx_vsubwev_d_w", + "llvm.loongarch.lsx.vsubwev.d.wu" => "__builtin_lsx_vsubwev_d_wu", + "llvm.loongarch.lsx.vsubwev.h.b" => "__builtin_lsx_vsubwev_h_b", + "llvm.loongarch.lsx.vsubwev.h.bu" => "__builtin_lsx_vsubwev_h_bu", + "llvm.loongarch.lsx.vsubwev.q.d" => "__builtin_lsx_vsubwev_q_d", + "llvm.loongarch.lsx.vsubwev.q.du" => "__builtin_lsx_vsubwev_q_du", + "llvm.loongarch.lsx.vsubwev.w.h" => "__builtin_lsx_vsubwev_w_h", + "llvm.loongarch.lsx.vsubwev.w.hu" => "__builtin_lsx_vsubwev_w_hu", + "llvm.loongarch.lsx.vsubwod.d.w" => "__builtin_lsx_vsubwod_d_w", + "llvm.loongarch.lsx.vsubwod.d.wu" => "__builtin_lsx_vsubwod_d_wu", + "llvm.loongarch.lsx.vsubwod.h.b" => "__builtin_lsx_vsubwod_h_b", + "llvm.loongarch.lsx.vsubwod.h.bu" => "__builtin_lsx_vsubwod_h_bu", + "llvm.loongarch.lsx.vsubwod.q.d" => "__builtin_lsx_vsubwod_q_d", + "llvm.loongarch.lsx.vsubwod.q.du" => "__builtin_lsx_vsubwod_q_du", + "llvm.loongarch.lsx.vsubwod.w.h" => "__builtin_lsx_vsubwod_w_h", + "llvm.loongarch.lsx.vsubwod.w.hu" => "__builtin_lsx_vsubwod_w_hu", + "llvm.loongarch.lsx.vxor.v" => "__builtin_lsx_vxor_v", + "llvm.loongarch.lsx.vxori.b" => "__builtin_lsx_vxori_b", + "llvm.loongarch.movfcsr2gr" => "__builtin_loongarch_movfcsr2gr", + "llvm.loongarch.movgr2fcsr" => "__builtin_loongarch_movgr2fcsr", + "llvm.loongarch.syscall" => "__builtin_loongarch_syscall", // mips "llvm.mips.absq.s.ph" => "__builtin_mips_absq_s_ph", "llvm.mips.absq.s.qb" => "__builtin_mips_absq_s_qb", @@ -2954,6 +4442,8 @@ "llvm.nvvm.barrier0.and" => "__nvvm_bar0_and", "llvm.nvvm.barrier0.or" => "__nvvm_bar0_or", "llvm.nvvm.barrier0.popc" => "__nvvm_bar0_popc", + "llvm.nvvm.bf2h.rn" => "__nvvm_bf2h_rn", + "llvm.nvvm.bf2h.rn.ftz" => "__nvvm_bf2h_rn_ftz", "llvm.nvvm.bitcast.d2ll" => "__nvvm_bitcast_d2ll", "llvm.nvvm.bitcast.f2i" => "__nvvm_bitcast_f2i", "llvm.nvvm.bitcast.i2f" => "__nvvm_bitcast_i2f", @@ -3016,8 +4506,6 @@ "llvm.nvvm.div.rz.ftz.f" => "__nvvm_div_rz_ftz_f", "llvm.nvvm.ex2.approx.d" => "__nvvm_ex2_approx_d", "llvm.nvvm.ex2.approx.f" => "__nvvm_ex2_approx_f", - "llvm.nvvm.ex2.approx.f16" => "__nvvm_ex2_approx_f16", - "llvm.nvvm.ex2.approx.f16x2" => "__nvvm_ex2_approx_f16x2", "llvm.nvvm.ex2.approx.ftz.f" => "__nvvm_ex2_approx_ftz_f", "llvm.nvvm.f2bf16.rn" => "__nvvm_f2bf16_rn", "llvm.nvvm.f2bf16.rn.relu" => "__nvvm_f2bf16_rn_relu", @@ -3079,11 +4567,17 @@ "llvm.nvvm.fma.rn.bf16x2" => "__nvvm_fma_rn_bf16x2", "llvm.nvvm.fma.rn.d" => "__nvvm_fma_rn_d", "llvm.nvvm.fma.rn.f" => "__nvvm_fma_rn_f", - "llvm.nvvm.fma.rn.f16" => "__nvvm_fma_rn_f16", - "llvm.nvvm.fma.rn.f16x2" => "__nvvm_fma_rn_f16x2", + "llvm.nvvm.fma.rn.ftz.bf16" => "__nvvm_fma_rn_ftz_bf16", + "llvm.nvvm.fma.rn.ftz.bf16x2" => "__nvvm_fma_rn_ftz_bf16x2", "llvm.nvvm.fma.rn.ftz.f" => "__nvvm_fma_rn_ftz_f", + "llvm.nvvm.fma.rn.ftz.relu.bf16" => "__nvvm_fma_rn_ftz_relu_bf16", + "llvm.nvvm.fma.rn.ftz.relu.bf16x2" => "__nvvm_fma_rn_ftz_relu_bf16x2", + "llvm.nvvm.fma.rn.ftz.sat.bf16" => "__nvvm_fma_rn_ftz_sat_bf16", + "llvm.nvvm.fma.rn.ftz.sat.bf16x2" => "__nvvm_fma_rn_ftz_sat_bf16x2", "llvm.nvvm.fma.rn.relu.bf16" => "__nvvm_fma_rn_relu_bf16", "llvm.nvvm.fma.rn.relu.bf16x2" => "__nvvm_fma_rn_relu_bf16x2", + "llvm.nvvm.fma.rn.sat.bf16" => "__nvvm_fma_rn_sat_bf16", + "llvm.nvvm.fma.rn.sat.bf16x2" => "__nvvm_fma_rn_sat_bf16x2", "llvm.nvvm.fma.rp.d" => "__nvvm_fma_rp_d", "llvm.nvvm.fma.rp.f" => "__nvvm_fma_rp_f", "llvm.nvvm.fma.rp.ftz.f" => "__nvvm_fma_rp_ftz_f", @@ -3094,11 +4588,17 @@ "llvm.nvvm.fmax.bf16x2" => "__nvvm_fmax_bf16x2", "llvm.nvvm.fmax.d" => "__nvvm_fmax_d", "llvm.nvvm.fmax.f" => "__nvvm_fmax_f", - "llvm.nvvm.fmax.f16" => "__nvvm_fmax_f16", - "llvm.nvvm.fmax.f16x2" => "__nvvm_fmax_f16x2", + "llvm.nvvm.fmax.ftz.bf16" => "__nvvm_fmax_ftz_bf16", + "llvm.nvvm.fmax.ftz.bf16x2" => "__nvvm_fmax_ftz_bf16x2", "llvm.nvvm.fmax.ftz.f" => "__nvvm_fmax_ftz_f", + "llvm.nvvm.fmax.ftz.nan.bf16" => "__nvvm_fmax_ftz_nan_bf16", + "llvm.nvvm.fmax.ftz.nan.bf16x2" => "__nvvm_fmax_ftz_nan_bf16x2", "llvm.nvvm.fmax.ftz.nan.f" => "__nvvm_fmax_ftz_nan_f", + "llvm.nvvm.fmax.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16", + "llvm.nvvm.fmax.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16x2", "llvm.nvvm.fmax.ftz.nan.xorsign.abs.f" => "__nvvm_fmax_ftz_nan_xorsign_abs_f", + "llvm.nvvm.fmax.ftz.xorsign.abs.bf16" => "__nvvm_fmax_ftz_xorsign_abs_bf16", + "llvm.nvvm.fmax.ftz.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_xorsign_abs_bf16x2", "llvm.nvvm.fmax.ftz.xorsign.abs.f" => "__nvvm_fmax_ftz_xorsign_abs_f", "llvm.nvvm.fmax.nan.bf16" => "__nvvm_fmax_nan_bf16", "llvm.nvvm.fmax.nan.bf16x2" => "__nvvm_fmax_nan_bf16x2", @@ -3113,11 +4613,17 @@ "llvm.nvvm.fmin.bf16x2" => "__nvvm_fmin_bf16x2", "llvm.nvvm.fmin.d" => "__nvvm_fmin_d", "llvm.nvvm.fmin.f" => "__nvvm_fmin_f", - "llvm.nvvm.fmin.f16" => "__nvvm_fmin_f16", - "llvm.nvvm.fmin.f16x2" => "__nvvm_fmin_f16x2", + "llvm.nvvm.fmin.ftz.bf16" => "__nvvm_fmin_ftz_bf16", + "llvm.nvvm.fmin.ftz.bf16x2" => "__nvvm_fmin_ftz_bf16x2", "llvm.nvvm.fmin.ftz.f" => "__nvvm_fmin_ftz_f", + "llvm.nvvm.fmin.ftz.nan.bf16" => "__nvvm_fmin_ftz_nan_bf16", + "llvm.nvvm.fmin.ftz.nan.bf16x2" => "__nvvm_fmin_ftz_nan_bf16x2", "llvm.nvvm.fmin.ftz.nan.f" => "__nvvm_fmin_ftz_nan_f", + "llvm.nvvm.fmin.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16", + "llvm.nvvm.fmin.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16x2", "llvm.nvvm.fmin.ftz.nan.xorsign.abs.f" => "__nvvm_fmin_ftz_nan_xorsign_abs_f", + "llvm.nvvm.fmin.ftz.xorsign.abs.bf16" => "__nvvm_fmin_ftz_xorsign_abs_bf16", + "llvm.nvvm.fmin.ftz.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_xorsign_abs_bf16x2", "llvm.nvvm.fmin.ftz.xorsign.abs.f" => "__nvvm_fmin_ftz_xorsign_abs_f", "llvm.nvvm.fmin.nan.bf16" => "__nvvm_fmin_nan_bf16", "llvm.nvvm.fmin.nan.bf16x2" => "__nvvm_fmin_nan_bf16x2", @@ -3979,6 +5485,7 @@ "llvm.ppc.maddhd" => "__builtin_ppc_maddhd", "llvm.ppc.maddhdu" => "__builtin_ppc_maddhdu", "llvm.ppc.maddld" => "__builtin_ppc_maddld", + "llvm.ppc.mffsl" => "__builtin_ppc_mffsl", "llvm.ppc.mfmsr" => "__builtin_ppc_mfmsr", "llvm.ppc.mftbu" => "__builtin_ppc_mftbu", "llvm.ppc.mtfsb0" => "__builtin_ppc_mtfsb0", @@ -4213,6 +5720,28 @@ "llvm.r600.read.tgid.x" => "__builtin_r600_read_tgid_x", "llvm.r600.read.tgid.y" => "__builtin_r600_read_tgid_y", "llvm.r600.read.tgid.z" => "__builtin_r600_read_tgid_z", + // riscv + "llvm.riscv.aes32dsi" => "__builtin_riscv_aes32dsi", + "llvm.riscv.aes32dsmi" => "__builtin_riscv_aes32dsmi", + "llvm.riscv.aes32esi" => "__builtin_riscv_aes32esi", + "llvm.riscv.aes32esmi" => "__builtin_riscv_aes32esmi", + "llvm.riscv.aes64ds" => "__builtin_riscv_aes64ds", + "llvm.riscv.aes64dsm" => "__builtin_riscv_aes64dsm", + "llvm.riscv.aes64es" => "__builtin_riscv_aes64es", + "llvm.riscv.aes64esm" => "__builtin_riscv_aes64esm", + "llvm.riscv.aes64im" => "__builtin_riscv_aes64im", + "llvm.riscv.aes64ks1i" => "__builtin_riscv_aes64ks1i", + "llvm.riscv.aes64ks2" => "__builtin_riscv_aes64ks2", + "llvm.riscv.sha512sig0" => "__builtin_riscv_sha512sig0", + "llvm.riscv.sha512sig0h" => "__builtin_riscv_sha512sig0h", + "llvm.riscv.sha512sig0l" => "__builtin_riscv_sha512sig0l", + "llvm.riscv.sha512sig1" => "__builtin_riscv_sha512sig1", + "llvm.riscv.sha512sig1h" => "__builtin_riscv_sha512sig1h", + "llvm.riscv.sha512sig1l" => "__builtin_riscv_sha512sig1l", + "llvm.riscv.sha512sum0" => "__builtin_riscv_sha512sum0", + "llvm.riscv.sha512sum0r" => "__builtin_riscv_sha512sum0r", + "llvm.riscv.sha512sum1" => "__builtin_riscv_sha512sum1", + "llvm.riscv.sha512sum1r" => "__builtin_riscv_sha512sum1r", // s390 "llvm.s390.efpc" => "__builtin_s390_efpc", "llvm.s390.etnd" => "__builtin_tx_nesting_depth", @@ -5912,6 +7441,18 @@ "llvm.x86.avx2.vpdpbuud.256" => "__builtin_ia32_vpdpbuud256", "llvm.x86.avx2.vpdpbuuds.128" => "__builtin_ia32_vpdpbuuds128", "llvm.x86.avx2.vpdpbuuds.256" => "__builtin_ia32_vpdpbuuds256", + "llvm.x86.avx2.vpdpwsud.128" => "__builtin_ia32_vpdpwsud128", + "llvm.x86.avx2.vpdpwsud.256" => "__builtin_ia32_vpdpwsud256", + "llvm.x86.avx2.vpdpwsuds.128" => "__builtin_ia32_vpdpwsuds128", + "llvm.x86.avx2.vpdpwsuds.256" => "__builtin_ia32_vpdpwsuds256", + "llvm.x86.avx2.vpdpwusd.128" => "__builtin_ia32_vpdpwusd128", + "llvm.x86.avx2.vpdpwusd.256" => "__builtin_ia32_vpdpwusd256", + "llvm.x86.avx2.vpdpwusds.128" => "__builtin_ia32_vpdpwusds128", + "llvm.x86.avx2.vpdpwusds.256" => "__builtin_ia32_vpdpwusds256", + "llvm.x86.avx2.vpdpwuud.128" => "__builtin_ia32_vpdpwuud128", + "llvm.x86.avx2.vpdpwuud.256" => "__builtin_ia32_vpdpwuud256", + "llvm.x86.avx2.vpdpwuuds.128" => "__builtin_ia32_vpdpwuuds128", + "llvm.x86.avx2.vpdpwuuds.256" => "__builtin_ia32_vpdpwuuds256", "llvm.x86.avx2.vperm2i128" => "__builtin_ia32_permti256", "llvm.x86.avx512.add.pd.512" => "__builtin_ia32_addpd512", "llvm.x86.avx512.add.ps.512" => "__builtin_ia32_addps512", @@ -7882,6 +9423,8 @@ "llvm.x86.tpause" => "__builtin_ia32_tpause", "llvm.x86.umonitor" => "__builtin_ia32_umonitor", "llvm.x86.umwait" => "__builtin_ia32_umwait", + "llvm.x86.urdmsr" => "__builtin_ia32_urdmsr", + "llvm.x86.uwrmsr" => "__builtin_ia32_uwrmsr", "llvm.x86.vbcstnebf162ps128" => "__builtin_ia32_vbcstnebf162ps128", "llvm.x86.vbcstnebf162ps256" => "__builtin_ia32_vbcstnebf162ps256", "llvm.x86.vbcstnesh2ps128" => "__builtin_ia32_vbcstnesh2ps128", @@ -7909,6 +9452,16 @@ "llvm.x86.vgf2p8mulb.128" => "__builtin_ia32_vgf2p8mulb_v16qi", "llvm.x86.vgf2p8mulb.256" => "__builtin_ia32_vgf2p8mulb_v32qi", "llvm.x86.vgf2p8mulb.512" => "__builtin_ia32_vgf2p8mulb_v64qi", + "llvm.x86.vsha512msg1" => "__builtin_ia32_vsha512msg1", + "llvm.x86.vsha512msg2" => "__builtin_ia32_vsha512msg2", + "llvm.x86.vsha512rnds2" => "__builtin_ia32_vsha512rnds2", + "llvm.x86.vsm3msg1" => "__builtin_ia32_vsm3msg1", + "llvm.x86.vsm3msg2" => "__builtin_ia32_vsm3msg2", + "llvm.x86.vsm3rnds2" => "__builtin_ia32_vsm3rnds2", + "llvm.x86.vsm4key4128" => "__builtin_ia32_vsm4key4128", + "llvm.x86.vsm4key4256" => "__builtin_ia32_vsm4key4256", + "llvm.x86.vsm4rnds4128" => "__builtin_ia32_vsm4rnds4128", + "llvm.x86.vsm4rnds4256" => "__builtin_ia32_vsm4rnds4256", "llvm.x86.wbinvd" => "__builtin_ia32_wbinvd", "llvm.x86.wbnoinvd" => "__builtin_ia32_wbnoinvd", "llvm.x86.wrfsbase.32" => "__builtin_ia32_wrfsbase32", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/llvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -236,11 +236,17 @@ let arg2 = builder.context.new_cast(None, arg2, arg2_type); args = vec![new_args[0], arg2].into(); }, + // These builtins are sent one more argument than needed. "__builtin_prefetch" => { let mut new_args = args.to_vec(); new_args.pop(); args = new_args.into(); }, + // The GCC version returns one value of the tuple through a pointer. + "__builtin_ia32_rdrand64_step" => { + let arg = builder.current_func().new_local(None, builder.ulonglong_type, "return_rdrand_arg"); + args = vec![arg.get_address(None)].into(); + }, _ => (), } } @@ -361,6 +367,19 @@ // builtin twice, we overwrite the return value with a dummy value. return_value = builder.context.new_rvalue_zero(builder.int_type); }, + "__builtin_ia32_rdrand64_step" => { + let random_number = args[0].dereference(None).to_rvalue(); + let success_variable = builder.current_func().new_local(None, return_value.get_type(), "success"); + builder.llbb().add_assignment(None, success_variable, return_value); + + let field1 = builder.context.new_field(None, random_number.get_type(), "random_number"); + let field2 = builder.context.new_field(None, return_value.get_type(), "success"); + let struct_type = builder.context.new_struct_type(None, "rdrand_result", &[field1, field2]); + return_value = builder.context.new_struct_constructor(None, struct_type.as_type(), None, &[ + random_number, + success_variable.to_rvalue(), + ]); + }, _ => (), } @@ -413,15 +432,21 @@ #[cfg(not(feature="master"))] pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function<'gcc> { - match name { - "llvm.x86.xgetbv" | "llvm.x86.sse2.pause" => { - let gcc_name = "__builtin_trap"; - let func = cx.context.get_builtin_function(gcc_name); - cx.functions.borrow_mut().insert(gcc_name.to_string(), func); - return func; - }, - _ => unimplemented!("unsupported LLVM intrinsic {}", name), - } + let gcc_name = + match name { + "llvm.x86.sse2.pause" => { + // NOTE: pause is only a hint, so we use a dummy built-in because target built-ins + // are not supported in libgccjit 12. + "__builtin_inff" + }, + "llvm.x86.xgetbv" => { + "__builtin_trap" + }, + _ => unimplemented!("unsupported LLVM intrinsic {}", name), + }; + let func = cx.context.get_builtin_function(gcc_name); + cx.functions.borrow_mut().insert(gcc_name.to_string(), func); + return func; } #[cfg(feature="master")] @@ -613,6 +638,7 @@ "llvm.fshr.v8i16" => "__builtin_ia32_vpshrdv_v8hi", "llvm.x86.fma.vfmadd.sd" => "__builtin_ia32_vfmaddsd3", "llvm.x86.fma.vfmadd.ss" => "__builtin_ia32_vfmaddss3", + "llvm.x86.rdrand.64" => "__builtin_ia32_rdrand64_step", // The above doc points to unknown builtins for the following, so override them: "llvm.x86.avx2.gather.d.d" => "__builtin_ia32_gathersiv4si", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,15 +4,17 @@ #[cfg(feature="master")] use std::iter; -use gccjit::{ComparisonOp, Function, RValue, ToRValue, Type, UnaryOp, FunctionType}; +#[cfg(feature="master")] +use gccjit::FunctionType; +use gccjit::{ComparisonOp, Function, RValue, ToRValue, Type, UnaryOp}; use rustc_codegen_ssa::MemFlags; use rustc_codegen_ssa::base::wants_msvc_seh; use rustc_codegen_ssa::common::IntPredicate; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::place::PlaceRef; -use rustc_codegen_ssa::traits::{ArgAbiMethods, BaseTypeMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods}; +use rustc_codegen_ssa::traits::{ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods}; #[cfg(feature="master")] -use rustc_codegen_ssa::traits::MiscMethods; +use rustc_codegen_ssa::traits::{BaseTypeMethods, MiscMethods}; use rustc_codegen_ssa::errors::InvalidMonomorphization; use rustc_middle::bug; use rustc_middle::ty::{self, Instance, Ty}; @@ -143,11 +145,15 @@ sym::volatile_load | sym::unaligned_volatile_load => { let tp_ty = fn_args.type_at(0); - let mut ptr = args[0].immediate(); - if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode { - ptr = self.pointercast(ptr, self.type_ptr_to(ty.gcc_type(self))); - } - let load = self.volatile_load(ptr.get_type(), ptr); + let ptr = args[0].immediate(); + let load = + if let PassMode::Cast { cast: ty, pad_i32: _ } = &fn_abi.ret.mode { + let gcc_ty = ty.gcc_type(self); + self.volatile_load(gcc_ty, ptr) + } + else { + self.volatile_load(self.layout_of(tp_ty).gcc_type(self), ptr) + }; // TODO(antoyo): set alignment. self.to_immediate(load, self.layout_of(tp_ty)) } @@ -819,75 +825,58 @@ value }; - if value_type.is_u128(&self.cx) { - // TODO(antoyo): implement in the normal algorithm below to have a more efficient - // implementation (that does not require a call to __popcountdi2). - let popcount = self.context.get_builtin_function("__builtin_popcountll"); + // only break apart 128-bit ints if they're not natively supported + // TODO(antoyo): remove this if/when native 128-bit integers land in libgccjit + if value_type.is_u128(&self.cx) && !self.cx.supports_128bit_integers { let sixty_four = self.gcc_int(value_type, 64); let right_shift = self.gcc_lshr(value, sixty_four); let high = self.gcc_int_cast(right_shift, self.cx.ulonglong_type); - let high = self.context.new_call(None, popcount, &[high]); + let high = self.pop_count(high); let low = self.gcc_int_cast(value, self.cx.ulonglong_type); - let low = self.context.new_call(None, popcount, &[low]); + let low = self.pop_count(low); let res = high + low; return self.gcc_int_cast(res, result_type); } - // First step. - let mask = self.context.new_rvalue_from_long(value_type, 0x5555555555555555); - let left = value & mask; - let shifted = value >> self.context.new_rvalue_from_int(value_type, 1); - let right = shifted & mask; - let value = left + right; - - // Second step. - let mask = self.context.new_rvalue_from_long(value_type, 0x3333333333333333); - let left = value & mask; - let shifted = value >> self.context.new_rvalue_from_int(value_type, 2); - let right = shifted & mask; - let value = left + right; - - // Third step. - let mask = self.context.new_rvalue_from_long(value_type, 0x0F0F0F0F0F0F0F0F); - let left = value & mask; - let shifted = value >> self.context.new_rvalue_from_int(value_type, 4); - let right = shifted & mask; - let value = left + right; - - if value_type.is_u8(&self.cx) { - return self.context.new_cast(None, value, result_type); - } - - // Fourth step. - let mask = self.context.new_rvalue_from_long(value_type, 0x00FF00FF00FF00FF); - let left = value & mask; - let shifted = value >> self.context.new_rvalue_from_int(value_type, 8); - let right = shifted & mask; - let value = left + right; - - if value_type.is_u16(&self.cx) { - return self.context.new_cast(None, value, result_type); - } - - // Fifth step. - let mask = self.context.new_rvalue_from_long(value_type, 0x0000FFFF0000FFFF); - let left = value & mask; - let shifted = value >> self.context.new_rvalue_from_int(value_type, 16); - let right = shifted & mask; - let value = left + right; - - if value_type.is_u32(&self.cx) { - return self.context.new_cast(None, value, result_type); - } - - // Sixth step. - let mask = self.context.new_rvalue_from_long(value_type, 0x00000000FFFFFFFF); - let left = value & mask; - let shifted = value >> self.context.new_rvalue_from_int(value_type, 32); - let right = shifted & mask; - let value = left + right; - - self.context.new_cast(None, value, result_type) + // Use Wenger's algorithm for population count, gcc's seems to play better with it + // for (int counter = 0; value != 0; counter++) { + // value &= value - 1; + // } + let func = self.current_func.borrow().expect("func"); + let loop_head = func.new_block("head"); + let loop_body = func.new_block("body"); + let loop_tail = func.new_block("tail"); + + let counter_type = self.int_type; + let counter = self.current_func().new_local(None, counter_type, "popcount_counter"); + let val = self.current_func().new_local(None, value_type, "popcount_value"); + let zero = self.gcc_zero(counter_type); + self.llbb().add_assignment(None, counter, zero); + self.llbb().add_assignment(None, val, value); + self.br(loop_head); + + // check if value isn't zero + self.switch_to_block(loop_head); + let zero = self.gcc_zero(value_type); + let cond = self.gcc_icmp(IntPredicate::IntNE, val.to_rvalue(), zero); + self.cond_br(cond, loop_body, loop_tail); + + // val &= val - 1; + self.switch_to_block(loop_body); + let one = self.gcc_int(value_type, 1); + let sub = self.gcc_sub(val.to_rvalue(), one); + let op = self.gcc_and(val.to_rvalue(), sub); + loop_body.add_assignment(None, val, op); + + // counter += 1 + let one = self.gcc_int(counter_type, 1); + let op = self.gcc_add(counter.to_rvalue(), one); + loop_body.add_assignment(None, counter, op); + self.br(loop_head); + + // end of loop + self.switch_to_block(loop_tail); + self.gcc_int_cast(counter.to_rvalue(), result_type) } // Algorithm from: https://blog.regehr.org/archives/1063 @@ -947,15 +936,7 @@ 128 => "__rust_i128_addo", _ => unreachable!(), }; - let param_a = self.context.new_parameter(None, result_type, "a"); - let param_b = self.context.new_parameter(None, result_type, "b"); - let result_field = self.context.new_field(None, result_type, "result"); - let overflow_field = self.context.new_field(None, self.bool_type, "overflow"); - let return_type = self.context.new_struct_type(None, "result_overflow", &[result_field, overflow_field]); - let func = self.context.new_function(None, FunctionType::Extern, return_type.as_type(), &[param_a, param_b], func_name, false); - let result = self.context.new_call(None, func, &[lhs, rhs]); - let overflow = result.access_field(None, overflow_field); - let int_result = result.access_field(None, result_field); + let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs); self.llbb().add_assignment(None, res, int_result); overflow }; @@ -1017,15 +998,7 @@ 128 => "__rust_i128_subo", _ => unreachable!(), }; - let param_a = self.context.new_parameter(None, result_type, "a"); - let param_b = self.context.new_parameter(None, result_type, "b"); - let result_field = self.context.new_field(None, result_type, "result"); - let overflow_field = self.context.new_field(None, self.bool_type, "overflow"); - let return_type = self.context.new_struct_type(None, "result_overflow", &[result_field, overflow_field]); - let func = self.context.new_function(None, FunctionType::Extern, return_type.as_type(), &[param_a, param_b], func_name, false); - let result = self.context.new_call(None, func, &[lhs, rhs]); - let overflow = result.access_field(None, overflow_field); - let int_result = result.access_field(None, result_field); + let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs); self.llbb().add_assignment(None, res, int_result); overflow }; @@ -1197,7 +1170,7 @@ #[cfg(feature="master")] fn gen_fn<'a, 'gcc, 'tcx>(cx: &'a CodegenCx<'gcc, 'tcx>, name: &str, rust_fn_sig: ty::PolyFnSig<'tcx>, codegen: &mut dyn FnMut(Builder<'a, 'gcc, 'tcx>)) -> (Type<'gcc>, Function<'gcc>) { let fn_abi = cx.fn_abi_of_fn_ptr(rust_fn_sig, ty::List::empty()); - let (typ, _, _, _) = fn_abi.gcc_type(cx); + let return_type = fn_abi.gcc_type(cx).return_type; // FIXME(eddyb) find a nicer way to do this. cx.linkage.set(FunctionType::Internal); let func = cx.declare_fn(name, fn_abi); @@ -1207,5 +1180,5 @@ let block = Builder::append_block(cx, func_val, "entry-block"); let bx = Builder::build(cx, block); codegen(bx); - (typ, func) + (return_type, func) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,10 +2,19 @@ * TODO(antoyo): implement equality in libgccjit based on https://zpz.github.io/blog/overloading-equality-operator-in-cpp-class-hierarchy/ (for type equality?) * TODO(antoyo): support #[inline] attributes. * TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one — https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html). + * For Thin LTO, this might be helpful: + * In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none. + * + * Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html + * Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans. + * TODO: disable debug info always being emitted. Perhaps this slows down things? * * TODO(antoyo): remove the patches. */ +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature( rustc_private, decl_macro, @@ -28,6 +37,7 @@ extern crate rustc_data_structures; extern crate rustc_errors; extern crate rustc_fluent_macro; +extern crate rustc_fs_util; extern crate rustc_hir; extern crate rustc_macros; extern crate rustc_metadata; @@ -35,7 +45,8 @@ extern crate rustc_session; extern crate rustc_span; extern crate rustc_target; -extern crate tempfile; +#[macro_use] +extern crate tracing; // This prevents duplicating functions and statics that are already part of the host rustc process. #[allow(unused_extern_crates)] @@ -57,6 +68,7 @@ mod debuginfo; mod declare; mod errors; +mod gcc_util; mod int; mod intrinsic; mod mono_item; @@ -64,18 +76,29 @@ mod type_of; use std::any::Any; -use std::sync::{Arc, Mutex}; - -use crate::errors::LTONotSupported; -use gccjit::{Context, OptimizationLevel, CType}; +use std::fmt::Debug; +use std::sync::Arc; +use std::sync::Mutex; +#[cfg(not(feature="master"))] +use std::sync::atomic::AtomicBool; +#[cfg(not(feature="master"))] +use std::sync::atomic::Ordering; + +use gccjit::{Context, OptimizationLevel}; +#[cfg(feature="master")] +use gccjit::TargetInfo; +#[cfg(not(feature="master"))] +use gccjit::CType; +use errors::LTONotSupported; use rustc_ast::expand::allocator::AllocatorKind; use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen}; use rustc_codegen_ssa::base::codegen_crate; use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn}; use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule}; use rustc_codegen_ssa::target_features::supported_target_features; -use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, ModuleBufferMethods, ThinBufferMethods, WriteBackendMethods}; use rustc_data_structures::fx::FxIndexMap; +use rustc_data_structures::sync::IntoDynSyncSend; +use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, ThinBufferMethods, WriteBackendMethods}; use rustc_errors::{DiagnosticMessage, ErrorGuaranteed, Handler, SubdiagnosticMessage}; use rustc_fluent_macro::fluent_messages; use rustc_metadata::EncodedMetadata; @@ -88,6 +111,9 @@ use rustc_span::fatal_error::FatalError; use tempfile::TempDir; +use crate::back::lto::ModuleBuffer; +use crate::gcc_util::target_cpu; + fluent_messages! { "../messages.ftl" } pub struct PrintOnPanic String>(pub F); @@ -100,9 +126,47 @@ } } +#[cfg(not(feature="master"))] +#[derive(Debug)] +pub struct TargetInfo { + supports_128bit_integers: AtomicBool, +} + +#[cfg(not(feature="master"))] +impl TargetInfo { + fn cpu_supports(&self, _feature: &str) -> bool { + false + } + + fn supports_128bit_int(&self) -> bool { + self.supports_128bit_integers.load(Ordering::SeqCst) + } +} + +#[derive(Clone)] +pub struct LockedTargetInfo { + info: Arc>>, +} + +impl Debug for LockedTargetInfo { + fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.info.lock().expect("lock").fmt(formatter) + } +} + +impl LockedTargetInfo { + fn cpu_supports(&self, feature: &str) -> bool { + self.info.lock().expect("lock").cpu_supports(feature) + } + + fn supports_128bit_int(&self) -> bool { + self.info.lock().expect("lock").supports_128bit_int() + } +} + #[derive(Clone)] pub struct GccCodegenBackend { - supports_128bit_integers: Arc>, + target_info: LockedTargetInfo, } impl CodegenBackend for GccCodegenBackend { @@ -112,24 +176,40 @@ fn init(&self, sess: &Session) { #[cfg(feature="master")] + { + let target_cpu = target_cpu(sess); + + // Get the second TargetInfo with the correct CPU features by setting the arch. + let context = Context::default(); + if target_cpu != "generic" { + context.add_command_line_option(&format!("-march={}", target_cpu)); + } + + **self.target_info.info.lock().expect("lock") = context.get_target_info(); + } + + #[cfg(feature="master")] gccjit::set_global_personality_function_name(b"rust_eh_personality\0"); - if sess.lto() != Lto::No { + if sess.lto() == Lto::Thin { sess.emit_warning(LTONotSupported {}); } - let temp_dir = TempDir::new().expect("cannot create temporary directory"); - let temp_file = temp_dir.into_path().join("result.asm"); - let check_context = Context::default(); - check_context.set_print_errors_to_stderr(false); - let _int128_ty = check_context.new_c_type(CType::UInt128t); - // NOTE: we cannot just call compile() as this would require other files than libgccjit.so. - check_context.compile_to_file(gccjit::OutputKind::Assembler, temp_file.to_str().expect("path to str")); - *self.supports_128bit_integers.lock().expect("lock") = check_context.get_last_error() == Ok(None); + #[cfg(not(feature="master"))] + { + let temp_dir = TempDir::new().expect("cannot create temporary directory"); + let temp_file = temp_dir.into_path().join("result.asm"); + let check_context = Context::default(); + check_context.set_print_errors_to_stderr(false); + let _int128_ty = check_context.new_c_type(CType::UInt128t); + // NOTE: we cannot just call compile() as this would require other files than libgccjit.so. + check_context.compile_to_file(gccjit::OutputKind::Assembler, temp_file.to_str().expect("path to str")); + self.target_info.info.lock().expect("lock").supports_128bit_integers.store(check_context.get_last_error() == Ok(None), Ordering::SeqCst); + } } fn provide(&self, providers: &mut Providers) { - // FIXME(antoyo) compute list of enabled features from cli flags - providers.global_backend_features = |_tcx, ()| vec![]; + providers.global_backend_features = + |tcx, ()| gcc_util::global_gcc_features(tcx.sess, true) } fn codegen_crate<'tcx>(&self, tcx: TyCtxt<'tcx>, metadata: EncodedMetadata, need_metadata_module: bool) -> Box { @@ -160,7 +240,7 @@ } fn target_features(&self, sess: &Session, allow_unstable: bool) -> Vec { - target_features(sess, allow_unstable) + target_features(sess, allow_unstable, &self.target_info) } } @@ -168,13 +248,19 @@ fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) -> Self::Module { let mut mods = GccContext { context: Context::default(), + should_combine_object_files: false, + temp_dir: None, }; + + if tcx.sess.target.arch == "x86" || tcx.sess.target.arch == "x86_64" { + mods.context.add_command_line_option("-masm=intel"); + } unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, alloc_error_handler_kind); } mods } fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen, u64) { - base::compile_codegen_unit(tcx, cgu_name, *self.supports_128bit_integers.lock().expect("lock")) + base::compile_codegen_unit(tcx, cgu_name, self.target_info.clone()) } fn target_machine_factory(&self, _sess: &Session, _opt_level: OptLevel, _features: &[String]) -> TargetMachineFactoryFn { @@ -185,14 +271,6 @@ } } -pub struct ModuleBuffer; - -impl ModuleBufferMethods for ModuleBuffer { - fn data(&self) -> &[u8] { - unimplemented!(); - } -} - pub struct ThinBuffer; impl ThinBufferMethods for ThinBuffer { @@ -203,6 +281,9 @@ pub struct GccContext { context: Context<'static>, + should_combine_object_files: bool, + // Temporary directory used by LTO. We keep it here so that it's not removed before linking. + temp_dir: Option, } unsafe impl Send for GccContext {} @@ -217,18 +298,8 @@ type ThinData = (); type ThinBuffer = ThinBuffer; - fn run_fat_lto(_cgcx: &CodegenContext, mut modules: Vec>, _cached_modules: Vec<(SerializedModule, WorkProduct)>) -> Result, FatalError> { - // TODO(antoyo): implement LTO by sending -flto to libgccjit and adding the appropriate gcc linker plugins. - // NOTE: implemented elsewhere. - // TODO(antoyo): what is implemented elsewhere ^ ? - let module = - match modules.remove(0) { - FatLtoInput::InMemory(module) => module, - FatLtoInput::Serialized { .. } => { - unimplemented!(); - } - }; - Ok(LtoModuleCodegen::Fat { module, _serialized_bitcode: vec![] }) + fn run_fat_lto(cgcx: &CodegenContext, modules: Vec>, cached_modules: Vec<(SerializedModule, WorkProduct)>) -> Result, FatalError> { + back::lto::run_fat(cgcx, modules, cached_modules) } fn run_thin_lto(_cgcx: &CodegenContext, _modules: Vec<(String, Self::ThinBuffer)>, _cached_modules: Vec<(SerializedModule, WorkProduct)>) -> Result<(Vec>, Vec), FatalError> { @@ -277,8 +348,19 @@ /// This is the entrypoint for a hot plugged rustc_codegen_gccjit #[no_mangle] pub fn __rustc_codegen_backend() -> Box { + #[cfg(feature="master")] + let info = { + // Check whether the target supports 128-bit integers. + let context = Context::default(); + Arc::new(Mutex::new(IntoDynSyncSend(context.get_target_info()))) + }; + #[cfg(not(feature="master"))] + let info = Arc::new(Mutex::new(IntoDynSyncSend(TargetInfo { + supports_128bit_integers: AtomicBool::new(false), + }))); + Box::new(GccCodegenBackend { - supports_128bit_integers: Arc::new(Mutex::new(false)), + target_info: LockedTargetInfo { info }, }) } @@ -297,22 +379,7 @@ } } -fn handle_native(name: &str) -> &str { - if name != "native" { - return name; - } - - unimplemented!(); -} - -pub fn target_cpu(sess: &Session) -> &str { - match sess.opts.cg.target_cpu { - Some(ref name) => handle_native(name), - None => handle_native(sess.target.cpu.as_ref()), - } -} - -pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec { +pub fn target_features(sess: &Session, allow_unstable: bool, target_info: &LockedTargetInfo) -> Vec { supported_target_features(sess) .iter() .filter_map( @@ -321,26 +388,13 @@ }, ) .filter(|_feature| { - // TODO(antoyo): implement a way to get enabled feature in libgccjit. - // Probably using the equivalent of __builtin_cpu_supports. - // TODO(antoyo): maybe use whatever outputs the following command: - // gcc -march=native -Q --help=target - #[cfg(feature="master")] - { - // NOTE: the CPU in the CI doesn't support sse4a, so disable it to make the stdarch tests pass in the CI. - (_feature.contains("sse") || _feature.contains("avx")) && !_feature.contains("avx512") && !_feature.contains("sse4a") - } - #[cfg(not(feature="master"))] - { - false - } + target_info.cpu_supports(_feature) /* adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512ifma, avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq, bmi1, bmi2, cmpxchg16b, ermsb, f16c, fma, fxsr, gfni, lzcnt, movbe, pclmulqdq, popcnt, rdrand, rdseed, rtm, sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, vaes, vpclmulqdq, xsave, xsavec, xsaveopt, xsaves */ - //false }) .map(|feature| Symbol::intern(feature)) .collect() diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_.rs 2023-12-21 16:55:28.000000000 +0000 @@ -119,11 +119,11 @@ } fn type_f32(&self) -> Type<'gcc> { - self.context.new_type::() + self.float_type } fn type_f64(&self) -> Type<'gcc> { - self.context.new_type::() + self.double_type } fn type_func(&self, params: &[Type<'gcc>], return_type: Type<'gcc>) -> Type<'gcc> { @@ -216,17 +216,17 @@ value.get_type() } - fn type_array(&self, ty: Type<'gcc>, len: u64) -> Type<'gcc> { - // TODO: remove this as well? - /*if let Some(struct_type) = ty.is_struct() { + #[cfg_attr(feature="master", allow(unused_mut))] + fn type_array(&self, ty: Type<'gcc>, mut len: u64) -> Type<'gcc> { + #[cfg(not(feature="master"))] + if let Some(struct_type) = ty.is_struct() { if struct_type.get_field_count() == 0 { // NOTE: since gccjit only supports i32 for the array size and libcore's tests uses a // size of usize::MAX in test_binary_search, we workaround this by setting the size to // zero for ZSTs. - // FIXME(antoyo): fix gccjit API. len = 0; } - }*/ + } self.context.new_array_type(None, ty, len) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_of.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_of.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_of.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/src/type_of.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,7 @@ use rustc_target::abi::{self, Abi, Align, F32, F64, FieldsShape, Int, Integer, Pointer, PointeeInfo, Size, TyAbiInterface, Variants}; use rustc_target::abi::call::{CastTarget, FnAbi, Reg}; -use crate::abi::{FnAbiGccExt, GccType}; +use crate::abi::{FnAbiGcc, FnAbiGccExt, GccType}; use crate::context::CodegenCx; use crate::type_::struct_fields; @@ -87,7 +87,7 @@ // FIXME(eddyb) producing readable type names for trait objects can result // in problematically distinct types due to HRTB and subtyping (see #47638). // ty::Dynamic(..) | - ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str + ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Coroutine(..) | ty::Str if !cx.sess().fewer_names() => { let mut name = with_no_trimmed_paths!(layout.ty.to_string()); @@ -98,10 +98,10 @@ write!(&mut name, "::{}", def.variant(index).name).unwrap(); } } - if let (&ty::Generator(_, _, _), &Variants::Single { index }) = + if let (&ty::Coroutine(_, _, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) { - write!(&mut name, "::{}", ty::GeneratorArgs::variant_name(index)).unwrap(); + write!(&mut name, "::{}", ty::CoroutineArgs::variant_name(index)).unwrap(); } Some(name) } @@ -182,6 +182,7 @@ /// of that field's type - this is useful for taking the address of /// that field and ensuring the struct has the right alignment. fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> { + use crate::rustc_middle::ty::layout::FnAbiOf; // This must produce the same result for `repr(transparent)` wrappers as for the inner type! // In other words, this should generally not look at the type at all, but only at the // layout. @@ -191,7 +192,14 @@ if let Some(&ty) = cx.scalar_types.borrow().get(&self.ty) { return ty; } - let ty = self.scalar_gcc_type_at(cx, scalar, Size::ZERO); + let ty = + match *self.ty.kind() { + // NOTE: we cannot remove this match like in the LLVM codegen because the call + // to fn_ptr_backend_type handle the on-stack attribute. + // TODO(antoyo): find a less hackish way to hande the on-stack attribute. + ty::FnPtr(sig) => cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty())), + _ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO), + }; cx.scalar_types.borrow_mut().insert(self.ty, ty); return ty; } @@ -364,7 +372,13 @@ } fn fn_decl_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc> { - let (return_type, param_types, variadic, _) = fn_abi.gcc_type(self); - self.context.new_function_pointer_type(None, return_type, ¶m_types, variadic) + // FIXME(antoyo): Should we do something with `FnAbiGcc::fn_attributes`? + let FnAbiGcc { + return_type, + arguments_type, + is_c_variadic, + .. + } = fn_abi.gcc_type(self); + self.context.new_function_pointer_type(None, return_type, &arguments_type, is_c_variadic) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/test.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/test.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/test.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/test.sh 2023-12-21 16:55:28.000000000 +0000 @@ -3,16 +3,7 @@ # TODO(antoyo): rewrite to cargo-make (or just) or something like that to only rebuild the sysroot when needed? set -e - -if [ -f ./gcc_path ]; then - export GCC_PATH=$(cat gcc_path) -else - echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details' - exit 1 -fi - -export LD_LIBRARY_PATH="$GCC_PATH" -export LIBRARY_PATH="$GCC_PATH" +#set -x flags= gcc_master_branch=1 @@ -21,12 +12,18 @@ build_only=0 nb_parts=0 current_part=0 +use_system_gcc=0 +use_backend=0 +cargo_target_dir="" + +export CHANNEL='debug' while [[ $# -gt 0 ]]; do case $1 in --release) codegen_channel=release channel="release" + export CHANNEL='release' shift ;; --release-sysroot) @@ -110,6 +107,22 @@ build_only=1 shift ;; + "--use-system-gcc") + use_system_gcc=1 + shift + ;; + "--use-backend") + use_backend=1 + shift + export BUILTIN_BACKEND=$1 + shift + ;; + "--out-dir") + shift + export CARGO_TARGET_DIR=$1 + cargo_target_dir=$1 + shift + ;; "--nb-parts") shift nb_parts=$1 @@ -127,13 +140,25 @@ esac done -if [[ $channel == "release" ]]; then - export CHANNEL='release' - CARGO_INCREMENTAL=1 cargo rustc --release $flags +if [ -f ./gcc_path ]; then + export GCC_PATH=$(cat gcc_path) +elif (( $use_system_gcc == 1 )); then + echo 'Using system GCC' else - echo $LD_LIBRARY_PATH - export CHANNEL='debug' - cargo rustc $flags + echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details' + exit 1 +fi + +export LD_LIBRARY_PATH="$GCC_PATH" +export LIBRARY_PATH="$GCC_PATH" + +if [[ $use_backend == 0 ]]; then + if [[ $channel == "release" ]]; then + CARGO_INCREMENTAL=1 cargo rustc --release $flags + else + echo $LD_LIBRARY_PATH + cargo rustc $flags + fi fi if (( $build_only == 1 )); then @@ -144,20 +169,26 @@ source config.sh function clean() { - rm -r target/out || true - mkdir -p target/out/gccjit + rm -r $cargo_target_dir || true + mkdir -p $cargo_target_dir/gccjit } function mini_tests() { echo "[BUILD] mini_core" - $RUSTC example/mini_core.rs --crate-name mini_core --crate-type lib,dylib --target $TARGET_TRIPLE + crate_types="lib,dylib" + + if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then + crate_types="lib" + fi + + $RUST_CMD example/mini_core.rs --crate-name mini_core --crate-type $crate_types --target $TARGET_TRIPLE echo "[BUILD] example" - $RUSTC example/example.rs --crate-type lib --target $TARGET_TRIPLE + $RUST_CMD example/example.rs --crate-type lib --target $TARGET_TRIPLE echo "[AOT] mini_core_hello_world" - $RUSTC example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target $TARGET_TRIPLE - $RUN_WRAPPER ./target/out/mini_core_hello_world abc bcd + $RUST_CMD example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target $TARGET_TRIPLE + $RUN_WRAPPER $cargo_target_dir/mini_core_hello_world abc bcd } function build_sysroot() { @@ -165,41 +196,61 @@ time ./build_sysroot/build_sysroot.sh $sysroot_channel } +# TODO(GuillaumeGomez): when rewriting in Rust, refactor with the code in tests/lang_tests_common.rs if possible. +function run_in_vm() { + vm_parent_dir=${CG_GCC_VM_DIR:-$(pwd)} + vm_dir=vm + exe=$1 + exe_filename=$(basename $exe) + vm_home_dir=$vm_parent_dir/$vm_dir/home + vm_exe_path=$vm_home_dir/$exe_filename + inside_vm_exe_path=/home/$exe_filename + sudo cp $exe $vm_exe_path + + shift + pushd $vm_parent_dir + sudo chroot $vm_dir qemu-m68k-static $inside_vm_exe_path $@ + popd +} + function std_tests() { echo "[AOT] arbitrary_self_types_pointers_and_wrappers" - $RUSTC example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target $TARGET_TRIPLE - $RUN_WRAPPER ./target/out/arbitrary_self_types_pointers_and_wrappers + $RUST_CMD example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target $TARGET_TRIPLE + $RUN_WRAPPER $cargo_target_dir/arbitrary_self_types_pointers_and_wrappers echo "[AOT] alloc_system" - $RUSTC example/alloc_system.rs --crate-type lib --target "$TARGET_TRIPLE" + $RUST_CMD example/alloc_system.rs --crate-type lib --target "$TARGET_TRIPLE" - echo "[AOT] alloc_example" - $RUSTC example/alloc_example.rs --crate-type bin --target $TARGET_TRIPLE - $RUN_WRAPPER ./target/out/alloc_example + # FIXME: doesn't work on m68k. + if [[ "$HOST_TRIPLE" == "$TARGET_TRIPLE" ]]; then + echo "[AOT] alloc_example" + $RUST_CMD example/alloc_example.rs --crate-type bin --target $TARGET_TRIPLE + $RUN_WRAPPER $cargo_target_dir/alloc_example + fi echo "[AOT] dst_field_align" # FIXME(antoyo): Re-add -Zmir-opt-level=2 once rust-lang/rust#67529 is fixed. - $RUSTC example/dst-field-align.rs --crate-name dst_field_align --crate-type bin --target $TARGET_TRIPLE - $RUN_WRAPPER ./target/out/dst_field_align || (echo $?; false) + $RUST_CMD example/dst-field-align.rs --crate-name dst_field_align --crate-type bin --target $TARGET_TRIPLE + $RUN_WRAPPER $cargo_target_dir/dst_field_align || (echo $?; false) echo "[AOT] std_example" std_flags="--cfg feature=\"master\"" if (( $gcc_master_branch == 0 )); then std_flags="" fi - $RUSTC example/std_example.rs --crate-type bin --target $TARGET_TRIPLE $std_flags - $RUN_WRAPPER ./target/out/std_example --target $TARGET_TRIPLE + $RUST_CMD example/std_example.rs --crate-type bin --target $TARGET_TRIPLE $std_flags + $RUN_WRAPPER $cargo_target_dir/std_example --target $TARGET_TRIPLE echo "[AOT] subslice-patterns-const-eval" - $RUSTC example/subslice-patterns-const-eval.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE - $RUN_WRAPPER ./target/out/subslice-patterns-const-eval + $RUST_CMD example/subslice-patterns-const-eval.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE + $RUN_WRAPPER $cargo_target_dir/subslice-patterns-const-eval echo "[AOT] track-caller-attribute" - $RUSTC example/track-caller-attribute.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE - $RUN_WRAPPER ./target/out/track-caller-attribute + $RUST_CMD example/track-caller-attribute.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE + $RUN_WRAPPER $cargo_target_dir/track-caller-attribute echo "[BUILD] mod_bench" - $RUSTC example/mod_bench.rs --crate-type bin --target $TARGET_TRIPLE + $RUST_CMD example/mod_bench.rs --crate-type bin --target $TARGET_TRIPLE } function setup_rustc() { @@ -208,22 +259,23 @@ git clone https://github.com/rust-lang/rust.git || true cd rust git fetch - git checkout $(rustc -V | cut -d' ' -f3 | tr -d '(') + git checkout $($RUSTC -V | cut -d' ' -f3 | tr -d '(') export RUSTFLAGS= rm config.toml || true cat > config.toml < bool { + filename.extension().expect("extension").to_str().expect("to_str") == "rs" + } + + #[cfg(feature="master")] + fn filter(filename: &Path) -> bool { + rust_filter(filename) + } + + #[cfg(not(feature="master"))] + fn filter(filename: &Path) -> bool { + if let Some(filename) = filename.to_str() { + if filename.ends_with("gep.rs") { + return false; + } + } + rust_filter(filename) + } + LangTester::new() .test_dir("tests/run") - .test_file_filter(|path| path.extension().expect("extension").to_str().expect("to_str") == "rs") + .test_file_filter(filter) .test_extract(|source| { let lines = source.lines() @@ -50,6 +70,19 @@ "-o", exe.to_str().expect("to_str"), path.to_str().expect("to_str"), ]); + + // TODO(antoyo): find a way to send this via a cli argument. + let test_target = std::env::var("CG_GCC_TEST_TARGET"); + if let Ok(ref target) = test_target { + compiler.args(&["--target", &target]); + let linker = format!("{}-gcc", target); + compiler.args(&[format!("-Clinker={}", linker)]); + let mut env_path = std::env::var("PATH").unwrap_or_default(); + // TODO(antoyo): find a better way to add the PATH necessary locally. + env_path = format!("/opt/m68k-unknown-linux-gnu/bin:{}", env_path); + compiler.env("PATH", env_path); + } + if let Some(flags) = option_env!("TEST_FLAGS") { for flag in flags.split_whitespace() { compiler.arg(&flag); @@ -65,8 +98,37 @@ } } // Test command 2: run `tempdir/x`. - let runtime = Command::new(exe); - vec![("Compiler", compiler), ("Run-time", runtime)] + if test_target.is_ok() { + let vm_parent_dir = std::env::var("CG_GCC_VM_DIR") + .map(|dir| PathBuf::from(dir)) + .unwrap_or_else(|_| std::env::current_dir().unwrap()); + let vm_dir = "vm"; + let exe_filename = exe.file_name().unwrap(); + let vm_home_dir = vm_parent_dir.join(vm_dir).join("home"); + let vm_exe_path = vm_home_dir.join(exe_filename); + // FIXME(antoyo): panicking here makes the test pass. + let inside_vm_exe_path = PathBuf::from("/home").join(&exe_filename); + let mut copy = Command::new("sudo"); + copy.arg("cp"); + copy.args(&[&exe, &vm_exe_path]); + + let mut runtime = Command::new("sudo"); + runtime.args(&["chroot", vm_dir, "qemu-m68k-static"]); + runtime.arg(inside_vm_exe_path); + runtime.current_dir(vm_parent_dir); + vec![ + ("Compiler", compiler), + ("Copy", copy), + ("Run-time", runtime), + ] + } + else { + let runtime = Command::new(exe); + vec![ + ("Compiler", compiler), + ("Run-time", runtime), + ] + } }) .run(); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort1.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort1.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort1.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort1.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,8 @@ // Run-time: // status: signal -#![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort2.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort2.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort2.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/abort2.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,8 @@ // Run-time: // status: signal -#![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/array.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/array.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/array.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/array.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,7 +7,8 @@ // 5 // 10 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/asm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/asm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/asm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/asm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,8 +5,10 @@ #![feature(asm_const)] +#[cfg(target_arch="x86_64")] use std::arch::{asm, global_asm}; +#[cfg(target_arch="x86_64")] global_asm!( " .global add_asm @@ -20,6 +22,7 @@ fn add_asm(a: i64, b: i64) -> i64; } +#[cfg(target_arch="x86_64")] pub unsafe fn mem_cpy(dst: *mut u8, src: *const u8, len: usize) { asm!( "rep movsb", @@ -30,7 +33,8 @@ ); } -fn main() { +#[cfg(target_arch="x86_64")] +fn asm() { unsafe { asm!("nop"); } @@ -124,7 +128,7 @@ // check const (ATT syntax) let mut x: u64 = 42; unsafe { - asm!("add {}, {}", + asm!("add ${}, {}", const 1, inout(reg) x, options(att_syntax) @@ -173,3 +177,11 @@ } assert_eq!(array1, array2); } + +#[cfg(not(target_arch="x86_64"))] +fn asm() { +} + +fn main() { + asm(); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/assign.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/assign.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/assign.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/assign.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,8 +5,8 @@ // 7 8 // 10 -#![allow(unused_attributes)] -#![feature(auto_traits, lang_items, no_core, start, intrinsics, track_caller)] +#![allow(internal_features, unused_attributes)] +#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs, track_caller)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/closure.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/closure.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/closure.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/closure.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,8 @@ // Both args: 11 #![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, - unboxed_closures)] + unboxed_closures, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/condition.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/condition.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/condition.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/condition.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,8 @@ // stdout: true // 1 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/empty_main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/empty_main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/empty_main.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/empty_main.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,7 @@ // status: 0 #![feature(auto_traits, lang_items, no_core, start)] +#![allow(internal_features)] #![no_std] #![no_core] @@ -34,6 +35,6 @@ */ #[start] -fn main(mut argc: isize, _argv: *const *const u8) -> isize { +fn main(_argc: isize, _argv: *const *const u8) -> isize { 0 } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,7 @@ // status: 2 #![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit_code.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit_code.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit_code.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/exit_code.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,7 @@ // status: 1 #![feature(auto_traits, lang_items, no_core, start)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/fun_ptr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,8 @@ // status: 0 // stdout: 1 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/gep.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/gep.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/gep.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/gep.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,10 @@ +// Compiler: +// +// Run-time: +// status: 0 + +fn main() { + let mut value = (1, 1); + let ptr = &mut value as *mut (i32, i32); + println!("{:?}", ptr.wrapping_offset(10)); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,138 +4,20 @@ // stdout: Success // status: signal -#![allow(unused_attributes)] -#![feature(auto_traits, lang_items, no_core, start, intrinsics)] - -#![no_std] -#![no_core] - -/* - * Core - */ - -// Because we don't have core yet. -#[lang = "sized"] -pub trait Sized {} - -#[lang = "copy"] -trait Copy { -} - -impl Copy for isize {} -impl Copy for *mut i32 {} -impl Copy for usize {} -impl Copy for i32 {} -impl Copy for u8 {} -impl Copy for i8 {} - -#[lang = "receiver"] -trait Receiver { -} - -#[lang = "freeze"] -pub(crate) unsafe auto trait Freeze {} - -#[lang = "panic_location"] -struct PanicLocation { - file: &'static str, - line: u32, - column: u32, -} - -mod libc { - #[link(name = "c")] - extern "C" { - pub fn puts(s: *const u8) -> i32; - pub fn fflush(stream: *mut i32) -> i32; - - pub static stdout: *mut i32; - } -} - -mod intrinsics { - extern "rust-intrinsic" { - #[rustc_safe_intrinsic] - pub fn abort() -> !; - } -} - -#[lang = "panic"] -#[track_caller] -#[no_mangle] -pub fn panic(_msg: &'static str) -> ! { - unsafe { - // Panicking is expected iff overflow checking is enabled. - #[cfg(debug_assertions)] - libc::puts("Success\0" as *const str as *const u8); - libc::fflush(libc::stdout); - intrinsics::abort(); - } -} - -#[lang = "add"] -trait Add { - type Output; - - fn add(self, rhs: RHS) -> Self::Output; -} - -impl Add for u8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i8 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for i32 { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for usize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -impl Add for isize { - type Output = Self; - - fn add(self, rhs: Self) -> Self { - self + rhs - } -} - -/* - * Code - */ - -#[start] -fn main(mut argc: isize, _argv: *const *const u8) -> isize { - let int = 9223372036854775807isize; - let int = int + argc; // overflow +fn main() { + std::panic::set_hook(Box::new(|_| { + println!("Success"); + std::process::abort(); + })); + + let arg_count = std::env::args().count(); + let int = isize::MAX; + let _int = int + arg_count as isize; // overflow // If overflow checking is disabled, we should reach here. #[cfg(not(debug_assertions))] unsafe { - libc::puts("Success\0" as *const str as *const u8); - libc::fflush(libc::stdout); - intrinsics::abort(); + println!("Success"); + std::process::abort(); } - - int } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/mut_ref.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,8 +7,8 @@ // 6 // 11 -#![allow(unused_attributes)] -#![feature(auto_traits, lang_items, no_core, start, intrinsics, track_caller)] +#![allow(internal_features, unused_attributes)] +#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs, track_caller)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/operations.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/operations.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/operations.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/operations.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,8 +5,8 @@ // 39 // 10 -#![allow(unused_attributes)] -#![feature(auto_traits, lang_items, no_core, start, intrinsics, arbitrary_self_types)] +#![allow(internal_features, unused_attributes)] +#![feature(auto_traits, lang_items, no_core, start, intrinsics, arbitrary_self_types, rustc_attrs)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/ptr_cast.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,8 @@ // status: 0 // stdout: 1 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/return-tuple.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,6 +7,7 @@ // 42 #![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/slice.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/slice.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/slice.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/slice.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,8 @@ // status: 0 // stdout: 5 -#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/static.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/static.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/static.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/static.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,8 @@ // 12 // 1 -#![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/structs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/structs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/structs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/structs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,6 +6,7 @@ // 2 #![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/tuple.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/tuple.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/tuple.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/tuple.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,6 +5,7 @@ // stdout: 3 #![feature(auto_traits, lang_items, no_core, start, intrinsics)] +#![allow(internal_features)] #![no_std] #![no_core] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/volatile.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/volatile.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/volatile.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tests/run/volatile.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +// Compiler: +// +// Run-time: +// status: 0 + +use std::mem::MaybeUninit; + +#[derive(Debug)] +struct Struct { + pointer: *const (), + func: unsafe fn(*const ()), +} + +fn func(ptr: *const ()) { +} + +fn main() { + let mut x = MaybeUninit::<&Struct>::uninit(); + x.write(&Struct { + pointer: std::ptr::null(), + func, + }); + let x = unsafe { x.assume_init() }; + let value = unsafe { (x as *const Struct).read_volatile() }; + println!("{:?}", value); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/tools/generate_intrinsics.py 2023-12-21 16:55:28.000000000 +0000 @@ -46,10 +46,10 @@ def extract_instrinsics_from_llvm(llvm_path, intrinsics): - p = subprocess.Popen( - ["llvm-tblgen", "llvm/IR/Intrinsics.td"], - cwd=os.path.join(llvm_path, "llvm/include"), - stdout=subprocess.PIPE) + command = ["llvm-tblgen", "llvm/IR/Intrinsics.td"] + cwd = os.path.join(llvm_path, "llvm/include") + print("=> Running command `{}` from `{}`".format(command, cwd)) + p = subprocess.Popen(command, cwd=cwd, stdout=subprocess.PIPE) output, err = p.communicate() lines = convert_to_string(output).splitlines() pos = 0 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/y.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/y.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/y.sh 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_gcc/y.sh 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -e +echo "[BUILD] build system" 1>&2 +cd build_system +cargo build --release +cd .. +./build_system/target/release/y $@ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -7,17 +7,15 @@ test = false [dependencies] +# tidy-alphabetical-start bitflags = "1.0" cstr = "0.2" +itertools = "0.10.5" libc = "0.2" measureme = "10.0.0" -object = { version = "0.32.0", default-features = false, features = [ - "std", - "read", -] } -tracing = "0.1" -rustc_middle = { path = "../rustc_middle" } +object = { version = "0.32.0", default-features = false, features = ["std", "read"] } rustc-demangle = "0.1.21" +rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } rustc_data_structures = { path = "../rustc_data_structures" } @@ -29,12 +27,14 @@ rustc_llvm = { path = "../rustc_llvm" } rustc_macros = { path = "../rustc_macros" } rustc_metadata = { path = "../rustc_metadata" } +rustc_middle = { path = "../rustc_middle" } rustc_query_system = { path = "../rustc_query_system" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } rustc_target = { path = "../rustc_target" } -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -rustc_ast = { path = "../rustc_ast" } -rustc_span = { path = "../rustc_span" } serde = { version = "1", features = [ "derive" ]} serde_json = "1" +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -76,8 +76,8 @@ codegen_llvm_target_machine_with_llvm_err = could not create LLVM TargetMachine for triple: {$triple}: {$llvm_err} codegen_llvm_unknown_ctarget_feature = - unknown feature specified for `-Ctarget-feature`: `{$feature}` - .note = it is still passed through to the codegen backend + unknown and unstable feature specified for `-Ctarget-feature`: `{$feature}` + .note = it is still passed through to the codegen backend, but use of this feature might be unsound and the behavior of this feature can change in the future .possible_feature = you might have meant: `{$rust_feature}` .consider_filing_feature_request = consider filing a feature request @@ -87,6 +87,10 @@ codegen_llvm_unknown_debuginfo_compression = unknown debuginfo compression algorithm {$algorithm} - will fall back to uncompressed debuginfo +codegen_llvm_unstable_ctarget_feature = + unstable feature specified for `-Ctarget-feature`: `{$feature}` + .note = this feature is not stably supported; its behavior can change in the future + codegen_llvm_write_bytecode = failed to write bytecode to {$path}: {$err} codegen_llvm_write_ir = failed to write LLVM IR to {$path} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/abi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/abi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/abi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/abi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -362,9 +362,14 @@ // currently use this mode so we have to allow it -- but we absolutely // shouldn't let any more targets do that. // (Also see .) + // + // The unstable abi `PtxKernel` also uses Direct for now. + // It needs to switch to something else before stabilization can happen. + // (See issue: https://github.com/rust-lang/rust/issues/117271) assert!( - matches!(&*cx.tcx.sess.target.arch, "wasm32" | "wasm64"), - "`PassMode::Direct` for aggregates only allowed on wasm targets\nProblematic type: {:#?}", + matches!(&*cx.tcx.sess.target.arch, "wasm32" | "wasm64") + || self.conv == Conv::PtxKernel, + "`PassMode::Direct` for aggregates only allowed on wasm and `extern \"ptx-kernel\"` fns\nProblematic type: {:#?}", arg.layout, ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/lto.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/lto.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/lto.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/lto.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,7 +19,6 @@ use rustc_middle::bug; use rustc_middle::dep_graph::WorkProduct; use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; -use rustc_session::cgu_reuse_tracker::CguReuse; use rustc_session::config::{self, CrateType, Lto}; use std::ffi::{CStr, CString}; @@ -585,7 +584,6 @@ copy_jobs.push(work_product); info!(" - {}: re-used", module_name); assert!(cgcx.incr_comp_session_dir.is_some()); - cgcx.cgu_reuse_tracker.set_actual_reuse(module_name, CguReuse::PostLto); continue; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/write.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/write.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/write.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/back/write.rs 2023-12-21 16:55:28.000000000 +0000 @@ -259,9 +259,17 @@ }; let debuginfo_compression = SmallCStr::new(&debuginfo_compression); + let should_prefer_remapped_for_split_debuginfo_paths = + sess.should_prefer_remapped_for_split_debuginfo_paths(); + Arc::new(move |config: TargetMachineFactoryConfig| { let path_to_cstring_helper = |path: Option| -> CString { - let path = path_mapping.map_prefix(path.unwrap_or_default()).0; + let path = path.unwrap_or_default(); + let path = if should_prefer_remapped_for_split_debuginfo_paths { + path_mapping.map_prefix(path).0 + } else { + path.into() + }; CString::new(path.to_str().unwrap()).unwrap() }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/builder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,7 @@ use crate::common::Funclet; use crate::context::CodegenCx; use crate::llvm::{self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, True}; +use crate::llvm_util; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; @@ -1225,9 +1226,16 @@ unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) } } - fn do_not_inline(&mut self, llret: &'ll Value) { - let noinline = llvm::AttributeKind::NoInline.create_attr(self.llcx); - attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[noinline]); + fn apply_attrs_to_cleanup_callsite(&mut self, llret: &'ll Value) { + if llvm_util::get_version() < (17, 0, 2) { + // Work around https://github.com/llvm/llvm-project/issues/66984. + let noinline = llvm::AttributeKind::NoInline.create_attr(self.llcx); + attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[noinline]); + } else { + // Cleanup is always the cold path. + let cold_inline = llvm::AttributeKind::Cold.create_attr(self.llcx); + attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[cold_inline]); + } } } @@ -1513,8 +1521,13 @@ llfn: &'ll Value, ) { let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) }; - if self.tcx.sess.is_sanitizer_cfi_enabled() && let Some(fn_abi) = fn_abi && is_indirect_call { - if let Some(fn_attrs) = fn_attrs && fn_attrs.no_sanitize.contains(SanitizerSet::CFI) { + if self.tcx.sess.is_sanitizer_cfi_enabled() + && let Some(fn_abi) = fn_abi + && is_indirect_call + { + if let Some(fn_attrs) = fn_attrs + && fn_attrs.no_sanitize.contains(SanitizerSet::CFI) + { return; } @@ -1551,25 +1564,29 @@ llfn: &'ll Value, ) -> Option> { let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) }; - let kcfi_bundle = - if self.tcx.sess.is_sanitizer_kcfi_enabled() && let Some(fn_abi) = fn_abi && is_indirect_call { - if let Some(fn_attrs) = fn_attrs && fn_attrs.no_sanitize.contains(SanitizerSet::KCFI) { - return None; - } + let kcfi_bundle = if self.tcx.sess.is_sanitizer_kcfi_enabled() + && let Some(fn_abi) = fn_abi + && is_indirect_call + { + if let Some(fn_attrs) = fn_attrs + && fn_attrs.no_sanitize.contains(SanitizerSet::KCFI) + { + return None; + } - let mut options = TypeIdOptions::empty(); - if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() { - options.insert(TypeIdOptions::GENERALIZE_POINTERS); - } - if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() { - options.insert(TypeIdOptions::NORMALIZE_INTEGERS); - } + let mut options = TypeIdOptions::empty(); + if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() { + options.insert(TypeIdOptions::GENERALIZE_POINTERS); + } + if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() { + options.insert(TypeIdOptions::NORMALIZE_INTEGERS); + } - let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi, options); - Some(llvm::OperandBundleDef::new("kcfi", &[self.const_u32(kcfi_typeid)])) - } else { - None - }; + let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi, options); + Some(llvm::OperandBundleDef::new("kcfi", &[self.const_u32(kcfi_typeid)])) + } else { + None + }; kcfi_bundle } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/callee.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/callee.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/callee.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/callee.rs 2023-12-21 16:55:28.000000000 +0000 @@ -46,8 +46,8 @@ llfn } else { let instance_def_id = instance.def_id(); - let llfn = if tcx.sess.target.arch == "x86" && - let Some(dllimport) = common::get_dllimport(tcx, instance_def_id, sym) + let llfn = if tcx.sess.target.arch == "x86" + && let Some(dllimport) = common::get_dllimport(tcx, instance_def_id, sym) { // Fix for https://github.com/rust-lang/rust/issues/104453 // On x86 Windows, LLVM uses 'L' as the prefix for any private @@ -60,8 +60,18 @@ // LLVM will prefix the name with `__imp_`. Ideally, we'd like the // existing logic below to set the Storage Class, but it has an // exemption for MinGW for backwards compatability. - let llfn = cx.declare_fn(&common::i686_decorated_name(&dllimport, common::is_mingw_gnu_toolchain(&tcx.sess.target), true), fn_abi, Some(instance)); - unsafe { llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport); } + let llfn = cx.declare_fn( + &common::i686_decorated_name( + &dllimport, + common::is_mingw_gnu_toolchain(&tcx.sess.target), + true, + ), + fn_abi, + Some(instance), + ); + unsafe { + llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport); + } llfn } else { cx.declare_fn(sym, fn_abi, Some(instance)) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/consts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/consts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/consts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/consts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -182,10 +182,17 @@ llvm::LLVMSetInitializer(g2, g1); g2 } - } else if cx.tcx.sess.target.arch == "x86" && - let Some(dllimport) = common::get_dllimport(cx.tcx, def_id, sym) + } else if cx.tcx.sess.target.arch == "x86" + && let Some(dllimport) = common::get_dllimport(cx.tcx, def_id, sym) { - cx.declare_global(&common::i686_decorated_name(&dllimport, common::is_mingw_gnu_toolchain(&cx.tcx.sess.target), true), llty) + cx.declare_global( + &common::i686_decorated_name( + &dllimport, + common::is_mingw_gnu_toolchain(&cx.tcx.sess.target), + true, + ), + llty, + ) } else { // Generate an external declaration. // FIXME(nagisa): investigate whether it can be changed into define_global @@ -367,15 +374,7 @@ let g = self.get_static(def_id); - // boolean SSA values are i1, but they have to be stored in i8 slots, - // otherwise some LLVM optimization passes don't work as expected - let mut val_llty = self.val_ty(v); - let v = if val_llty == self.type_i1() { - val_llty = self.type_i8(); - llvm::LLVMConstZExt(v, val_llty) - } else { - v - }; + let val_llty = self.val_ty(v); let instance = Instance::mono(self.tcx, def_id); let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,8 +26,8 @@ use rustc_session::config::{BranchProtection, CFGuard, CFProtection}; use rustc_session::config::{CrateType, DebugInfo, PAuthKey, PacRet}; use rustc_session::Session; -use rustc_span::source_map::Span; use rustc_span::source_map::Spanned; +use rustc_span::Span; use rustc_target::abi::{ call::FnAbi, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx, }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,4 @@ -use rustc_middle::mir::coverage::{CounterId, ExpressionId, Operand}; +use rustc_middle::mir::coverage::{CounterId, CovTerm, ExpressionId}; /// Must match the layout of `LLVMRustCounterKind`. #[derive(Copy, Clone, Debug)] @@ -43,11 +43,11 @@ Self { kind: CounterKind::Expression, id: expression_id.as_u32() } } - pub(crate) fn from_operand(operand: Operand) -> Self { - match operand { - Operand::Zero => Self::ZERO, - Operand::Counter(id) => Self::counter_value_reference(id), - Operand::Expression(id) => Self::expression(id), + pub(crate) fn from_term(term: CovTerm) -> Self { + match term { + CovTerm::Zero => Self::ZERO, + CovTerm::Counter(id) => Self::counter_value_reference(id), + CovTerm::Expression(id) => Self::expression(id), } } } @@ -73,17 +73,6 @@ pub rhs: Counter, } -impl CounterExpression { - /// The dummy expression `(0 - 0)` has a representation of all zeroes, - /// making it marginally more efficient to initialize than `(0 + 0)`. - pub(crate) const DUMMY: Self = - Self { lhs: Counter::ZERO, kind: ExprKind::Subtract, rhs: Counter::ZERO }; - - pub fn new(lhs: Counter, kind: ExprKind, rhs: Counter) -> Self { - Self { kind, lhs, rhs } - } -} - /// Corresponds to enum `llvm::coverage::CounterMappingRegion::RegionKind`. /// /// Must match the layout of `LLVMRustCounterMappingRegionKind`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,271 +1,270 @@ use crate::coverageinfo::ffi::{Counter, CounterExpression, ExprKind}; +use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::FxIndexSet; -use rustc_index::IndexVec; -use rustc_middle::mir::coverage::{CodeRegion, CounterId, ExpressionId, Op, Operand}; +use rustc_index::bit_set::BitSet; +use rustc_middle::mir::coverage::{ + CodeRegion, CounterId, CovTerm, Expression, ExpressionId, FunctionCoverageInfo, Mapping, Op, +}; use rustc_middle::ty::Instance; -use rustc_middle::ty::TyCtxt; +use rustc_span::Symbol; -#[derive(Clone, Debug, PartialEq)] -pub struct Expression { - lhs: Operand, - op: Op, - rhs: Operand, - region: Option, -} - -/// Collects all of the coverage regions associated with (a) injected counters, (b) counter -/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero), -/// for a given Function. This struct also stores the `function_source_hash`, -/// computed during instrumentation, and forwarded with counters. -/// -/// Note, it may be important to understand LLVM's definitions of `unreachable` regions versus "gap -/// regions" (or "gap areas"). A gap region is a code region within a counted region (either counter -/// or expression), but the line or lines in the gap region are not executable (such as lines with -/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count -/// for a gap area is only used as the line execution count if there are no other regions on a -/// line." +/// Holds all of the coverage mapping data associated with a function instance, +/// collected during traversal of `Coverage` statements in the function's MIR. #[derive(Debug)] -pub struct FunctionCoverage<'tcx> { - instance: Instance<'tcx>, - source_hash: u64, +pub struct FunctionCoverageCollector<'tcx> { + /// Coverage info that was attached to this function by the instrumentor. + function_coverage_info: &'tcx FunctionCoverageInfo, is_used: bool, - counters: IndexVec>, - expressions: IndexVec>, - unreachable_regions: Vec, + + /// Tracks which counters have been seen, so that we can identify mappings + /// to counters that were optimized out, and set them to zero. + counters_seen: BitSet, + /// Contains all expression IDs that have been seen in an `ExpressionUsed` + /// coverage statement, plus all expression IDs that aren't directly used + /// by any mappings (and therefore do not have expression-used statements). + /// After MIR traversal is finished, we can conclude that any IDs missing + /// from this set must have had their statements deleted by MIR opts. + expressions_seen: BitSet, } -impl<'tcx> FunctionCoverage<'tcx> { +impl<'tcx> FunctionCoverageCollector<'tcx> { /// Creates a new set of coverage data for a used (called) function. - pub fn new(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self { - Self::create(tcx, instance, true) + pub fn new( + instance: Instance<'tcx>, + function_coverage_info: &'tcx FunctionCoverageInfo, + ) -> Self { + Self::create(instance, function_coverage_info, true) } /// Creates a new set of coverage data for an unused (never called) function. - pub fn unused(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self { - Self::create(tcx, instance, false) + pub fn unused( + instance: Instance<'tcx>, + function_coverage_info: &'tcx FunctionCoverageInfo, + ) -> Self { + Self::create(instance, function_coverage_info, false) } - fn create(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, is_used: bool) -> Self { - let coverageinfo = tcx.coverageinfo(instance.def); + fn create( + instance: Instance<'tcx>, + function_coverage_info: &'tcx FunctionCoverageInfo, + is_used: bool, + ) -> Self { + let num_counters = function_coverage_info.num_counters; + let num_expressions = function_coverage_info.expressions.len(); debug!( - "FunctionCoverage::create(instance={:?}) has coverageinfo={:?}. is_used={}", - instance, coverageinfo, is_used + "FunctionCoverage::create(instance={instance:?}) has \ + num_counters={num_counters}, num_expressions={num_expressions}, is_used={is_used}" ); - Self { - instance, - source_hash: 0, // will be set with the first `add_counter()` - is_used, - counters: IndexVec::from_elem_n(None, coverageinfo.num_counters as usize), - expressions: IndexVec::from_elem_n(None, coverageinfo.num_expressions as usize), - unreachable_regions: Vec::new(), - } - } - /// Returns true for a used (called) function, and false for an unused function. - pub fn is_used(&self) -> bool { - self.is_used - } - - /// Sets the function source hash value. If called multiple times for the same function, all - /// calls should have the same hash value. - pub fn set_function_source_hash(&mut self, source_hash: u64) { - if self.source_hash == 0 { - self.source_hash = source_hash; - } else { - debug_assert_eq!(source_hash, self.source_hash); + // Create a filled set of expression IDs, so that expressions not + // directly used by mappings will be treated as "seen". + // (If they end up being unused, LLVM will delete them for us.) + let mut expressions_seen = BitSet::new_filled(num_expressions); + // For each expression ID that is directly used by one or more mappings, + // mark it as not-yet-seen. This indicates that we expect to see a + // corresponding `ExpressionUsed` statement during MIR traversal. + for Mapping { term, .. } in &function_coverage_info.mappings { + if let &CovTerm::Expression(id) = term { + expressions_seen.remove(id); + } } - } - /// Adds a code region to be counted by an injected counter intrinsic. - pub fn add_counter(&mut self, id: CounterId, region: CodeRegion) { - if let Some(previous_region) = self.counters[id].replace(region.clone()) { - assert_eq!(previous_region, region, "add_counter: code region for id changed"); + Self { + function_coverage_info, + is_used, + counters_seen: BitSet::new_empty(num_counters), + expressions_seen, } } - /// Both counters and "counter expressions" (or simply, "expressions") can be operands in other - /// expressions. These are tracked as separate variants of `Operand`, so there is no ambiguity - /// between operands that are counter IDs and operands that are expression IDs. - pub fn add_counter_expression( - &mut self, - expression_id: ExpressionId, - lhs: Operand, - op: Op, - rhs: Operand, - region: Option, - ) { - debug!( - "add_counter_expression({:?}, lhs={:?}, op={:?}, rhs={:?} at {:?}", - expression_id, lhs, op, rhs, region - ); - debug_assert!( - expression_id.as_usize() < self.expressions.len(), - "expression_id {} is out of range for expressions.len() = {} - for {:?}", - expression_id.as_usize(), - self.expressions.len(), - self, - ); - if let Some(previous_expression) = self.expressions[expression_id].replace(Expression { - lhs, - op, - rhs, - region: region.clone(), - }) { - assert_eq!( - previous_expression, - Expression { lhs, op, rhs, region }, - "add_counter_expression: expression for id changed" - ); - } + /// Marks a counter ID as having been seen in a counter-increment statement. + #[instrument(level = "debug", skip(self))] + pub(crate) fn mark_counter_id_seen(&mut self, id: CounterId) { + self.counters_seen.insert(id); } - /// Add a region that will be marked as "unreachable", with a constant "zero counter". - pub fn add_unreachable_region(&mut self, region: CodeRegion) { - self.unreachable_regions.push(region) + /// Marks an expression ID as having been seen in an expression-used statement. + #[instrument(level = "debug", skip(self))] + pub(crate) fn mark_expression_id_seen(&mut self, id: ExpressionId) { + self.expressions_seen.insert(id); } - /// Perform some simplifications to make the final coverage mappings - /// slightly smaller. + /// Identify expressions that will always have a value of zero, and note + /// their IDs in [`ZeroExpressions`]. Mappings that refer to a zero expression + /// can instead become mappings to a constant zero value. /// /// This method mainly exists to preserve the simplifications that were /// already being performed by the Rust-side expression renumbering, so that /// the resulting coverage mappings don't get worse. - pub(crate) fn simplify_expressions(&mut self) { + fn identify_zero_expressions(&self) -> ZeroExpressions { // The set of expressions that either were optimized out entirely, or // have zero as both of their operands, and will therefore always have // a value of zero. Other expressions that refer to these as operands - // can have those operands replaced with `Operand::Zero`. - let mut zero_expressions = FxIndexSet::default(); + // can have those operands replaced with `CovTerm::Zero`. + let mut zero_expressions = ZeroExpressions::default(); - // For each expression, perform simplifications based on lower-numbered - // expressions, and then update the set of always-zero expressions if - // necessary. + // Simplify a copy of each expression based on lower-numbered expressions, + // and then update the set of always-zero expressions if necessary. // (By construction, expressions can only refer to other expressions - // that have lower IDs, so one simplification pass is sufficient.) - for (id, maybe_expression) in self.expressions.iter_enumerated_mut() { - let Some(expression) = maybe_expression else { - // If an expression is missing, it must have been optimized away, + // that have lower IDs, so one pass is sufficient.) + for (id, expression) in self.function_coverage_info.expressions.iter_enumerated() { + if !self.expressions_seen.contains(id) { + // If an expression was not seen, it must have been optimized away, // so any operand that refers to it can be replaced with zero. zero_expressions.insert(id); continue; + } + + // We don't need to simplify the actual expression data in the + // expressions list; we can just simplify a temporary copy and then + // use that to update the set of always-zero expressions. + let Expression { mut lhs, op, mut rhs } = *expression; + + // If an expression has an operand that is also an expression, the + // operand's ID must be strictly lower. This is what lets us find + // all zero expressions in one pass. + let assert_operand_expression_is_lower = |operand_id: ExpressionId| { + assert!( + operand_id < id, + "Operand {operand_id:?} should be less than {id:?} in {expression:?}", + ) }; - // If an operand refers to an expression that is always zero, then - // that operand can be replaced with `Operand::Zero`. - let maybe_set_operand_to_zero = |operand: &mut Operand| match &*operand { - Operand::Expression(id) if zero_expressions.contains(id) => { - *operand = Operand::Zero; + // If an operand refers to a counter or expression that is always + // zero, then that operand can be replaced with `CovTerm::Zero`. + let maybe_set_operand_to_zero = |operand: &mut CovTerm| { + if let CovTerm::Expression(id) = *operand { + assert_operand_expression_is_lower(id); + } + + if is_zero_term(&self.counters_seen, &zero_expressions, *operand) { + *operand = CovTerm::Zero; } - _ => (), }; - maybe_set_operand_to_zero(&mut expression.lhs); - maybe_set_operand_to_zero(&mut expression.rhs); + maybe_set_operand_to_zero(&mut lhs); + maybe_set_operand_to_zero(&mut rhs); // Coverage counter values cannot be negative, so if an expression // involves subtraction from zero, assume that its RHS must also be zero. // (Do this after simplifications that could set the LHS to zero.) - if let Expression { lhs: Operand::Zero, op: Op::Subtract, .. } = expression { - expression.rhs = Operand::Zero; + if lhs == CovTerm::Zero && op == Op::Subtract { + rhs = CovTerm::Zero; } // After the above simplifications, if both operands are zero, then // we know that this expression is always zero too. - if let Expression { lhs: Operand::Zero, rhs: Operand::Zero, .. } = expression { + if lhs == CovTerm::Zero && rhs == CovTerm::Zero { zero_expressions.insert(id); } } + + zero_expressions + } + + pub(crate) fn into_finished(self) -> FunctionCoverage<'tcx> { + let zero_expressions = self.identify_zero_expressions(); + let FunctionCoverageCollector { function_coverage_info, is_used, counters_seen, .. } = self; + + FunctionCoverage { function_coverage_info, is_used, counters_seen, zero_expressions } + } +} + +pub(crate) struct FunctionCoverage<'tcx> { + function_coverage_info: &'tcx FunctionCoverageInfo, + is_used: bool, + + counters_seen: BitSet, + zero_expressions: ZeroExpressions, +} + +impl<'tcx> FunctionCoverage<'tcx> { + /// Returns true for a used (called) function, and false for an unused function. + pub(crate) fn is_used(&self) -> bool { + self.is_used } /// Return the source hash, generated from the HIR node structure, and used to indicate whether /// or not the source code structure changed between different compilations. pub fn source_hash(&self) -> u64 { - self.source_hash + if self.is_used { self.function_coverage_info.function_source_hash } else { 0 } } - /// Generate an array of CounterExpressions, and an iterator over all `Counter`s and their - /// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create - /// `CounterMappingRegion`s. - pub fn get_expressions_and_counter_regions( - &self, - ) -> (Vec, impl Iterator) { - assert!( - self.source_hash != 0 || !self.is_used, - "No counters provided the source_hash for used function: {:?}", - self.instance - ); - - let counter_expressions = self.counter_expressions(); - // Expression IDs are indices into `self.expressions`, and on the LLVM - // side they will be treated as indices into `counter_expressions`, so - // the two vectors should correspond 1:1. - assert_eq!(self.expressions.len(), counter_expressions.len()); - - let counter_regions = self.counter_regions(); - let expression_regions = self.expression_regions(); - let unreachable_regions = self.unreachable_regions(); - - let counter_regions = - counter_regions.chain(expression_regions.into_iter().chain(unreachable_regions)); - (counter_expressions, counter_regions) - } - - fn counter_regions(&self) -> impl Iterator { - self.counters.iter_enumerated().filter_map(|(index, entry)| { - // Option::map() will return None to filter out missing counters. This may happen - // if, for example, a MIR-instrumented counter is removed during an optimization. - entry.as_ref().map(|region| (Counter::counter_value_reference(index), region)) - }) + /// Returns an iterator over all filenames used by this function's mappings. + pub(crate) fn all_file_names(&self) -> impl Iterator + Captures<'_> { + self.function_coverage_info.mappings.iter().map(|mapping| mapping.code_region.file_name) } /// Convert this function's coverage expression data into a form that can be /// passed through FFI to LLVM. - fn counter_expressions(&self) -> Vec { + pub(crate) fn counter_expressions( + &self, + ) -> impl Iterator + ExactSizeIterator + Captures<'_> { // We know that LLVM will optimize out any unused expressions before // producing the final coverage map, so there's no need to do the same // thing on the Rust side unless we're confident we can do much better. // (See `CounterExpressionsMinimizer` in `CoverageMappingWriter.cpp`.) - self.expressions - .iter() - .map(|expression| match expression { - None => { - // This expression ID was allocated, but we never saw the - // actual expression, so it must have been optimized out. - // Replace it with a dummy expression, and let LLVM take - // care of omitting it from the expression list. - CounterExpression::DUMMY - } - &Some(Expression { lhs, op, rhs, .. }) => { - // Convert the operands and operator as normal. - CounterExpression::new( - Counter::from_operand(lhs), - match op { - Op::Add => ExprKind::Add, - Op::Subtract => ExprKind::Subtract, - }, - Counter::from_operand(rhs), - ) - } - }) - .collect::>() + self.function_coverage_info.expressions.iter().map(move |&Expression { lhs, op, rhs }| { + CounterExpression { + lhs: self.counter_for_term(lhs), + kind: match op { + Op::Add => ExprKind::Add, + Op::Subtract => ExprKind::Subtract, + }, + rhs: self.counter_for_term(rhs), + } + }) } - fn expression_regions(&self) -> Vec<(Counter, &CodeRegion)> { - // Find all of the expression IDs that weren't optimized out AND have - // an attached code region, and return the corresponding mapping as a - // counter/region pair. - self.expressions - .iter_enumerated() - .filter_map(|(id, expression)| { - let code_region = expression.as_ref()?.region.as_ref()?; - Some((Counter::expression(id), code_region)) - }) - .collect::>() + /// Converts this function's coverage mappings into an intermediate form + /// that will be used by `mapgen` when preparing for FFI. + pub(crate) fn counter_regions( + &self, + ) -> impl Iterator + ExactSizeIterator { + self.function_coverage_info.mappings.iter().map(move |mapping| { + let &Mapping { term, ref code_region } = mapping; + let counter = self.counter_for_term(term); + (counter, code_region) + }) } - fn unreachable_regions(&self) -> impl Iterator { - self.unreachable_regions.iter().map(|region| (Counter::ZERO, region)) + fn counter_for_term(&self, term: CovTerm) -> Counter { + if is_zero_term(&self.counters_seen, &self.zero_expressions, term) { + Counter::ZERO + } else { + Counter::from_term(term) + } + } +} + +/// Set of expression IDs that are known to always evaluate to zero. +/// Any mapping or expression operand that refers to these expressions can have +/// that reference replaced with a constant zero value. +#[derive(Default)] +struct ZeroExpressions(FxIndexSet); + +impl ZeroExpressions { + fn insert(&mut self, id: ExpressionId) { + self.0.insert(id); + } + + fn contains(&self, id: ExpressionId) -> bool { + self.0.contains(&id) + } +} + +/// Returns `true` if the given term is known to have a value of zero, taking +/// into account knowledge of which counters are unused and which expressions +/// are always zero. +fn is_zero_term( + counters_seen: &BitSet, + zero_expressions: &ZeroExpressions, + term: CovTerm, +) -> bool { + match term { + CovTerm::Zero => true, + CovTerm::Counter(id) => !counters_seen.contains(id), + CovTerm::Expression(id) => zero_expressions.contains(id), } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,18 +1,20 @@ use crate::common::CodegenCx; use crate::coverageinfo; use crate::coverageinfo::ffi::CounterMappingRegion; -use crate::coverageinfo::map_data::FunctionCoverage; +use crate::coverageinfo::map_data::{FunctionCoverage, FunctionCoverageCollector}; use crate::llvm; -use rustc_codegen_ssa::traits::ConstMethods; -use rustc_data_structures::fx::FxIndexSet; +use itertools::Itertools as _; +use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods}; +use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; use rustc_index::IndexVec; use rustc_middle::bug; -use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; +use rustc_middle::mir; use rustc_middle::mir::coverage::CodeRegion; -use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::{self, TyCtxt}; +use rustc_span::def_id::DefIdSet; use rustc_span::Symbol; /// Generates and exports the Coverage Map. @@ -56,21 +58,40 @@ return; } - let mut global_file_table = GlobalFileTable::new(tcx); + let function_coverage_entries = function_coverage_map + .into_iter() + .map(|(instance, function_coverage)| (instance, function_coverage.into_finished())) + .collect::>(); + + let all_file_names = + function_coverage_entries.iter().flat_map(|(_, fn_cov)| fn_cov.all_file_names()); + let global_file_table = GlobalFileTable::new(all_file_names); + + // Encode all filenames referenced by coverage mappings in this CGU. + let filenames_buffer = global_file_table.make_filenames_buffer(tcx); + + let filenames_size = filenames_buffer.len(); + let filenames_val = cx.const_bytes(&filenames_buffer); + let filenames_ref = coverageinfo::hash_bytes(&filenames_buffer); + + // Generate the coverage map header, which contains the filenames used by + // this CGU's coverage mappings, and store it in a well-known global. + let cov_data_val = generate_coverage_map(cx, version, filenames_size, filenames_val); + coverageinfo::save_cov_data_to_mod(cx, cov_data_val); + + let mut unused_function_names = Vec::new(); + let covfun_section_name = coverageinfo::covfun_section_name(cx); // Encode coverage mappings and generate function records - let mut function_data = Vec::new(); - for (instance, mut function_coverage) in function_coverage_map { + for (instance, function_coverage) in function_coverage_entries { debug!("Generate function coverage for {}, {:?}", cx.codegen_unit.name(), instance); - function_coverage.simplify_expressions(); - let function_coverage = function_coverage; let mangled_function_name = tcx.symbol_name(instance).name; let source_hash = function_coverage.source_hash(); let is_used = function_coverage.is_used(); let coverage_mapping_buffer = - encode_mappings_for_function(&mut global_file_table, &function_coverage); + encode_mappings_for_function(&global_file_table, &function_coverage); if coverage_mapping_buffer.is_empty() { if function_coverage.is_used() { @@ -84,21 +105,10 @@ } } - function_data.push((mangled_function_name, source_hash, is_used, coverage_mapping_buffer)); - } - - // Encode all filenames referenced by counters/expressions in this module - let filenames_buffer = global_file_table.into_filenames_buffer(); - - let filenames_size = filenames_buffer.len(); - let filenames_val = cx.const_bytes(&filenames_buffer); - let filenames_ref = coverageinfo::hash_bytes(&filenames_buffer); - - // Generate the LLVM IR representation of the coverage map and store it in a well-known global - let cov_data_val = generate_coverage_map(cx, version, filenames_size, filenames_val); + if !is_used { + unused_function_names.push(mangled_function_name); + } - let covfun_section_name = coverageinfo::covfun_section_name(cx); - for (mangled_function_name, source_hash, is_used, coverage_mapping_buffer) in function_data { save_function_record( cx, &covfun_section_name, @@ -110,90 +120,143 @@ ); } - // Save the coverage data value to LLVM IR - coverageinfo::save_cov_data_to_mod(cx, cov_data_val); + // For unused functions, we need to take their mangled names and store them + // in a specially-named global array. LLVM's `InstrProfiling` pass will + // detect this global and include those names in its `__llvm_prf_names` + // section. (See `llvm/lib/Transforms/Instrumentation/InstrProfiling.cpp`.) + if !unused_function_names.is_empty() { + assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); + + let name_globals = unused_function_names + .into_iter() + .map(|mangled_function_name| cx.const_str(mangled_function_name).0) + .collect::>(); + let initializer = cx.const_array(cx.type_ptr(), &name_globals); + + let array = llvm::add_global(cx.llmod, cx.val_ty(initializer), "__llvm_coverage_names"); + llvm::set_global_constant(array, true); + llvm::set_linkage(array, llvm::Linkage::InternalLinkage); + llvm::set_initializer(array, initializer); + } } +/// Maps "global" (per-CGU) file ID numbers to their underlying filenames. struct GlobalFileTable { - global_file_table: FxIndexSet, + /// This "raw" table doesn't include the working dir, so a filename's + /// global ID is its index in this set **plus one**. + raw_file_table: FxIndexSet, } impl GlobalFileTable { - fn new(tcx: TyCtxt<'_>) -> Self { - let mut global_file_table = FxIndexSet::default(); + fn new(all_file_names: impl IntoIterator) -> Self { + // Collect all of the filenames into a set. Filenames usually come in + // contiguous runs, so we can dedup adjacent ones to save work. + let mut raw_file_table = all_file_names.into_iter().dedup().collect::>(); + + // Sort the file table by its actual string values, not the arbitrary + // ordering of its symbols. + raw_file_table.sort_unstable_by(|a, b| a.as_str().cmp(b.as_str())); + + Self { raw_file_table } + } + + fn global_file_id_for_file_name(&self, file_name: Symbol) -> u32 { + let raw_id = self.raw_file_table.get_index_of(&file_name).unwrap_or_else(|| { + bug!("file name not found in prepared global file table: {file_name}"); + }); + // The raw file table doesn't include an entry for the working dir + // (which has ID 0), so add 1 to get the correct ID. + (raw_id + 1) as u32 + } + + fn make_filenames_buffer(&self, tcx: TyCtxt<'_>) -> Vec { // LLVM Coverage Mapping Format version 6 (zero-based encoded as 5) // requires setting the first filename to the compilation directory. // Since rustc generates coverage maps with relative paths, the // compilation directory can be combined with the relative paths // to get absolute paths, if needed. - let working_dir = Symbol::intern( - &tcx.sess.opts.working_dir.remapped_path_if_available().to_string_lossy(), - ); - global_file_table.insert(working_dir); - Self { global_file_table } - } - - fn global_file_id_for_file_name(&mut self, file_name: Symbol) -> u32 { - let (global_file_id, _) = self.global_file_table.insert_full(file_name); - global_file_id as u32 - } - - fn into_filenames_buffer(self) -> Vec { - // This method takes `self` so that the caller can't accidentally - // modify the original file table after encoding it into a buffer. + use rustc_session::RemapFileNameExt; + let working_dir: &str = &tcx.sess.opts.working_dir.for_codegen(&tcx.sess).to_string_lossy(); llvm::build_byte_buffer(|buffer| { coverageinfo::write_filenames_section_to_buffer( - self.global_file_table.iter().map(Symbol::as_str), + // Insert the working dir at index 0, before the other filenames. + std::iter::once(working_dir).chain(self.raw_file_table.iter().map(Symbol::as_str)), buffer, ); }) } } +rustc_index::newtype_index! { + // Tell the newtype macro to not generate `Encode`/`Decode` impls. + #[custom_encodable] + struct LocalFileId {} +} + +/// Holds a mapping from "local" (per-function) file IDs to "global" (per-CGU) +/// file IDs. +#[derive(Default)] +struct VirtualFileMapping { + local_to_global: IndexVec, + global_to_local: FxIndexMap, +} + +impl VirtualFileMapping { + fn local_id_for_global(&mut self, global_file_id: u32) -> LocalFileId { + *self + .global_to_local + .entry(global_file_id) + .or_insert_with(|| self.local_to_global.push(global_file_id)) + } + + fn into_vec(self) -> Vec { + self.local_to_global.raw + } +} + /// Using the expressions and counter regions collected for a single function, /// generate the variable-sized payload of its corresponding `__llvm_covfun` /// entry. The payload is returned as a vector of bytes. /// /// Newly-encountered filenames will be added to the global file table. fn encode_mappings_for_function( - global_file_table: &mut GlobalFileTable, + global_file_table: &GlobalFileTable, function_coverage: &FunctionCoverage<'_>, ) -> Vec { - let (expressions, counter_regions) = function_coverage.get_expressions_and_counter_regions(); - - let mut counter_regions = counter_regions.collect::>(); + let counter_regions = function_coverage.counter_regions(); if counter_regions.is_empty() { return Vec::new(); } - let mut virtual_file_mapping = IndexVec::::new(); + let expressions = function_coverage.counter_expressions().collect::>(); + + let mut virtual_file_mapping = VirtualFileMapping::default(); let mut mapping_regions = Vec::with_capacity(counter_regions.len()); - // Sort the list of (counter, region) mapping pairs by region, so that they - // can be grouped by filename. Prepare file IDs for each filename, and - // prepare the mapping data so that we can pass it through FFI to LLVM. - counter_regions.sort_by_key(|(_counter, region)| *region); - for counter_regions_for_file in - counter_regions.group_by(|(_, a), (_, b)| a.file_name == b.file_name) + // Group mappings into runs with the same filename, preserving the order + // yielded by `FunctionCoverage`. + // Prepare file IDs for each filename, and prepare the mapping data so that + // we can pass it through FFI to LLVM. + for (file_name, counter_regions_for_file) in + &counter_regions.group_by(|(_counter, region)| region.file_name) { - // Look up (or allocate) the global file ID for this filename. - let file_name = counter_regions_for_file[0].1.file_name; + // Look up the global file ID for this filename. let global_file_id = global_file_table.global_file_id_for_file_name(file_name); // Associate that global file ID with a local file ID for this function. - let local_file_id: u32 = virtual_file_mapping.push(global_file_id); - debug!(" file id: local {local_file_id} => global {global_file_id} = '{file_name:?}'"); + let local_file_id = virtual_file_mapping.local_id_for_global(global_file_id); + debug!(" file id: {local_file_id:?} => global {global_file_id} = '{file_name:?}'"); // For each counter/region pair in this function+file, convert it to a // form suitable for FFI. - for &(counter, region) in counter_regions_for_file { + for (counter, region) in counter_regions_for_file { let CodeRegion { file_name: _, start_line, start_col, end_line, end_col } = *region; debug!("Adding counter {counter:?} to map for {region:?}"); mapping_regions.push(CounterMappingRegion::code_region( counter, - local_file_id, + local_file_id.as_u32(), start_line, start_col, end_line, @@ -205,7 +268,7 @@ // Encode the function's coverage mappings into a buffer. llvm::build_byte_buffer(|buffer| { coverageinfo::write_mapping_to_buffer( - virtual_file_mapping.raw, + virtual_file_mapping.into_vec(), expressions, mapping_regions, buffer, @@ -289,13 +352,12 @@ /// `-Clink-dead-code` will not generate code for unused generic functions.) /// /// We can find the unused functions (including generic functions) by the set difference of all MIR -/// `DefId`s (`tcx` query `mir_keys`) minus the codegenned `DefId`s (`tcx` query -/// `codegened_and_inlined_items`). +/// `DefId`s (`tcx` query `mir_keys`) minus the codegenned `DefId`s (`codegenned_and_inlined_items`). /// -/// These unused functions are then codegen'd in one of the CGUs which is marked as the -/// "code coverage dead code cgu" during the partitioning process. This prevents us from generating -/// code regions for the same function more than once which can lead to linker errors regarding -/// duplicate symbols. +/// These unused functions don't need to be codegenned, but we do need to add them to the function +/// coverage map (in a single designated CGU) so that we still emit coverage mappings for them. +/// We also end up adding their symbol names to a special global array that LLVM will include in +/// its embedded coverage data. fn add_unused_functions(cx: &CodegenCx<'_, '_>) { assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu()); @@ -315,7 +377,7 @@ // generic functions from consideration as well. if !matches!( kind, - DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Generator + DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Coroutine ) { return None; } @@ -326,21 +388,80 @@ }) .collect(); - let codegenned_def_ids = tcx.codegened_and_inlined_items(()); + let codegenned_def_ids = codegenned_and_inlined_items(tcx); - for non_codegenned_def_id in - eligible_def_ids.into_iter().filter(|id| !codegenned_def_ids.contains(id)) - { - let codegen_fn_attrs = tcx.codegen_fn_attrs(non_codegenned_def_id); - - // If a function is marked `#[coverage(off)]`, then skip generating a - // dead code stub for it. - if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NO_COVERAGE) { - debug!("skipping unused fn marked #[coverage(off)]: {:?}", non_codegenned_def_id); + // For each `DefId` that should have coverage instrumentation but wasn't + // codegenned, add it to the function coverage map as an unused function. + for def_id in eligible_def_ids.into_iter().filter(|id| !codegenned_def_ids.contains(id)) { + // Skip any function that didn't have coverage data added to it by the + // coverage instrumentor. + let body = tcx.instance_mir(ty::InstanceDef::Item(def_id)); + let Some(function_coverage_info) = body.function_coverage_info.as_deref() else { continue; + }; + + debug!("generating unused fn: {def_id:?}"); + let instance = declare_unused_fn(tcx, def_id); + add_unused_function_coverage(cx, instance, function_coverage_info); + } +} + +/// All items participating in code generation together with (instrumented) +/// items inlined into them. +fn codegenned_and_inlined_items(tcx: TyCtxt<'_>) -> DefIdSet { + let (items, cgus) = tcx.collect_and_partition_mono_items(()); + let mut visited = DefIdSet::default(); + let mut result = items.clone(); + + for cgu in cgus { + for item in cgu.items().keys() { + if let mir::mono::MonoItem::Fn(ref instance) = item { + let did = instance.def_id(); + if !visited.insert(did) { + continue; + } + let body = tcx.instance_mir(instance.def); + for block in body.basic_blocks.iter() { + for statement in &block.statements { + let mir::StatementKind::Coverage(_) = statement.kind else { continue }; + let scope = statement.source_info.scope; + if let Some(inlined) = scope.inlined_instance(&body.source_scopes) { + result.insert(inlined.def_id()); + } + } + } + } } + } - debug!("generating unused fn: {:?}", non_codegenned_def_id); - cx.define_unused_fn(non_codegenned_def_id); + result +} + +fn declare_unused_fn<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::Instance<'tcx> { + ty::Instance::new( + def_id, + ty::GenericArgs::for_item(tcx, def_id, |param, _| { + if let ty::GenericParamDefKind::Lifetime = param.kind { + tcx.lifetimes.re_erased.into() + } else { + tcx.mk_param_from_def(param) + } + }), + ) +} + +fn add_unused_function_coverage<'tcx>( + cx: &CodegenCx<'_, 'tcx>, + instance: ty::Instance<'tcx>, + function_coverage_info: &'tcx mir::coverage::FunctionCoverageInfo, +) { + // An unused function's mappings will automatically be rewritten to map to + // zero, because none of its counters/expressions are marked as seen. + let function_coverage = FunctionCoverageCollector::unused(instance, function_coverage_info); + + if let Some(coverage_context) = cx.coverage_context() { + coverage_context.function_coverage_map.borrow_mut().insert(instance, function_coverage); + } else { + bug!("Could not get the `coverage_context`"); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,9 @@ use crate::llvm; -use crate::abi::Abi; use crate::builder::Builder; use crate::common::CodegenCx; use crate::coverageinfo::ffi::{CounterExpression, CounterMappingRegion}; -use crate::coverageinfo::map_data::FunctionCoverage; +use crate::coverageinfo::map_data::FunctionCoverageCollector; use libc::c_uint; use rustc_codegen_ssa::traits::{ @@ -12,17 +11,12 @@ StaticMethods, }; use rustc_data_structures::fx::FxHashMap; -use rustc_hir as hir; -use rustc_hir::def_id::DefId; use rustc_llvm::RustString; use rustc_middle::bug; -use rustc_middle::mir::coverage::{CounterId, CoverageKind}; +use rustc_middle::mir::coverage::CoverageKind; use rustc_middle::mir::Coverage; -use rustc_middle::ty; -use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt}; -use rustc_middle::ty::GenericArgs; +use rustc_middle::ty::layout::HasTyCtxt; use rustc_middle::ty::Instance; -use rustc_middle::ty::Ty; use std::cell::RefCell; @@ -30,14 +24,13 @@ pub(crate) mod map_data; pub mod mapgen; -const UNUSED_FUNCTION_COUNTER_ID: CounterId = CounterId::START; - const VAR_ALIGN_BYTES: usize = 8; /// A context object for maintaining all state needed by the coverageinfo module. pub struct CrateCoverageContext<'ll, 'tcx> { /// Coverage data for each instrumented function identified by DefId. - pub(crate) function_coverage_map: RefCell, FunctionCoverage<'tcx>>>, + pub(crate) function_coverage_map: + RefCell, FunctionCoverageCollector<'tcx>>>, pub(crate) pgo_func_name_var_map: RefCell, &'ll llvm::Value>>, } @@ -49,7 +42,9 @@ } } - pub fn take_function_coverage_map(&self) -> FxHashMap, FunctionCoverage<'tcx>> { + pub fn take_function_coverage_map( + &self, + ) -> FxHashMap, FunctionCoverageCollector<'tcx>> { self.function_coverage_map.replace(FxHashMap::default()) } } @@ -76,68 +71,56 @@ bug!("Could not get the `coverage_context`"); } } - - /// Functions with MIR-based coverage are normally codegenned _only_ if - /// called. LLVM coverage tools typically expect every function to be - /// defined (even if unused), with at least one call to LLVM intrinsic - /// `instrprof.increment`. - /// - /// Codegen a small function that will never be called, with one counter - /// that will never be incremented. - /// - /// For used/called functions, the coverageinfo was already added to the - /// `function_coverage_map` (keyed by function `Instance`) during codegen. - /// But in this case, since the unused function was _not_ previously - /// codegenned, collect the coverage `CodeRegion`s from the MIR and add - /// them. The first `CodeRegion` is used to add a single counter, with the - /// same counter ID used in the injected `instrprof.increment` intrinsic - /// call. Since the function is never called, all other `CodeRegion`s can be - /// added as `unreachable_region`s. - fn define_unused_fn(&self, def_id: DefId) { - let instance = declare_unused_fn(self, def_id); - codegen_unused_fn_and_counter(self, instance); - add_unused_function_coverage(self, instance, def_id); - } } impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> { + #[instrument(level = "debug", skip(self))] fn add_coverage(&mut self, instance: Instance<'tcx>, coverage: &Coverage) { + // Our caller should have already taken care of inlining subtleties, + // so we can assume that counter/expression IDs in this coverage + // statement are meaningful for the given instance. + // + // (Either the statement was not inlined and directly belongs to this + // instance, or it was inlined *from* this instance.) + let bx = self; + let Some(function_coverage_info) = + bx.tcx.instance_mir(instance.def).function_coverage_info.as_deref() + else { + debug!("function has a coverage statement but no coverage info"); + return; + }; + let Some(coverage_context) = bx.coverage_context() else { return }; let mut coverage_map = coverage_context.function_coverage_map.borrow_mut(); let func_coverage = coverage_map .entry(instance) - .or_insert_with(|| FunctionCoverage::new(bx.tcx(), instance)); - - let Coverage { kind, code_region } = coverage.clone(); - match kind { - CoverageKind::Counter { function_source_hash, id } => { - debug!( - "ensuring function source hash is set for instance={:?}; function_source_hash={}", - instance, function_source_hash, - ); - func_coverage.set_function_source_hash(function_source_hash); + .or_insert_with(|| FunctionCoverageCollector::new(instance, function_coverage_info)); - if let Some(code_region) = code_region { - // Note: Some counters do not have code regions, but may still be referenced - // from expressions. In that case, don't add the counter to the coverage map, - // but do inject the counter intrinsic. - debug!( - "adding counter to coverage_map: instance={:?}, id={:?}, region={:?}", - instance, id, code_region, - ); - func_coverage.add_counter(id, code_region); - } + let Coverage { kind } = coverage; + match *kind { + CoverageKind::CounterIncrement { id } => { + func_coverage.mark_counter_id_seen(id); // We need to explicitly drop the `RefMut` before calling into `instrprof_increment`, // as that needs an exclusive borrow. drop(coverage_map); - let coverageinfo = bx.tcx().coverageinfo(instance.def); + // The number of counters passed to `llvm.instrprof.increment` might + // be smaller than the number originally inserted by the instrumentor, + // if some high-numbered counters were removed by MIR optimizations. + // If so, LLVM's profiler runtime will use fewer physical counters. + let num_counters = + bx.tcx().coverage_ids_info(instance.def).max_counter_id.as_u32() + 1; + assert!( + num_counters as usize <= function_coverage_info.num_counters, + "num_counters disagreement: query says {num_counters} but function info only has {}", + function_coverage_info.num_counters + ); let fn_name = bx.get_pgo_func_name_var(instance); - let hash = bx.const_u64(function_source_hash); - let num_counters = bx.const_u32(coverageinfo.num_counters); + let hash = bx.const_u64(function_coverage_info.function_source_hash); + let num_counters = bx.const_u32(num_counters); let index = bx.const_u32(id.as_u32()); debug!( "codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})", @@ -145,105 +128,13 @@ ); bx.instrprof_increment(fn_name, hash, num_counters, index); } - CoverageKind::Expression { id, lhs, op, rhs } => { - debug!( - "adding counter expression to coverage_map: instance={:?}, id={:?}, {:?} {:?} {:?}; region: {:?}", - instance, id, lhs, op, rhs, code_region, - ); - func_coverage.add_counter_expression(id, lhs, op, rhs, code_region); - } - CoverageKind::Unreachable => { - let code_region = - code_region.expect("unreachable regions always have code regions"); - debug!( - "adding unreachable code to coverage_map: instance={:?}, at {:?}", - instance, code_region, - ); - func_coverage.add_unreachable_region(code_region); + CoverageKind::ExpressionUsed { id } => { + func_coverage.mark_expression_id_seen(id); } } } } -fn declare_unused_fn<'tcx>(cx: &CodegenCx<'_, 'tcx>, def_id: DefId) -> Instance<'tcx> { - let tcx = cx.tcx; - - let instance = Instance::new( - def_id, - GenericArgs::for_item(tcx, def_id, |param, _| { - if let ty::GenericParamDefKind::Lifetime = param.kind { - tcx.lifetimes.re_erased.into() - } else { - tcx.mk_param_from_def(param) - } - }), - ); - - let llfn = cx.declare_fn( - tcx.symbol_name(instance).name, - cx.fn_abi_of_fn_ptr( - ty::Binder::dummy(tcx.mk_fn_sig( - [Ty::new_unit(tcx)], - Ty::new_unit(tcx), - false, - hir::Unsafety::Unsafe, - Abi::Rust, - )), - ty::List::empty(), - ), - None, - ); - - llvm::set_linkage(llfn, llvm::Linkage::PrivateLinkage); - llvm::set_visibility(llfn, llvm::Visibility::Default); - - assert!(cx.instances.borrow_mut().insert(instance, llfn).is_none()); - - instance -} - -fn codegen_unused_fn_and_counter<'tcx>(cx: &CodegenCx<'_, 'tcx>, instance: Instance<'tcx>) { - let llfn = cx.get_fn(instance); - let llbb = Builder::append_block(cx, llfn, "unused_function"); - let mut bx = Builder::build(cx, llbb); - let fn_name = bx.get_pgo_func_name_var(instance); - let hash = bx.const_u64(0); - let num_counters = bx.const_u32(1); - let index = bx.const_u32(u32::from(UNUSED_FUNCTION_COUNTER_ID)); - debug!( - "codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, - index={:?}) for unused function: {:?}", - fn_name, hash, num_counters, index, instance - ); - bx.instrprof_increment(fn_name, hash, num_counters, index); - bx.ret_void(); -} - -fn add_unused_function_coverage<'tcx>( - cx: &CodegenCx<'_, 'tcx>, - instance: Instance<'tcx>, - def_id: DefId, -) { - let tcx = cx.tcx; - - let mut function_coverage = FunctionCoverage::unused(tcx, instance); - for (index, &code_region) in tcx.covered_code_regions(def_id).iter().enumerate() { - if index == 0 { - // Insert at least one real counter so the LLVM CoverageMappingReader will find expected - // definitions. - function_coverage.add_counter(UNUSED_FUNCTION_COUNTER_ID, code_region.clone()); - } else { - function_coverage.add_unreachable_region(code_region.clone()); - } - } - - if let Some(coverage_context) = cx.coverage_context() { - coverage_context.function_coverage_map.borrow_mut().insert(instance, function_coverage); - } else { - bug!("Could not get the `coverage_context`"); - } -} - /// Calls llvm::createPGOFuncNameVar() with the given function instance's /// mangled function name. The LLVM API returns an llvm::GlobalVariable /// containing the function name, with the specific variable name and linkage diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs 2023-12-21 16:55:28.000000000 +0000 @@ -75,7 +75,10 @@ return; }; - if let Some(vars) = variables && !vars.contains(scope) && scope_data.inlined.is_none() { + if let Some(vars) = variables + && !vars.contains(scope) + && scope_data.inlined.is_none() + { // Do not create a DIScope if there are no variables defined in this // MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat. debug_context.scopes[scope] = parent_scope; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,7 +12,7 @@ ty::{ self, layout::{LayoutOf, TyAndLayout}, - AdtDef, GeneratorArgs, Ty, + AdtDef, CoroutineArgs, Ty, }, }; use rustc_target::abi::{Align, Endian, Size, TagEncoding, VariantIdx, Variants}; @@ -268,18 +268,18 @@ ) } -/// A generator debuginfo node looks the same as a that of an enum type. +/// A coroutine debuginfo node looks the same as a that of an enum type. /// /// See [build_enum_type_di_node] for more information. -pub(super) fn build_generator_di_node<'ll, 'tcx>( +pub(super) fn build_coroutine_di_node<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, unique_type_id: UniqueTypeId<'tcx>, ) -> DINodeCreationResult<'ll> { - let generator_type = unique_type_id.expect_ty(); - let generator_type_and_layout = cx.layout_of(generator_type); - let generator_type_name = compute_debuginfo_type_name(cx.tcx, generator_type, false); + let coroutine_type = unique_type_id.expect_ty(); + let coroutine_type_and_layout = cx.layout_of(coroutine_type); + let coroutine_type_name = compute_debuginfo_type_name(cx.tcx, coroutine_type, false); - debug_assert!(!wants_c_like_enum_debuginfo(generator_type_and_layout)); + debug_assert!(!wants_c_like_enum_debuginfo(coroutine_type_and_layout)); type_map::build_type_with_children( cx, @@ -287,24 +287,24 @@ cx, type_map::Stub::Union, unique_type_id, - &generator_type_name, - size_and_align_of(generator_type_and_layout), + &coroutine_type_name, + size_and_align_of(coroutine_type_and_layout), NO_SCOPE_METADATA, DIFlags::FlagZero, ), - |cx, generator_type_di_node| match generator_type_and_layout.variants { + |cx, coroutine_type_di_node| match coroutine_type_and_layout.variants { Variants::Multiple { tag_encoding: TagEncoding::Direct, .. } => { - build_union_fields_for_direct_tag_generator( + build_union_fields_for_direct_tag_coroutine( cx, - generator_type_and_layout, - generator_type_di_node, + coroutine_type_and_layout, + coroutine_type_di_node, ) } Variants::Single { .. } | Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => { bug!( - "Encountered generator with non-direct-tag layout: {:?}", - generator_type_and_layout + "Encountered coroutine with non-direct-tag layout: {:?}", + coroutine_type_and_layout ) } }, @@ -428,7 +428,7 @@ }) .collect(); - build_union_fields_for_direct_tag_enum_or_generator( + build_union_fields_for_direct_tag_enum_or_coroutine( cx, enum_type_and_layout, enum_type_di_node, @@ -469,8 +469,8 @@ fn build_variant_struct_wrapper_type_di_node<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, - enum_or_generator_type_and_layout: TyAndLayout<'tcx>, - enum_or_generator_type_di_node: &'ll DIType, + enum_or_coroutine_type_and_layout: TyAndLayout<'tcx>, + enum_or_coroutine_type_di_node: &'ll DIType, variant_index: VariantIdx, untagged_variant_index: Option, variant_struct_type_di_node: &'ll DIType, @@ -486,13 +486,13 @@ Stub::Struct, UniqueTypeId::for_enum_variant_struct_type_wrapper( cx.tcx, - enum_or_generator_type_and_layout.ty, + enum_or_coroutine_type_and_layout.ty, variant_index, ), &variant_struct_wrapper_type_name(variant_index), // NOTE: We use size and align of enum_type, not from variant_layout: - size_and_align_of(enum_or_generator_type_and_layout), - Some(enum_or_generator_type_di_node), + size_and_align_of(enum_or_coroutine_type_and_layout), + Some(enum_or_coroutine_type_di_node), DIFlags::FlagZero, ), |cx, wrapper_struct_type_di_node| { @@ -535,7 +535,7 @@ cx, wrapper_struct_type_di_node, "value", - size_and_align_of(enum_or_generator_type_and_layout), + size_and_align_of(enum_or_coroutine_type_and_layout), Size::ZERO, DIFlags::FlagZero, variant_struct_type_di_node, @@ -662,40 +662,40 @@ Split128 { hi: (value >> 64) as u64, lo: value as u64 } } -fn build_union_fields_for_direct_tag_generator<'ll, 'tcx>( +fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, - generator_type_and_layout: TyAndLayout<'tcx>, - generator_type_di_node: &'ll DIType, + coroutine_type_and_layout: TyAndLayout<'tcx>, + coroutine_type_di_node: &'ll DIType, ) -> SmallVec<&'ll DIType> { let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } = - generator_type_and_layout.variants + coroutine_type_and_layout.variants else { bug!("This function only supports layouts with directly encoded tags.") }; - let (generator_def_id, generator_args) = match generator_type_and_layout.ty.kind() { - &ty::Generator(def_id, args, _) => (def_id, args.as_generator()), + let (coroutine_def_id, coroutine_args) = match coroutine_type_and_layout.ty.kind() { + &ty::Coroutine(def_id, args, _) => (def_id, args.as_coroutine()), _ => unreachable!(), }; - let generator_layout = cx.tcx.optimized_mir(generator_def_id).generator_layout().unwrap(); + let coroutine_layout = cx.tcx.optimized_mir(coroutine_def_id).coroutine_layout().unwrap(); - let common_upvar_names = cx.tcx.closure_saved_names_of_captured_variables(generator_def_id); - let variant_range = generator_args.variant_range(generator_def_id, cx.tcx); + let common_upvar_names = cx.tcx.closure_saved_names_of_captured_variables(coroutine_def_id); + let variant_range = coroutine_args.variant_range(coroutine_def_id, cx.tcx); let variant_count = (variant_range.start.as_u32()..variant_range.end.as_u32()).len(); - let tag_base_type = tag_base_type(cx, generator_type_and_layout); + let tag_base_type = tag_base_type(cx, coroutine_type_and_layout); let variant_names_type_di_node = build_variant_names_type_di_node( cx, - generator_type_di_node, + coroutine_type_di_node, variant_range .clone() - .map(|variant_index| (variant_index, GeneratorArgs::variant_name(variant_index))), + .map(|variant_index| (variant_index, CoroutineArgs::variant_name(variant_index))), ); let discriminants: IndexVec = { - let discriminants_iter = generator_args.discriminants(generator_def_id, cx.tcx); + let discriminants_iter = coroutine_args.discriminants(coroutine_def_id, cx.tcx); let mut discriminants: IndexVec = IndexVec::with_capacity(variant_count); for (variant_index, discr) in discriminants_iter { @@ -709,16 +709,16 @@ // Build the type node for each field. let variant_field_infos: SmallVec> = variant_range .map(|variant_index| { - let variant_struct_type_di_node = super::build_generator_variant_struct_type_di_node( + let variant_struct_type_di_node = super::build_coroutine_variant_struct_type_di_node( cx, variant_index, - generator_type_and_layout, - generator_type_di_node, - generator_layout, + coroutine_type_and_layout, + coroutine_type_di_node, + coroutine_layout, &common_upvar_names, ); - let span = generator_layout.variant_source_info[variant_index].span; + let span = coroutine_layout.variant_source_info[variant_index].span; let source_info = if !span.is_dummy() { let loc = cx.lookup_debug_loc(span.lo()); Some((file_metadata(cx, &loc.file), loc.line as c_uint)) @@ -735,10 +735,10 @@ }) .collect(); - build_union_fields_for_direct_tag_enum_or_generator( + build_union_fields_for_direct_tag_enum_or_coroutine( cx, - generator_type_and_layout, - generator_type_di_node, + coroutine_type_and_layout, + coroutine_type_di_node, &variant_field_infos[..], variant_names_type_di_node, tag_base_type, @@ -747,9 +747,9 @@ ) } -/// This is a helper function shared between enums and generators that makes sure fields have the +/// This is a helper function shared between enums and coroutines that makes sure fields have the /// expect names. -fn build_union_fields_for_direct_tag_enum_or_generator<'ll, 'tcx>( +fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, enum_type_and_layout: TyAndLayout<'tcx>, enum_type_di_node: &'ll DIType, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,11 +6,11 @@ use rustc_index::IndexSlice; use rustc_middle::{ bug, - mir::GeneratorLayout, + mir::CoroutineLayout, ty::{ self, layout::{IntegerExt, LayoutOf, PrimitiveExt, TyAndLayout}, - AdtDef, GeneratorArgs, Ty, VariantDef, + AdtDef, CoroutineArgs, Ty, VariantDef, }, }; use rustc_span::Symbol; @@ -66,14 +66,14 @@ } } -pub(super) fn build_generator_di_node<'ll, 'tcx>( +pub(super) fn build_coroutine_di_node<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, unique_type_id: UniqueTypeId<'tcx>, ) -> DINodeCreationResult<'ll> { if cpp_like_debuginfo(cx.tcx) { - cpp_like::build_generator_di_node(cx, unique_type_id) + cpp_like::build_coroutine_di_node(cx, unique_type_id) } else { - native::build_generator_di_node(cx, unique_type_id) + native::build_coroutine_di_node(cx, unique_type_id) } } @@ -101,13 +101,13 @@ } } -/// Extract the type with which we want to describe the tag of the given enum or generator. +/// Extract the type with which we want to describe the tag of the given enum or coroutine. fn tag_base_type<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, enum_type_and_layout: TyAndLayout<'tcx>, ) -> Ty<'tcx> { debug_assert!(match enum_type_and_layout.ty.kind() { - ty::Generator(..) => true, + ty::Coroutine(..) => true, ty::Adt(adt_def, _) => adt_def.is_enum(), _ => false, }); @@ -300,8 +300,8 @@ .di_node } -/// Build the struct type for describing a single generator state. -/// See [build_generator_variant_struct_type_di_node]. +/// Build the struct type for describing a single coroutine state. +/// See [build_coroutine_variant_struct_type_di_node]. /// /// ```txt /// @@ -317,25 +317,25 @@ /// ---> DW_TAG_structure_type (type of variant 3) /// /// ``` -pub fn build_generator_variant_struct_type_di_node<'ll, 'tcx>( +pub fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, variant_index: VariantIdx, - generator_type_and_layout: TyAndLayout<'tcx>, - generator_type_di_node: &'ll DIType, - generator_layout: &GeneratorLayout<'tcx>, + coroutine_type_and_layout: TyAndLayout<'tcx>, + coroutine_type_di_node: &'ll DIType, + coroutine_layout: &CoroutineLayout<'tcx>, common_upvar_names: &IndexSlice, ) -> &'ll DIType { - let variant_name = GeneratorArgs::variant_name(variant_index); + let variant_name = CoroutineArgs::variant_name(variant_index); let unique_type_id = UniqueTypeId::for_enum_variant_struct_type( cx.tcx, - generator_type_and_layout.ty, + coroutine_type_and_layout.ty, variant_index, ); - let variant_layout = generator_type_and_layout.for_variant(cx, variant_index); + let variant_layout = coroutine_type_and_layout.for_variant(cx, variant_index); - let generator_args = match generator_type_and_layout.ty.kind() { - ty::Generator(_, args, _) => args.as_generator(), + let coroutine_args = match coroutine_type_and_layout.ty.kind() { + ty::Coroutine(_, args, _) => args.as_coroutine(), _ => unreachable!(), }; @@ -346,17 +346,17 @@ Stub::Struct, unique_type_id, &variant_name, - size_and_align_of(generator_type_and_layout), - Some(generator_type_di_node), + size_and_align_of(coroutine_type_and_layout), + Some(coroutine_type_di_node), DIFlags::FlagZero, ), |cx, variant_struct_type_di_node| { // Fields that just belong to this variant/state let state_specific_fields: SmallVec<_> = (0..variant_layout.fields.count()) .map(|field_index| { - let generator_saved_local = generator_layout.variant_fields[variant_index] + let coroutine_saved_local = coroutine_layout.variant_fields[variant_index] [FieldIdx::from_usize(field_index)]; - let field_name_maybe = generator_layout.field_names[generator_saved_local]; + let field_name_maybe = coroutine_layout.field_names[coroutine_saved_local]; let field_name = field_name_maybe .as_ref() .map(|s| Cow::from(s.as_str())) @@ -377,7 +377,7 @@ .collect(); // Fields that are common to all states - let common_fields: SmallVec<_> = generator_args + let common_fields: SmallVec<_> = coroutine_args .prefix_tys() .iter() .zip(common_upvar_names) @@ -388,7 +388,7 @@ variant_struct_type_di_node, upvar_name.as_str(), cx.size_and_align_of(upvar_ty), - generator_type_and_layout.fields.offset(index), + coroutine_type_and_layout.fields.offset(index), DIFlags::FlagZero, type_di_node(cx, upvar_ty), ) @@ -397,7 +397,7 @@ state_specific_fields.into_iter().chain(common_fields.into_iter()).collect() }, - |cx| build_generic_type_param_di_nodes(cx, generator_type_and_layout.ty), + |cx| build_generic_type_param_di_nodes(cx, coroutine_type_and_layout.ty), ) .di_node } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs 2023-12-21 16:55:28.000000000 +0000 @@ -110,12 +110,12 @@ ) } -/// Build the debuginfo node for a generator environment. It looks the same as the debuginfo for +/// Build the debuginfo node for a coroutine environment. It looks the same as the debuginfo for /// an enum. See [build_enum_type_di_node] for more information. /// /// ```txt /// -/// ---> DW_TAG_structure_type (top-level type for the generator) +/// ---> DW_TAG_structure_type (top-level type for the coroutine) /// DW_TAG_variant_part (variant part) /// DW_AT_discr (reference to discriminant DW_TAG_member) /// DW_TAG_member (discriminant member) @@ -127,21 +127,21 @@ /// DW_TAG_structure_type (type of variant 3) /// /// ``` -pub(super) fn build_generator_di_node<'ll, 'tcx>( +pub(super) fn build_coroutine_di_node<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, unique_type_id: UniqueTypeId<'tcx>, ) -> DINodeCreationResult<'ll> { - let generator_type = unique_type_id.expect_ty(); - let &ty::Generator(generator_def_id, _, _) = generator_type.kind() else { - bug!("build_generator_di_node() called with non-generator type: `{:?}`", generator_type) + let coroutine_type = unique_type_id.expect_ty(); + let &ty::Coroutine(coroutine_def_id, _, _) = coroutine_type.kind() else { + bug!("build_coroutine_di_node() called with non-coroutine type: `{:?}`", coroutine_type) }; - let containing_scope = get_namespace_for_item(cx, generator_def_id); - let generator_type_and_layout = cx.layout_of(generator_type); + let containing_scope = get_namespace_for_item(cx, coroutine_def_id); + let coroutine_type_and_layout = cx.layout_of(coroutine_type); - debug_assert!(!wants_c_like_enum_debuginfo(generator_type_and_layout)); + debug_assert!(!wants_c_like_enum_debuginfo(coroutine_type_and_layout)); - let generator_type_name = compute_debuginfo_type_name(cx.tcx, generator_type, false); + let coroutine_type_name = compute_debuginfo_type_name(cx.tcx, coroutine_type, false); type_map::build_type_with_children( cx, @@ -149,37 +149,37 @@ cx, Stub::Struct, unique_type_id, - &generator_type_name, - size_and_align_of(generator_type_and_layout), + &coroutine_type_name, + size_and_align_of(coroutine_type_and_layout), Some(containing_scope), DIFlags::FlagZero, ), - |cx, generator_type_di_node| { - let generator_layout = - cx.tcx.optimized_mir(generator_def_id).generator_layout().unwrap(); + |cx, coroutine_type_di_node| { + let coroutine_layout = + cx.tcx.optimized_mir(coroutine_def_id).coroutine_layout().unwrap(); let Variants::Multiple { tag_encoding: TagEncoding::Direct, ref variants, .. } = - generator_type_and_layout.variants + coroutine_type_and_layout.variants else { bug!( - "Encountered generator with non-direct-tag layout: {:?}", - generator_type_and_layout + "Encountered coroutine with non-direct-tag layout: {:?}", + coroutine_type_and_layout ) }; let common_upvar_names = - cx.tcx.closure_saved_names_of_captured_variables(generator_def_id); + cx.tcx.closure_saved_names_of_captured_variables(coroutine_def_id); // Build variant struct types let variant_struct_type_di_nodes: SmallVec<_> = variants .indices() .map(|variant_index| { // FIXME: This is problematic because just a number is not a valid identifier. - // GeneratorArgs::variant_name(variant_index), would be consistent + // CoroutineArgs::variant_name(variant_index), would be consistent // with enums? let variant_name = format!("{}", variant_index.as_usize()).into(); - let span = generator_layout.variant_source_info[variant_index].span; + let span = coroutine_layout.variant_source_info[variant_index].span; let source_info = if !span.is_dummy() { let loc = cx.lookup_debug_loc(span.lo()); Some((file_metadata(cx, &loc.file), loc.line)) @@ -191,12 +191,12 @@ variant_index, variant_name, variant_struct_type_di_node: - super::build_generator_variant_struct_type_di_node( + super::build_coroutine_variant_struct_type_di_node( cx, variant_index, - generator_type_and_layout, - generator_type_di_node, - generator_layout, + coroutine_type_and_layout, + coroutine_type_di_node, + coroutine_layout, &common_upvar_names, ), source_info, @@ -206,18 +206,18 @@ smallvec![build_enum_variant_part_di_node( cx, - generator_type_and_layout, - generator_type_di_node, + coroutine_type_and_layout, + coroutine_type_di_node, &variant_struct_type_di_nodes[..], )] }, - // We don't seem to be emitting generic args on the generator type, it seems. Rather + // We don't seem to be emitting generic args on the coroutine type, it seems. Rather // they get attached to the struct type of each variant. NO_GENERICS, ) } -/// Builds the DW_TAG_variant_part of an enum or generator debuginfo node: +/// Builds the DW_TAG_variant_part of an enum or coroutine debuginfo node: /// /// ```txt /// DW_TAG_structure_type (top-level type for enum) @@ -306,11 +306,11 @@ /// ``` fn build_discr_member_di_node<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, - enum_or_generator_type_and_layout: TyAndLayout<'tcx>, - enum_or_generator_type_di_node: &'ll DIType, + enum_or_coroutine_type_and_layout: TyAndLayout<'tcx>, + enum_or_coroutine_type_di_node: &'ll DIType, ) -> Option<&'ll DIType> { - let tag_name = match enum_or_generator_type_and_layout.ty.kind() { - ty::Generator(..) => "__state", + let tag_name = match enum_or_coroutine_type_and_layout.ty.kind() { + ty::Coroutine(..) => "__state", _ => "", }; @@ -320,14 +320,14 @@ // In LLVM IR the wrong scope will be listed but when DWARF is // generated from it, the DW_TAG_member will be a child the // DW_TAG_variant_part. - let containing_scope = enum_or_generator_type_di_node; + let containing_scope = enum_or_coroutine_type_di_node; - match enum_or_generator_type_and_layout.layout.variants() { + match enum_or_coroutine_type_and_layout.layout.variants() { // A single-variant enum has no discriminant. &Variants::Single { .. } => None, &Variants::Multiple { tag_field, .. } => { - let tag_base_type = tag_base_type(cx, enum_or_generator_type_and_layout); + let tag_base_type = tag_base_type(cx, enum_or_coroutine_type_and_layout); let (size, align) = cx.size_and_align_of(tag_base_type); unsafe { @@ -340,7 +340,7 @@ UNKNOWN_LINE_NUMBER, size.bits(), align.bits() as u32, - enum_or_generator_type_and_layout.fields.offset(tag_field).bits(), + enum_or_coroutine_type_and_layout.fields.offset(tag_field).bits(), DIFlags::FlagArtificial, type_di_node(cx, tag_base_type), )) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata/type_map.rs 2023-12-21 16:55:28.000000000 +0000 @@ -43,7 +43,7 @@ /// The ID of a regular type as it shows up at the language level. Ty(Ty<'tcx>, private::HiddenZst), /// The ID for the single DW_TAG_variant_part nested inside the top-level - /// DW_TAG_structure_type that describes enums and generators. + /// DW_TAG_structure_type that describes enums and coroutines. VariantPart(Ty<'tcx>, private::HiddenZst), /// The ID for the artificial struct type describing a single enum variant. VariantStructType(Ty<'tcx>, VariantIdx, private::HiddenZst), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs 2023-12-21 16:55:28.000000000 +0000 @@ -335,12 +335,20 @@ // This is actually a function pointer, so wrap it in pointer DI. let name = compute_debuginfo_type_name(cx.tcx, fn_ty, false); + let (size, align) = match fn_ty.kind() { + ty::FnDef(..) => (0, 1), + ty::FnPtr(..) => ( + cx.tcx.data_layout.pointer_size.bits(), + cx.tcx.data_layout.pointer_align.abi.bits() as u32, + ), + _ => unreachable!(), + }; let di_node = unsafe { llvm::LLVMRustDIBuilderCreatePointerType( DIB(cx), fn_di_node, - cx.tcx.data_layout.pointer_size.bits(), - cx.tcx.data_layout.pointer_align.abi.bits() as u32, + size, + align, 0, // Ignore DWARF address space. name.as_ptr().cast(), name.len(), @@ -452,7 +460,7 @@ } ty::FnDef(..) | ty::FnPtr(_) => build_subroutine_type_di_node(cx, unique_type_id), ty::Closure(..) => build_closure_env_di_node(cx, unique_type_id), - ty::Generator(..) => enums::build_generator_di_node(cx, unique_type_id), + ty::Coroutine(..) => enums::build_coroutine_di_node(cx, unique_type_id), ty::Adt(def, ..) => match def.adt_kind() { AdtKind::Struct => build_struct_type_di_node(cx, unique_type_id), AdtKind::Union => build_union_type_di_node(cx, unique_type_id), @@ -539,48 +547,77 @@ ) -> &'ll DIFile { debug!(?source_file.name); + use rustc_session::RemapFileNameExt; let (directory, file_name) = match &source_file.name { FileName::Real(filename) => { let working_directory = &cx.sess().opts.working_dir; debug!(?working_directory); - let filename = cx - .sess() - .source_map() - .path_mapping() - .to_embeddable_absolute_path(filename.clone(), working_directory); - - // Construct the absolute path of the file - let abs_path = filename.remapped_path_if_available(); - debug!(?abs_path); - - if let Ok(rel_path) = - abs_path.strip_prefix(working_directory.remapped_path_if_available()) - { - // If the compiler's working directory (which also is the DW_AT_comp_dir of - // the compilation unit) is a prefix of the path we are about to emit, then - // only emit the part relative to the working directory. - // Because of path remapping we sometimes see strange things here: `abs_path` - // might actually look like a relative path - // (e.g. `/src/lib.rs`), so if we emit it without - // taking the working directory into account, downstream tooling will - // interpret it as `//src/lib.rs`, - // which makes no sense. Usually in such cases the working directory will also - // be remapped to `` or some other prefix of the path - // we are remapping, so we end up with - // `//src/lib.rs`. - // By moving the working directory portion into the `directory` part of the - // DIFile, we allow LLVM to emit just the relative path for DWARF, while - // still emitting the correct absolute path for CodeView. - ( - working_directory.to_string_lossy(FileNameDisplayPreference::Remapped), - rel_path.to_string_lossy().into_owned(), - ) + if cx.sess().should_prefer_remapped_for_codegen() { + let filename = cx + .sess() + .source_map() + .path_mapping() + .to_embeddable_absolute_path(filename.clone(), working_directory); + + // Construct the absolute path of the file + let abs_path = filename.remapped_path_if_available(); + debug!(?abs_path); + + if let Ok(rel_path) = + abs_path.strip_prefix(working_directory.remapped_path_if_available()) + { + // If the compiler's working directory (which also is the DW_AT_comp_dir of + // the compilation unit) is a prefix of the path we are about to emit, then + // only emit the part relative to the working directory. + // Because of path remapping we sometimes see strange things here: `abs_path` + // might actually look like a relative path + // (e.g. `/src/lib.rs`), so if we emit it without + // taking the working directory into account, downstream tooling will + // interpret it as `//src/lib.rs`, + // which makes no sense. Usually in such cases the working directory will also + // be remapped to `` or some other prefix of the path + // we are remapping, so we end up with + // `//src/lib.rs`. + // By moving the working directory portion into the `directory` part of the + // DIFile, we allow LLVM to emit just the relative path for DWARF, while + // still emitting the correct absolute path for CodeView. + ( + working_directory.to_string_lossy(FileNameDisplayPreference::Remapped), + rel_path.to_string_lossy().into_owned(), + ) + } else { + ("".into(), abs_path.to_string_lossy().into_owned()) + } } else { - ("".into(), abs_path.to_string_lossy().into_owned()) + let working_directory = working_directory.local_path_if_available(); + let filename = filename.local_path_if_available(); + + debug!(?working_directory, ?filename); + + let abs_path: Cow<'_, Path> = if filename.is_absolute() { + filename.into() + } else { + let mut p = PathBuf::new(); + p.push(working_directory); + p.push(filename); + p.into() + }; + + if let Ok(rel_path) = abs_path.strip_prefix(working_directory) { + ( + working_directory.to_string_lossy().into(), + rel_path.to_string_lossy().into_owned(), + ) + } else { + ("".into(), abs_path.to_string_lossy().into_owned()) + } } } - other => ("".into(), other.prefer_remapped().to_string_lossy().into_owned()), + other => { + debug!(?other); + ("".into(), other.for_codegen(cx.sess()).to_string_lossy().into_owned()) + } }; let hash_kind = match source_file.src_hash.kind { @@ -814,8 +851,9 @@ // FIXME(#41252) Remove "clang LLVM" if we can get GDB and LLVM to play nice. let producer = format!("clang LLVM ({rustc_producer})"); + use rustc_session::RemapFileNameExt; let name_in_debuginfo = name_in_debuginfo.to_string_lossy(); - let work_dir = tcx.sess.opts.working_dir.to_string_lossy(FileNameDisplayPreference::Remapped); + let work_dir = tcx.sess.opts.working_dir.for_codegen(&tcx.sess).to_string_lossy(); let flags = "\0"; let output_filenames = tcx.output_filenames(()); let split_name = if tcx.sess.target_can_use_split_dwarf() { @@ -826,7 +864,13 @@ Some(codegen_unit_name), ) // We get a path relative to the working directory from split_dwarf_path - .map(|f| tcx.sess.source_map().path_mapping().map_prefix(f).0) + .map(|f| { + if tcx.sess.should_prefer_remapped_for_split_debuginfo_paths() { + tcx.sess.source_map().path_mapping().map_prefix(f).0 + } else { + f.into() + } + }) } else { None } @@ -982,20 +1026,20 @@ // Tuples //=----------------------------------------------------------------------------- -/// Builds the DW_TAG_member debuginfo nodes for the upvars of a closure or generator. -/// For a generator, this will handle upvars shared by all states. +/// Builds the DW_TAG_member debuginfo nodes for the upvars of a closure or coroutine. +/// For a coroutine, this will handle upvars shared by all states. fn build_upvar_field_di_nodes<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, - closure_or_generator_ty: Ty<'tcx>, - closure_or_generator_di_node: &'ll DIType, + closure_or_coroutine_ty: Ty<'tcx>, + closure_or_coroutine_di_node: &'ll DIType, ) -> SmallVec<&'ll DIType> { - let (&def_id, up_var_tys) = match closure_or_generator_ty.kind() { - ty::Generator(def_id, args, _) => (def_id, args.as_generator().prefix_tys()), + let (&def_id, up_var_tys) = match closure_or_coroutine_ty.kind() { + ty::Coroutine(def_id, args, _) => (def_id, args.as_coroutine().prefix_tys()), ty::Closure(def_id, args) => (def_id, args.as_closure().upvar_tys()), _ => { bug!( - "build_upvar_field_di_nodes() called with non-closure-or-generator-type: {:?}", - closure_or_generator_ty + "build_upvar_field_di_nodes() called with non-closure-or-coroutine-type: {:?}", + closure_or_coroutine_ty ) } }; @@ -1005,7 +1049,7 @@ ); let capture_names = cx.tcx.closure_saved_names_of_captured_variables(def_id); - let layout = cx.layout_of(closure_or_generator_ty); + let layout = cx.layout_of(closure_or_coroutine_ty); up_var_tys .into_iter() @@ -1014,7 +1058,7 @@ .map(|(index, (up_var_ty, capture_name))| { build_field_di_node( cx, - closure_or_generator_di_node, + closure_or_coroutine_di_node, capture_name.as_str(), cx.size_and_align_of(up_var_ty), layout.fields.offset(index), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -50,7 +50,6 @@ pub use self::create_scope_map::compute_mir_scopes; pub use self::metadata::build_global_var_di_node; -pub use self::metadata::extend_scope_to_file; #[allow(non_upper_case_globals)] const DW_TAG_auto_variable: c_uint = 0x100; @@ -342,7 +341,7 @@ // We look up the generics of the enclosing function and truncate the args // to their length in order to cut off extra stuff that might be in there for - // closures or generators. + // closures or coroutines. let generics = tcx.generics_of(enclosing_fn_def_id); let args = instance.args.truncate_to(tcx, generics); @@ -537,7 +536,9 @@ // Only "class" methods are generally understood by LLVM, // so avoid methods on other types (e.g., `<*mut T>::null`). - if let ty::Adt(def, ..) = impl_self_ty.kind() && !def.is_box() { + if let ty::Adt(def, ..) = impl_self_ty.kind() + && !def.is_box() + { // Again, only create type information if full debuginfo is enabled if cx.sess().opts.debuginfo == DebugInfo::Full && !impl_self_ty.has_param() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,6 +26,13 @@ pub rust_feature: PossibleFeature<'a>, } +#[derive(Diagnostic)] +#[diag(codegen_llvm_unstable_ctarget_feature)] +#[note] +pub(crate) struct UnstableCTargetFeature<'a> { + pub feature: &'a str, +} + #[derive(Subdiagnostic)] pub(crate) enum PossibleFeature<'a> { #[help(codegen_llvm_possible_feature)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/intrinsic.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/intrinsic.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/intrinsic.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/intrinsic.rs 2023-12-21 16:55:28.000000000 +0000 @@ -935,9 +935,10 @@ } macro_rules! require_simd { - ($ty: expr, $diag: expr) => { - require!($ty.is_simd(), $diag) - }; + ($ty: expr, $variant:ident) => {{ + require!($ty.is_simd(), InvalidMonomorphization::$variant { span, name, ty: $ty }); + $ty.simd_size_and_type(bx.tcx()) + }}; } let tcx = bx.tcx(); @@ -946,12 +947,7 @@ let arg_tys = sig.inputs(); if name == sym::simd_select_bitmask { - require_simd!( - arg_tys[1], - InvalidMonomorphization::SimdArgument { span, name, ty: arg_tys[1] } - ); - - let (len, _) = arg_tys[1].simd_size_and_type(bx.tcx()); + let (len, _) = require_simd!(arg_tys[1], SimdArgument); let expected_int_bits = (len.max(8) - 1).next_power_of_two(); let expected_bytes = len / 8 + ((len % 8 > 0) as u64); @@ -988,7 +984,7 @@ } // every intrinsic below takes a SIMD vector as its first argument - require_simd!(arg_tys[0], InvalidMonomorphization::SimdInput { span, name, ty: arg_tys[0] }); + let (in_len, in_elem) = require_simd!(arg_tys[0], SimdInput); let in_ty = arg_tys[0]; let comparison = match name { @@ -1001,11 +997,8 @@ _ => None, }; - let (in_len, in_elem) = arg_tys[0].simd_size_and_type(bx.tcx()); if let Some(cmp_op) = comparison { - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - - let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_ty) = require_simd!(ret_ty, SimdReturn); require!( in_len == out_len, @@ -1041,8 +1034,7 @@ .unwrap_branch(); let n = idx.len() as u64; - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_ty) = require_simd!(ret_ty, SimdReturn); require!( out_len == n, InvalidMonomorphization::ReturnLength { span, name, in_len: n, ret_ty, out_len } @@ -1099,8 +1091,7 @@ }), }; - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_ty) = require_simd!(ret_ty, SimdReturn); require!( out_len == n, InvalidMonomorphization::ReturnLength { span, name, in_len: n, ret_ty, out_len } @@ -1179,11 +1170,7 @@ if name == sym::simd_select { let m_elem_ty = in_elem; let m_len = in_len; - require_simd!( - arg_tys[1], - InvalidMonomorphization::SimdArgument { span, name, ty: arg_tys[1] } - ); - let (v_len, _) = arg_tys[1].simd_size_and_type(bx.tcx()); + let (v_len, _) = require_simd!(arg_tys[1], SimdArgument); require!( m_len == v_len, InvalidMonomorphization::MismatchedLengths { span, name, m_len, v_len } @@ -1401,20 +1388,16 @@ // * M: any integer width is supported, will be truncated to i1 // All types must be simd vector types - require_simd!(in_ty, InvalidMonomorphization::SimdFirst { span, name, ty: in_ty }); - require_simd!( - arg_tys[1], - InvalidMonomorphization::SimdSecond { span, name, ty: arg_tys[1] } - ); - require_simd!( - arg_tys[2], - InvalidMonomorphization::SimdThird { span, name, ty: arg_tys[2] } - ); - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); + + // The second argument must be a simd vector with an element type that's a pointer + // to the element type of the first argument + let (_, element_ty0) = require_simd!(in_ty, SimdFirst); + let (out_len, element_ty1) = require_simd!(arg_tys[1], SimdSecond); + // The element type of the third argument must be a signed integer type of any width: + let (out_len2, element_ty2) = require_simd!(arg_tys[2], SimdThird); + require_simd!(ret_ty, SimdReturn); // Of the same length: - let (out_len, _) = arg_tys[1].simd_size_and_type(bx.tcx()); - let (out_len2, _) = arg_tys[2].simd_size_and_type(bx.tcx()); require!( in_len == out_len, InvalidMonomorphization::SecondArgumentLength { @@ -1444,11 +1427,6 @@ InvalidMonomorphization::ExpectedReturnType { span, name, in_ty, ret_ty } ); - // The second argument must be a simd vector with an element type that's a pointer - // to the element type of the first argument - let (_, element_ty0) = arg_tys[0].simd_size_and_type(bx.tcx()); - let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx()); - require!( matches!( element_ty1.kind(), @@ -1465,20 +1443,15 @@ } ); - // The element type of the third argument must be a signed integer type of any width: - let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx()); match element_ty2.kind() { ty::Int(_) => (), _ => { - require!( - false, - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: element_ty2, - third_arg: arg_tys[2] - } - ); + return_error!(InvalidMonomorphization::ThirdArgElementType { + span, + name, + expected_element: element_ty2, + third_arg: arg_tys[2] + }); } } @@ -1527,19 +1500,13 @@ // * M: any integer width is supported, will be truncated to i1 // All types must be simd vector types - require_simd!(in_ty, InvalidMonomorphization::SimdFirst { span, name, ty: in_ty }); - require_simd!( - arg_tys[1], - InvalidMonomorphization::SimdSecond { span, name, ty: arg_tys[1] } - ); - require_simd!( - arg_tys[2], - InvalidMonomorphization::SimdThird { span, name, ty: arg_tys[2] } - ); + // The second argument must be a simd vector with an element type that's a pointer + // to the element type of the first argument + let (_, element_ty0) = require_simd!(in_ty, SimdFirst); + let (element_len1, element_ty1) = require_simd!(arg_tys[1], SimdSecond); + let (element_len2, element_ty2) = require_simd!(arg_tys[2], SimdThird); // Of the same length: - let (element_len1, _) = arg_tys[1].simd_size_and_type(bx.tcx()); - let (element_len2, _) = arg_tys[2].simd_size_and_type(bx.tcx()); require!( in_len == element_len1, InvalidMonomorphization::SecondArgumentLength { @@ -1563,12 +1530,6 @@ } ); - // The second argument must be a simd vector with an element type that's a pointer - // to the element type of the first argument - let (_, element_ty0) = arg_tys[0].simd_size_and_type(bx.tcx()); - let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx()); - let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx()); - require!( matches!( element_ty1.kind(), @@ -1590,15 +1551,12 @@ match element_ty2.kind() { ty::Int(_) => (), _ => { - require!( - false, - InvalidMonomorphization::ThirdArgElementType { - span, - name, - expected_element: element_ty2, - third_arg: arg_tys[2] - } - ); + return_error!(InvalidMonomorphization::ThirdArgElementType { + span, + name, + expected_element: element_ty2, + third_arg: arg_tys[2] + }); } } @@ -1794,8 +1752,7 @@ bitwise_red!(simd_reduce_any: vector_reduce_or, true); if name == sym::simd_cast_ptr { - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_elem) = require_simd!(ret_ty, SimdReturn); require!( in_len == out_len, InvalidMonomorphization::ReturnLengthInputType { @@ -1843,8 +1800,7 @@ } if name == sym::simd_expose_addr { - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_elem) = require_simd!(ret_ty, SimdReturn); require!( in_len == out_len, InvalidMonomorphization::ReturnLengthInputType { @@ -1872,8 +1828,7 @@ } if name == sym::simd_from_exposed_addr { - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_elem) = require_simd!(ret_ty, SimdReturn); require!( in_len == out_len, InvalidMonomorphization::ReturnLengthInputType { @@ -1901,8 +1856,7 @@ } if name == sym::simd_cast || name == sym::simd_as { - require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); - let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx()); + let (out_len, out_elem) = require_simd!(ret_ty, SimdReturn); require!( in_len == out_len, InvalidMonomorphization::ReturnLengthInputType { @@ -1989,17 +1943,14 @@ } _ => { /* Unsupported. Fallthrough. */ } } - require!( - false, - InvalidMonomorphization::UnsupportedCast { - span, - name, - in_ty, - in_elem, - ret_ty, - out_elem - } - ); + return_error!(InvalidMonomorphization::UnsupportedCast { + span, + name, + in_ty, + in_elem, + ret_ty, + out_elem + }); } macro_rules! arith_binary { ($($name: ident: $($($p: ident),* => $call: ident),*;)*) => { @@ -2010,8 +1961,7 @@ })* _ => {}, } - require!( - false, + return_error!( InvalidMonomorphization::UnsupportedOperation { span, name, in_ty, in_elem } ); })* @@ -2041,8 +1991,7 @@ })* _ => {}, } - require!( - false, + return_error!( InvalidMonomorphization::UnsupportedOperation { span, name, in_ty, in_elem } ); })* diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,13 +4,17 @@ //! //! This API is completely unstable and subject to change. +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![feature(exact_size_is_empty)] #![feature(extern_types)] #![feature(hash_raw_entry)] #![feature(iter_intersperse)] #![feature(let_chains)] +#![feature(min_specialization)] #![feature(never_type)] -#![feature(slice_group_by)] #![feature(impl_trait_in_assoc_type)] #![recursion_limit = "256"] #![allow(rustc::potential_query_instability)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm/ffi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm/ffi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm/ffi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm/ffi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -969,7 +969,6 @@ ConstantIndices: *const &'a Value, NumIndices: c_uint, ) -> &'a Value; - pub fn LLVMConstZExt<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value; pub fn LLVMConstPtrToInt<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value; pub fn LLVMConstIntToPtr<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value; pub fn LLVMConstBitCast<'a>(ConstantVal: &'a Value, ToType: &'a Type) -> &'a Value; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm_util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm_util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm_util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/llvm_util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use crate::back::write::create_informational_target_machine; use crate::errors::{ PossibleFeature, TargetFeatureDisableOrEnable, UnknownCTargetFeature, - UnknownCTargetFeaturePrefix, + UnknownCTargetFeaturePrefix, UnstableCTargetFeature, }; use crate::llvm; use libc::c_int; @@ -531,25 +531,34 @@ }; let feature = backend_feature_name(s)?; - // Warn against use of LLVM specific feature names on the CLI. - if diagnostics && !supported_features.iter().any(|&(v, _)| v == feature) { - let rust_feature = supported_features.iter().find_map(|&(rust_feature, _)| { - let llvm_features = to_llvm_features(sess, rust_feature); - if llvm_features.contains(&feature) && !llvm_features.contains(&rust_feature) { - Some(rust_feature) + // Warn against use of LLVM specific feature names and unstable features on the CLI. + if diagnostics { + let feature_state = supported_features.iter().find(|&&(v, _)| v == feature); + if feature_state.is_none() { + let rust_feature = supported_features.iter().find_map(|&(rust_feature, _)| { + let llvm_features = to_llvm_features(sess, rust_feature); + if llvm_features.contains(&feature) + && !llvm_features.contains(&rust_feature) + { + Some(rust_feature) + } else { + None + } + }); + let unknown_feature = if let Some(rust_feature) = rust_feature { + UnknownCTargetFeature { + feature, + rust_feature: PossibleFeature::Some { rust_feature }, + } } else { - None - } - }); - let unknown_feature = if let Some(rust_feature) = rust_feature { - UnknownCTargetFeature { - feature, - rust_feature: PossibleFeature::Some { rust_feature }, - } - } else { - UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } - }; - sess.emit_warning(unknown_feature); + UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None } + }; + sess.emit_warning(unknown_feature); + } else if feature_state.is_some_and(|(_name, feature_gate)| feature_gate.is_some()) + { + // An unstable feature. Warn about using it. + sess.emit_warning(UnstableCTargetFeature { feature }); + } } if diagnostics { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/mono_item.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/mono_item.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/mono_item.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/mono_item.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,6 @@ use crate::type_of::LayoutLlvmExt; use rustc_codegen_ssa::traits::*; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; -pub use rustc_middle::mir::mono::MonoItem; use rustc_middle::mir::mono::{Linkage, Visibility}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf}; use rustc_middle::ty::{self, Instance, TypeVisitableExt}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_.rs 2023-12-21 16:55:28.000000000 +0000 @@ -112,7 +112,7 @@ } } - /// Return a LLVM type that has at most the required alignment, + /// Return an LLVM type that has at most the required alignment, /// and exactly the required size, as a best-effort padding array. pub(crate) fn type_padding_filler(&self, size: Size, align: Align) -> &'ll Type { let unit = Integer::approximate_align(self, align); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_of.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_of.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_of.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_llvm/src/type_of.rs 2023-12-21 16:55:28.000000000 +0000 @@ -42,7 +42,7 @@ // FIXME(eddyb) producing readable type names for trait objects can result // in problematically distinct types due to HRTB and subtyping (see #47638). // ty::Dynamic(..) | - ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str + ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Coroutine(..) | ty::Str // For performance reasons we use names only when emitting LLVM IR. if !cx.sess().fewer_names() => { @@ -54,10 +54,10 @@ write!(&mut name, "::{}", def.variant(index).name).unwrap(); } } - if let (&ty::Generator(_, _, _), &Variants::Single { index }) = + if let (&ty::Coroutine(_, _, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) { - write!(&mut name, "::{}", ty::GeneratorArgs::variant_name(index)).unwrap(); + write!(&mut name, "::{}", ty::CoroutineArgs::variant_name(index)).unwrap(); } Some(name) } @@ -397,7 +397,12 @@ // extracts all the individual values. let ety = element.llvm_type(cx); - return Some(cx.type_vector(ety, *count)); + if *count == 1 { + // Emitting `<1 x T>` would be silly; just use the scalar. + return Some(ety); + } else { + return Some(cx.type_vector(ety, *count)); + } } // FIXME: The above only handled integer arrays; surely more things diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,42 +4,46 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start ar_archive_writer = "0.1.5" bitflags = "1.2.1" cc = "1.0.69" itertools = "0.10.1" -tracing = "0.1" jobserver = "0.1.22" -tempfile = "3.2" -thorin-dwp = "0.7" pathdiff = "0.2.0" -serde_json = "1.0.59" -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } regex = "1.4" - -rustc_serialize = { path = "../rustc_serialize" } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } -rustc_span = { path = "../rustc_span" } -rustc_middle = { path = "../rustc_middle" } -rustc_type_ir = { path = "../rustc_type_ir" } rustc_attr = { path = "../rustc_attr" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_fs_util = { path = "../rustc_fs_util" } rustc_hir = { path = "../rustc_hir" } rustc_incremental = { path = "../rustc_incremental" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_metadata = { path = "../rustc_metadata" } +rustc_middle = { path = "../rustc_middle" } rustc_query_system = { path = "../rustc_query_system" } -rustc_target = { path = "../rustc_target" } +rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } +rustc_target = { path = "../rustc_target" } +rustc_type_ir = { path = "../rustc_type_ir" } +serde_json = "1.0.59" +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tempfile = "3.2" +thin-vec = "0.2.12" +thorin-dwp = "0.7" +tracing = "0.1" +# tidy-alphabetical-end [target.'cfg(unix)'.dependencies] +# tidy-alphabetical-start libc = "0.2.50" +# tidy-alphabetical-end [dependencies.object] version = "0.32.0" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -11,6 +11,9 @@ codegen_ssa_binary_output_to_tty = option `-o` or `--emit` is used to write binary output type `{$shorthand}` to stdout, but stdout is a tty +codegen_ssa_cgu_not_recorded = + CGU-reuse for `{$cgu_user_name}` is (mangled: `{$cgu_name}`) was not recorded + codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option. codegen_ssa_copy_path = could not copy {$from} to {$to}: {$error} @@ -39,6 +42,8 @@ codegen_ssa_failed_to_write = failed to write {$path}: {$error} +codegen_ssa_field_associated_value_expected = associated value expected for `{$name}` + codegen_ssa_ignoring_emit_path = ignoring emit path because multiple .{$extension} files were produced codegen_ssa_ignoring_output = ignoring -o because multiple .{$extension} files were produced @@ -46,6 +51,12 @@ codegen_ssa_illegal_link_ordinal_format = illegal ordinal format in `link_ordinal` .note = an unsuffixed integer value, e.g., `1`, is expected +codegen_ssa_incorrect_cgu_reuse_type = + CGU-reuse for `{$cgu_user_name}` is `{$actual_reuse}` but should be {$at_least -> + [one] {"at least "} + *[other] {""} + }`{$expected_reuse}` + codegen_ssa_insufficient_vs_code_product = VS Code is a different product, and is not sufficient. codegen_ssa_invalid_link_ordinal_nargs = incorrect number of arguments to `#[link_ordinal]` @@ -153,12 +164,18 @@ codegen_ssa_linking_failed = linking with `{$linker_path}` failed: {$exit_status} +codegen_ssa_malformed_cgu_name = + found malformed codegen unit name `{$user_path}`. codegen units names must always start with the name of the crate (`{$crate_name}` in this case). + codegen_ssa_metadata_object_file_write = error writing metadata object file: {$error} codegen_ssa_missing_cpp_build_tool_component = or a necessary component may be missing from the "C++ build tools" workload codegen_ssa_missing_memory_ordering = Atomic intrinsic missing memory ordering +codegen_ssa_missing_query_depgraph = + found CGU-reuse attribute but `-Zquery-dep-graph` was not specified + codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but `link.exe` was not found codegen_ssa_multiple_external_func_decl = multiple declarations of external function `{$function}` from library `{$library_name}` have different calling conventions @@ -166,9 +183,12 @@ codegen_ssa_multiple_main_functions = entry symbol `main` declared multiple times .help = did you use `#[no_mangle]` on `fn main`? Use `#[start]` instead -codegen_ssa_no_natvis_directory = error enumerating natvis directory: {$error} +codegen_ssa_no_field = no field `{$name}` + +codegen_ssa_no_module_named = + no module named `{$user_path}` (mangled: {$cgu_name}). available modules: {$cgu_names} -codegen_ssa_option_gcc_only = option `-Z gcc-ld` is used even though linker flavor is not gcc +codegen_ssa_no_natvis_directory = error enumerating natvis directory: {$error} codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status} .note = {$output} @@ -299,6 +319,8 @@ codegen_ssa_unknown_atomic_ordering = unknown ordering in atomic intrinsic +codegen_ssa_unknown_reuse_kind = unknown cgu-reuse-kind `{$kind}` specified + codegen_ssa_unsupported_arch = unsupported arch `{$arch}` for os `{$os}` codegen_ssa_unsupported_link_self_contained = option `-C link-self-contained` is not supported on this target diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/assert_module_sources.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/assert_module_sources.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/assert_module_sources.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/assert_module_sources.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,295 @@ +//! This pass is only used for UNIT TESTS related to incremental +//! compilation. It tests whether a particular `.o` file will be re-used +//! from a previous compilation or whether it must be regenerated. +//! +//! The user adds annotations to the crate of the following form: +//! +//! ``` +//! # #![feature(rustc_attrs)] +//! # #![allow(internal_features)] +//! #![rustc_partition_reused(module="spike", cfg="rpass2")] +//! #![rustc_partition_codegened(module="spike-x", cfg="rpass2")] +//! ``` +//! +//! The first indicates (in the cfg `rpass2`) that `spike.o` will be +//! reused, the second that `spike-x.o` will be recreated. If these +//! annotations are inaccurate, errors are reported. +//! +//! The reason that we use `cfg=...` and not `#[cfg_attr]` is so that +//! the HIR doesn't change as a result of the annotations, which might +//! perturb the reuse results. +//! +//! `#![rustc_expected_cgu_reuse(module="spike", cfg="rpass2", kind="post-lto")]` +//! allows for doing a more fine-grained check to see if pre- or post-lto data +//! was re-used. + +use crate::errors; +use rustc_ast as ast; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::unord::UnordSet; +use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg}; +use rustc_hir::def_id::LOCAL_CRATE; +use rustc_middle::mir::mono::CodegenUnitNameBuilder; +use rustc_middle::ty::TyCtxt; +use rustc_session::Session; +use rustc_span::symbol::sym; +use rustc_span::{Span, Symbol}; +use std::borrow::Cow; +use std::fmt; +use thin_vec::ThinVec; + +#[allow(missing_docs)] +pub fn assert_module_sources(tcx: TyCtxt<'_>, set_reuse: &dyn Fn(&mut CguReuseTracker)) { + tcx.dep_graph.with_ignore(|| { + if tcx.sess.opts.incremental.is_none() { + return; + } + + let available_cgus = + tcx.collect_and_partition_mono_items(()).1.iter().map(|cgu| cgu.name()).collect(); + + let mut ams = AssertModuleSource { + tcx, + available_cgus, + cgu_reuse_tracker: if tcx.sess.opts.unstable_opts.query_dep_graph { + CguReuseTracker::new() + } else { + CguReuseTracker::new_disabled() + }, + }; + + for attr in tcx.hir().attrs(rustc_hir::CRATE_HIR_ID) { + ams.check_attr(attr); + } + + set_reuse(&mut ams.cgu_reuse_tracker); + + ams.cgu_reuse_tracker.check_expected_reuse(tcx.sess); + }); +} + +struct AssertModuleSource<'tcx> { + tcx: TyCtxt<'tcx>, + available_cgus: UnordSet, + cgu_reuse_tracker: CguReuseTracker, +} + +impl<'tcx> AssertModuleSource<'tcx> { + fn check_attr(&mut self, attr: &ast::Attribute) { + let (expected_reuse, comp_kind) = if attr.has_name(sym::rustc_partition_reused) { + (CguReuse::PreLto, ComparisonKind::AtLeast) + } else if attr.has_name(sym::rustc_partition_codegened) { + (CguReuse::No, ComparisonKind::Exact) + } else if attr.has_name(sym::rustc_expected_cgu_reuse) { + match self.field(attr, sym::kind) { + sym::no => (CguReuse::No, ComparisonKind::Exact), + sym::pre_dash_lto => (CguReuse::PreLto, ComparisonKind::Exact), + sym::post_dash_lto => (CguReuse::PostLto, ComparisonKind::Exact), + sym::any => (CguReuse::PreLto, ComparisonKind::AtLeast), + other => { + self.tcx + .sess + .emit_fatal(errors::UnknownReuseKind { span: attr.span, kind: other }); + } + } + } else { + return; + }; + + if !self.tcx.sess.opts.unstable_opts.query_dep_graph { + self.tcx.sess.emit_fatal(errors::MissingQueryDepGraph { span: attr.span }); + } + + if !self.check_config(attr) { + debug!("check_attr: config does not match, ignoring attr"); + return; + } + + let user_path = self.field(attr, sym::module).to_string(); + let crate_name = self.tcx.crate_name(LOCAL_CRATE).to_string(); + + if !user_path.starts_with(&crate_name) { + self.tcx.sess.emit_fatal(errors::MalformedCguName { + span: attr.span, + user_path, + crate_name, + }); + } + + // Split of the "special suffix" if there is one. + let (user_path, cgu_special_suffix) = if let Some(index) = user_path.rfind('.') { + (&user_path[..index], Some(&user_path[index + 1..])) + } else { + (&user_path[..], None) + }; + + let mut iter = user_path.split('-'); + + // Remove the crate name + assert_eq!(iter.next().unwrap(), crate_name); + + let cgu_path_components = iter.collect::>(); + + let cgu_name_builder = &mut CodegenUnitNameBuilder::new(self.tcx); + let cgu_name = + cgu_name_builder.build_cgu_name(LOCAL_CRATE, cgu_path_components, cgu_special_suffix); + + debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name); + + if !self.available_cgus.contains(&cgu_name) { + let cgu_names: Vec<&str> = + self.available_cgus.items().map(|cgu| cgu.as_str()).into_sorted_stable_ord(); + self.tcx.sess.emit_err(errors::NoModuleNamed { + span: attr.span, + user_path, + cgu_name, + cgu_names: cgu_names.join(", "), + }); + } + + self.cgu_reuse_tracker.set_expectation( + cgu_name, + &user_path, + attr.span, + expected_reuse, + comp_kind, + ); + } + + fn field(&self, attr: &ast::Attribute, name: Symbol) -> Symbol { + for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) { + if item.has_name(name) { + if let Some(value) = item.value_str() { + return value; + } else { + self.tcx.sess.emit_fatal(errors::FieldAssociatedValueExpected { + span: item.span(), + name, + }); + } + } + } + + self.tcx.sess.emit_fatal(errors::NoField { span: attr.span, name }); + } + + /// Scan for a `cfg="foo"` attribute and check whether we have a + /// cfg flag called `foo`. + fn check_config(&self, attr: &ast::Attribute) -> bool { + let config = &self.tcx.sess.parse_sess.config; + let value = self.field(attr, sym::cfg); + debug!("check_config(config={:?}, value={:?})", config, value); + if config.iter().any(|&(name, _)| name == value) { + debug!("check_config: matched"); + return true; + } + debug!("check_config: no match found"); + false + } +} + +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] +pub enum CguReuse { + No, + PreLto, + PostLto, +} + +impl fmt::Display for CguReuse { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + CguReuse::No => write!(f, "No"), + CguReuse::PreLto => write!(f, "PreLto "), + CguReuse::PostLto => write!(f, "PostLto "), + } + } +} + +impl IntoDiagnosticArg for CguReuse { + fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> { + DiagnosticArgValue::Str(Cow::Owned(self.to_string())) + } +} + +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum ComparisonKind { + Exact, + AtLeast, +} + +struct TrackerData { + actual_reuse: FxHashMap, + expected_reuse: FxHashMap, +} + +pub struct CguReuseTracker { + data: Option, +} + +impl CguReuseTracker { + fn new() -> CguReuseTracker { + let data = + TrackerData { actual_reuse: Default::default(), expected_reuse: Default::default() }; + + CguReuseTracker { data: Some(data) } + } + + fn new_disabled() -> CguReuseTracker { + CguReuseTracker { data: None } + } + + pub fn set_actual_reuse(&mut self, cgu_name: &str, kind: CguReuse) { + if let Some(data) = &mut self.data { + debug!("set_actual_reuse({cgu_name:?}, {kind:?})"); + + let prev_reuse = data.actual_reuse.insert(cgu_name.to_string(), kind); + assert!(prev_reuse.is_none()); + } + } + + fn set_expectation( + &mut self, + cgu_name: Symbol, + cgu_user_name: &str, + error_span: Span, + expected_reuse: CguReuse, + comparison_kind: ComparisonKind, + ) { + if let Some(data) = &mut self.data { + debug!("set_expectation({cgu_name:?}, {expected_reuse:?}, {comparison_kind:?})"); + + data.expected_reuse.insert( + cgu_name.to_string(), + (cgu_user_name.to_string(), error_span, expected_reuse, comparison_kind), + ); + } + } + + fn check_expected_reuse(&self, sess: &Session) { + if let Some(ref data) = self.data { + for (cgu_name, &(ref cgu_user_name, ref error_span, expected_reuse, comparison_kind)) in + &data.expected_reuse + { + if let Some(&actual_reuse) = data.actual_reuse.get(cgu_name) { + let (error, at_least) = match comparison_kind { + ComparisonKind::Exact => (expected_reuse != actual_reuse, false), + ComparisonKind::AtLeast => (actual_reuse < expected_reuse, true), + }; + + if error { + let at_least = if at_least { 1 } else { 0 }; + errors::IncorrectCguReuseType { + span: *error_span, + cgu_user_name, + actual_reuse, + expected_reuse, + at_least, + }; + } + } else { + sess.emit_fatal(errors::CguNotRecorded { cgu_user_name, cgu_name }); + } + } + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/link.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/link.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/link.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/link.rs 2023-12-21 16:55:28.000000000 +0000 @@ -22,7 +22,9 @@ /// need out of the shared crate context before we get rid of it. use rustc_session::{filesearch, Session}; use rustc_span::symbol::Symbol; -use rustc_target::spec::crt_objects::{CrtObjects, LinkSelfContainedDefault}; +use rustc_target::spec::crt_objects::CrtObjects; +use rustc_target::spec::LinkSelfContainedComponents; +use rustc_target::spec::LinkSelfContainedDefault; use rustc_target::spec::{Cc, LinkOutputKind, LinkerFlavor, Lld, PanicStrategy}; use rustc_target::spec::{RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo}; @@ -368,17 +370,25 @@ let NativeLibKind::Static { bundle: None | Some(true), .. } = lib.kind else { continue; }; - if flavor == RlibFlavor::Normal && let Some(filename) = lib.filename { + if flavor == RlibFlavor::Normal + && let Some(filename) = lib.filename + { let path = find_native_static_library(filename.as_str(), true, &lib_search_paths, sess); - let src = read(path).map_err(|e| sess.emit_fatal(errors::ReadFileError {message: e }))?; + let src = + read(path).map_err(|e| sess.emit_fatal(errors::ReadFileError { message: e }))?; let (data, _) = create_wrapper_file(sess, b".bundled_lib".to_vec(), &src); let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str()); packed_bundled_libs.push(wrapper_file); } else { - let path = - find_native_static_library(lib.name.as_str(), lib.verbatim, &lib_search_paths, sess); + let path = find_native_static_library( + lib.name.as_str(), + lib.verbatim, + &lib_search_paths, + sess, + ); ab.add_archive(&path, Box::new(|_| false)).unwrap_or_else(|error| { - sess.emit_fatal(errors::AddNativeLibrary { library_path: path, error })}); + sess.emit_fatal(errors::AddNativeLibrary { library_path: path, error }) + }); } } @@ -720,6 +730,7 @@ ) -> Result<(), ErrorGuaranteed> { info!("preparing {:?} to {:?}", crate_type, out_filename); let (linker_path, flavor) = linker_and_flavor(sess); + let self_contained_components = self_contained_components(sess, crate_type); let mut cmd = linker_with_args( &linker_path, flavor, @@ -729,6 +740,7 @@ tmpdir, out_filename, codegen_results, + self_contained_components, )?; linker::disable_localization(&mut cmd); @@ -804,14 +816,14 @@ "Linker does not support -static-pie command line option. Retrying with -static instead." ); // Mirror `add_(pre,post)_link_objects` to replace CRT objects. - let self_contained = self_contained(sess, crate_type); + let self_contained_crt_objects = self_contained_components.is_crt_objects_enabled(); let opts = &sess.target; - let pre_objects = if self_contained { + let pre_objects = if self_contained_crt_objects { &opts.pre_link_objects_self_contained } else { &opts.pre_link_objects }; - let post_objects = if self_contained { + let post_objects = if self_contained_crt_objects { &opts.post_link_objects_self_contained } else { &opts.post_link_objects @@ -822,7 +834,9 @@ .iter() .copied() .flatten() - .map(|obj| get_object_file_path(sess, obj, self_contained).into_os_string()) + .map(|obj| { + get_object_file_path(sess, obj, self_contained_crt_objects).into_os_string() + }) .collect::>() }; let pre_objects_static_pie = get_objects(pre_objects, LinkOutputKind::StaticPicExe); @@ -1019,7 +1033,7 @@ SplitDebuginfo::Packed => link_dwarf_object(sess, codegen_results, out_filename), } - let strip = strip_value(sess); + let strip = sess.opts.cg.strip; if sess.target.is_like_osx { match (strip, crate_type) { @@ -1056,14 +1070,6 @@ Ok(()) } -// Temporarily support both -Z strip and -C strip -fn strip_value(sess: &Session) -> Strip { - match (sess.opts.unstable_opts.strip, sess.opts.cg.strip) { - (s, Strip::None) => s, - (_, s) => s, - } -} - fn strip_symbols_with_external_utility<'a>( sess: &'a Session, util: &str, @@ -1702,26 +1708,43 @@ /// Various toolchain components used during linking are used from rustc distribution /// instead of being found somewhere on the host system. /// We only provide such support for a very limited number of targets. -fn self_contained(sess: &Session, crate_type: CrateType) -> bool { - if let Some(self_contained) = sess.opts.cg.link_self_contained.explicitly_set { - if sess.target.link_self_contained == LinkSelfContainedDefault::False { - sess.emit_err(errors::UnsupportedLinkSelfContained); - } - return self_contained; - } - - match sess.target.link_self_contained { - LinkSelfContainedDefault::False => false, - LinkSelfContainedDefault::True => true, - // FIXME: Find a better heuristic for "native musl toolchain is available", - // based on host and linker path, for example. - // (https://github.com/rust-lang/rust/pull/71769#issuecomment-626330237). - LinkSelfContainedDefault::Musl => sess.crt_static(Some(crate_type)), - LinkSelfContainedDefault::Mingw => { - sess.host == sess.target - && sess.target.vendor != "uwp" - && detect_self_contained_mingw(&sess) - } +fn self_contained_components(sess: &Session, crate_type: CrateType) -> LinkSelfContainedComponents { + // Turn the backwards compatible bool values for `self_contained` into fully inferred + // `LinkSelfContainedComponents`. + let self_contained = + if let Some(self_contained) = sess.opts.cg.link_self_contained.explicitly_set { + // Emit an error if the user requested self-contained mode on the CLI but the target + // explicitly refuses it. + if sess.target.link_self_contained.is_disabled() { + sess.emit_err(errors::UnsupportedLinkSelfContained); + } + self_contained + } else { + match sess.target.link_self_contained { + LinkSelfContainedDefault::False => false, + LinkSelfContainedDefault::True => true, + + LinkSelfContainedDefault::WithComponents(components) => { + // For target specs with explicitly enabled components, we can return them + // directly. + return components; + } + + // FIXME: Find a better heuristic for "native musl toolchain is available", + // based on host and linker path, for example. + // (https://github.com/rust-lang/rust/pull/71769#issuecomment-626330237). + LinkSelfContainedDefault::InferredForMusl => sess.crt_static(Some(crate_type)), + LinkSelfContainedDefault::InferredForMingw => { + sess.host == sess.target + && sess.target.vendor != "uwp" + && detect_self_contained_mingw(&sess) + } + } + }; + if self_contained { + LinkSelfContainedComponents::all() + } else { + LinkSelfContainedComponents::empty() } } @@ -1881,37 +1904,14 @@ return; }; - // NOTE(nbdd0121): MSVC will hang if the input object file contains no sections, - // so add an empty section. if file.format() == object::BinaryFormat::Coff { + // NOTE(nbdd0121): MSVC will hang if the input object file contains no sections, + // so add an empty section. file.add_section(Vec::new(), ".text".into(), object::SectionKind::Text); // We handle the name decoration of COFF targets in `symbol_export.rs`, so disable the // default mangler in `object` crate. file.set_mangling(object::write::Mangling::None); - - // Add feature flags to the object file. On MSVC this is optional but LLD will complain if - // not present. - let mut feature = 0; - - if file.architecture() == object::Architecture::I386 { - // Indicate that all SEH handlers are registered in .sxdata section. - // We don't have generate any code, so we don't need .sxdata section but LLD still - // expects us to set this bit (see #96498). - // Reference: https://docs.microsoft.com/en-us/windows/win32/debug/pe-format - feature |= 1; - } - - file.add_symbol(object::write::Symbol { - name: "@feat.00".into(), - value: feature, - size: 0, - kind: object::SymbolKind::Data, - scope: object::SymbolScope::Compilation, - weak: false, - section: object::write::SymbolSection::Absolute, - flags: object::SymbolFlags::None, - }); } for (sym, kind) in symbols.iter() { @@ -2045,13 +2045,14 @@ tmpdir: &Path, out_filename: &Path, codegen_results: &CodegenResults, + self_contained_components: LinkSelfContainedComponents, ) -> Result { - let self_contained = self_contained(sess, crate_type); + let self_contained_crt_objects = self_contained_components.is_crt_objects_enabled(); let cmd = &mut *super::linker::get_linker( sess, path, flavor, - self_contained, + self_contained_components.are_any_components_enabled(), &codegen_results.crate_info.target_cpu, ); let link_output_kind = link_output_kind(sess, crate_type); @@ -2078,7 +2079,7 @@ // ------------ Object code and libraries, order-dependent ------------ // Pre-link CRT objects. - add_pre_link_objects(cmd, sess, flavor, link_output_kind, self_contained); + add_pre_link_objects(cmd, sess, flavor, link_output_kind, self_contained_crt_objects); add_linked_symbol_object( cmd, @@ -2221,7 +2222,7 @@ cmd, sess, link_output_kind, - self_contained, + self_contained_components, flavor, crate_type, codegen_results, @@ -2237,7 +2238,7 @@ // ------------ Object code and libraries, order-dependent ------------ // Post-link CRT objects. - add_post_link_objects(cmd, sess, link_output_kind, self_contained); + add_post_link_objects(cmd, sess, link_output_kind, self_contained_crt_objects); // ------------ Late order-dependent options ------------ @@ -2254,7 +2255,7 @@ cmd: &mut dyn Linker, sess: &Session, link_output_kind: LinkOutputKind, - self_contained: bool, + self_contained_components: LinkSelfContainedComponents, flavor: LinkerFlavor, crate_type: CrateType, codegen_results: &CodegenResults, @@ -2262,7 +2263,7 @@ tmpdir: &Path, ) { // Take care of the flavors and CLI options requesting the `lld` linker. - add_lld_args(cmd, sess, flavor); + add_lld_args(cmd, sess, flavor, self_contained_components); add_apple_sdk(cmd, sess, flavor); @@ -2287,7 +2288,7 @@ // Make the binary compatible with data execution prevention schemes. cmd.add_no_exec(); - if self_contained { + if self_contained_components.is_crt_objects_enabled() { cmd.no_crt_objects(); } @@ -2318,7 +2319,7 @@ cmd.linker_plugin_lto(); - add_library_search_dirs(cmd, sess, self_contained); + add_library_search_dirs(cmd, sess, self_contained_components.are_any_components_enabled()); cmd.output_filename(out_filename); @@ -2361,7 +2362,7 @@ ); // Pass debuginfo, NatVis debugger visualizers and strip flags down to the linker. - cmd.debuginfo(strip_value(sess), &natvis_visualizers); + cmd.debuginfo(sess.opts.cg.strip, &natvis_visualizers); // We want to prevent the compiler from accidentally leaking in any system libraries, // so by default we tell linkers not to link to any default libraries. @@ -2871,6 +2872,7 @@ } let sdk_name = match (arch.as_ref(), os.as_ref()) { + ("aarch64", "tvos") if llvm_target.ends_with("-simulator") => "appletvsimulator", ("aarch64", "tvos") => "appletvos", ("x86_64", "tvos") => "appletvsimulator", ("arm", "ios") => "iphoneos", @@ -2964,31 +2966,54 @@ } } -/// When using the linker flavors opting in to `lld`, or the unstable `-Zgcc-ld=lld` flag, add the -/// necessary paths and arguments to invoke it: +/// When using the linker flavors opting in to `lld`, add the necessary paths and arguments to +/// invoke it: /// - when the self-contained linker flag is active: the build of `lld` distributed with rustc, /// - or any `lld` available to `cc`. -fn add_lld_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) { - let unstable_use_lld = sess.opts.unstable_opts.gcc_ld.is_some(); - debug!("add_lld_args requested, flavor: '{flavor:?}', `-Zgcc-ld=lld`: {unstable_use_lld}"); - - // Sanity check: using the old unstable `-Zgcc-ld=lld` option requires a `cc`-using flavor. - let flavor_uses_cc = flavor.uses_cc(); - if unstable_use_lld && !flavor_uses_cc { - sess.emit_fatal(errors::OptionGccOnly); - } +fn add_lld_args( + cmd: &mut dyn Linker, + sess: &Session, + flavor: LinkerFlavor, + self_contained_components: LinkSelfContainedComponents, +) { + debug!( + "add_lld_args requested, flavor: '{:?}', target self-contained components: {:?}", + flavor, self_contained_components, + ); // If the flavor doesn't use a C/C++ compiler to invoke the linker, or doesn't opt in to `lld`, // we don't need to do anything. - let use_lld = flavor.uses_lld() || unstable_use_lld; - if !flavor_uses_cc || !use_lld { + if !(flavor.uses_cc() && flavor.uses_lld()) { return; } // 1. Implement the "self-contained" part of this feature by adding rustc distribution - // directories to the tool's search path. - let self_contained_linker = sess.opts.cg.link_self_contained.linker() || unstable_use_lld; - if self_contained_linker { + // directories to the tool's search path, depending on a mix between what users can specify on + // the CLI, and what the target spec enables (as it can't disable components): + // - if the self-contained linker is enabled on the CLI or by the target spec, + // - and if the self-contained linker is not disabled on the CLI. + let self_contained_cli = sess.opts.cg.link_self_contained.is_linker_enabled(); + let self_contained_target = self_contained_components.is_linker_enabled(); + + // FIXME: in the future, codegen backends may need to have more control over this process: they + // don't always support all the features the linker expects here, and vice versa. For example, + // at the time of writing this, lld expects a newer style of aarch64 TLS relocations that + // cranelift doesn't implement yet. That in turn can impact whether linking would succeed on + // such a target when using the `cg_clif` backend and lld. + // + // Until interactions between backends and linker features are expressible, we limit target + // specs to opt-in to lld only when we're on the llvm backend, where it's expected to work and + // tested on CI. As usual, the CLI still has precedence over this, so that users and developers + // can still override this default when needed (e.g. for tests). + let uses_llvm_backend = + matches!(sess.opts.unstable_opts.codegen_backend.as_deref(), None | Some("llvm")); + if !uses_llvm_backend && !self_contained_cli && sess.opts.cg.linker_flavor.is_none() { + // We bail if we're not using llvm and lld was not explicitly requested on the CLI. + return; + } + + let self_contained_linker = self_contained_cli || self_contained_target; + if self_contained_linker && !sess.opts.cg.link_self_contained.is_linker_disabled() { for path in sess.get_tools_search_paths(false) { cmd.arg({ let mut arg = OsString::from("-B"); @@ -2999,7 +3024,7 @@ } // 2. Implement the "linker flavor" part of this feature by asking `cc` to use some kind of - // `lld` as the linker. + // `lld` as the linker. cmd.arg("-fuse-ld=lld"); if !flavor.is_gnu() { @@ -3012,13 +3037,13 @@ // shown in issue #101653 and the discussion in PR #101792. // // It could be required in some cases of cross-compiling with - // `-Zgcc-ld=lld`, but this is generally unspecified, and we don't know + // LLD, but this is generally unspecified, and we don't know // which specific versions of clang, macOS SDK, host and target OS // combinations impact us here. // // So we do a simple first-approximation until we know more of what the // Apple targets require (and which would be handled prior to hitting this - // `-Zgcc-ld=lld` codepath anyway), but the expectation is that until then + // LLD codepath anyway), but the expectation is that until then // this should be manually passed if needed. We specify the target when // targeting a different linker flavor on macOS, and that's also always // the case when targeting WASM. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/linker.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/linker.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/linker.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/linker.rs 2023-12-21 16:55:28.000000000 +0000 @@ -626,6 +626,15 @@ self.linker_arg("--strip-all"); } } + match self.sess.opts.unstable_opts.debuginfo_compression { + config::DebugInfoCompression::None => {} + config::DebugInfoCompression::Zlib => { + self.linker_arg("--compress-debug-sections=zlib"); + } + config::DebugInfoCompression::Zstd => { + self.linker_arg("--compress-debug-sections=zstd"); + } + } } fn no_crt_objects(&mut self) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/metadata.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/metadata.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/metadata.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/metadata.rs 2023-12-21 16:55:28.000000000 +0000 @@ -228,6 +228,35 @@ if sess.target.is_like_osx { file.set_macho_build_version(macho_object_build_version_for_target(&sess.target)) } + if binary_format == BinaryFormat::Coff { + // Disable the default mangler to avoid mangling the special "@feat.00" symbol name. + let original_mangling = file.mangling(); + file.set_mangling(object::write::Mangling::None); + + let mut feature = 0; + + if file.architecture() == object::Architecture::I386 { + // When linking with /SAFESEH on x86, lld requires that all linker inputs be marked as + // safe exception handling compatible. Metadata files masquerade as regular COFF + // objects and are treated as linker inputs, despite containing no actual code. Thus, + // they still need to be marked as safe exception handling compatible. See #96498. + // Reference: https://docs.microsoft.com/en-us/windows/win32/debug/pe-format + feature |= 1; + } + + file.add_symbol(object::write::Symbol { + name: "@feat.00".into(), + value: feature, + size: 0, + kind: object::SymbolKind::Data, + scope: object::SymbolScope::Compilation, + weak: false, + section: object::write::SymbolSection::Absolute, + flags: object::SymbolFlags::None, + }); + + file.set_mangling(original_mangling); + } let e_flags = match architecture { Architecture::Mips => { let arch = match sess.target.options.cpu.as_ref() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/rpath.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/rpath.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/rpath.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/rpath.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,7 @@ use pathdiff::diff_paths; use rustc_data_structures::fx::FxHashSet; -use std::env; +use rustc_fs_util::try_canonicalize; use std::ffi::OsString; -use std::fs; use std::path::{Path, PathBuf}; pub struct RPathConfig<'a> { @@ -82,12 +81,11 @@ // Mac doesn't appear to support $ORIGIN let prefix = if config.is_like_osx { "@loader_path" } else { "$ORIGIN" }; - let cwd = env::current_dir().unwrap(); - let mut lib = fs::canonicalize(&cwd.join(lib)).unwrap_or_else(|_| cwd.join(lib)); - lib.pop(); // strip filename - let mut output = cwd.join(&config.out_filename); - output.pop(); // strip filename - let output = fs::canonicalize(&output).unwrap_or(output); + // Strip filenames + let lib = lib.parent().unwrap(); + let output = config.out_filename.parent().unwrap(); + let lib = try_canonicalize(lib).unwrap(); + let output = try_canonicalize(output).unwrap(); let relative = path_relative_from(&lib, &output) .unwrap_or_else(|| panic!("couldn't create relative path from {output:?} to {lib:?}")); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/write.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/write.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/write.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/back/write.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,6 @@ use rustc_middle::dep_graph::{WorkProduct, WorkProductId}; use rustc_middle::middle::exported_symbols::SymbolExportInfo; use rustc_middle::ty::TyCtxt; -use rustc_session::cgu_reuse_tracker::CguReuseTracker; use rustc_session::config::{self, CrateType, Lto, OutFileName, OutputFilenames, OutputType}; use rustc_session::config::{Passes, SwitchWithOptPath}; use rustc_session::Session; @@ -366,8 +365,6 @@ /// The incremental compilation session directory, or None if we are not /// compiling incrementally pub incr_comp_session_dir: Option, - /// Used to update CGU re-use information during the thinlto phase. - pub cgu_reuse_tracker: CguReuseTracker, /// Channel back to the main control thread to send messages to pub coordinator_send: Sender>, } @@ -1119,7 +1116,6 @@ remark: sess.opts.cg.remark.clone(), remark_dir, incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()), - cgu_reuse_tracker: sess.cgu_reuse_tracker.clone(), coordinator_send, expanded_args: tcx.sess.expanded_args.clone(), diag_emitter: shared_emitter.clone(), @@ -1969,8 +1965,6 @@ } }); - sess.cgu_reuse_tracker.check_expected_reuse(sess); - sess.abort_if_errors(); let work_products = diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/base.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use crate::assert_module_sources::CguReuse; use crate::back::link::are_upstream_rust_objects_already_included; use crate::back::metadata::create_compressed_metadata_file; use crate::back::write::{ @@ -31,7 +32,6 @@ use rustc_middle::query::Providers; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; -use rustc_session::cgu_reuse_tracker::CguReuse; use rustc_session::config::{self, CrateType, EntryFnType, OutputType}; use rustc_session::Session; use rustc_span::symbol::sym; @@ -683,6 +683,13 @@ codegen_units.iter().map(|cgu| determine_cgu_reuse(tcx, &cgu)).collect::>() }); + crate::assert_module_sources::assert_module_sources(tcx, &|cgu_reuse_tracker| { + for (i, cgu) in codegen_units.iter().enumerate() { + let cgu_reuse = cgu_reuse[i]; + cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse); + } + }); + let mut total_codegen_time = Duration::new(0, 0); let start_rss = tcx.sess.opts.unstable_opts.time_passes.then(|| get_resident_set_size()); @@ -727,7 +734,6 @@ ongoing_codegen.check_for_errors(tcx.sess); let cgu_reuse = cgu_reuse[i]; - tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse); match cgu_reuse { CguReuse::No => { @@ -994,7 +1000,7 @@ }; } -fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) -> CguReuse { +pub fn determine_cgu_reuse<'tcx>(tcx: TyCtxt<'tcx>, cgu: &CodegenUnit<'tcx>) -> CguReuse { if !tcx.dep_graph.is_fully_enabled() { return CguReuse::No; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/codegen_attrs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/codegen_attrs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/codegen_attrs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/codegen_attrs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -238,8 +238,13 @@ && let Some(fn_sig) = fn_sig() && fn_sig.skip_binder().abi() != abi::Abi::Rust { - struct_span_err!(tcx.sess, attr.span, E0737, "`#[track_caller]` requires Rust ABI") - .emit(); + struct_span_err!( + tcx.sess, + attr.span, + E0737, + "`#[track_caller]` requires Rust ABI" + ) + .emit(); } if is_closure && !tcx.features().closure_track_caller @@ -435,17 +440,18 @@ && let [item] = items.as_slice() && let Some((sym::align, literal)) = item.name_value_literal() { - rustc_attr::parse_alignment(&literal.kind).map_err(|msg| { - struct_span_err!( - tcx.sess.diagnostic(), - attr.span, - E0589, - "invalid `repr(align)` attribute: {}", - msg - ) - .emit(); - }) - .ok() + rustc_attr::parse_alignment(&literal.kind) + .map_err(|msg| { + struct_span_err!( + tcx.sess.diagnostic(), + attr.span, + E0589, + "invalid `repr(align)` attribute: {}", + msg + ) + .emit(); + }) + .ok() } else { None }; @@ -626,10 +632,7 @@ && let ty::AssocItemContainer::ImplContainer = impl_item.container && let Some(trait_item) = impl_item.trait_item_def_id { - return tcx - .codegen_fn_attrs(trait_item) - .flags - .intersects(CodegenFnAttrFlags::TRACK_CALLER); + return tcx.codegen_fn_attrs(trait_item).flags.intersects(CodegenFnAttrFlags::TRACK_CALLER); } false diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,7 +15,7 @@ use rustc_data_structures::stable_hasher::{Hash64, HashStable, StableHasher}; use rustc_hir::def_id::DefId; use rustc_hir::definitions::{DefPathData, DefPathDataName, DisambiguatedDefPathData}; -use rustc_hir::{AsyncGeneratorKind, GeneratorKind, Mutability}; +use rustc_hir::{CoroutineKind, CoroutineSource, Mutability}; use rustc_middle::ty::layout::{IntegerExt, TyAndLayout}; use rustc_middle::ty::{self, ExistentialProjection, ParamEnv, Ty, TyCtxt}; use rustc_middle::ty::{GenericArgKind, GenericArgsRef}; @@ -398,23 +398,23 @@ // processing visited.remove(&t); } - ty::Closure(def_id, args) | ty::Generator(def_id, args, ..) => { - // Name will be "{closure_env#0}", "{generator_env#0}", or + ty::Closure(def_id, args) | ty::Coroutine(def_id, args, ..) => { + // Name will be "{closure_env#0}", "{coroutine_env#0}", or // "{async_fn_env#0}", etc. // In the case of cpp-like debuginfo, the name additionally gets wrapped inside of // an artificial `enum2$<>` type, as defined in msvc_enum_fallback(). - if cpp_like_debuginfo && t.is_generator() { + if cpp_like_debuginfo && t.is_coroutine() { let ty_and_layout = tcx.layout_of(ParamEnv::reveal_all().and(t)).unwrap(); msvc_enum_fallback( ty_and_layout, &|output, visited| { - push_closure_or_generator_name(tcx, def_id, args, true, output, visited); + push_closure_or_coroutine_name(tcx, def_id, args, true, output, visited); }, output, visited, ); } else { - push_closure_or_generator_name(tcx, def_id, args, qualified, output, visited); + push_closure_or_coroutine_name(tcx, def_id, args, qualified, output, visited); } } // Type parameters from polymorphized functions. @@ -426,7 +426,7 @@ | ty::Placeholder(..) | ty::Alias(..) | ty::Bound(..) - | ty::GeneratorWitness(..) => { + | ty::CoroutineWitness(..) => { bug!( "debuginfo: Trying to create type name for \ unexpected type: {:?}", @@ -558,12 +558,15 @@ push_unqualified_item_name(tcx, def_id, def_key.disambiguated_data, output); } -fn generator_kind_label(generator_kind: Option) -> &'static str { - match generator_kind { - Some(GeneratorKind::Async(AsyncGeneratorKind::Block)) => "async_block", - Some(GeneratorKind::Async(AsyncGeneratorKind::Closure)) => "async_closure", - Some(GeneratorKind::Async(AsyncGeneratorKind::Fn)) => "async_fn", - Some(GeneratorKind::Gen) => "generator", +fn coroutine_kind_label(coroutine_kind: Option) -> &'static str { + match coroutine_kind { + Some(CoroutineKind::Gen(CoroutineSource::Block)) => "gen_block", + Some(CoroutineKind::Gen(CoroutineSource::Closure)) => "gen_closure", + Some(CoroutineKind::Gen(CoroutineSource::Fn)) => "gen_fn", + Some(CoroutineKind::Async(CoroutineSource::Block)) => "async_block", + Some(CoroutineKind::Async(CoroutineSource::Closure)) => "async_closure", + Some(CoroutineKind::Async(CoroutineSource::Fn)) => "async_fn", + Some(CoroutineKind::Coroutine) => "coroutine", None => "closure", } } @@ -592,7 +595,7 @@ output.push_str(tcx.crate_name(def_id.krate).as_str()); } DefPathData::ClosureExpr => { - let label = generator_kind_label(tcx.generator_kind(def_id)); + let label = coroutine_kind_label(tcx.coroutine_kind(def_id)); push_disambiguated_special_name( label, @@ -707,7 +710,7 @@ push_generic_params_internal(tcx, args, def_id, output, &mut visited); } -fn push_closure_or_generator_name<'tcx>( +fn push_closure_or_coroutine_name<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, args: GenericArgsRef<'tcx>, @@ -715,10 +718,10 @@ output: &mut String, visited: &mut FxHashSet>, ) { - // Name will be "{closure_env#0}", "{generator_env#0}", or + // Name will be "{closure_env#0}", "{coroutine_env#0}", or // "{async_fn_env#0}", etc. let def_key = tcx.def_key(def_id); - let generator_kind = tcx.generator_kind(def_id); + let coroutine_kind = tcx.coroutine_kind(def_id); if qualified { let parent_def_id = DefId { index: def_key.parent.unwrap(), ..def_id }; @@ -727,7 +730,7 @@ } let mut label = String::with_capacity(20); - write!(&mut label, "{}_env", generator_kind_label(generator_kind)).unwrap(); + write!(&mut label, "{}_env", coroutine_kind_label(coroutine_kind)).unwrap(); push_disambiguated_special_name( &label, @@ -736,7 +739,7 @@ output, ); - // We also need to add the generic arguments of the async fn/generator or + // We also need to add the generic arguments of the async fn/coroutine or // the enclosing function (for closures or async blocks), so that we end // up with a unique name for every instantiation. @@ -745,7 +748,7 @@ let generics = tcx.generics_of(enclosing_fn_def_id); // Truncate the args to the length of the above generics. This will cut off - // anything closure- or generator-specific. + // anything closure- or coroutine-specific. let args = args.truncate_to(tcx, generics); push_generic_params_internal(tcx, args, enclosing_fn_def_id, output, visited); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ //! Errors emitted by codegen_ssa +use crate::assert_module_sources::CguReuse; use crate::back::command::Command; use crate::fluent_generated as fluent; use rustc_errors::{ @@ -17,6 +18,74 @@ use std::process::ExitStatus; #[derive(Diagnostic)] +#[diag(codegen_ssa_incorrect_cgu_reuse_type)] +pub struct IncorrectCguReuseType<'a> { + #[primary_span] + pub span: Span, + pub cgu_user_name: &'a str, + pub actual_reuse: CguReuse, + pub expected_reuse: CguReuse, + pub at_least: u8, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_cgu_not_recorded)] +pub struct CguNotRecorded<'a> { + pub cgu_user_name: &'a str, + pub cgu_name: &'a str, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_unknown_reuse_kind)] +pub struct UnknownReuseKind { + #[primary_span] + pub span: Span, + pub kind: Symbol, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_missing_query_depgraph)] +pub struct MissingQueryDepGraph { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_malformed_cgu_name)] +pub struct MalformedCguName { + #[primary_span] + pub span: Span, + pub user_path: String, + pub crate_name: String, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_no_module_named)] +pub struct NoModuleNamed<'a> { + #[primary_span] + pub span: Span, + pub user_path: &'a str, + pub cgu_name: Symbol, + pub cgu_names: String, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_field_associated_value_expected)] +pub struct FieldAssociatedValueExpected { + #[primary_span] + pub span: Span, + pub name: Symbol, +} + +#[derive(Diagnostic)] +#[diag(codegen_ssa_no_field)] +pub struct NoField { + #[primary_span] + pub span: Span, + pub name: Symbol, +} + +#[derive(Diagnostic)] #[diag(codegen_ssa_lib_def_write_failure)] pub struct LibDefWriteFailure { pub error: Error, @@ -490,10 +559,6 @@ } #[derive(Diagnostic)] -#[diag(codegen_ssa_option_gcc_only)] -pub struct OptionGccOnly; - -#[derive(Diagnostic)] pub enum ExtractBundledLibsError<'a> { #[diag(codegen_ssa_extract_bundled_libs_open_file)] OpenFile { rlib: &'a Path, error: Box }, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(if_let_guard)] @@ -43,6 +46,7 @@ use std::io; use std::path::{Path, PathBuf}; +pub mod assert_module_sources; pub mod back; pub mod base; pub mod codegen_attrs; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/analyze.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/analyze.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/analyze.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/analyze.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::mir::traversal; use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; -use rustc_middle::mir::{self, Location, TerminatorKind}; +use rustc_middle::mir::{self, DefLocation, Location, TerminatorKind}; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf}; pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( @@ -67,21 +67,6 @@ SSA(DefLocation), } -#[derive(Copy, Clone, PartialEq, Eq)] -enum DefLocation { - Argument, - Body(Location), -} - -impl DefLocation { - fn dominates(self, location: Location, dominators: &Dominators) -> bool { - match self { - DefLocation::Argument => true, - DefLocation::Body(def) => def.successor_within_block().dominates(location, dominators), - } - } -} - struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { fx: &'mir FunctionCx<'a, 'tcx, Bx>, dominators: &'mir Dominators, @@ -287,7 +272,7 @@ | TerminatorKind::UnwindResume | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Unreachable | TerminatorKind::SwitchInt { .. } | TerminatorKind::Yield { .. } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/block.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/block.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/block.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/block.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,8 +17,7 @@ use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; use rustc_middle::ty::{self, Instance, Ty}; use rustc_session::config::OptLevel; -use rustc_span::source_map::Span; -use rustc_span::{sym, Symbol}; +use rustc_span::{sym, Span, Symbol}; use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode, Reg}; use rustc_target::abi::{self, HasDataLayout, WrappingRange}; use rustc_target::spec::abi::Abi; @@ -213,7 +212,7 @@ self.funclet(fx), ); if fx.mir[self.bb].is_cleanup { - bx.do_not_inline(invokeret); + bx.apply_attrs_to_cleanup_callsite(invokeret); } if let Some((ret_dest, target)) = destination { @@ -228,11 +227,7 @@ } else { let llret = bx.call(fn_ty, fn_attrs, Some(&fn_abi), fn_ptr, &llargs, self.funclet(fx)); if fx.mir[self.bb].is_cleanup { - // Cleanup is always the cold path. Don't inline - // drop glue. Also, when there is a deeply-nested - // struct, there are "symmetry" issues that cause - // exponential inlining - see issue #41696. - bx.do_not_inline(llret); + bx.apply_attrs_to_cleanup_callsite(llret); } if let Some((ret_dest, target)) = destination { @@ -1269,8 +1264,8 @@ fn_span, mergeable_succ(), ), - mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Yield { .. } => { - bug!("generator ops in codegen") + mir::TerminatorKind::CoroutineDrop | mir::TerminatorKind::Yield { .. } => { + bug!("coroutine ops in codegen") } mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } => { bug!("borrowck false edges in codegen") @@ -1453,46 +1448,12 @@ fn get_caller_location( &mut self, bx: &mut Bx, - mut source_info: mir::SourceInfo, + source_info: mir::SourceInfo, ) -> OperandRef<'tcx, Bx::Value> { - let tcx = bx.tcx(); - - let mut span_to_caller_location = |span: Span| { - let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); - let caller = tcx.sess.source_map().lookup_char_pos(topmost.lo()); - let const_loc = tcx.const_caller_location(( - Symbol::intern(&caller.file.name.prefer_remapped().to_string_lossy()), - caller.line as u32, - caller.col_display as u32 + 1, - )); + self.mir.caller_location_span(source_info, self.caller_location, bx.tcx(), |span: Span| { + let const_loc = bx.tcx().span_as_caller_location(span); OperandRef::from_const(bx, const_loc, bx.tcx().caller_location_ty()) - }; - - // Walk up the `SourceScope`s, in case some of them are from MIR inlining. - // If so, the starting `source_info.span` is in the innermost inlined - // function, and will be replaced with outer callsite spans as long - // as the inlined functions were `#[track_caller]`. - loop { - let scope_data = &self.mir.source_scopes[source_info.scope]; - - if let Some((callee, callsite_span)) = scope_data.inlined { - // Stop inside the most nested non-`#[track_caller]` function, - // before ever reaching its caller (which is irrelevant). - if !callee.def.requires_caller_location(tcx) { - return span_to_caller_location(source_info.span); - } - source_info.span = callsite_span; - } - - // Skip past all of the parents with `inlined: None`. - match scope_data.inlined_parent_scope { - Some(parent) => source_info.scope = parent, - None => break, - } - } - - // No inlined `SourceScope`s, or all of them were `#[track_caller]`. - self.caller_location.unwrap_or_else(|| span_to_caller_location(source_info.span)) + }) } fn get_personality_slot(&mut self, bx: &mut Bx) -> PlaceRef<'tcx, Bx::Value> { @@ -1559,7 +1520,9 @@ } fn terminate_block(&mut self, reason: UnwindTerminateReason) -> Bx::BasicBlock { - if let Some((cached_bb, cached_reason)) = self.terminate_block && reason == cached_reason { + if let Some((cached_bb, cached_reason)) = self.terminate_block + && reason == cached_reason + { return cached_bb; } @@ -1627,7 +1590,7 @@ let fn_ty = bx.fn_decl_backend_type(&fn_abi); let llret = bx.call(fn_ty, None, Some(&fn_abi), fn_ptr, &[], funclet.as_ref()); - bx.do_not_inline(llret); + bx.apply_attrs_to_cleanup_callsite(llret); bx.unreachable(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/constant.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/constant.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/constant.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/constant.rs 2023-12-21 16:55:28.000000000 +0000 @@ -21,6 +21,8 @@ } pub fn eval_mir_constant(&self, constant: &mir::ConstOperand<'tcx>) -> mir::ConstValue<'tcx> { + // `MirUsedCollector` visited all constants before codegen began, so if we got here there + // can be no more constants that fail to evaluate. self.monomorphize(constant.const_) .eval(self.cx.tcx(), ty::ParamEnv::reveal_all(), Some(constant.span)) .expect("erroneous constant not captured by required_consts") diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs 2023-12-21 16:55:28.000000000 +0000 @@ -117,9 +117,11 @@ sym::vtable_size => { let size_bound = bx.data_layout().ptr_sized_integer().signed_max() as u128; bx.range_metadata(value, WrappingRange { start: 0, end: size_bound }); - }, + } // Alignment is always nonzero. - sym::vtable_align => bx.range_metadata(value, WrappingRange { start: 1, end: !0 }), + sym::vtable_align => { + bx.range_metadata(value, WrappingRange { start: 1, end: !0 }) + } _ => {} } value @@ -220,9 +222,13 @@ } else { bx.exactudiv(args[0].immediate(), args[1].immediate()) } - }, + } None => { - bx.tcx().sess.emit_err(InvalidMonomorphization::BasicIntegerType { span, name, ty }); + bx.tcx().sess.emit_err(InvalidMonomorphization::BasicIntegerType { + span, + name, + ty, + }); return; } } @@ -238,7 +244,11 @@ _ => bug!(), }, None => { - bx.tcx().sess.emit_err(InvalidMonomorphization::BasicFloatType { span, name, ty: arg_tys[0] }); + bx.tcx().sess.emit_err(InvalidMonomorphization::BasicFloatType { + span, + name, + ty: arg_tys[0], + }); return; } } @@ -246,11 +256,17 @@ sym::float_to_int_unchecked => { if float_type_width(arg_tys[0]).is_none() { - bx.tcx().sess.emit_err(InvalidMonomorphization::FloatToIntUnchecked { span, ty: arg_tys[0] }); + bx.tcx().sess.emit_err(InvalidMonomorphization::FloatToIntUnchecked { + span, + ty: arg_tys[0], + }); return; } let Some((_width, signed)) = int_type_width_signed(ret_ty, bx.tcx()) else { - bx.tcx().sess.emit_err(InvalidMonomorphization::FloatToIntUnchecked { span, ty: ret_ty }); + bx.tcx().sess.emit_err(InvalidMonomorphization::FloatToIntUnchecked { + span, + ty: ret_ty, + }); return; }; if signed { @@ -299,7 +315,11 @@ }; let invalid_monomorphization = |ty| { - bx.tcx().sess.emit_err(InvalidMonomorphization::BasicIntegerType { span, name, ty }); + bx.tcx().sess.emit_err(InvalidMonomorphization::BasicIntegerType { + span, + name, + ty, + }); }; match instruction { @@ -319,7 +339,14 @@ cmp = bx.ptrtoint(cmp, bx.type_isize()); src = bx.ptrtoint(src, bx.type_isize()); } - let pair = bx.atomic_cmpxchg(dst, cmp, src, parse_ordering(bx, success), parse_ordering(bx, failure), weak); + let pair = bx.atomic_cmpxchg( + dst, + cmp, + src, + parse_ordering(bx, success), + parse_ordering(bx, failure), + weak, + ); let val = bx.extract_value(pair, 0); let success = bx.extract_value(pair, 1); let val = bx.from_immediate(val); @@ -345,11 +372,21 @@ // Some platforms do not support atomic operations on pointers, // so we cast to integer first... let llty = bx.type_isize(); - let result = bx.atomic_load(llty, source, parse_ordering(bx, ordering), size); + let result = bx.atomic_load( + llty, + source, + parse_ordering(bx, ordering), + size, + ); // ... and then cast the result back to a pointer bx.inttoptr(result, bx.backend_type(layout)) } else { - bx.atomic_load(bx.backend_type(layout), source, parse_ordering(bx, ordering), size) + bx.atomic_load( + bx.backend_type(layout), + source, + parse_ordering(bx, ordering), + size, + ) } } else { return invalid_monomorphization(ty); @@ -375,12 +412,18 @@ } "fence" => { - bx.atomic_fence(parse_ordering(bx, ordering), SynchronizationScope::CrossThread); + bx.atomic_fence( + parse_ordering(bx, ordering), + SynchronizationScope::CrossThread, + ); return; } "singlethreadfence" => { - bx.atomic_fence(parse_ordering(bx, ordering), SynchronizationScope::SingleThread); + bx.atomic_fence( + parse_ordering(bx, ordering), + SynchronizationScope::SingleThread, + ); return; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -209,18 +209,11 @@ caller_location: None, }; - fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx); + // It may seem like we should iterate over `required_consts` to ensure they all successfully + // evaluate; however, the `MirUsedCollector` already did that during the collection phase of + // monomorphization so we don't have to do it again. - // Rust post-monomorphization checks; we later rely on them. - if let Err(err) = - mir.post_mono_checks(cx.tcx(), ty::ParamEnv::reveal_all(), |c| Ok(fx.monomorphize(c))) - { - err.emit_err(cx.tcx()); - // This IR shouldn't ever be emitted, but let's try to guard against any of this code - // ever running. - start_bx.abort(); - return; - } + fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx); let memory_locals = analyze::non_ssa_locals(&fx); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/rvalue.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/rvalue.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/rvalue.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/mir/rvalue.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,7 +13,7 @@ use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, adjustment::PointerCoercion, Instance, Ty, TyCtxt}; use rustc_session::config::OptLevel; -use rustc_span::source_map::{Span, DUMMY_SP}; +use rustc_span::{Span, DUMMY_SP}; use rustc_target::abi::{self, FIRST_VARIANT}; impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { @@ -239,17 +239,17 @@ }; if let OperandValueKind::Immediate(out_scalar) = cast_kind && in_scalar.size(self.cx) == out_scalar.size(self.cx) - { - let operand_bty = bx.backend_type(operand.layout); - let cast_bty = bx.backend_type(cast); - Some(OperandValue::Immediate(self.transmute_immediate( - bx, - imm, - in_scalar, - operand_bty, - out_scalar, - cast_bty, - ))) + { + let operand_bty = bx.backend_type(operand.layout); + let cast_bty = bx.backend_type(cast); + Some(OperandValue::Immediate(self.transmute_immediate( + bx, + imm, + in_scalar, + operand_bty, + out_scalar, + cast_bty, + ))) } else { None } @@ -680,7 +680,7 @@ layout.align.abi.bytes() } mir::NullOp::OffsetOf(fields) => { - layout.offset_of_subfield(bx.cx(), fields.iter().map(|f| f.index())).bytes() + layout.offset_of_subfield(bx.cx(), fields.iter()).bytes() } }; let val = bx.cx().const_usize(val); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/target_features.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/target_features.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/target_features.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/target_features.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,6 +23,15 @@ // check whether they're named already elsewhere in rust // e.g. in stdarch and whether the given name matches LLVM's // if it doesn't, to_llvm_feature in llvm_util in rustc_codegen_llvm needs to be adapted +// +// When adding a new feature, be particularly mindful of features that affect function ABIs. Those +// need to be treated very carefully to avoid introducing unsoundness! This often affects features +// that enable/disable hardfloat support (see https://github.com/rust-lang/rust/issues/116344 for an +// example of this going wrong), but features enabling new SIMD registers are also a concern (see +// https://github.com/rust-lang/rust/issues/116558 for an example of this going wrong). +// +// Stabilizing a target feature (setting the 2nd component of the pair to `None`) requires t-lang +// approval. const ARM_ALLOWED_FEATURES: &[(&str, Option)] = &[ // tidy-alphabetical-start @@ -244,38 +253,38 @@ const RISCV_ALLOWED_FEATURES: &[(&str, Option)] = &[ // tidy-alphabetical-start - ("a", Some(sym::riscv_target_feature)), - ("c", Some(sym::riscv_target_feature)), + ("a", None), + ("c", None), ("d", Some(sym::riscv_target_feature)), ("e", Some(sym::riscv_target_feature)), ("f", Some(sym::riscv_target_feature)), - ("m", Some(sym::riscv_target_feature)), + ("m", None), ("relax", Some(sym::riscv_target_feature)), ("unaligned-scalar-mem", Some(sym::riscv_target_feature)), ("v", Some(sym::riscv_target_feature)), - ("zba", Some(sym::riscv_target_feature)), - ("zbb", Some(sym::riscv_target_feature)), - ("zbc", Some(sym::riscv_target_feature)), - ("zbkb", Some(sym::riscv_target_feature)), - ("zbkc", Some(sym::riscv_target_feature)), - ("zbkx", Some(sym::riscv_target_feature)), - ("zbs", Some(sym::riscv_target_feature)), + ("zba", None), + ("zbb", None), + ("zbc", None), + ("zbkb", None), + ("zbkc", None), + ("zbkx", None), + ("zbs", None), ("zdinx", Some(sym::riscv_target_feature)), ("zfh", Some(sym::riscv_target_feature)), ("zfhmin", Some(sym::riscv_target_feature)), ("zfinx", Some(sym::riscv_target_feature)), ("zhinx", Some(sym::riscv_target_feature)), ("zhinxmin", Some(sym::riscv_target_feature)), - ("zk", Some(sym::riscv_target_feature)), - ("zkn", Some(sym::riscv_target_feature)), - ("zknd", Some(sym::riscv_target_feature)), - ("zkne", Some(sym::riscv_target_feature)), - ("zknh", Some(sym::riscv_target_feature)), - ("zkr", Some(sym::riscv_target_feature)), - ("zks", Some(sym::riscv_target_feature)), - ("zksed", Some(sym::riscv_target_feature)), - ("zksh", Some(sym::riscv_target_feature)), - ("zkt", Some(sym::riscv_target_feature)), + ("zk", None), + ("zkn", None), + ("zknd", None), + ("zkne", None), + ("zknh", None), + ("zkr", None), + ("zks", None), + ("zksed", None), + ("zksh", None), + ("zkt", None), // tidy-alphabetical-end ]; @@ -342,6 +351,19 @@ ("hard-float-abi", Some(sym::csky_target_feature)), // tidy-alphabetical-end ]; + +const LOONGARCH_ALLOWED_FEATURES: &[(&str, Option)] = &[ + // tidy-alphabetical-start + ("d", Some(sym::loongarch_target_feature)), + ("f", Some(sym::loongarch_target_feature)), + ("lasx", Some(sym::loongarch_target_feature)), + ("lbt", Some(sym::loongarch_target_feature)), + ("lsx", Some(sym::loongarch_target_feature)), + ("lvz", Some(sym::loongarch_target_feature)), + ("ual", Some(sym::loongarch_target_feature)), + // tidy-alphabetical-end +]; + /// When rustdoc is running, provide a list of all known features so that all their respective /// primitives may be documented. /// @@ -358,6 +380,7 @@ .chain(WASM_ALLOWED_FEATURES.iter()) .chain(BPF_ALLOWED_FEATURES.iter()) .chain(CSKY_ALLOWED_FEATURES) + .chain(LOONGARCH_ALLOWED_FEATURES) .cloned() } @@ -373,6 +396,7 @@ "wasm32" | "wasm64" => WASM_ALLOWED_FEATURES, "bpf" => BPF_ALLOWED_FEATURES, "csky" => CSKY_ALLOWED_FEATURES, + "loongarch64" => LOONGARCH_ALLOWED_FEATURES, _ => &[], } } @@ -445,6 +469,7 @@ Some(sym::bpf_target_feature) => rust_features.bpf_target_feature, Some(sym::aarch64_ver_target_feature) => rust_features.aarch64_ver_target_feature, Some(sym::csky_target_feature) => rust_features.csky_target_feature, + Some(sym::loongarch_target_feature) => rust_features.loongarch_target_feature, Some(name) => bug!("unknown target feature gate {}", name), None => true, }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/backend.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/backend.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/backend.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/backend.rs 2023-12-21 16:55:28.000000000 +0000 @@ -104,11 +104,7 @@ outputs: &OutputFilenames, ) -> Result<(CodegenResults, FxIndexMap), ErrorGuaranteed>; - /// This is called on the returned `Box` from `join_codegen` - /// - /// # Panics - /// - /// Panics when the passed `Box` was not returned by `join_codegen`. + /// This is called on the returned `CodegenResults` from `join_codegen` fn link( &self, sess: &Session, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/builder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -332,5 +332,5 @@ ) -> Self::Value; fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; - fn do_not_inline(&mut self, llret: Self::Value); + fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/type_.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/type_.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/type_.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_codegen_ssa/src/traits/type_.rs 2023-12-21 16:55:28.000000000 +0000 @@ -30,7 +30,7 @@ fn type_ptr_ext(&self, address_space: AddressSpace) -> Self::Type; fn element_type(&self, ty: Self::Type) -> Self::Type; - /// Returns the number of elements in `self` if it is a LLVM vector type. + /// Returns the number of elements in `self` if it is an LLVM vector type. fn vector_length(&self, ty: Self::Type) -> usize; fn float_width(&self, ty: Self::Type) -> usize; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,25 +3,25 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -tracing = "0.1" +# tidy-alphabetical-start either = "1" rustc_apfloat = "0.2.0" rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_mir_dataflow = { path = "../rustc_mir_dataflow" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -rustc_span = { path = "../rustc_span" } rustc_type_ir = { path = "../rustc_type_ir" } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,15 @@ const_eval_address_space_full = there are no more free addresses in the address space -const_eval_align_check_failed = accessing memory with alignment {$has}, but alignment {$required} is required + const_eval_align_offset_invalid_align = `align_offset` called with non-power-of-two align: {$target_align} const_eval_alignment_check_failed = - accessing memory with alignment {$has}, but alignment {$required} is required + {$msg -> + [AccessedPtr] accessing memory + *[other] accessing memory based on pointer + } with alignment {$has}, but alignment {$required} is required + const_eval_already_reported = an error has already been reported elsewhere (this should not usually be printed) const_eval_assume_false = @@ -61,7 +65,6 @@ .target_note = deref defined here const_eval_deref_function_pointer = accessing {$allocation} which contains a function -const_eval_deref_test = dereferencing pointer failed const_eval_deref_vtable_pointer = accessing {$allocation} which contains a vtable const_eval_different_allocations = diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/eval_queries.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/eval_queries.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/eval_queries.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/eval_queries.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,21 +1,22 @@ -use crate::const_eval::CheckAlignment; -use crate::errors::ConstEvalError; +use std::mem; use either::{Left, Right}; use rustc_hir::def::DefKind; -use rustc_middle::mir::interpret::{ErrorHandled, InterpErrorInfo}; +use rustc_middle::mir::interpret::{AllocId, ErrorHandled, InterpErrorInfo}; use rustc_middle::mir::pretty::write_allocation_bytes; use rustc_middle::mir::{self, ConstAlloc, ConstValue}; use rustc_middle::traits::Reveal; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, TyCtxt}; -use rustc_span::source_map::Span; +use rustc_span::Span; use rustc_target::abi::{self, Abi}; use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter}; +use crate::const_eval::CheckAlignment; use crate::errors; +use crate::errors::ConstEvalError; use crate::interpret::eval_nullary_intrinsic; use crate::interpret::{ intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate, InternKind, InterpCx, @@ -74,9 +75,9 @@ None => InternKind::Constant, } }; - ecx.machine.check_alignment = CheckAlignment::No; // interning doesn't need to respect alignment + let check_alignment = mem::replace(&mut ecx.machine.check_alignment, CheckAlignment::No); // interning doesn't need to respect alignment intern_const_alloc_recursive(ecx, intern_kind, &ret)?; - // we leave alignment checks off, since this `ecx` will not be used for further evaluation anyway + ecx.machine.check_alignment = check_alignment; debug!("eval_body_using_ecx done: {:?}", ret); Ok(ret) @@ -89,7 +90,7 @@ /// that inform us about the generic bounds of the constant. E.g., using an associated constant /// of a function's generic parameter will require knowledge about the bounds on the generic /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument. -pub(super) fn mk_eval_cx<'mir, 'tcx>( +pub(crate) fn mk_eval_cx<'mir, 'tcx>( tcx: TyCtxt<'tcx>, root_span: Span, param_env: ty::ParamEnv<'tcx>, @@ -105,10 +106,16 @@ } /// This function converts an interpreter value into a MIR constant. +/// +/// The `for_diagnostics` flag turns the usual rules for returning `ConstValue::Scalar` into a +/// best-effort attempt. This is not okay for use in const-eval sine it breaks invariants rustc +/// relies on, but it is okay for diagnostics which will just give up gracefully when they +/// encounter an `Indirect` they cannot handle. #[instrument(skip(ecx), level = "debug")] pub(super) fn op_to_const<'tcx>( ecx: &CompileTimeEvalContext<'_, 'tcx>, op: &OpTy<'tcx>, + for_diagnostics: bool, ) -> ConstValue<'tcx> { // Handle ZST consistently and early. if op.layout.is_zst() { @@ -132,7 +139,13 @@ _ => false, }; let immediate = if force_as_immediate { - Right(ecx.read_immediate(op).expect("normalization works on validated constants")) + match ecx.read_immediate(op) { + Ok(imm) => Right(imm), + Err(err) if !for_diagnostics => { + panic!("normalization works on validated constants: {err:?}") + } + _ => op.as_mplace_or_imm(), + } } else { op.as_mplace_or_imm() }; @@ -204,7 +217,7 @@ ); // Turn this into a proper constant. - op_to_const(&ecx, &mplace.into()) + op_to_const(&ecx, &mplace.into(), /* for diagnostics */ false) } #[instrument(skip(tcx), level = "debug")] @@ -284,22 +297,22 @@ let def = cid.instance.def.def_id(); let is_static = tcx.is_static(def); - let mut ecx = InterpCx::new( + let ecx = InterpCx::new( tcx, tcx.def_span(def), key.param_env, // Statics (and promoteds inside statics) may access other statics, because unlike consts // they do not have to behave "as if" they were evaluated at runtime. - CompileTimeInterpreter::new( - CanAccessStatics::from(is_static), - if tcx.sess.opts.unstable_opts.extra_const_ub_checks { - CheckAlignment::Error - } else { - CheckAlignment::FutureIncompat - }, - ), + CompileTimeInterpreter::new(CanAccessStatics::from(is_static), CheckAlignment::Error), ); + eval_in_interpreter(ecx, cid, is_static) +} +pub fn eval_in_interpreter<'mir, 'tcx>( + mut ecx: InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>, + cid: GlobalId<'tcx>, + is_static: bool, +) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> { let res = ecx.load_mir(cid.instance.def, cid.promoted); match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body)) { Err(error) => { @@ -312,7 +325,7 @@ // If the current item has generics, we'd like to enrich the message with the // instance and its args: to show the actual compile-time values, in addition to // the expression, leading to the const eval error. - let instance = &key.value.instance; + let instance = &cid.instance; if !instance.args.is_empty() { let instance = with_no_trimmed_paths!(instance.to_string()); ("const_with_path", instance) @@ -337,56 +350,14 @@ Ok(mplace) => { // Since evaluation had no errors, validate the resulting constant. // This is a separate `try` block to provide more targeted error reporting. - let validation: Result<_, InterpErrorInfo<'_>> = try { - let mut ref_tracking = RefTracking::new(mplace.clone()); - let mut inner = false; - while let Some((mplace, path)) = ref_tracking.todo.pop() { - let mode = match tcx.static_mutability(cid.instance.def_id()) { - Some(_) if cid.promoted.is_some() => { - // Promoteds in statics are allowed to point to statics. - CtfeValidationMode::Const { inner, allow_static_ptrs: true } - } - Some(_) => CtfeValidationMode::Regular, // a `static` - None => CtfeValidationMode::Const { inner, allow_static_ptrs: false }, - }; - ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)?; - inner = true; - } - }; + let validation = + const_validate_mplace(&ecx, &mplace, is_static, cid.promoted.is_some()); + let alloc_id = mplace.ptr().provenance.unwrap(); // Validation failed, report an error. if let Err(error) = validation { - let (error, backtrace) = error.into_parts(); - backtrace.print_backtrace(); - - let ub_note = matches!(error, InterpError::UndefinedBehavior(_)).then(|| {}); - - let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner(); - let mut bytes = String::new(); - if alloc.size() != abi::Size::ZERO { - bytes = "\n".into(); - // FIXME(translation) there might be pieces that are translatable. - write_allocation_bytes(*ecx.tcx, alloc, &mut bytes, " ").unwrap(); - } - let raw_bytes = errors::RawBytesNote { - size: alloc.size().bytes(), - align: alloc.align.bytes(), - bytes, - }; - - Err(super::report( - *ecx.tcx, - error, - None, - || super::get_span_and_frames(&ecx), - move |span, frames| errors::UndefinedBehavior { - span, - ub_note, - frames, - raw_bytes, - }, - )) + Err(const_report_error(&ecx, error, alloc_id)) } else { // Convert to raw constant Ok(ConstAlloc { alloc_id, ty: mplace.layout.ty }) @@ -394,3 +365,61 @@ } } } + +#[inline(always)] +pub fn const_validate_mplace<'mir, 'tcx>( + ecx: &InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>, + mplace: &MPlaceTy<'tcx>, + is_static: bool, + is_promoted: bool, +) -> InterpResult<'tcx> { + let mut ref_tracking = RefTracking::new(mplace.clone()); + let mut inner = false; + while let Some((mplace, path)) = ref_tracking.todo.pop() { + let mode = if is_static { + if is_promoted { + // Promoteds in statics are allowed to point to statics. + CtfeValidationMode::Const { inner, allow_static_ptrs: true } + } else { + // a `static` + CtfeValidationMode::Regular + } + } else { + CtfeValidationMode::Const { inner, allow_static_ptrs: false } + }; + ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)?; + inner = true; + } + + Ok(()) +} + +#[inline(always)] +pub fn const_report_error<'mir, 'tcx>( + ecx: &InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>, + error: InterpErrorInfo<'tcx>, + alloc_id: AllocId, +) -> ErrorHandled { + let (error, backtrace) = error.into_parts(); + backtrace.print_backtrace(); + + let ub_note = matches!(error, InterpError::UndefinedBehavior(_)).then(|| {}); + + let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner(); + let mut bytes = String::new(); + if alloc.size() != abi::Size::ZERO { + bytes = "\n".into(); + // FIXME(translation) there might be pieces that are translatable. + write_allocation_bytes(*ecx.tcx, alloc, &mut bytes, " ").unwrap(); + } + let raw_bytes = + errors::RawBytesNote { size: alloc.size().bytes(), align: alloc.align.bytes(), bytes }; + + crate::const_eval::report( + *ecx.tcx, + error, + None, + || crate::const_eval::get_span_and_frames(ecx), + move |span, frames| errors::UndefinedBehavior { span, ub_note, frames, raw_bytes }, + ) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/fn_queries.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/fn_queries.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/fn_queries.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/fn_queries.rs 2023-12-21 16:55:28.000000000 +0000 @@ -39,8 +39,13 @@ hir::Node::Ctor(_) | hir::Node::AnonConst(_) | hir::Node::ConstBlock(_) - | hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Const(..), .. }) => hir::Constness::Const, - hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(_), .. }) => tcx.generics_of(def_id).host_effect_index.map_or(hir::Constness::NotConst, |_| hir::Constness::Const), + | hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Const(..), .. }) => { + hir::Constness::Const + } + hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(_), .. }) => tcx + .generics_of(def_id) + .host_effect_index + .map_or(hir::Constness::NotConst, |_| hir::Constness::Const), hir::Node::ForeignItem(hir::ForeignItem { kind: hir::ForeignItemKind::Fn(..), .. }) => { // Intrinsics use `rustc_const_{un,}stable` attributes to indicate constness. All other // foreign items cannot be evaluated at compile-time. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/machine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/machine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/machine.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/machine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,10 @@ use rustc_hir::def::DefKind; -use rustc_hir::{LangItem, CRATE_HIR_ID}; +use rustc_hir::LangItem; use rustc_middle::mir; use rustc_middle::mir::interpret::PointerArithmetic; use rustc_middle::ty::layout::{FnAbiOf, TyAndLayout}; use rustc_middle::ty::{self, TyCtxt}; -use rustc_session::lint::builtin::INVALID_ALIGNMENT; +use rustc_span::Span; use std::borrow::Borrow; use std::hash::Hash; use std::ops::ControlFlow; @@ -21,11 +21,11 @@ use rustc_target::spec::abi::Abi as CallAbi; use crate::errors::{LongRunning, LongRunningWarn}; +use crate::fluent_generated as fluent; use crate::interpret::{ self, compile_time_machine, AllocId, ConstAllocation, FnArg, FnVal, Frame, ImmTy, InterpCx, InterpResult, OpTy, PlaceTy, Pointer, Scalar, }; -use crate::{errors, fluent_generated as fluent}; use super::error::*; @@ -65,22 +65,11 @@ #[derive(Copy, Clone)] pub enum CheckAlignment { - /// Ignore alignment when following relocations. + /// Ignore all alignment requirements. /// This is mainly used in interning. No, /// Hard error when dereferencing a misaligned pointer. Error, - /// Emit a future incompat lint when dereferencing a misaligned pointer. - FutureIncompat, -} - -impl CheckAlignment { - pub fn should_check(&self) -> bool { - match self { - CheckAlignment::No => false, - CheckAlignment::Error | CheckAlignment::FutureIncompat => true, - } - } } #[derive(Copy, Clone, PartialEq)] @@ -193,6 +182,24 @@ } impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> { + fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) { + let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); + let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo()); + + use rustc_session::{config::RemapPathScopeComponents, RemapFileNameExt}; + ( + Symbol::intern( + &caller + .file + .name + .for_scope(&self.tcx.sess, RemapPathScopeComponents::DIAGNOSTICS) + .to_string_lossy(), + ), + u32::try_from(caller.line).unwrap(), + u32::try_from(caller.col_display).unwrap().checked_add(1).unwrap(), + ) + } + /// "Intercept" a function call, because we have something special to do for it. /// All `#[rustc_do_not_const_check]` functions should be hooked here. /// If this returns `Some` function, which may be `instance` or a different function with @@ -207,7 +214,7 @@ ) -> InterpResult<'tcx, Option>> { let def_id = instance.def_id(); - if Some(def_id) == self.tcx.lang_items().panic_display() + if self.tcx.has_attr(def_id, sym::rustc_const_panic_str) || Some(def_id) == self.tcx.lang_items().begin_panic_fn() { let args = self.copy_fn_args(args)?; @@ -358,8 +365,8 @@ const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error #[inline(always)] - fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment { - ecx.machine.check_alignment + fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + matches!(ecx.machine.check_alignment, CheckAlignment::Error) } #[inline(always)] @@ -367,39 +374,6 @@ ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks || layout.abi.is_uninhabited() } - fn alignment_check_failed( - ecx: &InterpCx<'mir, 'tcx, Self>, - has: Align, - required: Align, - check: CheckAlignment, - ) -> InterpResult<'tcx, ()> { - let err = err_ub!(AlignmentCheckFailed { has, required }).into(); - match check { - CheckAlignment::Error => Err(err), - CheckAlignment::No => span_bug!( - ecx.cur_span(), - "`alignment_check_failed` called when no alignment check requested" - ), - CheckAlignment::FutureIncompat => { - let (_, backtrace) = err.into_parts(); - backtrace.print_backtrace(); - let (span, frames) = super::get_span_and_frames(&ecx); - - ecx.tcx.emit_spanned_lint( - INVALID_ALIGNMENT, - ecx.stack().iter().find_map(|frame| frame.lint_root()).unwrap_or(CRATE_HIR_ID), - span, - errors::AlignmentCheckFailed { - has: has.bytes(), - required: required.bytes(), - frames, - }, - ); - Ok(()) - } - } - } - fn load_mir( ecx: &InterpCx<'mir, 'tcx, Self>, instance: ty::InstanceDef<'tcx>, @@ -579,8 +553,8 @@ OverflowNeg(op) => OverflowNeg(eval_to_int(op)?), DivisionByZero(op) => DivisionByZero(eval_to_int(op)?), RemainderByZero(op) => RemainderByZero(eval_to_int(op)?), - ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind), - ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind), + ResumedAfterReturn(coroutine_kind) => ResumedAfterReturn(*coroutine_kind), + ResumedAfterPanic(coroutine_kind) => ResumedAfterPanic(*coroutine_kind), MisalignedPointerDereference { ref required, ref found } => { MisalignedPointerDereference { required: eval_to_int(required)?, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,12 @@ // Not in interpret to make sure we do not use private implementation details use crate::errors::MaxNumNodesInConstErr; -use crate::interpret::{intern_const_alloc_recursive, InternKind, InterpCx, Scalar}; +use crate::interpret::InterpCx; use rustc_middle::mir; use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId}; use rustc_middle::query::TyCtxtAt; use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_span::{source_map::DUMMY_SP, symbol::Symbol}; +use rustc_span::DUMMY_SP; mod error; mod eval_queries; @@ -20,20 +20,6 @@ pub use machine::*; pub(crate) use valtrees::{const_to_valtree_inner, valtree_to_const_value}; -pub(crate) fn const_caller_location( - tcx: TyCtxt<'_>, - (file, line, col): (Symbol, u32, u32), -) -> mir::ConstValue<'_> { - trace!("const_caller_location: {}:{}:{}", file, line, col); - let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), CanAccessStatics::No); - - let loc_place = ecx.alloc_caller_location(file, line, col); - if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() { - bug!("intern_const_alloc_recursive should not error in this case") - } - mir::ConstValue::Scalar(Scalar::from_maybe_pointer(loc_place.ptr(), &tcx)) -} - // We forbid type-level constants that contain more than `VALTREE_MAX_NODES` nodes. const VALTREE_MAX_NODES: usize = 100000; @@ -86,7 +72,7 @@ } #[instrument(skip(tcx), level = "debug")] -pub(crate) fn try_destructure_mir_constant_for_diagnostics<'tcx>( +pub(crate) fn try_destructure_mir_constant_for_user_output<'tcx>( tcx: TyCtxtAt<'tcx>, val: mir::ConstValue<'tcx>, ty: Ty<'tcx>, @@ -113,7 +99,7 @@ let fields_iter = (0..field_count) .map(|i| { let field_op = ecx.project_field(&down, i).ok()?; - let val = op_to_const(&ecx, &field_op); + let val = op_to_const(&ecx, &field_op, /* for diagnostics */ true); Some((val, field_op.layout.ty)) }) .collect::>>()?; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/valtrees.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/valtrees.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/valtrees.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/const_eval/valtrees.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ use rustc_middle::mir; use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt}; -use rustc_span::source_map::DUMMY_SP; +use rustc_span::DUMMY_SP; use rustc_target::abi::VariantIdx; #[instrument(skip(ecx), level = "debug")] @@ -97,11 +97,27 @@ Ok(ty::ValTree::Leaf(val.assert_int())) } - // Raw pointers are not allowed in type level constants, as we cannot properly test them for - // equality at compile-time (see `ptr_guaranteed_cmp`). + ty::RawPtr(_) => { + // Not all raw pointers are allowed, as we cannot properly test them for + // equality at compile-time (see `ptr_guaranteed_cmp`). + // However we allow those that are just integers in disguise. + // (We could allow wide raw pointers where both sides are integers in the future, + // but for now we reject them.) + let Ok(val) = ecx.read_scalar(place) else { + return Err(ValTreeCreationError::Other); + }; + // We are in the CTFE machine, so ptr-to-int casts will fail. + // This can only be `Ok` if `val` already is an integer. + let Ok(val) = val.try_to_int() else { + return Err(ValTreeCreationError::Other); + }; + // It's just a ScalarInt! + Ok(ty::ValTree::Leaf(val)) + } + // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to // agree with runtime equality tests. - ty::FnPtr(_) | ty::RawPtr(_) => Err(ValTreeCreationError::NonSupportedType), + ty::FnPtr(_) => Err(ValTreeCreationError::NonSupportedType), ty::Ref(_, _, _) => { let Ok(derefd_place)= ecx.deref_pointer(place) else { @@ -151,8 +167,8 @@ | ty::Infer(_) // FIXME(oli-obk): we can probably encode closures just like structs | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) => Err(ValTreeCreationError::NonSupportedType), + | ty::Coroutine(..) + | ty::CoroutineWitness(..) => Err(ValTreeCreationError::NonSupportedType), } } @@ -222,17 +238,19 @@ assert!(valtree.unwrap_branch().is_empty()); mir::ConstValue::ZeroSized } - ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => match valtree { - ty::ValTree::Leaf(scalar_int) => mir::ConstValue::Scalar(Scalar::Int(scalar_int)), - ty::ValTree::Branch(_) => bug!( - "ValTrees for Bool, Int, Uint, Float or Char should have the form ValTree::Leaf" - ), - }, + ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char | ty::RawPtr(_) => { + match valtree { + ty::ValTree::Leaf(scalar_int) => mir::ConstValue::Scalar(Scalar::Int(scalar_int)), + ty::ValTree::Branch(_) => bug!( + "ValTrees for Bool, Int, Uint, Float, Char or RawPtr should have the form ValTree::Leaf" + ), + } + } ty::Ref(_, inner_ty, _) => { let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessStatics::No); let imm = valtree_to_ref(&mut ecx, valtree, *inner_ty); let imm = ImmTy::from_immediate(imm, tcx.layout_of(param_env_ty).unwrap()); - op_to_const(&ecx, &imm.into()) + op_to_const(&ecx, &imm.into(), /* for diagnostics */ false) } ty::Tuple(_) | ty::Array(_, _) | ty::Adt(..) => { let layout = tcx.layout_of(param_env_ty).unwrap(); @@ -265,7 +283,7 @@ dump_place(&ecx, &place); intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap(); - op_to_const(&ecx, &place.into()) + op_to_const(&ecx, &place.into(), /* for diagnostics */ false) } ty::Never | ty::Error(_) @@ -278,10 +296,9 @@ | ty::Placeholder(..) | ty::Infer(_) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::FnPtr(_) - | ty::RawPtr(_) | ty::Str | ty::Slice(_) | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,8 +5,9 @@ use rustc_hir::ConstContext; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; use rustc_middle::mir::interpret::{ - CheckInAllocMsg, ExpectedKind, InterpError, InvalidMetaKind, InvalidProgramInfo, PointerKind, - ResourceExhaustionInfo, UndefinedBehaviorInfo, UnsupportedOpInfo, ValidationErrorInfo, + CheckInAllocMsg, ExpectedKind, InterpError, InvalidMetaKind, InvalidProgramInfo, Misalignment, + PointerKind, ResourceExhaustionInfo, UndefinedBehaviorInfo, UnsupportedOpInfo, + ValidationErrorInfo, }; use rustc_middle::ty::{self, Ty}; use rustc_span::Span; @@ -389,15 +390,6 @@ pub dropped_at: Option, } -#[derive(LintDiagnostic)] -#[diag(const_eval_align_check_failed)] -pub struct AlignmentCheckFailed { - pub has: u64, - pub required: u64, - #[subdiagnostic] - pub frames: Vec, -} - #[derive(Diagnostic)] #[diag(const_eval_error, code = "E0080")] pub struct ConstEvalError { @@ -459,7 +451,6 @@ use crate::fluent_generated::*; let msg = match msg { - CheckInAllocMsg::DerefTest => const_eval_deref_test, CheckInAllocMsg::MemoryAccessTest => const_eval_memory_access_test, CheckInAllocMsg::PointerArithmeticTest => const_eval_pointer_arithmetic_test, CheckInAllocMsg::OffsetFromTest => const_eval_offset_from_test, @@ -568,9 +559,10 @@ builder.set_arg("bad_pointer_message", bad_pointer_message(msg, handler)); } - AlignmentCheckFailed { required, has } => { + AlignmentCheckFailed(Misalignment { required, has }, msg) => { builder.set_arg("required", required.bytes()); builder.set_arg("has", has.bytes()); + builder.set_arg("msg", format!("{msg:?}")); } WriteToReadOnly(alloc) | DerefFunctionPointer(alloc) | DerefVTablePointer(alloc) => { builder.set_arg("allocation", alloc); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/cast.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/cast.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/cast.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/cast.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ use rustc_middle::ty::layout::{IntegerExt, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, FloatTy, Ty, TypeAndMut}; use rustc_target::abi::Integer; -use rustc_type_ir::sty::TyKind::*; +use rustc_type_ir::TyKind::*; use super::{ util::ensure_monomorphic_enough, FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, @@ -145,16 +145,12 @@ assert!(dest.layout.is_sized()); assert_eq!(cast_ty, dest.layout.ty); // we otherwise ignore `cast_ty` enirely... if src.layout.size != dest.layout.size { - let src_bytes = src.layout.size.bytes(); - let dest_bytes = dest.layout.size.bytes(); - let src_ty = format!("{}", src.layout.ty); - let dest_ty = format!("{}", dest.layout.ty); throw_ub_custom!( fluent::const_eval_invalid_transmute, - src_bytes = src_bytes, - dest_bytes = dest_bytes, - src = src_ty, - dest = dest_ty, + src_bytes = src.layout.size.bytes(), + dest_bytes = dest.layout.size.bytes(), + src = src.layout.ty, + dest = dest.layout.ty, ); } @@ -185,7 +181,7 @@ src: &ImmTy<'tcx, M::Provenance>, cast_to: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> { - use rustc_type_ir::sty::TyKind::*; + use rustc_type_ir::TyKind::*; let val = match src.layout.ty.kind() { // Floating point @@ -310,7 +306,22 @@ where F: Float + Into> + FloatConvert + FloatConvert, { - use rustc_type_ir::sty::TyKind::*; + use rustc_type_ir::TyKind::*; + + fn adjust_nan< + 'mir, + 'tcx: 'mir, + M: Machine<'mir, 'tcx>, + F1: rustc_apfloat::Float + FloatConvert, + F2: rustc_apfloat::Float, + >( + ecx: &InterpCx<'mir, 'tcx, M>, + f1: F1, + f2: F2, + ) -> F2 { + if f2.is_nan() { M::generate_nan(ecx, &[f1]) } else { f2 } + } + match *dest_ty.kind() { // float -> uint Uint(t) => { @@ -330,9 +341,13 @@ Scalar::from_int(v, size) } // float -> f32 - Float(FloatTy::F32) => Scalar::from_f32(f.convert(&mut false).value), + Float(FloatTy::F32) => { + Scalar::from_f32(adjust_nan(self, f, f.convert(&mut false).value)) + } // float -> f64 - Float(FloatTy::F64) => Scalar::from_f64(f.convert(&mut false).value), + Float(FloatTy::F64) => { + Scalar::from_f64(adjust_nan(self, f, f.convert(&mut false).value)) + } // That's it. _ => span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty), } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/discriminant.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/discriminant.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/discriminant.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/discriminant.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,8 @@ -//! Functions for reading and writing discriminants of multi-variant layouts (enums and generators). +//! Functions for reading and writing discriminants of multi-variant layouts (enums and coroutines). -use rustc_middle::ty::layout::{LayoutOf, PrimitiveExt, TyAndLayout}; -use rustc_middle::{mir, ty}; +use rustc_middle::mir; +use rustc_middle::ty::layout::{LayoutOf, PrimitiveExt}; +use rustc_middle::ty::{self, Ty}; use rustc_target::abi::{self, TagEncoding}; use rustc_target::abi::{VariantIdx, Variants}; @@ -170,11 +171,11 @@ ty::Adt(adt, _) => { adt.discriminants(*self.tcx).find(|(_, var)| var.val == discr_bits) } - ty::Generator(def_id, args, _) => { - let args = args.as_generator(); + ty::Coroutine(def_id, args, _) => { + let args = args.as_coroutine(); args.discriminants(def_id, *self.tcx).find(|(_, var)| var.val == discr_bits) } - _ => span_bug!(self.cur_span(), "tagged layout for non-adt non-generator"), + _ => span_bug!(self.cur_span(), "tagged layout for non-adt non-coroutine"), } .ok_or_else(|| err_ub!(InvalidTag(Scalar::from_uint(tag_bits, tag_layout.size))))?; // Return the cast value, and the index. @@ -244,11 +245,11 @@ pub fn discriminant_for_variant( &self, - layout: TyAndLayout<'tcx>, + ty: Ty<'tcx>, variant: VariantIdx, ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> { - let discr_layout = self.layout_of(layout.ty.discriminant_ty(*self.tcx))?; - let discr_value = match layout.ty.discriminant_for_variant(*self.tcx, variant) { + let discr_layout = self.layout_of(ty.discriminant_ty(*self.tcx))?; + let discr_value = match ty.discriminant_for_variant(*self.tcx, variant) { Some(discr) => { // This type actually has discriminants. assert_eq!(discr.ty, discr_layout.ty); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/eval_context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/eval_context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/eval_context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/eval_context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -595,6 +595,50 @@ } } + /// Walks up the callstack from the intrinsic's callsite, searching for the first callsite in a + /// frame which is not `#[track_caller]`. This is the fancy version of `cur_span`. + pub(crate) fn find_closest_untracked_caller_location(&self) -> Span { + for frame in self.stack().iter().rev() { + debug!("find_closest_untracked_caller_location: checking frame {:?}", frame.instance); + + // Assert that the frame we look at is actually executing code currently + // (`loc` is `Right` when we are unwinding and the frame does not require cleanup). + let loc = frame.loc.left().unwrap(); + + // This could be a non-`Call` terminator (such as `Drop`), or not a terminator at all + // (such as `box`). Use the normal span by default. + let mut source_info = *frame.body.source_info(loc); + + // If this is a `Call` terminator, use the `fn_span` instead. + let block = &frame.body.basic_blocks[loc.block]; + if loc.statement_index == block.statements.len() { + debug!( + "find_closest_untracked_caller_location: got terminator {:?} ({:?})", + block.terminator(), + block.terminator().kind, + ); + if let mir::TerminatorKind::Call { fn_span, .. } = block.terminator().kind { + source_info.span = fn_span; + } + } + + let caller_location = if frame.instance.def.requires_caller_location(*self.tcx) { + // We use `Err(())` as indication that we should continue up the call stack since + // this is a `#[track_caller]` function. + Some(Err(())) + } else { + None + }; + if let Ok(span) = + frame.body.caller_location_span(source_info, caller_location, *self.tcx, Ok) + { + return span; + } + } + + span_bug!(self.cur_span(), "no non-`#[track_caller]` frame found") + } + #[inline(always)] pub fn layout_of_local( &self, @@ -750,12 +794,14 @@ // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). if M::POST_MONO_CHECKS { - // `ctfe_query` does some error message decoration that we want to be in effect here. - self.ctfe_query(None, |tcx| { - body.post_mono_checks(*tcx, self.param_env, |c| { - self.subst_from_current_frame_and_normalize_erasing_regions(c) - }) - })?; + for &const_ in &body.required_consts { + let c = + self.subst_from_current_frame_and_normalize_erasing_regions(const_.const_)?; + c.eval(*self.tcx, self.param_env, Some(const_.span)).map_err(|err| { + err.emit_note(*self.tcx); + err + })?; + } } // done @@ -961,8 +1007,8 @@ | ty::RawPtr(..) | ty::Char | ty::Ref(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Array(..) | ty::Closure(..) | ty::Never @@ -1008,7 +1054,7 @@ // Just make this an efficient immediate. // Note that not calling `layout_of` here does have one real consequence: // if the type is too big, we'll only notice this when the local is actually initialized, - // which is a bit too late -- we should ideally notice this alreayd here, when the memory + // which is a bit too late -- we should ideally notice this already here, when the memory // is conceptually allocated. But given how rare that error is and that this is a hot function, // we accept this downside for now. Operand::Immediate(Immediate::Uninit) @@ -1054,14 +1100,14 @@ Ok(()) } - /// Call a query that can return `ErrorHandled`. If `span` is `Some`, point to that span when an error occurs. + /// Call a query that can return `ErrorHandled`. Should be used for statics and other globals. + /// (`mir::Const`/`ty::Const` have `eval` methods that can be used directly instead.) pub fn ctfe_query( &self, - span: Option, query: impl FnOnce(TyCtxtAt<'tcx>) -> Result, ) -> Result { // Use a precise span for better cycle errors. - query(self.tcx.at(span.unwrap_or_else(|| self.cur_span()))).map_err(|err| { + query(self.tcx.at(self.cur_span())).map_err(|err| { err.emit_note(*self.tcx); err }) @@ -1072,17 +1118,14 @@ instance: ty::Instance<'tcx>, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> { let gid = GlobalId { instance, promoted: None }; - // For statics we pick `ParamEnv::reveal_all`, because statics don't have generics - // and thus don't care about the parameter environment. While we could just use - // `self.param_env`, that would mean we invoke the query to evaluate the static - // with different parameter environments, thus causing the static to be evaluated - // multiple times. - let param_env = if self.tcx.is_static(gid.instance.def_id()) { - ty::ParamEnv::reveal_all() + let val = if self.tcx.is_static(gid.instance.def_id()) { + let alloc_id = self.tcx.reserve_and_set_static_alloc(gid.instance.def_id()); + + let ty = instance.ty(self.tcx.tcx, self.param_env); + mir::ConstAlloc { alloc_id, ty } } else { - self.param_env + self.ctfe_query(|tcx| tcx.eval_to_allocation_raw(self.param_env.and(gid)))? }; - let val = self.ctfe_query(None, |tcx| tcx.eval_to_allocation_raw(param_env.and(gid)))?; self.raw_const_to_mplace(val) } @@ -1092,7 +1135,12 @@ span: Option, layout: Option>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { - let const_val = self.ctfe_query(span, |tcx| val.eval(*tcx, self.param_env, span))?; + let const_val = val.eval(*self.tcx, self.param_env, span).map_err(|err| { + // FIXME: somehow this is reachable even when POST_MONO_CHECKS is on. + // Are we not always populating `required_consts`? + err.emit_note(*self.tcx); + err + })?; self.const_val_to_op(const_val, val.ty(), layout) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intern.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intern.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intern.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intern.rs 2023-12-21 16:55:28.000000000 +0000 @@ -161,7 +161,7 @@ #[inline(always)] fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> { - &self.ecx + self.ecx } fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> { @@ -259,7 +259,7 @@ // to avoid could be expensive: on the potentially larger types, arrays and slices, // rather than on all aggregates unconditionally. if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) { - let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else { + let Some((size, _align)) = self.ecx.size_and_align_of_mplace(&mplace)? else { // We do the walk if we can't determine the size of the mplace: we may be // dealing with extern types here in the future. return Ok(true); @@ -267,7 +267,7 @@ // If there is no provenance in this allocation, it does not contain references // that point to another allocation, and we can avoid the interning walk. - if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr(), size, align)? { + if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr(), size)? { if !alloc.has_provenance() { return Ok(false); } @@ -450,6 +450,42 @@ Ok(()) } +/// Intern `ret`. This function assumes that `ret` references no other allocation. +#[instrument(level = "debug", skip(ecx))] +pub fn intern_const_alloc_for_constprop< + 'mir, + 'tcx: 'mir, + T, + M: CompileTimeMachine<'mir, 'tcx, T>, +>( + ecx: &mut InterpCx<'mir, 'tcx, M>, + alloc_id: AllocId, +) -> InterpResult<'tcx, ()> { + // Move allocation to `tcx`. + let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else { + // Pointer not found in local memory map. It is either a pointer to the global + // map, or dangling. + if ecx.tcx.try_get_global_alloc(alloc_id).is_none() { + throw_ub!(DeadLocal) + } + // The constant is already in global memory. Do nothing. + return Ok(()); + }; + + alloc.mutability = Mutability::Not; + + // We are not doing recursive interning, so we don't currently support provenance. + // (If this assertion ever triggers, we should just implement a + // proper recursive interning loop.) + assert!(alloc.provenance().ptrs().is_empty()); + + // Link the alloc id to the actual allocation + let alloc = ecx.tcx.mk_const_alloc(alloc); + ecx.tcx.set_alloc_id_memory(alloc_id, alloc); + + Ok(()) +} + impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>> InterpCx<'mir, 'tcx, M> { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -use rustc_ast::Mutability; -use rustc_hir::lang_items::LangItem; -use rustc_middle::mir::TerminatorKind; -use rustc_middle::ty::layout::LayoutOf; -use rustc_span::{Span, Symbol}; - -use crate::interpret::{ - intrinsics::{InterpCx, Machine}, - MPlaceTy, MemoryKind, Scalar, -}; - -impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { - /// Walks up the callstack from the intrinsic's callsite, searching for the first callsite in a - /// frame which is not `#[track_caller]`. - pub(crate) fn find_closest_untracked_caller_location(&self) -> Span { - for frame in self.stack().iter().rev() { - debug!("find_closest_untracked_caller_location: checking frame {:?}", frame.instance); - - // Assert that the frame we look at is actually executing code currently - // (`loc` is `Right` when we are unwinding and the frame does not require cleanup). - let loc = frame.loc.left().unwrap(); - - // This could be a non-`Call` terminator (such as `Drop`), or not a terminator at all - // (such as `box`). Use the normal span by default. - let mut source_info = *frame.body.source_info(loc); - - // If this is a `Call` terminator, use the `fn_span` instead. - let block = &frame.body.basic_blocks[loc.block]; - if loc.statement_index == block.statements.len() { - debug!( - "find_closest_untracked_caller_location: got terminator {:?} ({:?})", - block.terminator(), - block.terminator().kind - ); - if let TerminatorKind::Call { fn_span, .. } = block.terminator().kind { - source_info.span = fn_span; - } - } - - // Walk up the `SourceScope`s, in case some of them are from MIR inlining. - // If so, the starting `source_info.span` is in the innermost inlined - // function, and will be replaced with outer callsite spans as long - // as the inlined functions were `#[track_caller]`. - loop { - let scope_data = &frame.body.source_scopes[source_info.scope]; - - if let Some((callee, callsite_span)) = scope_data.inlined { - // Stop inside the most nested non-`#[track_caller]` function, - // before ever reaching its caller (which is irrelevant). - if !callee.def.requires_caller_location(*self.tcx) { - return source_info.span; - } - source_info.span = callsite_span; - } - - // Skip past all of the parents with `inlined: None`. - match scope_data.inlined_parent_scope { - Some(parent) => source_info.scope = parent, - None => break, - } - } - - // Stop inside the most nested non-`#[track_caller]` function, - // before ever reaching its caller (which is irrelevant). - if !frame.instance.def.requires_caller_location(*self.tcx) { - return source_info.span; - } - } - - span_bug!(self.cur_span(), "no non-`#[track_caller]` frame found") - } - - /// Allocate a `const core::panic::Location` with the provided filename and line/column numbers. - pub(crate) fn alloc_caller_location( - &mut self, - filename: Symbol, - line: u32, - col: u32, - ) -> MPlaceTy<'tcx, M::Provenance> { - let loc_details = self.tcx.sess.opts.unstable_opts.location_detail; - // This can fail if rustc runs out of memory right here. Trying to emit an error would be - // pointless, since that would require allocating more memory than these short strings. - let file = if loc_details.file { - self.allocate_str(filename.as_str(), MemoryKind::CallerLocation, Mutability::Not) - .unwrap() - } else { - // FIXME: This creates a new allocation each time. It might be preferable to - // perform this allocation only once, and re-use the `MPlaceTy`. - // See https://github.com/rust-lang/rust/pull/89920#discussion_r730012398 - self.allocate_str("", MemoryKind::CallerLocation, Mutability::Not).unwrap() - }; - let line = if loc_details.line { Scalar::from_u32(line) } else { Scalar::from_u32(0) }; - let col = if loc_details.column { Scalar::from_u32(col) } else { Scalar::from_u32(0) }; - - // Allocate memory for `CallerLocation` struct. - let loc_ty = self - .tcx - .type_of(self.tcx.require_lang_item(LangItem::PanicLocation, None)) - .instantiate(*self.tcx, self.tcx.mk_args(&[self.tcx.lifetimes.re_erased.into()])); - let loc_layout = self.layout_of(loc_ty).unwrap(); - let location = self.allocate(loc_layout, MemoryKind::CallerLocation).unwrap(); - - // Initialize fields. - self.write_immediate(file.to_ref(self), &self.project_field(&location, 0).unwrap()) - .expect("writing to memory we just allocated cannot fail"); - self.write_scalar(line, &self.project_field(&location, 1).unwrap()) - .expect("writing to memory we just allocated cannot fail"); - self.write_scalar(col, &self.project_field(&location, 2).unwrap()) - .expect("writing to memory we just allocated cannot fail"); - - location - } - - pub(crate) fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) { - let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); - let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo()); - ( - Symbol::intern(&caller.file.name.prefer_remapped().to_string_lossy()), - u32::try_from(caller.line).unwrap(), - u32::try_from(caller.col_display).unwrap().checked_add(1).unwrap(), - ) - } - - pub fn alloc_caller_location_for_span(&mut self, span: Span) -> MPlaceTy<'tcx, M::Provenance> { - let (file, line, column) = self.location_triple_for_span(span); - self.alloc_caller_location(file, line, column) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/intrinsics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,7 +13,7 @@ use rustc_middle::ty::GenericArgsRef; use rustc_middle::ty::{Ty, TyCtxt}; use rustc_span::symbol::{sym, Symbol}; -use rustc_target::abi::{Abi, Align, Primitive, Size}; +use rustc_target::abi::{Abi, Primitive, Size}; use super::{ util::ensure_monomorphic_enough, CheckInAllocMsg, ImmTy, InterpCx, Machine, OpTy, PlaceTy, @@ -22,8 +22,6 @@ use crate::fluent_generated as fluent; -mod caller_location; - fn numeric_intrinsic(name: Symbol, bits: u128, kind: Primitive) -> Scalar { let size = match kind { Primitive::Int(integer, _) => integer.size(), @@ -99,8 +97,8 @@ | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Error(_) => ConstValue::from_target_usize(0u64, &tcx), @@ -130,8 +128,10 @@ match intrinsic_name { sym::caller_location => { let span = self.find_closest_untracked_caller_location(); - let location = self.alloc_caller_location_for_span(span); - self.write_immediate(location.to_ref(self), dest)?; + let val = self.tcx.span_as_caller_location(span); + let val = + self.const_val_to_op(val, self.tcx.caller_location_ty(), Some(dest.layout))?; + self.copy_op(&val, dest, /* allow_transmute */ false)?; } sym::min_align_of_val | sym::size_of_val => { @@ -164,7 +164,7 @@ sym::type_name => Ty::new_static_str(self.tcx.tcx), _ => bug!(), }; - let val = self.ctfe_query(None, |tcx| { + let val = self.ctfe_query(|tcx| { tcx.const_eval_global_id(self.param_env, gid, Some(tcx.span)) })?; let val = self.const_val_to_op(val, ty, Some(dest.layout))?; @@ -218,7 +218,7 @@ sym::discriminant_value => { let place = self.deref_pointer(&args[0])?; let variant = self.read_discriminant(&place)?; - let discr = self.discriminant_for_variant(place.layout, variant)?; + let discr = self.discriminant_for_variant(place.layout.ty, variant)?; self.write_immediate(*discr, dest)?; } sym::exact_div => { @@ -349,10 +349,9 @@ // Check that the range between them is dereferenceable ("in-bounds or one past the // end of the same allocation"). This is like the check in ptr_offset_inbounds. let min_ptr = if dist >= 0 { b } else { a }; - self.check_ptr_access_align( + self.check_ptr_access( min_ptr, Size::from_bytes(dist.unsigned_abs()), - Align::ONE, CheckInAllocMsg::OffsetFromTest, )?; @@ -500,6 +499,9 @@ b: &ImmTy<'tcx, M::Provenance>, dest: &PlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { + assert_eq!(a.layout.ty, b.layout.ty); + assert!(matches!(a.layout.ty.kind(), ty::Int(..) | ty::Uint(..))); + // Performs an exact division, resulting in undefined behavior where // `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`. // First, check x % y != 0 (or if that computation overflows). @@ -522,7 +524,10 @@ l: &ImmTy<'tcx, M::Provenance>, r: &ImmTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Scalar> { + assert_eq!(l.layout.ty, r.layout.ty); + assert!(matches!(l.layout.ty.kind(), ty::Int(..) | ty::Uint(..))); assert!(matches!(mir_op, BinOp::Add | BinOp::Sub)); + let (val, overflowed) = self.overflowing_binary_op(mir_op, l, r)?; Ok(if overflowed { let size = l.layout.size; @@ -565,16 +570,8 @@ pub fn ptr_offset_inbounds( &self, ptr: Pointer>, - pointee_ty: Ty<'tcx>, - offset_count: i64, + offset_bytes: i64, ) -> InterpResult<'tcx, Pointer>> { - // We cannot overflow i64 as a type's size must be <= isize::MAX. - let pointee_size = i64::try_from(self.layout_of(pointee_ty)?.size.bytes()).unwrap(); - // The computed offset, in bytes, must not overflow an isize. - // `checked_mul` enforces a too small bound, but no actual allocation can be big enough for - // the difference to be noticeable. - let offset_bytes = - offset_count.checked_mul(pointee_size).ok_or(err_ub!(PointerArithOverflow))?; // The offset being in bounds cannot rely on "wrapping around" the address space. // So, first rule out overflows in the pointer arithmetic. let offset_ptr = ptr.signed_offset(offset_bytes, self)?; @@ -583,10 +580,9 @@ // pointers to be properly aligned (unlike a read/write operation). let min_ptr = if offset_bytes >= 0 { ptr } else { offset_ptr }; // This call handles checking for integer/null pointers. - self.check_ptr_access_align( + self.check_ptr_access( min_ptr, Size::from_bytes(offset_bytes.unsigned_abs()), - Align::ONE, CheckInAllocMsg::PointerArithmeticTest, )?; Ok(offset_ptr) @@ -615,7 +611,10 @@ let src = self.read_pointer(src)?; let dst = self.read_pointer(dst)?; - self.mem_copy(src, align, dst, align, size, nonoverlapping) + self.check_ptr_align(src, align)?; + self.check_ptr_align(dst, align)?; + + self.mem_copy(src, dst, size, nonoverlapping) } pub(crate) fn write_bytes_intrinsic( @@ -671,7 +670,7 @@ size| -> InterpResult<'tcx, &[u8]> { let ptr = this.read_pointer(op)?; - let Some(alloc_ref) = self.get_ptr_alloc(ptr, size, Align::ONE)? else { + let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else { // zero-sized access return Ok(&[]); }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/machine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/machine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/machine.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/machine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,16 +6,15 @@ use std::fmt::Debug; use std::hash::Hash; +use rustc_apfloat::{Float, FloatConvert}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_middle::mir; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::def_id::DefId; -use rustc_target::abi::{Align, Size}; +use rustc_target::abi::Size; use rustc_target::spec::abi::Abi as CallAbi; -use crate::const_eval::CheckAlignment; - use super::{ AllocBytes, AllocId, AllocRange, Allocation, ConstAllocation, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, PlaceTy, Pointer, Provenance, @@ -134,7 +133,7 @@ const POST_MONO_CHECKS: bool = true; /// Whether memory accesses should be alignment-checked. - fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment; + fn enforce_alignment(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool; /// Whether, when checking alignment, we should look at the actual address and thus support /// custom alignment logic based on whatever the integer address happens to be. @@ -142,13 +141,6 @@ /// If this returns true, Provenance::OFFSET_IS_ADDR must be true. fn use_addr_for_alignment_check(ecx: &InterpCx<'mir, 'tcx, Self>) -> bool; - fn alignment_check_failed( - ecx: &InterpCx<'mir, 'tcx, Self>, - has: Align, - required: Align, - check: CheckAlignment, - ) -> InterpResult<'tcx, ()>; - /// Whether to enforce the validity invariant for a specific layout. fn enforce_validity(ecx: &InterpCx<'mir, 'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool; @@ -240,6 +232,16 @@ right: &ImmTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, (ImmTy<'tcx, Self::Provenance>, bool)>; + /// Generate the NaN returned by a float operation, given the list of inputs. + /// (This is all inputs, not just NaN inputs!) + fn generate_nan, F2: Float>( + _ecx: &InterpCx<'mir, 'tcx, Self>, + _inputs: &[F1], + ) -> F2 { + // By default we always return the preferred NaN. + F2::NAN + } + /// Called before writing the specified `local` of the `frame`. /// Since writing a ZST is not actually accessing memory or locals, this is never invoked /// for ZST reads. @@ -434,6 +436,7 @@ place: &PlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx> { // Without an aliasing model, all we can do is put `Uninit` into the place. + // Conveniently this also ensures that the place actually points to suitable memory. ecx.write_uninit(place) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/memory.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/memory.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/memory.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/memory.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,13 +18,12 @@ use rustc_middle::ty::{self, Instance, ParamEnv, Ty, TyCtxt}; use rustc_target::abi::{Align, HasDataLayout, Size}; -use crate::const_eval::CheckAlignment; use crate::fluent_generated as fluent; use super::{ - alloc_range, AllocBytes, AllocId, AllocMap, AllocRange, Allocation, CheckInAllocMsg, - GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak, Pointer, PointerArithmetic, Provenance, - Scalar, + alloc_range, AllocBytes, AllocId, AllocMap, AllocRange, Allocation, CheckAlignMsg, + CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak, Misalignment, Pointer, + PointerArithmetic, Provenance, Scalar, }; #[derive(Debug, PartialEq, Copy, Clone)] @@ -259,14 +258,7 @@ None => self.get_alloc_raw(alloc_id)?.size(), }; // This will also call the access hooks. - self.mem_copy( - ptr, - Align::ONE, - new_ptr.into(), - Align::ONE, - old_size.min(new_size), - /*nonoverlapping*/ true, - )?; + self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), /*nonoverlapping*/ true)?; self.deallocate_ptr(ptr, old_size_and_align, kind)?; Ok(new_ptr) @@ -368,13 +360,10 @@ &self, ptr: Pointer>, size: Size, - align: Align, ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> { self.check_and_deref_ptr( ptr, size, - align, - M::enforce_alignment(self), CheckInAllocMsg::MemoryAccessTest, |alloc_id, offset, prov| { let (size, align) = self @@ -384,43 +373,31 @@ ) } - /// Check if the given pointer points to live memory of given `size` and `align` - /// (ignoring `M::enforce_alignment`). The caller can control the error message for the - /// out-of-bounds case. + /// Check if the given pointer points to live memory of the given `size`. + /// The caller can control the error message for the out-of-bounds case. #[inline(always)] - pub fn check_ptr_access_align( + pub fn check_ptr_access( &self, ptr: Pointer>, size: Size, - align: Align, msg: CheckInAllocMsg, ) -> InterpResult<'tcx> { - self.check_and_deref_ptr( - ptr, - size, - align, - CheckAlignment::Error, - msg, - |alloc_id, _, _| { - let (size, align) = self.get_live_alloc_size_and_align(alloc_id, msg)?; - Ok((size, align, ())) - }, - )?; + self.check_and_deref_ptr(ptr, size, msg, |alloc_id, _, _| { + let (size, align) = self.get_live_alloc_size_and_align(alloc_id, msg)?; + Ok((size, align, ())) + })?; Ok(()) } /// Low-level helper function to check if a ptr is in-bounds and potentially return a reference /// to the allocation it points to. Supports both shared and mutable references, as the actual - /// checking is offloaded to a helper closure. `align` defines whether and which alignment check - /// is done. + /// checking is offloaded to a helper closure. /// /// If this returns `None`, the size is 0; it can however return `Some` even for size 0. fn check_and_deref_ptr( &self, ptr: Pointer>, size: Size, - align: Align, - check: CheckAlignment, msg: CheckInAllocMsg, alloc_size: impl FnOnce( AllocId, @@ -435,14 +412,10 @@ if size.bytes() > 0 || addr == 0 { throw_ub!(DanglingIntPointer(addr, msg)); } - // Must be aligned. - if check.should_check() { - self.check_offset_align(addr, align, check)?; - } None } Ok((alloc_id, offset, prov)) => { - let (alloc_size, alloc_align, ret_val) = alloc_size(alloc_id, offset, prov)?; + let (alloc_size, _alloc_align, ret_val) = alloc_size(alloc_id, offset, prov)?; // Test bounds. This also ensures non-null. // It is sufficient to check this for the end pointer. Also check for overflow! if offset.checked_add(size, &self.tcx).map_or(true, |end| end > alloc_size) { @@ -458,20 +431,6 @@ if M::Provenance::OFFSET_IS_ADDR { assert_ne!(ptr.addr(), Size::ZERO); } - // Test align. Check this last; if both bounds and alignment are violated - // we want the error to be about the bounds. - if check.should_check() { - if M::use_addr_for_alignment_check(self) { - // `use_addr_for_alignment_check` can only be true if `OFFSET_IS_ADDR` is true. - self.check_offset_align(ptr.addr().bytes(), align, check)?; - } else { - // Check allocation alignment and offset alignment. - if alloc_align.bytes() < align.bytes() { - M::alignment_check_failed(self, alloc_align, align, check)?; - } - self.check_offset_align(offset.bytes(), align, check)?; - } - } // We can still be zero-sized in this branch, in which case we have to // return `None`. @@ -480,19 +439,65 @@ }) } - fn check_offset_align( + pub(super) fn check_misalign( &self, - offset: u64, - align: Align, - check: CheckAlignment, + misaligned: Option, + msg: CheckAlignMsg, ) -> InterpResult<'tcx> { - if offset % align.bytes() == 0 { - Ok(()) - } else { - // The biggest power of two through which `offset` is divisible. - let offset_pow2 = 1 << offset.trailing_zeros(); - M::alignment_check_failed(self, Align::from_bytes(offset_pow2).unwrap(), align, check) + if let Some(misaligned) = misaligned { + throw_ub!(AlignmentCheckFailed(misaligned, msg)) } + Ok(()) + } + + pub(super) fn is_ptr_misaligned( + &self, + ptr: Pointer>, + align: Align, + ) -> Option { + if !M::enforce_alignment(self) || align.bytes() == 1 { + return None; + } + + #[inline] + fn offset_misalignment(offset: u64, align: Align) -> Option { + if offset % align.bytes() == 0 { + None + } else { + // The biggest power of two through which `offset` is divisible. + let offset_pow2 = 1 << offset.trailing_zeros(); + Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align }) + } + } + + match self.ptr_try_get_alloc_id(ptr) { + Err(addr) => offset_misalignment(addr, align), + Ok((alloc_id, offset, _prov)) => { + let (_size, alloc_align, _kind) = self.get_alloc_info(alloc_id); + if M::use_addr_for_alignment_check(self) { + // `use_addr_for_alignment_check` can only be true if `OFFSET_IS_ADDR` is true. + offset_misalignment(ptr.addr().bytes(), align) + } else { + // Check allocation alignment and offset alignment. + if alloc_align.bytes() < align.bytes() { + Some(Misalignment { has: alloc_align, required: align }) + } else { + offset_misalignment(offset.bytes(), align) + } + } + } + } + } + + /// Checks a pointer for misalignment. + /// + /// The error assumes this is checking the pointer used directly for an access. + pub fn check_ptr_align( + &self, + ptr: Pointer>, + align: Align, + ) -> InterpResult<'tcx> { + self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr) } } @@ -536,7 +541,7 @@ } // We don't give a span -- statics don't need that, they cannot be generic or associated. - let val = self.ctfe_query(None, |tcx| tcx.eval_static_initializer(def_id))?; + let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?; (val, Some(def_id)) } }; @@ -550,17 +555,6 @@ ) } - /// Get the base address for the bytes in an `Allocation` specified by the - /// `AllocID` passed in; error if no such allocation exists. - /// - /// It is up to the caller to take sufficient care when using this address: - /// there could be provenance or uninit memory in there, and other memory - /// accesses could invalidate the exposed pointer. - pub fn alloc_base_addr(&self, id: AllocId) -> InterpResult<'tcx, *const u8> { - let alloc = self.get_alloc_raw(id)?; - Ok(alloc.base_addr()) - } - /// Gives raw access to the `Allocation`, without bounds or alignment checks. /// The caller is responsible for calling the access hooks! /// @@ -598,19 +592,16 @@ } } - /// "Safe" (bounds and align-checked) allocation access. + /// Bounds-checked *but not align-checked* allocation access. pub fn get_ptr_alloc<'a>( &'a self, ptr: Pointer>, size: Size, - align: Align, ) -> InterpResult<'tcx, Option>> { let ptr_and_alloc = self.check_and_deref_ptr( ptr, size, - align, - M::enforce_alignment(self), CheckInAllocMsg::MemoryAccessTest, |alloc_id, offset, prov| { let alloc = self.get_alloc_raw(alloc_id)?; @@ -671,15 +662,14 @@ Ok((alloc, &mut self.machine)) } - /// "Safe" (bounds and align-checked) allocation access. + /// Bounds-checked *but not align-checked* allocation access. pub fn get_ptr_alloc_mut<'a>( &'a mut self, ptr: Pointer>, size: Size, - align: Align, ) -> InterpResult<'tcx, Option>> { - let parts = self.get_ptr_access(ptr, size, align)?; + let parts = self.get_ptr_access(ptr, size)?; if let Some((alloc_id, offset, prov)) = parts { let tcx = *self.tcx; // FIXME: can we somehow avoid looking up the allocation twice here? @@ -1021,7 +1011,7 @@ } /// Returns whether the allocation has provenance anywhere in the range of the `AllocRef`. - pub(crate) fn has_provenance(&self) -> bool { + pub fn has_provenance(&self) -> bool { !self.alloc.provenance().range_empty(self.range, &self.tcx) } } @@ -1036,7 +1026,7 @@ ptr: Pointer>, size: Size, ) -> InterpResult<'tcx, &[u8]> { - let Some(alloc_ref) = self.get_ptr_alloc(ptr, size, Align::ONE)? else { + let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else { // zero-sized access return Ok(&[]); }; @@ -1062,7 +1052,7 @@ assert_eq!(lower, len, "can only write iterators with a precise length"); let size = Size::from_bytes(len); - let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size, Align::ONE)? else { + let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else { // zero-sized access assert_matches!(src.next(), None, "iterator said it was empty but returned an element"); return Ok(()); @@ -1087,29 +1077,25 @@ pub fn mem_copy( &mut self, src: Pointer>, - src_align: Align, dest: Pointer>, - dest_align: Align, size: Size, nonoverlapping: bool, ) -> InterpResult<'tcx> { - self.mem_copy_repeatedly(src, src_align, dest, dest_align, size, 1, nonoverlapping) + self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping) } pub fn mem_copy_repeatedly( &mut self, src: Pointer>, - src_align: Align, dest: Pointer>, - dest_align: Align, size: Size, num_copies: u64, nonoverlapping: bool, ) -> InterpResult<'tcx> { let tcx = self.tcx; // We need to do our own bounds-checks. - let src_parts = self.get_ptr_access(src, size, src_align)?; - let dest_parts = self.get_ptr_access(dest, size * num_copies, dest_align)?; // `Size` multiplication + let src_parts = self.get_ptr_access(src, size)?; + let dest_parts = self.get_ptr_access(dest, size * num_copies)?; // `Size` multiplication // FIXME: we look up both allocations twice here, once before for the `check_ptr_access` // and once below to get the underlying `&[mut] Allocation`. @@ -1249,6 +1235,11 @@ /// Turning a "maybe pointer" into a proper pointer (and some information /// about where it points), or an absolute address. + /// + /// The result must be used immediately; it is not allowed to convert + /// the returned data back into a `Pointer` and store that in machine state. + /// (In fact that's not even possible since `M::ProvenanceExtra` is generic and + /// we don't have an operation to turn it back into `M::Provenance`.) pub fn ptr_try_get_alloc_id( &self, ptr: Pointer>, @@ -1267,6 +1258,11 @@ } /// Turning a "maybe pointer" into a proper pointer (and some information about where it points). + /// + /// The result must be used immediately; it is not allowed to convert + /// the returned data back into a `Pointer` and store that in machine state. + /// (In fact that's not even possible since `M::ProvenanceExtra` is generic and + /// we don't have an operation to turn it back into `M::Provenance`.) #[inline(always)] pub fn ptr_get_alloc_id( &self, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -21,12 +21,14 @@ pub use rustc_middle::mir::interpret::*; // have all the `interpret` symbols in one place: here pub use self::eval_context::{Frame, FrameInfo, InterpCx, StackPopCleanup}; -pub use self::intern::{intern_const_alloc_recursive, InternKind}; +pub use self::intern::{ + intern_const_alloc_for_constprop, intern_const_alloc_recursive, InternKind, +}; pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackPopJump}; pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind}; pub use self::operand::{ImmTy, Immediate, OpTy, Readable}; pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable}; -pub use self::projection::Projectable; +pub use self::projection::{OffsetMode, Projectable}; pub use self::terminator::FnArg; pub use self::validity::{CtfeValidationMode, RefTracking}; pub use self::visitor::ValueVisitor; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operand.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operand.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operand.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operand.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,11 +10,12 @@ use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter}; use rustc_middle::ty::{ConstInt, Ty, TyCtxt}; use rustc_middle::{mir, ty}; -use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size}; +use rustc_target::abi::{self, Abi, HasDataLayout, Size}; use super::{ alloc_range, from_known_layout, mir_assign_valid_types, AllocId, Frame, InterpCx, InterpResult, - MPlaceTy, Machine, MemPlace, MemPlaceMeta, PlaceTy, Pointer, Projectable, Provenance, Scalar, + MPlaceTy, Machine, MemPlace, MemPlaceMeta, OffsetMode, PlaceTy, Pointer, Projectable, + Provenance, Scalar, }; /// An `Immediate` represents a single immediate self-contained Rust value. @@ -43,12 +44,16 @@ } impl Immediate { - pub fn from_pointer(ptr: Pointer, cx: &impl HasDataLayout) -> Self { - Immediate::Scalar(Scalar::from_pointer(ptr, cx)) - } - - pub fn from_maybe_pointer(ptr: Pointer>, cx: &impl HasDataLayout) -> Self { - Immediate::Scalar(Scalar::from_maybe_pointer(ptr, cx)) + pub fn new_pointer_with_meta( + ptr: Pointer>, + meta: MemPlaceMeta, + cx: &impl HasDataLayout, + ) -> Self { + let ptr = Scalar::from_maybe_pointer(ptr, cx); + match meta { + MemPlaceMeta::None => Immediate::from(ptr), + MemPlaceMeta::Meta(meta) => Immediate::ScalarPair(ptr, meta), + } } pub fn new_slice(ptr: Pointer>, len: u64, cx: &impl HasDataLayout) -> Self { @@ -102,10 +107,10 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { /// Helper function for printing a scalar to a FmtPrinter fn p<'a, 'tcx, Prov: Provenance>( - cx: FmtPrinter<'a, 'tcx>, + cx: &mut FmtPrinter<'a, 'tcx>, s: Scalar, ty: Ty<'tcx>, - ) -> Result, std::fmt::Error> { + ) -> Result<(), std::fmt::Error> { match s { Scalar::Int(int) => cx.pretty_print_const_scalar_int(int, ty, true), Scalar::Ptr(ptr, _sz) => { @@ -120,8 +125,9 @@ match self.imm { Immediate::Scalar(s) => { if let Some(ty) = tcx.lift(self.layout.ty) { - let cx = FmtPrinter::new(tcx, Namespace::ValueNS); - f.write_str(&p(cx, s, ty)?.into_buffer())?; + let s = + FmtPrinter::print_string(tcx, Namespace::ValueNS, |cx| p(cx, s, ty))?; + f.write_str(&s)?; return Ok(()); } write!(f, "{:x}: {}", s, self.layout.ty) @@ -163,6 +169,16 @@ ImmTy { imm: val.into(), layout } } + #[inline] + pub fn from_scalar_pair(a: Scalar, b: Scalar, layout: TyAndLayout<'tcx>) -> Self { + debug_assert!( + matches!(layout.abi, Abi::ScalarPair(..)), + "`ImmTy::from_scalar_pair` on non-scalar-pair layout" + ); + let imm = Immediate::ScalarPair(a, b); + ImmTy { imm, layout } + } + #[inline(always)] pub fn from_immediate(imm: Immediate, layout: TyAndLayout<'tcx>) -> Self { debug_assert!( @@ -219,6 +235,17 @@ /// given layout. // Not called `offset` to avoid confusion with the trait method. fn offset_(&self, offset: Size, layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout) -> Self { + debug_assert!(layout.is_sized(), "unsized immediates are not a thing"); + // `ImmTy` have already been checked to be in-bounds, so we can just check directly if this + // remains in-bounds. This cannot actually be violated since projections are type-checked + // and bounds-checked. + assert!( + offset + layout.size <= self.layout.size, + "attempting to project to field at offset {} with size {} into immediate with layout {:#?}", + offset.bytes(), + layout.size.bytes(), + self.layout, + ); // This makes several assumptions about what layouts we will encounter; we match what // codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`). let inner_val: Immediate<_> = match (**self, self.layout.abi) { @@ -286,6 +313,7 @@ fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, offset: Size, + _mode: OffsetMode, meta: MemPlaceMeta, layout: TyAndLayout<'tcx>, ecx: &InterpCx<'mir, 'tcx, M>, @@ -315,14 +343,6 @@ pub struct OpTy<'tcx, Prov: Provenance = AllocId> { op: Operand, // Keep this private; it helps enforce invariants. pub layout: TyAndLayout<'tcx>, - /// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct: - /// it needs to have a different alignment than the field type would usually have. - /// So we represent this here with a separate field that "overwrites" `layout.align`. - /// This means `layout.align` should never be used for an `OpTy`! - /// `None` means "alignment does not matter since this is a by-value operand" - /// (`Operand::Immediate`); this field is only relevant for `Operand::Indirect`. - /// Also CTFE ignores alignment anyway, so this is for Miri only. - pub align: Option, } impl std::fmt::Debug for OpTy<'_, Prov> { @@ -338,18 +358,14 @@ impl<'tcx, Prov: Provenance> From> for OpTy<'tcx, Prov> { #[inline(always)] fn from(val: ImmTy<'tcx, Prov>) -> Self { - OpTy { op: Operand::Immediate(val.imm), layout: val.layout, align: None } + OpTy { op: Operand::Immediate(val.imm), layout: val.layout } } } impl<'tcx, Prov: Provenance> From> for OpTy<'tcx, Prov> { #[inline(always)] fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self { - OpTy { - op: Operand::Indirect(*mplace.mplace()), - layout: mplace.layout, - align: Some(mplace.align), - } + OpTy { op: Operand::Indirect(*mplace.mplace()), layout: mplace.layout } } } @@ -380,14 +396,14 @@ fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, offset: Size, + mode: OffsetMode, meta: MemPlaceMeta, layout: TyAndLayout<'tcx>, ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Self> { match self.as_mplace_or_imm() { - Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, ecx)?.into()), + Left(mplace) => Ok(mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into()), Right(imm) => { - debug_assert!(layout.is_sized(), "unsized immediates are not a thing"); assert_matches!(meta, MemPlaceMeta::None); // no place to store metadata here // Every part of an uninit is uninit. Ok(imm.offset_(offset, layout, ecx).into()) @@ -622,7 +638,7 @@ throw_inval!(ConstPropNonsense); } } - Ok(OpTy { op, layout, align: Some(layout.align.abi) }) + Ok(OpTy { op, layout }) } /// Every place can be read from, so we can turn them into an operand. @@ -637,16 +653,14 @@ Right((frame, local, offset)) => { debug_assert!(place.layout.is_sized()); // only sized locals can ever be `Place::Local`. let base = self.local_to_op(&self.stack()[frame], local, None)?; - let mut field = match offset { + Ok(match offset { Some(offset) => base.offset(offset, place.layout, self)?, None => { // In the common case this hasn't been projected. debug_assert_eq!(place.layout, base.layout); base } - }; - field.align = Some(place.align); - Ok(field) + }) } } } @@ -670,19 +684,24 @@ trace!("eval_place_to_op: got {:?}", op); // Sanity-check the type we ended up with. - debug_assert!( - mir_assign_valid_types( + if cfg!(debug_assertions) { + let normalized_place_ty = self.subst_from_current_frame_and_normalize_erasing_regions( + mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty, + )?; + if !mir_assign_valid_types( *self.tcx, self.param_env, - self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions( - mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty - )?)?, + self.layout_of(normalized_place_ty)?, op.layout, - ), - "eval_place of a MIR place with type {:?} produced an interpreter operand with type {}", - mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty, - op.layout.ty, - ); + ) { + span_bug!( + self.cur_span(), + "eval_place of a MIR place with type {} produced an interpreter operand with type {}", + normalized_place_ty, + op.layout.ty, + ) + } + } Ok(op) } @@ -729,27 +748,23 @@ }) }; let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?; - let op = match val_val { + let imm = match val_val { mir::ConstValue::Indirect { alloc_id, offset } => { // We rely on mutability being set correctly in that allocation to prevent writes // where none should happen. let ptr = self.global_base_pointer(Pointer::new(alloc_id, offset))?; - Operand::Indirect(MemPlace::from_ptr(ptr.into())) + return Ok(self.ptr_to_mplace(ptr.into(), layout).into()); } - mir::ConstValue::Scalar(x) => Operand::Immediate(adjust_scalar(x)?.into()), - mir::ConstValue::ZeroSized => Operand::Immediate(Immediate::Uninit), + mir::ConstValue::Scalar(x) => adjust_scalar(x)?.into(), + mir::ConstValue::ZeroSized => Immediate::Uninit, mir::ConstValue::Slice { data, meta } => { // We rely on mutability being set correctly in `data` to prevent writes // where none should happen. let ptr = Pointer::new(self.tcx.reserve_and_set_memory_alloc(data), Size::ZERO); - Operand::Immediate(Immediate::new_slice( - self.global_base_pointer(ptr)?.into(), - meta, - self, - )) + Immediate::new_slice(self.global_base_pointer(ptr)?.into(), meta, self) } }; - Ok(OpTy { op, layout, align: Some(layout.align.abi) }) + Ok(OpTy { op: Operand::Immediate(imm), layout }) } } @@ -762,6 +777,6 @@ static_assert_size!(Immediate, 48); static_assert_size!(ImmTy<'_>, 64); static_assert_size!(Operand, 56); - static_assert_size!(OpTy<'_>, 80); + static_assert_size!(OpTy<'_>, 72); // tidy-alphabetical-end } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/operator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ -use rustc_apfloat::Float; +use rustc_apfloat::{Float, FloatConvert}; use rustc_middle::mir; use rustc_middle::mir::interpret::{InterpResult, Scalar}; -use rustc_middle::ty::layout::TyAndLayout; +use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, FloatTy, Ty}; use rustc_span::symbol::sym; use rustc_target::abi::Abi; @@ -104,7 +104,7 @@ (ImmTy::from_bool(res, *self.tcx), false) } - fn binary_float_op>>( + fn binary_float_op + Into>>( &self, bin_op: mir::BinOp, layout: TyAndLayout<'tcx>, @@ -113,6 +113,11 @@ ) -> (ImmTy<'tcx, M::Provenance>, bool) { use rustc_middle::mir::BinOp::*; + // Performs appropriate non-deterministic adjustments of NaN results. + let adjust_nan = |f: F| -> F { + if f.is_nan() { M::generate_nan(self, &[l, r]) } else { f } + }; + let val = match bin_op { Eq => ImmTy::from_bool(l == r, *self.tcx), Ne => ImmTy::from_bool(l != r, *self.tcx), @@ -120,11 +125,11 @@ Le => ImmTy::from_bool(l <= r, *self.tcx), Gt => ImmTy::from_bool(l > r, *self.tcx), Ge => ImmTy::from_bool(l >= r, *self.tcx), - Add => ImmTy::from_scalar((l + r).value.into(), layout), - Sub => ImmTy::from_scalar((l - r).value.into(), layout), - Mul => ImmTy::from_scalar((l * r).value.into(), layout), - Div => ImmTy::from_scalar((l / r).value.into(), layout), - Rem => ImmTy::from_scalar((l % r).value.into(), layout), + Add => ImmTy::from_scalar(adjust_nan((l + r).value).into(), layout), + Sub => ImmTy::from_scalar(adjust_nan((l - r).value).into(), layout), + Mul => ImmTy::from_scalar(adjust_nan((l * r).value).into(), layout), + Div => ImmTy::from_scalar(adjust_nan((l / r).value).into(), layout), + Rem => ImmTy::from_scalar(adjust_nan((l % r).value).into(), layout), _ => span_bug!(self.cur_span(), "invalid float op: `{:?}`", bin_op), }; (val, false) @@ -332,7 +337,15 @@ let offset_count = right.to_scalar().to_target_isize(self)?; let pointee_ty = left.layout.ty.builtin_deref(true).unwrap().ty; - let offset_ptr = self.ptr_offset_inbounds(ptr, pointee_ty, offset_count)?; + // We cannot overflow i64 as a type's size must be <= isize::MAX. + let pointee_size = i64::try_from(self.layout_of(pointee_ty)?.size.bytes()).unwrap(); + // The computed offset, in bytes, must not overflow an isize. + // `checked_mul` enforces a too small bound, but no actual allocation can be big enough for + // the difference to be noticeable. + let offset_bytes = + offset_count.checked_mul(pointee_size).ok_or(err_ub!(PointerArithOverflow))?; + + let offset_ptr = self.ptr_offset_inbounds(ptr, offset_bytes)?; Ok(( ImmTy::from_scalar(Scalar::from_maybe_pointer(offset_ptr, self), left.layout), false, @@ -456,6 +469,7 @@ Ok((ImmTy::from_bool(res, *self.tcx), false)) } ty::Float(fty) => { + // No NaN adjustment here, `-` is a bitwise operation! let res = match (un_op, fty) { (Neg, FloatTy::F32) => Scalar::from_f32(-val.to_f32()?), (Neg, FloatTy::F64) => Scalar::from_f64(-val.to_f64()?), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/place.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/place.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/place.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/place.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,9 +15,9 @@ use rustc_target::abi::{Abi, Align, FieldIdx, HasDataLayout, Size, FIRST_VARIANT}; use super::{ - alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg, ImmTy, - Immediate, InterpCx, InterpResult, Machine, MemoryKind, OpTy, Operand, Pointer, - PointerArithmetic, Projectable, Provenance, Readable, Scalar, + alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckAlignMsg, ImmTy, + Immediate, InterpCx, InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy, + Operand, Pointer, PointerArithmetic, Projectable, Provenance, Readable, Scalar, }; #[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)] @@ -57,19 +57,11 @@ /// Must not be present for sized types, but can be missing for unsized types /// (e.g., `extern type`). pub meta: MemPlaceMeta, + /// Stores whether this place was created based on a sufficiently aligned pointer. + misaligned: Option, } impl MemPlace { - #[inline(always)] - pub fn from_ptr(ptr: Pointer>) -> Self { - MemPlace { ptr, meta: MemPlaceMeta::None } - } - - #[inline(always)] - pub fn from_ptr_with_meta(ptr: Pointer>, meta: MemPlaceMeta) -> Self { - MemPlace { ptr, meta } - } - /// Adjust the provenance of the main pointer (metadata is unaffected). pub fn map_provenance(self, f: impl FnOnce(Option) -> Option) -> Self { MemPlace { ptr: self.ptr.map_provenance(f), ..self } @@ -78,27 +70,32 @@ /// Turn a mplace into a (thin or wide) pointer, as a reference, pointing to the same space. #[inline] pub fn to_ref(self, cx: &impl HasDataLayout) -> Immediate { - match self.meta { - MemPlaceMeta::None => Immediate::from(Scalar::from_maybe_pointer(self.ptr, cx)), - MemPlaceMeta::Meta(meta) => { - Immediate::ScalarPair(Scalar::from_maybe_pointer(self.ptr, cx), meta) - } - } + Immediate::new_pointer_with_meta(self.ptr, self.meta, cx) } #[inline] // Not called `offset_with_meta` to avoid confusion with the trait method. - fn offset_with_meta_<'tcx>( + fn offset_with_meta_<'mir, 'tcx, M: Machine<'mir, 'tcx, Provenance = Prov>>( self, offset: Size, + mode: OffsetMode, meta: MemPlaceMeta, - cx: &impl HasDataLayout, + ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Self> { debug_assert!( !meta.has_meta() || self.meta.has_meta(), "cannot use `offset_with_meta` to add metadata to a place" ); - Ok(MemPlace { ptr: self.ptr.offset(offset, cx)?, meta }) + if offset > ecx.data_layout().max_size_of_val() { + throw_ub!(PointerArithOverflow); + } + let ptr = match mode { + OffsetMode::Inbounds => { + ecx.ptr_offset_inbounds(self.ptr, offset.bytes().try_into().unwrap())? + } + OffsetMode::Wrapping => self.ptr.wrapping_offset(offset, ecx), + }; + Ok(MemPlace { ptr, meta, misaligned: self.misaligned }) } } @@ -107,11 +104,6 @@ pub struct MPlaceTy<'tcx, Prov: Provenance = AllocId> { mplace: MemPlace, pub layout: TyAndLayout<'tcx>, - /// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct: - /// it needs to have a different alignment than the field type would usually have. - /// So we represent this here with a separate field that "overwrites" `layout.align`. - /// This means `layout.align` should never be used for a `MPlaceTy`! - pub align: Align, } impl std::fmt::Debug for MPlaceTy<'_, Prov> { @@ -133,25 +125,7 @@ assert!(layout.is_zst()); let align = layout.align.abi; let ptr = Pointer::from_addr_invalid(align.bytes()); // no provenance, absolute address - MPlaceTy { mplace: MemPlace { ptr, meta: MemPlaceMeta::None }, layout, align } - } - - #[inline] - pub fn from_aligned_ptr(ptr: Pointer>, layout: TyAndLayout<'tcx>) -> Self { - MPlaceTy { mplace: MemPlace::from_ptr(ptr), layout, align: layout.align.abi } - } - - #[inline] - pub fn from_aligned_ptr_with_meta( - ptr: Pointer>, - layout: TyAndLayout<'tcx>, - meta: MemPlaceMeta, - ) -> Self { - MPlaceTy { - mplace: MemPlace::from_ptr_with_meta(ptr, meta), - layout, - align: layout.align.abi, - } + MPlaceTy { mplace: MemPlace { ptr, meta: MemPlaceMeta::None, misaligned: None }, layout } } /// Adjust the provenance of the main pointer (metadata is unaffected). @@ -189,15 +163,12 @@ fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, offset: Size, + mode: OffsetMode, meta: MemPlaceMeta, layout: TyAndLayout<'tcx>, ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Self> { - Ok(MPlaceTy { - mplace: self.mplace.offset_with_meta_(offset, meta, ecx)?, - align: self.align.restrict_for_offset(offset), - layout, - }) + Ok(MPlaceTy { mplace: self.mplace.offset_with_meta_(offset, mode, meta, ecx)?, layout }) } fn to_op<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( @@ -228,11 +199,6 @@ pub struct PlaceTy<'tcx, Prov: Provenance = AllocId> { place: Place, // Keep this private; it helps enforce invariants. pub layout: TyAndLayout<'tcx>, - /// rustc does not have a proper way to represent the type of a field of a `repr(packed)` struct: - /// it needs to have a different alignment than the field type would usually have. - /// So we represent this here with a separate field that "overwrites" `layout.align`. - /// This means `layout.align` should never be used for a `PlaceTy`! - pub align: Align, } impl std::fmt::Debug for PlaceTy<'_, Prov> { @@ -248,7 +214,7 @@ impl<'tcx, Prov: Provenance> From> for PlaceTy<'tcx, Prov> { #[inline(always)] fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self { - PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout, align: mplace.align } + PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout } } } @@ -264,7 +230,7 @@ &self, ) -> Either, (usize, mir::Local, Option)> { match self.place { - Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout, align: self.align }), + Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout }), Place::Local { frame, local, offset } => Right((frame, local, offset)), } } @@ -301,27 +267,27 @@ fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, offset: Size, + mode: OffsetMode, meta: MemPlaceMeta, layout: TyAndLayout<'tcx>, ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Self> { Ok(match self.as_mplace_or_local() { - Left(mplace) => mplace.offset_with_meta(offset, meta, layout, ecx)?.into(), + Left(mplace) => mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into(), Right((frame, local, old_offset)) => { debug_assert!(layout.is_sized(), "unsized locals should live in memory"); assert_matches!(meta, MemPlaceMeta::None); // we couldn't store it anyway... - let new_offset = ecx - .data_layout() - .offset(old_offset.unwrap_or(Size::ZERO).bytes(), offset.bytes())?; - PlaceTy { - place: Place::Local { - frame, - local, - offset: Some(Size::from_bytes(new_offset)), - }, - align: self.align.restrict_for_offset(offset), - layout, - } + // `Place::Local` are always in-bounds of their surrounding local, so we can just + // check directly if this remains in-bounds. This cannot actually be violated since + // projections are type-checked and bounds-checked. + assert!(offset + layout.size <= self.layout.size); + + let new_offset = Size::from_bytes( + ecx.data_layout() + .offset(old_offset.unwrap_or(Size::ZERO).bytes(), offset.bytes())?, + ); + + PlaceTy { place: Place::Local { frame, local, offset: Some(new_offset) }, layout } } }) } @@ -339,9 +305,7 @@ #[inline(always)] pub fn as_mplace_or_imm(&self) -> Either, ImmTy<'tcx, Prov>> { match self.op() { - Operand::Indirect(mplace) => { - Left(MPlaceTy { mplace: *mplace, layout: self.layout, align: self.align.unwrap() }) - } + Operand::Indirect(mplace) => Left(MPlaceTy { mplace: *mplace, layout: self.layout }), Operand::Immediate(imm) => Right(ImmTy::from_immediate(*imm, self.layout)), } } @@ -362,7 +326,7 @@ pub trait Writeable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> { fn as_mplace_or_local( &self, - ) -> Either, (usize, mir::Local, Option, Align, TyAndLayout<'tcx>)>; + ) -> Either, (usize, mir::Local, Option, TyAndLayout<'tcx>)>; fn force_mplace<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, @@ -374,10 +338,9 @@ #[inline(always)] fn as_mplace_or_local( &self, - ) -> Either, (usize, mir::Local, Option, Align, TyAndLayout<'tcx>)> - { + ) -> Either, (usize, mir::Local, Option, TyAndLayout<'tcx>)> { self.as_mplace_or_local() - .map_right(|(frame, local, offset)| (frame, local, offset, self.align, self.layout)) + .map_right(|(frame, local, offset)| (frame, local, offset, self.layout)) } #[inline(always)] @@ -393,8 +356,7 @@ #[inline(always)] fn as_mplace_or_local( &self, - ) -> Either, (usize, mir::Local, Option, Align, TyAndLayout<'tcx>)> - { + ) -> Either, (usize, mir::Local, Option, TyAndLayout<'tcx>)> { Left(self.clone()) } @@ -413,6 +375,25 @@ Prov: Provenance, M: Machine<'mir, 'tcx, Provenance = Prov>, { + pub fn ptr_with_meta_to_mplace( + &self, + ptr: Pointer>, + meta: MemPlaceMeta, + layout: TyAndLayout<'tcx>, + ) -> MPlaceTy<'tcx, M::Provenance> { + let misaligned = self.is_ptr_misaligned(ptr, layout.align.abi); + MPlaceTy { mplace: MemPlace { ptr, meta, misaligned }, layout } + } + + pub fn ptr_to_mplace( + &self, + ptr: Pointer>, + layout: TyAndLayout<'tcx>, + ) -> MPlaceTy<'tcx, M::Provenance> { + assert!(layout.is_sized()); + self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout) + } + /// Take a value, which represents a (thin or wide) reference, and make it a place. /// Alignment is just based on the type. This is the inverse of `mplace_to_ref()`. /// @@ -434,7 +415,8 @@ // `ref_to_mplace` is called on raw pointers even if they don't actually get dereferenced; // we hence can't call `size_and_align_of` since that asserts more validity than we want. - Ok(MPlaceTy::from_aligned_ptr_with_meta(ptr.to_pointer(self)?, layout, meta)) + let ptr = ptr.to_pointer(self)?; + Ok(self.ptr_with_meta_to_mplace(ptr, meta, layout)) } /// Turn a mplace into a (thin or wide) mutable raw pointer, pointing to the same space. @@ -464,7 +446,6 @@ } let mplace = self.ref_to_mplace(&val)?; - self.check_mplace(&mplace)?; Ok(mplace) } @@ -477,8 +458,11 @@ let (size, _align) = self .size_and_align_of_mplace(&mplace)? .unwrap_or((mplace.layout.size, mplace.layout.align.abi)); - // Due to packed places, only `mplace.align` matters. - self.get_ptr_alloc(mplace.ptr(), size, mplace.align) + // We check alignment separately, and *after* checking everything else. + // If an access is both OOB and misaligned, we want to see the bounds error. + let a = self.get_ptr_alloc(mplace.ptr(), size)?; + self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn)?; + Ok(a) } #[inline] @@ -490,20 +474,13 @@ let (size, _align) = self .size_and_align_of_mplace(&mplace)? .unwrap_or((mplace.layout.size, mplace.layout.align.abi)); - // Due to packed places, only `mplace.align` matters. - self.get_ptr_alloc_mut(mplace.ptr(), size, mplace.align) - } - - /// Check if this mplace is dereferenceable and sufficiently aligned. - pub fn check_mplace(&self, mplace: &MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> { - let (size, _align) = self - .size_and_align_of_mplace(&mplace)? - .unwrap_or((mplace.layout.size, mplace.layout.align.abi)); - // Due to packed places, only `mplace.align` matters. - let align = - if M::enforce_alignment(self).should_check() { mplace.align } else { Align::ONE }; - self.check_ptr_access_align(mplace.ptr(), size, align, CheckInAllocMsg::DerefTest)?; - Ok(()) + // We check alignment separately, and raise that error *after* checking everything else. + // If an access is both OOB and misaligned, we want to see the bounds error. + // However we have to call `check_misalign` first to make the borrow checker happy. + let misalign_err = self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn); + let a = self.get_ptr_alloc_mut(mplace.ptr(), size)?; + misalign_err?; + Ok(a) } /// Converts a repr(simd) place into a place where `place_index` accesses the SIMD elements. @@ -518,8 +495,8 @@ let (len, e_ty) = mplace.layout.ty.simd_size_and_type(*self.tcx); let array = Ty::new_array(self.tcx.tcx, e_ty, len); let layout = self.layout_of(array)?; - assert_eq!(layout.size, mplace.layout.size); - Ok((MPlaceTy { layout, ..*mplace }, len)) + let mplace = mplace.transmute(layout, self)?; + Ok((mplace, len)) } /// Converts a repr(simd) place into a place where `place_index` accesses the SIMD elements. @@ -555,7 +532,7 @@ Operand::Indirect(mplace) => Place::Ptr(*mplace), } }; - Ok(PlaceTy { place, layout, align: layout.align.abi }) + Ok(PlaceTy { place, layout }) } /// Computes a place. You should only use this if you intend to write into this @@ -573,19 +550,24 @@ trace!("{:?}", self.dump_place(&place)); // Sanity-check the type we ended up with. - debug_assert!( - mir_assign_valid_types( + if cfg!(debug_assertions) { + let normalized_place_ty = self.subst_from_current_frame_and_normalize_erasing_regions( + mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty, + )?; + if !mir_assign_valid_types( *self.tcx, self.param_env, - self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions( - mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty - )?)?, + self.layout_of(normalized_place_ty)?, place.layout, - ), - "eval_place of a MIR place with type {:?} produced an interpreter place with type {}", - mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty, - place.layout.ty, - ); + ) { + span_bug!( + self.cur_span(), + "eval_place of a MIR place with type {} produced an interpreter place with type {}", + normalized_place_ty, + place.layout.ty, + ) + } + } Ok(place) } @@ -640,7 +622,7 @@ // See if we can avoid an allocation. This is the counterpart to `read_immediate_raw`, // but not factored as a separate function. let mplace = match dest.as_mplace_or_local() { - Right((frame, local, offset, align, layout)) => { + Right((frame, local, offset, layout)) => { if offset.is_some() { // This has been projected to a part of this local. We could have complicated // logic to still keep this local as an `Operand`... but it's much easier to @@ -681,7 +663,7 @@ } Operand::Indirect(mplace) => { // The local is in memory, go on below. - MPlaceTy { mplace: *mplace, align, layout } + MPlaceTy { mplace: *mplace, layout } } } } @@ -690,7 +672,7 @@ }; // This is already in memory, write there. - self.write_immediate_to_mplace_no_validate(src, mplace.layout, mplace.align, mplace.mplace) + self.write_immediate_to_mplace_no_validate(src, mplace.layout, mplace.mplace) } /// Write an immediate to memory. @@ -700,7 +682,6 @@ &mut self, value: Immediate, layout: TyAndLayout<'tcx>, - align: Align, dest: MemPlace, ) -> InterpResult<'tcx> { // Note that it is really important that the type here is the right one, and matches the @@ -709,9 +690,7 @@ // wrong type. let tcx = *self.tcx; - let Some(mut alloc) = - self.get_place_alloc_mut(&MPlaceTy { mplace: dest, layout, align })? - else { + let Some(mut alloc) = self.get_place_alloc_mut(&MPlaceTy { mplace: dest, layout })? else { // zero-sized access return Ok(()); }; @@ -729,9 +708,6 @@ alloc.write_scalar(alloc_range(Size::ZERO, size), scalar) } Immediate::ScalarPair(a_val, b_val) => { - // We checked `ptr_align` above, so all fields will have the alignment they need. - // We would anyway check against `ptr_align.restrict_for_offset(b_offset)`, - // which `ptr.offset(b_offset)` cannot possibly fail to satisfy. let Abi::ScalarPair(a, b) = layout.abi else { span_bug!( self.cur_span(), @@ -760,7 +736,7 @@ ) -> InterpResult<'tcx> { let mplace = match dest.as_mplace_or_local() { Left(mplace) => mplace, - Right((frame, local, offset, align, layout)) => { + Right((frame, local, offset, layout)) => { if offset.is_some() { // This has been projected to a part of this local. We could have complicated // logic to still keep this local as an `Operand`... but it's much easier to @@ -776,7 +752,7 @@ } Operand::Indirect(mplace) => { // The local is in memory, go on below. - MPlaceTy { mplace: *mplace, layout, align } + MPlaceTy { mplace: *mplace, layout } } } } @@ -869,7 +845,6 @@ self.write_immediate_to_mplace_no_validate( *src_val, src.layout(), - dest_mem.align, dest_mem.mplace, ) }; @@ -896,14 +871,12 @@ // type does not have Scalar/ScalarPair layout. // (Or as the `Assign` docs put it, assignments "not producing primitives" must be // non-overlapping.) - self.mem_copy( - src.ptr(), - src.align, - dest.ptr(), - dest.align, - dest_size, - /*nonoverlapping*/ true, - ) + // We check alignment separately, and *after* checking everything else. + // If an access is both OOB and misaligned, we want to see the bounds error. + self.mem_copy(src.ptr(), dest.ptr(), dest_size, /*nonoverlapping*/ true)?; + self.check_misalign(src.mplace.misaligned, CheckAlignMsg::BasedOn)?; + self.check_misalign(dest.mplace.misaligned, CheckAlignMsg::BasedOn)?; + Ok(()) } /// Ensures that a place is in memory, and returns where it is. @@ -937,7 +910,6 @@ self.write_immediate_to_mplace_no_validate( local_val, local_layout, - local_layout.align.abi, mplace.mplace, )?; } @@ -952,7 +924,13 @@ &mut Operand::Indirect(mplace) => mplace, // this already was an indirect local }; if let Some(offset) = offset { - whole_local.offset_with_meta_(offset, MemPlaceMeta::None, self)? + // This offset is always inbounds, no need to check it again. + whole_local.offset_with_meta_( + offset, + OffsetMode::Wrapping, + MemPlaceMeta::None, + self, + )? } else { // Preserve wide place metadata, do not call `offset`. whole_local @@ -961,7 +939,7 @@ Place::Ptr(mplace) => mplace, }; // Return with the original layout and align, so that the caller can go on - Ok(MPlaceTy { mplace, layout: place.layout, align: place.align }) + Ok(MPlaceTy { mplace, layout: place.layout }) } pub fn allocate_dyn( @@ -974,7 +952,7 @@ span_bug!(self.cur_span(), "cannot allocate space for `extern` type, size is not known") }; let ptr = self.allocate_ptr(size, align, kind)?; - Ok(MPlaceTy::from_aligned_ptr_with_meta(ptr.into(), layout, meta)) + Ok(self.ptr_with_meta_to_mplace(ptr.into(), meta, layout)) } pub fn allocate( @@ -986,7 +964,7 @@ self.allocate_dyn(layout, kind, MemPlaceMeta::None) } - /// Returns a wide MPlace of type `&'static [mut] str` to a new 1-aligned allocation. + /// Returns a wide MPlace of type `str` to a new 1-aligned allocation. pub fn allocate_str( &mut self, str: &str, @@ -995,15 +973,8 @@ ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> { let ptr = self.allocate_bytes_ptr(str.as_bytes(), Align::ONE, kind, mutbl)?; let meta = Scalar::from_target_usize(u64::try_from(str.len()).unwrap(), self); - let mplace = MemPlace { ptr: ptr.into(), meta: MemPlaceMeta::Meta(meta) }; - - let ty = Ty::new_ref( - self.tcx.tcx, - self.tcx.lifetimes.re_static, - ty::TypeAndMut { ty: self.tcx.types.str_, mutbl }, - ); - let layout = self.layout_of(ty).unwrap(); - Ok(MPlaceTy { mplace, layout, align: layout.align.abi }) + let layout = self.layout_of(self.tcx.types.str_).unwrap(); + Ok(self.ptr_with_meta_to_mplace(ptr.into(), MemPlaceMeta::Meta(meta), layout)) } /// Writes the aggregate to the destination. @@ -1042,7 +1013,7 @@ let _ = self.tcx.global_alloc(raw.alloc_id); let ptr = self.global_base_pointer(Pointer::from(raw.alloc_id))?; let layout = self.layout_of(raw.ty)?; - Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout)) + Ok(self.ptr_to_mplace(ptr.into(), layout)) } /// Turn a place with a `dyn Trait` type into a place with the actual dynamic type. @@ -1058,12 +1029,10 @@ let vtable = mplace.meta().unwrap_meta().to_pointer(self)?; let (ty, _) = self.get_ptr_vtable(vtable)?; let layout = self.layout_of(ty)?; - - let mplace = MPlaceTy { - mplace: MemPlace { meta: MemPlaceMeta::None, ..mplace.mplace }, - layout, - align: layout.align.abi, - }; + // This is a kind of transmute, from a place with unsized type and metadata to + // a place with sized type and no metadata. + let mplace = + MPlaceTy { mplace: MemPlace { meta: MemPlaceMeta::None, ..mplace.mplace }, layout }; Ok((mplace, vtable)) } @@ -1095,10 +1064,10 @@ use super::*; use rustc_data_structures::static_assert_size; // tidy-alphabetical-start - static_assert_size!(MemPlace, 40); + static_assert_size!(MemPlace, 48); static_assert_size!(MemPlaceMeta, 24); static_assert_size!(MPlaceTy<'_>, 64); - static_assert_size!(Place, 40); + static_assert_size!(Place, 48); static_assert_size!(PlaceTy<'_>, 64); // tidy-alphabetical-end } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/projection.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/projection.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/projection.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/projection.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,6 +19,15 @@ use super::{InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Provenance, Scalar}; +/// Describes the constraints placed on offset-projections. +#[derive(Copy, Clone, Debug)] +pub enum OffsetMode { + /// The offset has to be inbounds, like `ptr::offset`. + Inbounds, + /// No constraints, just wrap around the edge of the address space. + Wrapping, +} + /// A thing that we can project into, and that has a layout. pub trait Projectable<'tcx, Prov: Provenance>: Sized + std::fmt::Debug { /// Get the layout. @@ -53,12 +62,12 @@ fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, offset: Size, + mode: OffsetMode, meta: MemPlaceMeta, layout: TyAndLayout<'tcx>, ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Self>; - #[inline] fn offset<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, offset: Size, @@ -66,10 +75,9 @@ ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Self> { assert!(layout.is_sized()); - self.offset_with_meta(offset, MemPlaceMeta::None, layout, ecx) + self.offset_with_meta(offset, OffsetMode::Inbounds, MemPlaceMeta::None, layout, ecx) } - #[inline] fn transmute<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( &self, layout: TyAndLayout<'tcx>, @@ -77,7 +85,7 @@ ) -> InterpResult<'tcx, Self> { assert!(self.layout().is_sized() && layout.is_sized()); assert_eq!(self.layout().size, layout.size); - self.offset_with_meta(Size::ZERO, MemPlaceMeta::None, layout, ecx) + self.offset_with_meta(Size::ZERO, OffsetMode::Wrapping, MemPlaceMeta::None, layout, ecx) } /// Convert this to an `OpTy`. This might be an irreversible transformation, but is useful for @@ -104,7 +112,17 @@ ecx: &InterpCx<'mir, 'tcx, M>, ) -> InterpResult<'tcx, Option<(u64, P)>> { let Some(idx) = self.range.next() else { return Ok(None) }; - Ok(Some((idx, self.base.offset(self.stride * idx, self.field_layout, ecx)?))) + // We use `Wrapping` here since the offset has already been checked when the iterator was created. + Ok(Some(( + idx, + self.base.offset_with_meta( + self.stride * idx, + OffsetMode::Wrapping, + MemPlaceMeta::None, + self.field_layout, + ecx, + )?, + ))) } } @@ -159,7 +177,7 @@ (MemPlaceMeta::None, offset) }; - base.offset_with_meta(offset, meta, field_layout, self) + base.offset_with_meta(offset, OffsetMode::Inbounds, meta, field_layout, self) } /// Downcasting to an enum variant. @@ -248,6 +266,10 @@ }; let len = base.len(self)?; let field_layout = base.layout().field(self, 0); + // Ensure that all the offsets are in-bounds once, up-front. + debug!("project_array_fields: {base:?} {len}"); + base.offset(len * stride, self.layout_of(self.tcx.types.unit).unwrap(), self)?; + // Create the iterator. Ok(ArrayIterator { base, range: 0..len, stride, field_layout, _phantom: PhantomData }) } @@ -305,7 +327,7 @@ }; let layout = self.layout_of(ty)?; - base.offset_with_meta(from_offset, meta, layout, self) + base.offset_with_meta(from_offset, OffsetMode::Inbounds, meta, layout, self) } /// Applying a general projection diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/step.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/step.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/step.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/step.rs 2023-12-21 16:55:28.000000000 +0000 @@ -206,15 +206,10 @@ let elem_size = first.layout.size; let first_ptr = first.ptr(); let rest_ptr = first_ptr.offset(elem_size, self)?; - // For the alignment of `rest_ptr`, we crucially do *not* use `first.align` as - // that place might be more aligned than its type mandates (a `u8` array could - // be 4-aligned if it sits at the right spot in a struct). We have to also factor - // in element size. + // No alignment requirement since `copy_op` above already checked it. self.mem_copy_repeatedly( first_ptr, - dest.align, rest_ptr, - dest.align.restrict_for_offset(elem_size), elem_size, length - 1, /*nonoverlapping:*/ true, @@ -268,7 +263,9 @@ NullaryOp(ref null_op, ty) => { let ty = self.subst_from_current_frame_and_normalize_erasing_regions(ty)?; let layout = self.layout_of(ty)?; - if let mir::NullOp::SizeOf | mir::NullOp::AlignOf = null_op && layout.is_unsized() { + if let mir::NullOp::SizeOf | mir::NullOp::AlignOf = null_op + && layout.is_unsized() + { span_bug!( self.frame().current_span(), "{null_op:?} MIR operator called for unsized type {ty}", @@ -278,7 +275,7 @@ mir::NullOp::SizeOf => layout.size.bytes(), mir::NullOp::AlignOf => layout.align.abi.bytes(), mir::NullOp::OffsetOf(fields) => { - layout.offset_of_subfield(self, fields.iter().map(|f| f.index())).bytes() + layout.offset_of_subfield(self, fields.iter()).bytes() } }; self.write_scalar(Scalar::from_target_usize(val, self), &dest)?; @@ -300,7 +297,7 @@ Discriminant(place) => { let op = self.eval_place_to_op(place, None)?; let variant = self.read_discriminant(&op)?; - let discr = self.discriminant_for_variant(op.layout, variant)?; + let discr = self.discriminant_for_variant(op.layout.ty, variant)?; self.write_immediate(*discr, &dest)?; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/terminator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/terminator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/terminator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/terminator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,5 @@ use std::borrow::Cow; -use either::Either; use rustc_ast::ast::InlineAsmOptions; use rustc_middle::{ mir, @@ -219,7 +218,7 @@ Unreachable => throw_ub!(Unreachable), // These should never occur for MIR we actually run. - FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | GeneratorDrop => span_bug!( + FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!( terminator.source_info.span, "{:#?} should have been eliminated by MIR pass", terminator.kind @@ -729,13 +728,7 @@ callee_ty: callee_fn_abi.ret.layout.ty }); } - // Ensure the return place is aligned and dereferenceable, and protect it for - // in-place return value passing. - if let Either::Left(mplace) = destination.as_mplace_or_local() { - self.check_mplace(&mplace)?; - } else { - // Nothing to do for locals, they are always properly allocated and aligned. - } + // Protect return place for in-place return value passing. M::protect_in_place_function_argument(self, destination)?; // Don't forget to mark "initially live" locals as live. @@ -890,11 +883,13 @@ } fn check_fn_target_features(&self, instance: ty::Instance<'tcx>) -> InterpResult<'tcx, ()> { + // Calling functions with `#[target_feature]` is not unsafe on WASM, see #84988 let attrs = self.tcx.codegen_fn_attrs(instance.def_id()); - if attrs - .target_features - .iter() - .any(|feature| !self.tcx.sess.target_features.contains(feature)) + if !self.tcx.sess.target.is_like_wasm + && attrs + .target_features + .iter() + .any(|feature| !self.tcx.sess.target_features.contains(feature)) { throw_ub_custom!( fluent::const_eval_unavailable_target_features_for_fn, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -34,7 +34,7 @@ match *ty.kind() { ty::Param(_) => ControlFlow::Break(FoundParam), ty::Closure(def_id, args) - | ty::Generator(def_id, args, ..) + | ty::Coroutine(def_id, args, ..) | ty::FnDef(def_id, args) => { let instance = ty::InstanceDef::Item(def_id); let unused_params = self.tcx.unused_generic_params(instance); @@ -42,10 +42,10 @@ let index = index .try_into() .expect("more generic parameters than can fit into a `u32`"); - // Only recurse when generic parameters in fns, closures and generators + // Only recurse when generic parameters in fns, closures and coroutines // are used and have to be instantiated. // - // Just in case there are closures or generators within this subst, + // Just in case there are closures or coroutines within this subst, // recurse. if unused_params.is_used(index) && subst.has_param() { return subst.visit_with(self); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/validity.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/validity.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/validity.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/interpret/validity.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,7 +13,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_middle::mir::interpret::{ - ExpectedKind, InterpError, InvalidMetaKind, PointerKind, ValidationErrorInfo, + ExpectedKind, InterpError, InvalidMetaKind, Misalignment, PointerKind, ValidationErrorInfo, ValidationErrorKind, ValidationErrorKind::*, }; use rustc_middle::ty; @@ -112,13 +112,13 @@ pub enum PathElem { Field(Symbol), Variant(Symbol), - GeneratorState(VariantIdx), + CoroutineState(VariantIdx), CapturedVar(Symbol), ArrayElem(usize), TupleElem(usize), Deref, EnumTag, - GeneratorTag, + CoroutineTag, DynDowncast, } @@ -171,8 +171,8 @@ Field(name) => write!(out, ".{name}"), EnumTag => write!(out, "."), Variant(name) => write!(out, "."), - GeneratorTag => write!(out, "."), - GeneratorState(idx) => write!(out, ".", idx.index()), + CoroutineTag => write!(out, "."), + CoroutineState(idx) => write!(out, ".", idx.index()), CapturedVar(name) => write!(out, "."), TupleElem(idx) => write!(out, ".{idx}"), ArrayElem(idx) => write!(out, "[{idx}]"), @@ -206,7 +206,7 @@ if tag_field == field { return match layout.ty.kind() { ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag, - ty::Generator(..) => PathElem::GeneratorTag, + ty::Coroutine(..) => PathElem::CoroutineTag, _ => bug!("non-variant type {:?}", layout.ty), }; } @@ -216,8 +216,8 @@ // Now we know we are projecting to a field, so figure out which one. match layout.ty.kind() { - // generators and closures. - ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => { + // coroutines and closures. + ty::Closure(def_id, _) | ty::Coroutine(def_id, _, _) => { let mut name = None; // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar // https://github.com/rust-lang/project-rfc-2229/issues/46 @@ -225,7 +225,7 @@ let captures = self.ecx.tcx.closure_captures(local_def_id); if let Some(captured_place) = captures.get(field) { // Sometimes the index is beyond the number of upvars (seen - // for a generator). + // for a coroutine). let var_hir_id = captured_place.get_root_variable(); let node = self.ecx.tcx.hir().get(var_hir_id); if let hir::Node::Pat(pat) = node { @@ -355,7 +355,7 @@ value: &OpTy<'tcx, M::Provenance>, ptr_kind: PointerKind, ) -> InterpResult<'tcx> { - // Not using `deref_pointer` since we do the dereferenceable check ourselves below. + // Not using `deref_pointer` since we want to use our `read_immediate` wrapper. let place = self.ecx.ref_to_mplace(&self.read_immediate(value, ptr_kind.into())?)?; // Handle wide pointers. // Check metadata early, for better diagnostics @@ -378,18 +378,12 @@ .unwrap_or_else(|| (place.layout.size, place.layout.align.abi)); // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines. try_validation!( - self.ecx.check_ptr_access_align( + self.ecx.check_ptr_access( place.ptr(), size, - align, CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message ), self.path, - Ub(AlignmentCheckFailed { required, has }) => UnalignedPtr { - ptr_kind, - required_bytes: required.bytes(), - found_bytes: has.bytes() - }, Ub(DanglingIntPointer(0, _)) => NullPtr { ptr_kind }, Ub(DanglingIntPointer(i, _)) => DanglingPtrNoProvenance { ptr_kind, @@ -405,6 +399,18 @@ ptr_kind, }, ); + try_validation!( + self.ecx.check_ptr_align( + place.ptr(), + align, + ), + self.path, + Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => UnalignedPtr { + ptr_kind, + required_bytes: required.bytes(), + found_bytes: has.bytes() + }, + ); // Do not allow pointers to uninhabited types. if place.layout.abi.is_uninhabited() { let ty = place.layout.ty; @@ -574,7 +580,7 @@ | ty::Str | ty::Dynamic(..) | ty::Closure(..) - | ty::Generator(..) => Ok(false), + | ty::Coroutine(..) => Ok(false), // Some types only occur during typechecking, they have no layout. // We should not see them here and we could not check them anyway. ty::Error(_) @@ -583,7 +589,7 @@ | ty::Bound(..) | ty::Param(..) | ty::Alias(..) - | ty::GeneratorWitness(..) => bug!("Encountered invalid type {:?}", ty), + | ty::CoroutineWitness(..) => bug!("Encountered invalid type {:?}", ty), } } @@ -645,7 +651,7 @@ #[inline(always)] fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> { - &self.ecx + self.ecx } fn read_discriminant( @@ -686,8 +692,8 @@ ) -> InterpResult<'tcx> { let name = match old_op.layout.ty.kind() { ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name), - // Generators also have variants - ty::Generator(..) => PathElem::GeneratorState(variant_id), + // Coroutines also have variants + ty::Coroutine(..) => PathElem::CoroutineState(variant_id), _ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty), }; self.with_elem(name, move |this| this.visit_value(new_op)) @@ -781,14 +787,8 @@ // Optimization: we just check the entire range at once. // NOTE: Keep this in sync with the handling of integer and float // types above, in `visit_primitive`. - // In run-time mode, we accept pointers in here. This is actually more - // permissive than a per-element check would be, e.g., we accept - // a &[u8] that contains a pointer even though bytewise checking would - // reject it. However, that's good: We don't inherently want - // to reject those pointers, we just do not have the machinery to - // talk about parts of a pointer. - // We also accept uninit, for consistency with the slow path. - let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size, mplace.align)?.expect("we already excluded size 0"); + // No need for an alignment check here, this is not an actual memory access. + let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0"); match alloc.get_bytes_strip_provenance() { // In the happy case, we needn't check anything else. @@ -929,7 +929,7 @@ /// - no pointers to statics. /// - no `UnsafeCell` or non-ZST `&mut`. #[inline(always)] - pub fn const_validate_operand( + pub(crate) fn const_validate_operand( &self, op: &OpTy<'tcx, M::Provenance>, path: Vec, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,9 @@ */ +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![deny(rustc::untranslatable_diagnostic)] #![feature(assert_matches)] #![feature(box_patterns)] @@ -46,13 +49,13 @@ const_eval::provide(providers); providers.eval_to_const_value_raw = const_eval::eval_to_const_value_raw_provider; providers.eval_to_allocation_raw = const_eval::eval_to_allocation_raw_provider; - providers.const_caller_location = const_eval::const_caller_location; + providers.hooks.const_caller_location = util::caller_location::const_caller_location_provider; providers.eval_to_valtree = |tcx, param_env_and_value| { let (param_env, raw) = param_env_and_value.into_parts(); const_eval::eval_to_valtree(tcx, param_env, raw) }; - providers.hooks.try_destructure_mir_constant_for_diagnostics = - const_eval::try_destructure_mir_constant_for_diagnostics; + providers.hooks.try_destructure_mir_constant_for_user_output = + const_eval::try_destructure_mir_constant_for_user_output; providers.valtree_to_const_val = |tcx, (ty, valtree)| { const_eval::valtree_to_const_value(tcx, ty::ParamEnv::empty().and(ty), valtree) }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/check.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/check.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/check.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/check.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,16 +9,17 @@ use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::traits::BuiltinImplSource; +use rustc_middle::ty::GenericArgs; use rustc_middle::ty::{self, adjustment::PointerCoercion, Instance, InstanceDef, Ty, TyCtxt}; -use rustc_middle::ty::{GenericArgKind, GenericArgs}; use rustc_middle::ty::{TraitRef, TypeVisitableExt}; use rustc_mir_dataflow::{self, Analysis}; use rustc_span::{sym, Span, Symbol}; use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _; use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt, SelectionContext}; +use rustc_type_ir::visit::{TypeSuperVisitable, TypeVisitor}; use std::mem; -use std::ops::Deref; +use std::ops::{ControlFlow, Deref}; use super::ops::{self, NonConstOp, Status}; use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop}; @@ -188,6 +189,24 @@ } } +struct LocalReturnTyVisitor<'ck, 'mir, 'tcx> { + kind: LocalKind, + checker: &'ck mut Checker<'mir, 'tcx>, +} + +impl<'ck, 'mir, 'tcx> TypeVisitor> for LocalReturnTyVisitor<'ck, 'mir, 'tcx> { + fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { + match t.kind() { + ty::FnPtr(_) => ControlFlow::Continue(()), + ty::Ref(_, _, hir::Mutability::Mut) => { + self.checker.check_op(ops::ty::MutRef(self.kind)); + t.super_visit_with(self) + } + _ => t.super_visit_with(self), + } + } +} + pub struct Checker<'mir, 'tcx> { ccx: &'mir ConstCx<'mir, 'tcx>, qualifs: Qualifs<'mir, 'tcx>, @@ -228,7 +247,7 @@ // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's // no need to emit duplicate errors here. - if self.ccx.is_async() || body.generator.is_some() { + if self.ccx.is_async() || body.coroutine.is_some() { tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`"); return; } @@ -237,7 +256,7 @@ if self.const_kind() == hir::ConstContext::ConstFn { for (idx, local) in body.local_decls.iter_enumerated() { // Handle the return place below. - if idx == RETURN_PLACE || local.internal { + if idx == RETURN_PLACE { continue; } @@ -304,7 +323,7 @@ let gate = match op.status_in_item(self.ccx) { Status::Allowed => return, - Status::Unstable(gate) if self.tcx.features().enabled(gate) => { + Status::Unstable(gate) if self.tcx.features().active(gate) => { let unstable_in_stable = self.ccx.is_const_stable_const_fn() && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate); if unstable_in_stable { @@ -346,20 +365,9 @@ fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) { let kind = self.body.local_kind(local); - for ty in ty.walk() { - let ty = match ty.unpack() { - GenericArgKind::Type(ty) => ty, - - // No constraints on lifetimes or constants, except potentially - // constants' types, but `walk` will get to them as well. - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue, - }; - - match *ty.kind() { - ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)), - _ => {} - } - } + let mut visitor = LocalReturnTyVisitor { kind, checker: self }; + + visitor.visit_ty(ty); } fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) { @@ -455,10 +463,11 @@ | Rvalue::Len(_) => {} Rvalue::Aggregate(kind, ..) => { - if let AggregateKind::Generator(def_id, ..) = kind.as_ref() - && let Some(generator_kind @ hir::GeneratorKind::Async(..)) = self.tcx.generator_kind(def_id) + if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref() + && let Some(coroutine_kind @ hir::CoroutineKind::Async(..)) = + self.tcx.coroutine_kind(def_id) { - self.check_op(ops::Generator(generator_kind)); + self.check_op(ops::Coroutine(coroutine_kind)); } } @@ -571,8 +580,7 @@ } } - Rvalue::BinaryOp(op, box (lhs, rhs)) - | Rvalue::CheckedBinaryOp(op, box (lhs, rhs)) => { + Rvalue::BinaryOp(op, box (lhs, rhs)) | Rvalue::CheckedBinaryOp(op, box (lhs, rhs)) => { let lhs_ty = lhs.ty(self.body, self.tcx); let rhs_ty = rhs.ty(self.body, self.tcx); @@ -580,18 +588,16 @@ // Int, bool, and char operations are fine. } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() { assert_eq!(lhs_ty, rhs_ty); - assert!( - matches!( - op, - BinOp::Eq + assert!(matches!( + op, + BinOp::Eq | BinOp::Ne | BinOp::Le | BinOp::Lt | BinOp::Ge | BinOp::Gt | BinOp::Offset - ) - ); + )); self.check_op(ops::RawPtrComparison); } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() { @@ -743,7 +749,7 @@ let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); } // Attempting to call a trait method? @@ -887,7 +893,7 @@ // At this point, we are calling a function, `callee`, whose `DefId` is known... - // `begin_panic` and `panic_display` are generic functions that accept + // `begin_panic` and `#[rustc_const_panic_str]` functions accept generic // types other than str. Check to enforce that only str can be used in // const-eval. @@ -899,8 +905,8 @@ } } - // const-eval of the `panic_display` fn assumes the argument is `&&str` - if Some(callee) == tcx.lang_items().panic_display() { + // const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str` + if tcx.has_attr(callee, sym::rustc_const_panic_str) { match args[0].ty(&self.ccx.body.local_decls, tcx).kind() { ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) => { @@ -939,7 +945,9 @@ if self.span.allows_unstable(gate) { return; } - if let Some(implied_by_gate) = implied_by && self.span.allows_unstable(implied_by_gate) { + if let Some(implied_by_gate) = implied_by + && self.span.allows_unstable(implied_by_gate) + { return; } @@ -1034,8 +1042,8 @@ TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm), - TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => { - self.check_op(ops::Generator(hir::GeneratorKind::Gen)) + TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } => { + self.check_op(ops::Coroutine(hir::CoroutineKind::Coroutine)) } TerminatorKind::UnwindTerminate(_) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/ops.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/ops.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/ops.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/ops.rs 2023-12-21 16:55:28.000000000 +0000 @@ -311,10 +311,10 @@ ccx.const_kind(), )); - if let Some(feature) = feature && ccx.tcx.sess.is_nightly_build() { - err.help(format!( - "add `#![feature({feature})]` to the crate attributes to enable", - )); + if let Some(feature) = feature + && ccx.tcx.sess.is_nightly_build() + { + err.help(format!("add `#![feature({feature})]` to the crate attributes to enable",)); } if let ConstContext::Static(_) = ccx.const_kind() { @@ -357,10 +357,10 @@ } #[derive(Debug)] -pub struct Generator(pub hir::GeneratorKind); -impl<'tcx> NonConstOp<'tcx> for Generator { +pub struct Coroutine(pub hir::CoroutineKind); +impl<'tcx> NonConstOp<'tcx> for Coroutine { fn status_in_item(&self, _: &ConstCx<'_, 'tcx>) -> Status { - if let hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) = self.0 { + if let hir::CoroutineKind::Async(hir::CoroutineSource::Block) = self.0 { Status::Unstable(sym::const_async_blocks) } else { Status::Forbidden @@ -372,8 +372,8 @@ ccx: &ConstCx<'_, 'tcx>, span: Span, ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - let msg = format!("{}s are not allowed in {}s", self.0.descr(), ccx.const_kind()); - if let hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) = self.0 { + let msg = format!("{:#}s are not allowed in {}s", self.0, ccx.const_kind()); + if let hir::CoroutineKind::Async(hir::CoroutineSource::Block) = self.0 { ccx.tcx.sess.create_feature_err( errors::UnallowedOpInConstContext { span, msg }, sym::const_async_blocks, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/post_drop_elaboration.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/post_drop_elaboration.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/post_drop_elaboration.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/check_consts/post_drop_elaboration.rs 2023-12-21 16:55:28.000000000 +0000 @@ -111,7 +111,7 @@ | mir::TerminatorKind::Assert { .. } | mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } - | mir::TerminatorKind::GeneratorDrop + | mir::TerminatorKind::CoroutineDrop | mir::TerminatorKind::Goto { .. } | mir::TerminatorKind::InlineAsm { .. } | mir::TerminatorKind::UnwindResume diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/promote_consts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/promote_consts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/promote_consts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/promote_consts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -970,7 +970,7 @@ 0, vec![], body.span, - body.generator_kind(), + body.coroutine_kind(), body.tainted_by_errors, ); promoted.phase = MirPhase::Analysis(AnalysisPhase::Initial); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/validate.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/validate.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/validate.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/transform/validate.rs 2023-12-21 16:55:28.000000000 +0000 @@ -67,7 +67,7 @@ let body_abi = match body_ty.kind() { ty::FnDef(..) => body_ty.fn_sig(tcx).abi(), ty::Closure(..) => Abi::RustCall, - ty::Generator(..) => Abi::Rust, + ty::Coroutine(..) => Abi::Rust, _ => { span_bug!(body.span, "unexpected body ty: {:?} phase {:?}", body_ty, mir_phase) } @@ -472,11 +472,11 @@ self.check_unwind_edge(location, *unwind); } TerminatorKind::Yield { resume, drop, .. } => { - if self.body.generator.is_none() { - self.fail(location, "`Yield` cannot appear outside generator bodies"); + if self.body.coroutine.is_none() { + self.fail(location, "`Yield` cannot appear outside coroutine bodies"); } if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { - self.fail(location, "`Yield` should have been replaced by generator lowering"); + self.fail(location, "`Yield` should have been replaced by coroutine lowering"); } self.check_edge(location, *resume, EdgeKind::Normal); if let Some(drop) = drop { @@ -509,14 +509,14 @@ } self.check_unwind_edge(location, *unwind); } - TerminatorKind::GeneratorDrop => { - if self.body.generator.is_none() { - self.fail(location, "`GeneratorDrop` cannot appear outside generator bodies"); + TerminatorKind::CoroutineDrop => { + if self.body.coroutine.is_none() { + self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies"); } if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) { self.fail( location, - "`GeneratorDrop` should have been replaced by generator lowering", + "`CoroutineDrop` should have been replaced by coroutine lowering", ); } } @@ -716,7 +716,7 @@ }; check_equal(self, location, f_ty); } - &ty::Generator(def_id, args, _) => { + &ty::Coroutine(def_id, args, _) => { let f_ty = if let Some(var) = parent_ty.variant_index { let gen_body = if def_id == self.body.source.def_id() { self.body @@ -724,10 +724,10 @@ self.tcx.optimized_mir(def_id) }; - let Some(layout) = gen_body.generator_layout() else { + let Some(layout) = gen_body.coroutine_layout() else { self.fail( location, - format!("No generator layout for {parent_ty:?}"), + format!("No coroutine layout for {parent_ty:?}"), ); return; }; @@ -747,7 +747,7 @@ ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args) } else { - let Some(&f_ty) = args.as_generator().prefix_tys().get(f.index()) + let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index()) else { fail_out_of_bounds(self, location); return; @@ -1056,16 +1056,23 @@ } } } - Rvalue::NullaryOp(NullOp::OffsetOf(fields), container) => { + Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => { let fail_out_of_bounds = |this: &mut Self, location, field, ty| { this.fail(location, format!("Out of bounds field {field:?} for {ty:?}")); }; let mut current_ty = *container; - for field in fields.iter() { + for (variant, field) in indices.iter() { match current_ty.kind() { ty::Tuple(fields) => { + if variant != FIRST_VARIANT { + self.fail( + location, + format!("tried to get variant {variant:?} of tuple"), + ); + return; + } let Some(&f_ty) = fields.get(field.as_usize()) else { fail_out_of_bounds(self, location, field, current_ty); return; @@ -1074,15 +1081,7 @@ current_ty = self.tcx.normalize_erasing_regions(self.param_env, f_ty); } ty::Adt(adt_def, args) => { - if adt_def.is_enum() { - self.fail( - location, - format!("Cannot get field offset from enum {current_ty:?}"), - ); - return; - } - - let Some(field) = adt_def.non_enum_variant().fields.get(field) else { + let Some(field) = adt_def.variant(variant).fields.get(field) else { fail_out_of_bounds(self, location, field, current_ty); return; }; @@ -1093,7 +1092,7 @@ _ => { self.fail( location, - format!("Cannot get field offset from non-adt type {current_ty:?}"), + format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty:?}"), ); return; } @@ -1211,11 +1210,11 @@ self.fail(location, "`SetDiscriminant`is not allowed until deaggregation"); } let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind(); - if !matches!(pty, ty::Adt(..) | ty::Generator(..) | ty::Alias(ty::Opaque, ..)) { + if !matches!(pty, ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)) { self.fail( location, format!( - "`SetDiscriminant` is only allowed on ADTs and generators, not {pty:?}" + "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty:?}" ), ); } @@ -1295,7 +1294,7 @@ | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } | TerminatorKind::InlineAsm { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::UnwindResume | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/alignment.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/alignment.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/alignment.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/alignment.rs 2023-12-21 16:55:28.000000000 +0000 @@ -21,10 +21,18 @@ }; let ty = place.ty(local_decls, tcx).ty; + let unsized_tail = || tcx.struct_tail_with_normalize(ty, |ty| ty, || {}); match tcx.layout_of(param_env.and(ty)) { - Ok(layout) if layout.align.abi <= pack => { + Ok(layout) + if layout.align.abi <= pack + && (layout.is_sized() + || matches!(unsized_tail().kind(), ty::Slice(..) | ty::Str)) => + { // If the packed alignment is greater or equal to the field alignment, the type won't be // further disaligned. + // However we need to ensure the field is sized; for unsized fields, `layout.align` is + // just an approximation -- except when the unsized tail is a slice, where the alignment + // is fully determined by the type. debug!( "is_disaligned({:?}) - align = {}, packed = {}; not disaligned", place, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/caller_location.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/caller_location.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/caller_location.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/caller_location.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,66 @@ +use rustc_hir::LangItem; +use rustc_middle::mir; +use rustc_middle::query::TyCtxtAt; +use rustc_middle::ty; +use rustc_middle::ty::layout::LayoutOf; +use rustc_span::symbol::Symbol; +use rustc_type_ir::Mutability; + +use crate::const_eval::{mk_eval_cx, CanAccessStatics, CompileTimeEvalContext}; +use crate::interpret::*; + +/// Allocate a `const core::panic::Location` with the provided filename and line/column numbers. +fn alloc_caller_location<'mir, 'tcx>( + ecx: &mut CompileTimeEvalContext<'mir, 'tcx>, + filename: Symbol, + line: u32, + col: u32, +) -> MPlaceTy<'tcx> { + let loc_details = ecx.tcx.sess.opts.unstable_opts.location_detail; + // This can fail if rustc runs out of memory right here. Trying to emit an error would be + // pointless, since that would require allocating more memory than these short strings. + let file = if loc_details.file { + ecx.allocate_str(filename.as_str(), MemoryKind::CallerLocation, Mutability::Not).unwrap() + } else { + // FIXME: This creates a new allocation each time. It might be preferable to + // perform this allocation only once, and re-use the `MPlaceTy`. + // See https://github.com/rust-lang/rust/pull/89920#discussion_r730012398 + ecx.allocate_str("", MemoryKind::CallerLocation, Mutability::Not).unwrap() + }; + let line = if loc_details.line { Scalar::from_u32(line) } else { Scalar::from_u32(0) }; + let col = if loc_details.column { Scalar::from_u32(col) } else { Scalar::from_u32(0) }; + + // Allocate memory for `CallerLocation` struct. + let loc_ty = ecx + .tcx + .type_of(ecx.tcx.require_lang_item(LangItem::PanicLocation, None)) + .instantiate(*ecx.tcx, ecx.tcx.mk_args(&[ecx.tcx.lifetimes.re_erased.into()])); + let loc_layout = ecx.layout_of(loc_ty).unwrap(); + let location = ecx.allocate(loc_layout, MemoryKind::CallerLocation).unwrap(); + + // Initialize fields. + ecx.write_immediate(file.to_ref(ecx), &ecx.project_field(&location, 0).unwrap()) + .expect("writing to memory we just allocated cannot fail"); + ecx.write_scalar(line, &ecx.project_field(&location, 1).unwrap()) + .expect("writing to memory we just allocated cannot fail"); + ecx.write_scalar(col, &ecx.project_field(&location, 2).unwrap()) + .expect("writing to memory we just allocated cannot fail"); + + location +} + +pub(crate) fn const_caller_location_provider( + tcx: TyCtxtAt<'_>, + file: Symbol, + line: u32, + col: u32, +) -> mir::ConstValue<'_> { + trace!("const_caller_location: {}:{}:{}", file, line, col); + let mut ecx = mk_eval_cx(tcx.tcx, tcx.span, ty::ParamEnv::reveal_all(), CanAccessStatics::No); + + let loc_place = alloc_caller_location(&mut ecx, file, line, col); + if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() { + bug!("intern_const_alloc_recursive should not error in this case") + } + mir::ConstValue::Scalar(Scalar::from_maybe_pointer(loc_place.ptr(), &tcx)) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,7 @@ use rustc_middle::mir; mod alignment; +pub(crate) mod caller_location; mod check_validity_requirement; mod compare_types; mod type_name; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/type_name.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/type_name.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/type_name.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_const_eval/src/util/type_name.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ use rustc_hir::definitions::DisambiguatedDefPathData; use rustc_middle::ty::{ self, - print::{PrettyPrinter, Print, Printer}, + print::{PrettyPrinter, Print, PrintError, Printer}, GenericArg, GenericArgKind, Ty, TyCtxt, }; use std::fmt::Write; @@ -14,23 +14,15 @@ } impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { - type Error = std::fmt::Error; - - type Path = Self; - type Region = Self; - type Type = Self; - type DynExistential = Self; - type Const = Self; - fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } - fn print_region(self, _region: ty::Region<'_>) -> Result { - Ok(self) + fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { + Ok(()) } - fn print_type(mut self, ty: Ty<'tcx>) -> Result { + fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { match *ty.kind() { // Types without identity. ty::Bool @@ -51,7 +43,7 @@ // Placeholders (all printed as `_` to uniformize them). ty::Param(_) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) | ty::Error(_) => { write!(self, "_")?; - Ok(self) + Ok(()) } // Types with identity (print the module path). @@ -59,53 +51,53 @@ | ty::FnDef(def_id, args) | ty::Alias(ty::Projection | ty::Opaque, ty::AliasTy { def_id, args, .. }) | ty::Closure(def_id, args) - | ty::Generator(def_id, args, _) => self.print_def_path(def_id, args), + | ty::Coroutine(def_id, args, _) => self.print_def_path(def_id, args), ty::Foreign(def_id) => self.print_def_path(def_id, &[]), ty::Alias(ty::Weak, _) => bug!("type_name: unexpected weak projection"), ty::Alias(ty::Inherent, _) => bug!("type_name: unexpected inherent projection"), - ty::GeneratorWitness(..) => bug!("type_name: unexpected `GeneratorWitness`"), + ty::CoroutineWitness(..) => bug!("type_name: unexpected `CoroutineWitness`"), } } - fn print_const(self, ct: ty::Const<'tcx>) -> Result { + fn print_const(&mut self, ct: ty::Const<'tcx>) -> Result<(), PrintError> { self.pretty_print_const(ct, false) } fn print_dyn_existential( - self, + &mut self, predicates: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { self.pretty_print_dyn_existential(predicates) } - fn path_crate(mut self, cnum: CrateNum) -> Result { + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.path.push_str(self.tcx.crate_name(cnum).as_str()); - Ok(self) + Ok(()) } fn path_qualified( - self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { self.pretty_path_qualified(self_ty, trait_ref) } fn path_append_impl( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { self.pretty_path_append_impl( - |mut cx| { - cx = print_prefix(cx)?; + |cx| { + print_prefix(cx)?; cx.path.push_str("::"); - Ok(cx) + Ok(()) }, self_ty, trait_ref, @@ -113,29 +105,29 @@ } fn path_append( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; write!(self.path, "::{}", disambiguated_data.data).unwrap(); - Ok(self) + Ok(()) } fn path_generic_args( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; let args = args.iter().cloned().filter(|arg| !matches!(arg.unpack(), GenericArgKind::Lifetime(_))); if args.clone().next().is_some() { self.generic_delimiters(|cx| cx.comma_sep(args)) } else { - Ok(self) + Ok(()) } } } @@ -144,31 +136,31 @@ fn should_print_region(&self, _region: ty::Region<'_>) -> bool { false } - fn comma_sep(mut self, mut elems: impl Iterator) -> Result + fn comma_sep(&mut self, mut elems: impl Iterator) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error>, + T: Print<'tcx, Self>, { if let Some(first) = elems.next() { - self = first.print(self)?; + first.print(self)?; for elem in elems { self.path.push_str(", "); - self = elem.print(self)?; + elem.print(self)?; } } - Ok(self) + Ok(()) } fn generic_delimiters( - mut self, - f: impl FnOnce(Self) -> Result, - ) -> Result { + &mut self, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + ) -> Result<(), PrintError> { write!(self, "<")?; - self = f(self)?; + f(self)?; write!(self, ">")?; - Ok(self) + Ok(()) } fn should_print_verbose(&self) -> bool { @@ -185,5 +177,7 @@ } pub fn type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> String { - AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path + let mut printer = AbsolutePathPrinter { tcx, path: String::new() }; + printer.print_type(ty).unwrap(); + printer.path } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,36 +3,31 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start arrayvec = { version = "0.7", default-features = false } bitflags = "1.2.1" -cfg-if = "1.0" +elsa = "=1.7.1" ena = "0.14.2" indexmap = { version = "2.0.0" } +itertools = "0.10.1" jobserver_crate = { version = "0.1.13", package = "jobserver" } libc = "0.2" measureme = "10.0.0" -rustc-rayon-core = { version = "0.5.0", optional = true } +rustc-hash = "1.1.0" rustc-rayon = { version = "0.5.0", optional = true } +rustc-rayon-core = { version = "0.5.0", optional = true } rustc_arena = { path = "../rustc_arena" } rustc_graphviz = { path = "../rustc_graphviz" } -rustc-hash = "1.1.0" rustc_index = { path = "../rustc_index", package = "rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } -smallvec = { version = "1.8.1", features = [ - "const_generics", - "union", - "may_dangle", -] } +smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dangle"] } stacker = "0.1.15" tempfile = "3.2" thin-vec = "0.2.12" tracing = "0.1" -elsa = "=1.7.1" -itertools = "0.10.1" +# tidy-alphabetical-end [dependencies.parking_lot] version = "0.12" @@ -48,7 +43,14 @@ ] [target.'cfg(not(target_arch = "wasm32"))'.dependencies] +# tidy-alphabetical-start memmap2 = "0.2.1" +# tidy-alphabetical-end + +[target.'cfg(any(target_arch = "powerpc", target_arch = "mips"))'.dependencies] +portable-atomic = "1.5.1" [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rustc-rayon", "rustc-rayon-core"] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/flock.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/flock.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/flock.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/flock.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,17 +4,20 @@ //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. -cfg_if! { - if #[cfg(target_os = "linux")] { +cfg_match! { + cfg(target_os = "linux") => { mod linux; use linux as imp; - } else if #[cfg(unix)] { + } + cfg(unix) => { mod unix; use unix as imp; - } else if #[cfg(windows)] { + } + cfg(windows) => { mod windows; use self::windows as imp; - } else { + } + _ => { mod unsupported; use unsupported as imp; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/functor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/functor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/functor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/functor.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,116 +0,0 @@ -use rustc_index::{Idx, IndexVec}; -use std::{mem, rc::Rc, sync::Arc}; - -pub trait IdFunctor: Sized { - type Inner; - - fn try_map_id(self, f: F) -> Result - where - F: FnMut(Self::Inner) -> Result; -} - -impl IdFunctor for Box { - type Inner = T; - - #[inline] - fn try_map_id(self, mut f: F) -> Result - where - F: FnMut(Self::Inner) -> Result, - { - let raw = Box::into_raw(self); - Ok(unsafe { - // SAFETY: The raw pointer points to a valid value of type `T`. - let value = raw.read(); - // SAFETY: Converts `Box` to `Box>` which is the - // inverse of `Box::assume_init()` and should be safe. - let raw: Box> = Box::from_raw(raw.cast()); - // SAFETY: Write the mapped value back into the `Box`. - Box::write(raw, f(value)?) - }) - } -} - -impl IdFunctor for Vec { - type Inner = T; - - #[inline] - fn try_map_id(self, f: F) -> Result - where - F: FnMut(Self::Inner) -> Result, - { - self.into_iter().map(f).collect() - } -} - -impl IdFunctor for Box<[T]> { - type Inner = T; - - #[inline] - fn try_map_id(self, f: F) -> Result - where - F: FnMut(Self::Inner) -> Result, - { - Vec::from(self).try_map_id(f).map(Into::into) - } -} - -impl IdFunctor for IndexVec { - type Inner = T; - - #[inline] - fn try_map_id(self, f: F) -> Result - where - F: FnMut(Self::Inner) -> Result, - { - self.raw.try_map_id(f).map(IndexVec::from_raw) - } -} - -macro_rules! rc { - ($($rc:ident),+) => {$( - impl IdFunctor for $rc { - type Inner = T; - - #[inline] - fn try_map_id(mut self, mut f: F) -> Result - where - F: FnMut(Self::Inner) -> Result, - { - // We merely want to replace the contained `T`, if at all possible, - // so that we don't needlessly allocate a new `$rc` or indeed clone - // the contained type. - unsafe { - // First step is to ensure that we have a unique reference to - // the contained type, which `$rc::make_mut` will accomplish (by - // allocating a new `$rc` and cloning the `T` only if required). - // This is done *before* casting to `$rc>` so that - // panicking during `make_mut` does not leak the `T`. - $rc::make_mut(&mut self); - - // Casting to `$rc>` is safe because `ManuallyDrop` - // is `repr(transparent)`. - let ptr = $rc::into_raw(self).cast::>(); - let mut unique = $rc::from_raw(ptr); - - // Call to `$rc::make_mut` above guarantees that `unique` is the - // sole reference to the contained value, so we can avoid doing - // a checked `get_mut` here. - let slot = $rc::get_mut_unchecked(&mut unique); - - // Semantically move the contained type out from `unique`, fold - // it, then move the folded value back into `unique`. Should - // folding fail, `ManuallyDrop` ensures that the "moved-out" - // value is not re-dropped. - let owned = mem::ManuallyDrop::take(slot); - let folded = f(owned)?; - *slot = mem::ManuallyDrop::new(folded); - - // Cast back to `$rc`. - Ok($rc::from_raw($rc::into_raw(unique).cast())) - } - } - } - )+}; -} - -rc! { Rc, Arc } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,42 @@ struct PreorderIndex {} } -pub fn dominators(graph: &G) -> Dominators { +#[derive(Clone, Debug)] +pub struct Dominators { + kind: Kind, +} + +#[derive(Clone, Debug)] +enum Kind { + /// A representation optimized for a small path graphs. + Path, + General(Inner), +} + +pub fn dominators(g: &G) -> Dominators { + // We often encounter MIR bodies with 1 or 2 basic blocks. Special case the dominators + // computation and representation for those cases. + if is_small_path_graph(g) { + Dominators { kind: Kind::Path } + } else { + Dominators { kind: Kind::General(dominators_impl(g)) } + } +} + +fn is_small_path_graph(g: &G) -> bool { + if g.start_node().index() != 0 { + return false; + } + if g.num_nodes() == 1 { + return true; + } + if g.num_nodes() == 2 { + return g.successors(g.start_node()).any(|n| n.index() == 1); + } + false +} + +fn dominators_impl(graph: &G) -> Inner { // compute the post order index (rank) for each node let mut post_order_rank = IndexVec::from_elem_n(0, graph.num_nodes()); @@ -245,7 +280,7 @@ let time = compute_access_time(start_node, &immediate_dominators); - Dominators { start_node, post_order_rank, immediate_dominators, time } + Inner { post_order_rank, immediate_dominators, time } } /// Evaluate the link-eval virtual forest, providing the currently minimum semi @@ -310,12 +345,11 @@ /// Tracks the list of dominators for each node. #[derive(Clone, Debug)] -pub struct Dominators { - start_node: N, +struct Inner { post_order_rank: IndexVec, // Even though we track only the immediate dominator of each node, it's // possible to get its full list of dominators by looking up the dominator - // of each dominator. (See the `impl Iterator for Iter` definition). + // of each dominator. immediate_dominators: IndexVec>, time: IndexVec, } @@ -323,19 +357,24 @@ impl Dominators { /// Returns true if node is reachable from the start node. pub fn is_reachable(&self, node: Node) -> bool { - node == self.start_node || self.immediate_dominators[node].is_some() + match &self.kind { + Kind::Path => true, + Kind::General(g) => g.time[node].start != 0, + } } /// Returns the immediate dominator of node, if any. pub fn immediate_dominator(&self, node: Node) -> Option { - self.immediate_dominators[node] - } - - /// Provides an iterator over each dominator up the CFG, for the given Node. - /// See the `impl Iterator for Iter` definition to understand how this works. - pub fn dominators(&self, node: Node) -> Iter<'_, Node> { - assert!(self.is_reachable(node), "node {node:?} is not reachable"); - Iter { dom_tree: self, node: Some(node) } + match &self.kind { + Kind::Path => { + if 0 < node.index() { + Some(Node::new(node.index() - 1)) + } else { + None + } + } + Kind::General(g) => g.immediate_dominators[node], + } } /// Provide deterministic ordering of nodes such that, if any two nodes have a dominator @@ -343,7 +382,10 @@ /// of two unrelated nodes will also be consistent, but otherwise the order has no /// meaning.) This method cannot be used to determine if either Node dominates the other. pub fn cmp_in_dominator_order(&self, lhs: Node, rhs: Node) -> Ordering { - self.post_order_rank[rhs].cmp(&self.post_order_rank[lhs]) + match &self.kind { + Kind::Path => lhs.index().cmp(&rhs.index()), + Kind::General(g) => g.post_order_rank[rhs].cmp(&g.post_order_rank[lhs]), + } } /// Returns true if `a` dominates `b`. @@ -352,27 +394,14 @@ /// /// Panics if `b` is unreachable. pub fn dominates(&self, a: Node, b: Node) -> bool { - let a = self.time[a]; - let b = self.time[b]; - assert!(b.start != 0, "node {b:?} is not reachable"); - a.start <= b.start && b.finish <= a.finish - } -} - -pub struct Iter<'dom, Node: Idx> { - dom_tree: &'dom Dominators, - node: Option, -} - -impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> { - type Item = Node; - - fn next(&mut self) -> Option { - if let Some(node) = self.node { - self.node = self.dom_tree.immediate_dominator(node); - Some(node) - } else { - None + match &self.kind { + Kind::Path => a.index() <= b.index(), + Kind::General(g) => { + let a = g.time[a]; + let b = g.time[b]; + assert!(b.start != 0, "node {b:?} is not reachable"); + a.start <= b.start && b.finish <= a.finish + } } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/graph/dominators/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,12 +6,11 @@ fn diamond() { let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]); - let dominators = dominators(&graph); - let immediate_dominators = &dominators.immediate_dominators; - assert_eq!(immediate_dominators[0], None); - assert_eq!(immediate_dominators[1], Some(0)); - assert_eq!(immediate_dominators[2], Some(0)); - assert_eq!(immediate_dominators[3], Some(0)); + let d = dominators(&graph); + assert_eq!(d.immediate_dominator(0), None); + assert_eq!(d.immediate_dominator(1), Some(0)); + assert_eq!(d.immediate_dominator(2), Some(0)); + assert_eq!(d.immediate_dominator(3), Some(0)); } #[test] @@ -22,15 +21,14 @@ &[(6, 5), (6, 4), (5, 1), (4, 2), (4, 3), (1, 2), (2, 3), (3, 2), (2, 1)], ); - let dominators = dominators(&graph); - let immediate_dominators = &dominators.immediate_dominators; - assert_eq!(immediate_dominators[0], None); // <-- note that 0 is not in graph - assert_eq!(immediate_dominators[1], Some(6)); - assert_eq!(immediate_dominators[2], Some(6)); - assert_eq!(immediate_dominators[3], Some(6)); - assert_eq!(immediate_dominators[4], Some(6)); - assert_eq!(immediate_dominators[5], Some(6)); - assert_eq!(immediate_dominators[6], None); + let d = dominators(&graph); + assert_eq!(d.immediate_dominator(0), None); // <-- note that 0 is not in graph + assert_eq!(d.immediate_dominator(1), Some(6)); + assert_eq!(d.immediate_dominator(2), Some(6)); + assert_eq!(d.immediate_dominator(3), Some(6)); + assert_eq!(d.immediate_dominator(4), Some(6)); + assert_eq!(d.immediate_dominator(5), Some(6)); + assert_eq!(d.immediate_dominator(6), None); } #[test] @@ -47,11 +45,11 @@ #[test] fn immediate_dominator() { let graph = TestGraph::new(1, &[(1, 2), (2, 3)]); - let dominators = dominators(&graph); - assert_eq!(dominators.immediate_dominator(0), None); - assert_eq!(dominators.immediate_dominator(1), None); - assert_eq!(dominators.immediate_dominator(2), Some(1)); - assert_eq!(dominators.immediate_dominator(3), Some(2)); + let d = dominators(&graph); + assert_eq!(d.immediate_dominator(0), None); + assert_eq!(d.immediate_dominator(1), None); + assert_eq!(d.immediate_dominator(2), Some(1)); + assert_eq!(d.immediate_dominator(3), Some(2)); } #[test] @@ -75,8 +73,7 @@ ], ); - let dom_tree = dominators(&graph); - let immediate_dominators = &dom_tree.immediate_dominators; - assert_eq!(immediate_dominators[2], Some(0)); - assert_eq!(immediate_dominators[3], Some(0)); // This used to return Some(1). + let d = dominators(&graph); + assert_eq!(d.immediate_dominator(2), Some(0)); + assert_eq!(d.immediate_dominator(3), Some(0)); // This used to return Some(1). } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,45 +6,44 @@ //! //! This API is completely unstable and subject to change. +// tidy-alphabetical-start +#![allow(internal_features)] +#![allow(rustc::default_hash_types)] +#![allow(rustc::potential_query_instability)] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![deny(rustc::diagnostic_outside_of_impl)] +#![deny(rustc::untranslatable_diagnostic)] +#![deny(unsafe_op_in_unsafe_fn)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![feature(allocator_api)] #![feature(array_windows)] -#![feature(associated_type_bounds)] #![feature(auto_traits)] #![feature(cell_leak)] +#![feature(cfg_match)] #![feature(core_intrinsics)] #![feature(extend_one)] #![feature(hash_raw_entry)] #![feature(hasher_prefixfree_extras)] +#![feature(lazy_cell)] +#![feature(lint_reasons)] +#![feature(macro_metavar_expr)] #![feature(maybe_uninit_uninit_array)] #![feature(min_specialization)] +#![feature(negative_impls)] #![feature(never_type)] -#![feature(type_alias_impl_trait)] -#![feature(new_uninit)] -#![feature(lazy_cell)] +#![feature(ptr_alignment_type)] #![feature(rustc_attrs)] -#![feature(negative_impls)] +#![feature(strict_provenance)] #![feature(test)] #![feature(thread_id_value)] -#![feature(vec_into_raw_parts)] -#![feature(allocator_api)] -#![feature(get_mut_unchecked)] -#![feature(lint_reasons)] +#![feature(type_alias_impl_trait)] #![feature(unwrap_infallible)] -#![feature(strict_provenance)] -#![feature(ptr_alignment_type)] -#![feature(macro_metavar_expr)] -#![allow(rustc::default_hash_types)] -#![allow(rustc::potential_query_instability)] -#![deny(rustc::untranslatable_diagnostic)] -#![deny(rustc::diagnostic_outside_of_impl)] -#![allow(internal_features)] -#![deny(unsafe_op_in_unsafe_fn)] +// tidy-alphabetical-end #[macro_use] extern crate tracing; #[macro_use] -extern crate cfg_if; -#[macro_use] extern crate rustc_macros; use std::fmt; @@ -63,7 +62,6 @@ pub mod captures; pub mod flat_map_in_place; pub mod flock; -pub mod functor; pub mod fx; pub mod graph; pub mod intern; @@ -129,6 +127,9 @@ } } +/// This is a marker for a fatal compiler error used with `resume_unwind`. +pub struct FatalErrorMarker; + /// Turns a closure that takes an `&mut Formatter` into something that can be display-formatted. pub fn make_display(f: impl Fn(&mut fmt::Formatter<'_>) -> fmt::Result) -> impl fmt::Display { struct Printer { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/marker.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/marker.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/marker.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/marker.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,12 @@ -cfg_if!( - if #[cfg(not(parallel_compiler))] { +cfg_match! { + cfg(not(parallel_compiler)) => { pub auto trait DynSend {} pub auto trait DynSync {} impl DynSend for T {} impl DynSync for T {} - } else { + } + _ => { #[rustc_on_unimplemented( message = "`{Self}` doesn't implement `DynSend`. \ Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`" @@ -48,13 +49,10 @@ [std::io::StdoutLock<'_>] [std::io::StderrLock<'_>] ); - cfg_if!( - // Consistent with `std` - // `os_imp::Env` is `!Send` in these platforms - if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] { - impl !DynSend for std::env::VarsOs {} - } - ); + + #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] + // Consistent with `std`, `os_imp::Env` is `!Sync` in these platforms + impl !DynSend for std::env::VarsOs {} macro_rules! already_send { ($([$ty: ty])*) => { @@ -123,13 +121,10 @@ [std::sync::mpsc::Receiver where T] [std::sync::mpsc::Sender where T] ); - cfg_if!( - // Consistent with `std` - // `os_imp::Env` is `!Sync` in these platforms - if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] { - impl !DynSync for std::env::VarsOs {} - } - ); + + #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] + // Consistent with `std`, `os_imp::Env` is `!Sync` in these platforms + impl !DynSync for std::env::VarsOs {} macro_rules! already_sync { ($([$ty: ty])*) => { @@ -143,7 +138,6 @@ [std::sync::atomic::AtomicUsize] [std::sync::atomic::AtomicU8] [std::sync::atomic::AtomicU32] - [std::sync::atomic::AtomicU64] [std::backtrace::Backtrace] [std::io::Error] [std::fs::File] @@ -153,6 +147,18 @@ [crate::owned_slice::OwnedSlice] ); + // PowerPC and MIPS platforms with 32-bit pointers do not + // have AtomicU64 type. + #[cfg(not(any(target_arch = "powerpc", target_arch = "mips")))] + already_sync!( + [std::sync::atomic::AtomicU64] + ); + + #[cfg(any(target_arch = "powerpc", target_arch = "mips"))] + already_sync!( + [portable_atomic::AtomicU64] + ); + macro_rules! impl_dyn_sync { ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => { $(unsafe impl<$($generics2)*> DynSync for $ty {})* @@ -183,7 +189,7 @@ [thin_vec::ThinVec where T: DynSync] ); } -); +} pub fn assert_dyn_sync() {} pub fn assert_dyn_send() {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/profiling.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/profiling.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/profiling.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/profiling.rs 2023-12-21 16:55:28.000000000 +0000 @@ -859,8 +859,8 @@ } // Memory reporting -cfg_if! { - if #[cfg(windows)] { +cfg_match! { + cfg(windows) => { pub fn get_resident_set_size() -> Option { use std::mem; @@ -885,7 +885,8 @@ Some(pmc.WorkingSetSize) } - } else if #[cfg(target_os = "macos")] { + } + cfg(target_os = "macos") => { pub fn get_resident_set_size() -> Option { use libc::{c_int, c_void, getpid, proc_pidinfo, proc_taskinfo, PROC_PIDTASKINFO}; use std::mem; @@ -903,7 +904,8 @@ } } } - } else if #[cfg(unix)] { + } + cfg(unix) => { pub fn get_resident_set_size() -> Option { let field = 1; let contents = fs::read("/proc/self/statm").ok()?; @@ -912,7 +914,8 @@ let npages = s.parse::().ok()?; Some(npages * 4096) } - } else { + } + _ => { pub fn get_resident_set_size() -> Option { None } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync/parallel.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync/parallel.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync/parallel.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync/parallel.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,8 @@ #![allow(dead_code)] +use crate::sync::IntoDynSyncSend; +use crate::FatalErrorMarker; use parking_lot::Mutex; use std::any::Any; use std::panic::{catch_unwind, resume_unwind, AssertUnwindSafe}; @@ -18,14 +20,17 @@ /// continuing with unwinding. It's also used for the non-parallel code to ensure error message /// output match the parallel compiler for testing purposes. pub struct ParallelGuard { - panic: Mutex>>, + panic: Mutex>>>, } impl ParallelGuard { pub fn run(&self, f: impl FnOnce() -> R) -> Option { catch_unwind(AssertUnwindSafe(f)) .map_err(|err| { - *self.panic.lock() = Some(err); + let mut panic = self.panic.lock(); + if panic.is_none() || !(*err).is::() { + *panic = Some(IntoDynSyncSend(err)); + } }) .ok() } @@ -37,7 +42,7 @@ pub fn parallel_guard(f: impl FnOnce(&ParallelGuard) -> R) -> R { let guard = ParallelGuard { panic: Mutex::new(None) }; let ret = f(&guard); - if let Some(panic) = guard.panic.into_inner() { + if let Some(IntoDynSyncSend(panic)) = guard.panic.into_inner() { resume_unwind(panic); } ret @@ -77,6 +82,15 @@ }) } + pub fn try_par_for_each_in( + t: T, + mut for_each: impl FnMut(T::Item) -> Result<(), E>, + ) -> Result<(), E> { + parallel_guard(|guard| { + t.into_iter().filter_map(|i| guard.run(|| for_each(i))).fold(Ok(()), Result::and) + }) + } + pub fn par_map>( t: T, mut map: impl FnMut(<::IntoIter as Iterator>::Item) -> R, @@ -97,14 +111,20 @@ parallel!(impl $fblock [$block, $($c,)*] [$($rest),*]) }; (impl $fblock:block [$($blocks:expr,)*] []) => { - ::rustc_data_structures::sync::scope(|s| { - $(let block = rustc_data_structures::sync::FromDyn::from(|| $blocks); - s.spawn(move |_| block.into_inner()());)* - (|| $fblock)(); + $crate::sync::parallel_guard(|guard| { + $crate::sync::scope(|s| { + $( + let block = $crate::sync::FromDyn::from(|| $blocks); + s.spawn(move |_| { + guard.run(move || block.into_inner()()); + }); + )* + guard.run(|| $fblock); + }); }); }; ($fblock:block, $($blocks:block),*) => { - if rustc_data_structures::sync::is_dyn_thread_safe() { + if $crate::sync::is_dyn_thread_safe() { // Reverse the order of the later blocks since Rayon executes them in reverse order // when using a single thread. This ensures the execution order matches that // of a single threaded rustc. @@ -137,11 +157,13 @@ if mode::is_dyn_thread_safe() { let oper_a = FromDyn::from(oper_a); let oper_b = FromDyn::from(oper_b); - let (a, b) = rayon::join( - move || FromDyn::from(oper_a.into_inner()()), - move || FromDyn::from(oper_b.into_inner()()), - ); - (a.into_inner(), b.into_inner()) + let (a, b) = parallel_guard(|guard| { + rayon::join( + move || guard.run(move || FromDyn::from(oper_a.into_inner()())), + move || guard.run(move || FromDyn::from(oper_b.into_inner()())), + ) + }); + (a.unwrap().into_inner(), b.unwrap().into_inner()) } else { super::disabled::join(oper_a, oper_b) } @@ -167,6 +189,25 @@ }); } + pub fn try_par_for_each_in< + T: IntoIterator + IntoParallelIterator::Item>, + E: Send, + >( + t: T, + for_each: impl Fn(::Item) -> Result<(), E> + DynSync + DynSend, + ) -> Result<(), E> { + parallel_guard(|guard| { + if mode::is_dyn_thread_safe() { + let for_each = FromDyn::from(for_each); + t.into_par_iter() + .filter_map(|i| guard.run(|| for_each(i))) + .reduce(|| Ok(()), Result::and) + } else { + t.into_iter().filter_map(|i| guard.run(|| for_each(i))).fold(Ok(()), Result::and) + } + }) + } + pub fn par_map< I, T: IntoIterator + IntoParallelIterator, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_data_structures/src/sync.rs 2023-12-21 16:55:28.000000000 +0000 @@ -54,7 +54,7 @@ mod parallel; #[cfg(parallel_compiler)] pub use parallel::scope; -pub use parallel::{join, par_for_each_in, par_map, parallel_guard}; +pub use parallel::{join, par_for_each_in, par_map, parallel_guard, try_par_for_each_in}; pub use std::sync::atomic::Ordering; pub use std::sync::atomic::Ordering::SeqCst; @@ -109,8 +109,8 @@ pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode}; -cfg_if! { - if #[cfg(not(parallel_compiler))] { +cfg_match! { + cfg(not(parallel_compiler)) => { use std::ops::Add; use std::cell::Cell; @@ -251,7 +251,8 @@ MTLock(self.0.clone()) } } - } else { + } + _ => { pub use std::marker::Send as Send; pub use std::marker::Sync as Sync; @@ -264,7 +265,15 @@ pub use std::sync::OnceLock; - pub use std::sync::atomic::{AtomicBool, AtomicUsize, AtomicU32, AtomicU64}; + pub use std::sync::atomic::{AtomicBool, AtomicUsize, AtomicU32}; + + // PowerPC and MIPS platforms with 32-bit pointers do not + // have AtomicU64 type. + #[cfg(not(any(target_arch = "powerpc", target_arch = "mips")))] + pub use std::sync::atomic::AtomicU64; + + #[cfg(any(target_arch = "powerpc", target_arch = "mips"))] + pub use portable_atomic::AtomicU64; pub use std::sync::Arc as Lrc; pub use std::sync::Weak as Weak; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -7,4 +7,6 @@ crate-type = ["dylib"] [dependencies] +# tidy-alphabetical-start rustc_driver_impl = { path = "../rustc_driver_impl" } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,8 @@ // This crate is intentionally empty and a re-export of `rustc_driver_impl` to allow the code in // `rustc_driver_impl` to be compiled in parallel with other crates. +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] + pub use rustc_driver_impl::*; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,59 +3,60 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -time = { version = "0.3", default-features = false, features = ["formatting", ] } -tracing = { version = "0.1.35" } -serde_json = "1.0.59" -rustc_log = { path = "../rustc_log" } +# tidy-alphabetical-start +rustc_ast = { path = "../rustc_ast" } rustc_ast_lowering = { path = "../rustc_ast_lowering" } rustc_ast_passes = { path = "../rustc_ast_passes" } +rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_attr = { path = "../rustc_attr" } rustc_borrowck = { path = "../rustc_borrowck" } rustc_builtin_macros = { path = "../rustc_builtin_macros" } +rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } rustc_const_eval = { path = "../rustc_const_eval" } +rustc_data_structures = { path = "../rustc_data_structures" } +rustc_error_codes = { path = "../rustc_error_codes" } rustc_error_messages = { path = "../rustc_error_messages" } +rustc_errors = { path = "../rustc_errors" } rustc_expand = { path = "../rustc_expand" } -rustc_hir_typeck = { path = "../rustc_hir_typeck" } +rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } +rustc_hir_analysis = { path = "../rustc_hir_analysis" } +rustc_hir_pretty = { path = "../rustc_hir_pretty" } +rustc_hir_typeck = { path = "../rustc_hir_typeck" } rustc_incremental = { path = "../rustc_incremental" } rustc_infer = { path = "../rustc_infer" } +rustc_interface = { path = "../rustc_interface" } +rustc_lint = { path = "../rustc_lint" } +rustc_log = { path = "../rustc_log" } +rustc_macros = { path = "../rustc_macros" } +rustc_metadata = { path = "../rustc_metadata" } +rustc_middle = { path = "../rustc_middle" } rustc_mir_build = { path = "../rustc_mir_build" } rustc_mir_dataflow = { path = "../rustc_mir_dataflow" } +rustc_mir_transform = { path = "../rustc_mir_transform" } rustc_monomorphize = { path = "../rustc_monomorphize" } +rustc_parse = { path = "../rustc_parse" } rustc_passes = { path = "../rustc_passes" } rustc_privacy = { path = "../rustc_privacy" } rustc_query_system = { path = "../rustc_query_system" } rustc_resolve = { path = "../rustc_resolve" } +rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } +rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_ty_utils = { path = "../rustc_ty_utils" } -rustc_middle = { path = "../rustc_middle" } -rustc_ast_pretty = { path = "../rustc_ast_pretty" } -rustc_target = { path = "../rustc_target" } -rustc_lint = { path = "../rustc_lint" } -rustc_data_structures = { path = "../rustc_data_structures" } -rustc_errors = { path = "../rustc_errors" } -rustc_feature = { path = "../rustc_feature" } -rustc_hir = { path = "../rustc_hir" } -rustc_hir_pretty = { path = "../rustc_hir_pretty" } -rustc_macros = { path = "../rustc_macros" } -rustc_metadata = { path = "../rustc_metadata" } -rustc_parse = { path = "../rustc_parse" } -rustc_plugin_impl = { path = "../rustc_plugin_impl" } -rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } -rustc_session = { path = "../rustc_session" } -rustc_error_codes = { path = "../rustc_error_codes" } -rustc_interface = { path = "../rustc_interface" } -rustc_ast = { path = "../rustc_ast" } -rustc_span = { path = "../rustc_span" } -rustc_hir_analysis = { path = "../rustc_hir_analysis" } -rustc_mir_transform = { path = "../rustc_mir_transform" } +serde_json = "1.0.59" +time = { version = "0.3", default-features = false, features = ["alloc", "formatting"] } +tracing = { version = "0.1.35" } +# tidy-alphabetical-end [target.'cfg(unix)'.dependencies] +# tidy-alphabetical-start libc = "0.2" +# tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] version = "0.48.0" @@ -64,6 +65,7 @@ ] [features] +# tidy-alphabetical-start llvm = ['rustc_interface/llvm'] max_level_info = ['rustc_log/max_level_info'] rustc_use_parallel_compiler = [ @@ -71,3 +73,4 @@ 'rustc_interface/rustc_use_parallel_compiler', 'rustc_middle/rustc_use_parallel_compiler' ] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ driver_impl_ice = the compiler unexpectedly panicked. this is a bug. driver_impl_ice_bug_report = we would appreciate a bug report: {$bug_report_url} +driver_impl_ice_bug_report_internal_feature = using internal features is not supported and expected to cause internal compiler errors when used incorrectly driver_impl_ice_exclude_cargo_defaults = some of the compiler flags provided by cargo are hidden driver_impl_ice_flags = compiler flags: {$flags} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,10 +5,13 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] -#![feature(lazy_cell)] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(decl_macro)] -#![feature(panic_update_hook)] +#![feature(lazy_cell)] #![feature(let_chains)] +#![feature(panic_update_hook)] #![recursion_limit = "256"] #![allow(rustc::potential_query_instability)] #![deny(rustc::untranslatable_diagnostic)] @@ -17,8 +20,6 @@ #[macro_use] extern crate tracing; -pub extern crate rustc_plugin_impl as plugin; - use rustc_ast as ast; use rustc_codegen_ssa::{traits::CodegenBackend, CodegenErrors, CodegenResults}; use rustc_data_structures::profiling::{ @@ -40,8 +41,10 @@ use rustc_session::getopts::{self, Matches}; use rustc_session::lint::{Lint, LintId}; use rustc_session::{config, EarlyErrorHandler, Session}; -use rustc_span::source_map::{FileLoader, FileName}; +use rustc_span::def_id::LOCAL_CRATE; +use rustc_span::source_map::FileLoader; use rustc_span::symbol::sym; +use rustc_span::FileName; use rustc_target::json::ToJson; use rustc_target::spec::{Target, TargetTriple}; @@ -57,9 +60,8 @@ use std::process::{self, Command, Stdio}; use std::str; use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::OnceLock; +use std::sync::{Arc, OnceLock}; use std::time::{Instant, SystemTime}; -use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; #[allow(unused_macros)] @@ -129,12 +131,10 @@ rustc_monomorphize::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE, rustc_passes::DEFAULT_LOCALE_RESOURCE, - rustc_plugin_impl::DEFAULT_LOCALE_RESOURCE, rustc_privacy::DEFAULT_LOCALE_RESOURCE, rustc_query_system::DEFAULT_LOCALE_RESOURCE, rustc_resolve::DEFAULT_LOCALE_RESOURCE, rustc_session::DEFAULT_LOCALE_RESOURCE, - rustc_symbol_mangling::DEFAULT_LOCALE_RESOURCE, rustc_trait_selection::DEFAULT_LOCALE_RESOURCE, rustc_ty_utils::DEFAULT_LOCALE_RESOURCE, // tidy-alphabetical-end @@ -221,11 +221,18 @@ file_loader: Option>, make_codegen_backend: Option Box + Send>>, + using_internal_features: Arc, } impl<'a, 'b> RunCompiler<'a, 'b> { pub fn new(at_args: &'a [String], callbacks: &'b mut (dyn Callbacks + Send)) -> Self { - Self { at_args, callbacks, file_loader: None, make_codegen_backend: None } + Self { + at_args, + callbacks, + file_loader: None, + make_codegen_backend: None, + using_internal_features: Arc::default(), + } } /// Set a custom codegen backend. @@ -257,9 +264,23 @@ self } + /// Set the session-global flag that checks whether internal features have been used, + /// suppressing the message about submitting an issue in ICEs when enabled. + #[must_use] + pub fn set_using_internal_features(mut self, using_internal_features: Arc) -> Self { + self.using_internal_features = using_internal_features; + self + } + /// Parse args and run the compiler. pub fn run(self) -> interface::Result<()> { - run_compiler(self.at_args, self.callbacks, self.file_loader, self.make_codegen_backend) + run_compiler( + self.at_args, + self.callbacks, + self.file_loader, + self.make_codegen_backend, + self.using_internal_features, + ) } } @@ -270,6 +291,7 @@ make_codegen_backend: Option< Box Box + Send>, >, + using_internal_features: Arc, ) -> interface::Result<()> { let mut early_error_handler = EarlyErrorHandler::new(ErrorOutputType::default()); @@ -294,13 +316,11 @@ return Ok(()); } - let cfg = interface::parse_cfgspecs(&early_error_handler, matches.opt_strs("cfg")); - let check_cfg = interface::parse_check_cfg(&early_error_handler, matches.opt_strs("check-cfg")); let (odir, ofile) = make_output(&matches); let mut config = interface::Config { opts: sopts, - crate_cfg: cfg, - crate_check_cfg: check_cfg, + crate_cfg: matches.opt_strs("cfg"), + crate_check_cfg: matches.opt_strs("check-cfg"), input: Input::File(PathBuf::new()), output_file: ofile, output_dir: odir, @@ -309,10 +329,12 @@ locale_resources: DEFAULT_LOCALE_RESOURCES, lint_caps: Default::default(), parse_sess_created: None, + hash_untracked_state: None, register_lints: None, override_queries: None, make_codegen_backend, registry: diagnostics_registry(), + using_internal_features, expanded_args: args, }; @@ -392,7 +414,7 @@ if ppm.needs_ast_map() { queries.global_ctxt()?.enter(|tcx| { tcx.ensure().early_lint_checks(()); - pretty::print_after_hir_lowering(tcx, *ppm); + pretty::print(sess, *ppm, pretty::PrintExtra::NeedsAstMap { tcx }); Ok(()) })?; @@ -400,8 +422,12 @@ // effects of writing the dep-info and reporting errors. queries.global_ctxt()?.enter(|tcx| tcx.output_filenames(())); } else { - let krate = queries.parse()?.steal(); - pretty::print_after_parsing(sess, &krate, *ppm); + let krate = queries.parse()?; + pretty::print( + sess, + *ppm, + pretty::PrintExtra::AfterParsing { krate: &*krate.borrow() }, + ); } trace!("finished pretty-printing"); return early_exit(); @@ -456,8 +482,7 @@ } if sess.opts.unstable_opts.print_vtable_sizes { - let crate_name = - compiler.session().opts.crate_name.as_deref().unwrap_or(""); + let crate_name = queries.global_ctxt()?.enter(|tcx| tcx.crate_name(LOCAL_CRATE)); sess.code_stats.print_vtable_sizes(crate_name); } @@ -542,7 +567,7 @@ } impl Compilation { - pub fn and_then Compilation>(self, next: F) -> Compilation { + fn and_then Compilation>(self, next: F) -> Compilation { match self { Compilation::Stop => Compilation::Stop, Compilation::Continue => next(), @@ -654,7 +679,7 @@ } } -pub fn try_process_rlink(sess: &Session, compiler: &interface::Compiler) -> Compilation { +fn try_process_rlink(sess: &Session, compiler: &interface::Compiler) -> Compilation { if sess.opts.unstable_opts.link_only { if let Input::File(file) = &sess.io.input { let outputs = compiler.build_output_filenames(sess, &[]); @@ -695,7 +720,7 @@ } } -pub fn list_metadata( +fn list_metadata( handler: &EarlyErrorHandler, sess: &Session, metadata_loader: &dyn MetadataLoader, @@ -970,16 +995,14 @@ } /// Write to stdout lint command options, together with a list of all available lints -pub fn describe_lints(sess: &Session, lint_store: &LintStore, loaded_plugins: bool) { +pub fn describe_lints(sess: &Session, lint_store: &LintStore, loaded_lints: bool) { safe_println!( " Available lint options: -W Warn about - -A \ - Allow + -A Allow -D Deny - -F Forbid \ - (deny and all attempts to override) + -F Forbid (deny and all attempts to override) " ); @@ -998,18 +1021,18 @@ lints } - let (plugin, builtin): (Vec<_>, _) = - lint_store.get_lints().iter().cloned().partition(|&lint| lint.is_plugin); - let plugin = sort_lints(sess, plugin); + let (loaded, builtin): (Vec<_>, _) = + lint_store.get_lints().iter().cloned().partition(|&lint| lint.is_loaded); + let loaded = sort_lints(sess, loaded); let builtin = sort_lints(sess, builtin); - let (plugin_groups, builtin_groups): (Vec<_>, _) = + let (loaded_groups, builtin_groups): (Vec<_>, _) = lint_store.get_lint_groups().partition(|&(.., p)| p); - let plugin_groups = sort_lint_groups(plugin_groups); + let loaded_groups = sort_lint_groups(loaded_groups); let builtin_groups = sort_lint_groups(builtin_groups); let max_name_len = - plugin.iter().chain(&builtin).map(|&s| s.name.chars().count()).max().unwrap_or(0); + loaded.iter().chain(&builtin).map(|&s| s.name.chars().count()).max().unwrap_or(0); let padded = |x: &str| { let mut s = " ".repeat(max_name_len - x.chars().count()); s.push_str(x); @@ -1037,7 +1060,7 @@ let max_name_len = max( "warnings".len(), - plugin_groups + loaded_groups .iter() .chain(&builtin_groups) .map(|&(s, _)| s.chars().count()) @@ -1075,20 +1098,22 @@ print_lint_groups(builtin_groups, true); - match (loaded_plugins, plugin.len(), plugin_groups.len()) { + match (loaded_lints, loaded.len(), loaded_groups.len()) { (false, 0, _) | (false, _, 0) => { - safe_println!("Lint tools like Clippy can provide additional lints and lint groups."); + safe_println!("Lint tools like Clippy can load additional lints and lint groups."); + } + (false, ..) => panic!("didn't load additional lints but got them anyway!"), + (true, 0, 0) => { + safe_println!("This crate does not load any additional lints or lint groups.") } - (false, ..) => panic!("didn't load lint plugins but got them anyway!"), - (true, 0, 0) => safe_println!("This crate does not load any lint plugins or lint groups."), (true, l, g) => { if l > 0 { - safe_println!("Lint checks provided by plugins loaded by this crate:\n"); - print_lints(plugin); + safe_println!("Lint checks loaded by this crate:\n"); + print_lints(loaded); } if g > 0 { - safe_println!("Lint groups provided by plugins loaded by this crate:\n"); - print_lint_groups(plugin_groups, false); + safe_println!("Lint groups loaded by this crate:\n"); + print_lint_groups(loaded_groups, false); } } } @@ -1105,7 +1130,7 @@ rustc_errors::FatalError.raise(); } - // Don't handle -W help here, because we might first load plugins. + // Don't handle -W help here, because we might first load additional lints. let debug_flags = matches.opt_strs("Z"); if debug_flags.iter().any(|x| *x == "help") { describe_debug_flags(); @@ -1181,6 +1206,10 @@ /// /// So with all that in mind, the comments below have some more detail about the /// contortions done here to get things to work out correctly. +/// +/// This does not need to be `pub` for rustc itself, but @chaosite needs it to +/// be public when using rustc as a library, see +/// pub fn handle_options(handler: &EarlyErrorHandler, args: &[String]) -> Option { if args.is_empty() { // user did not write `-v` nor `-Z unstable-options`, so do not @@ -1280,24 +1309,36 @@ } } -pub static ICE_PATH: OnceLock> = OnceLock::new(); +static ICE_PATH: OnceLock> = OnceLock::new(); -pub fn ice_path() -> &'static Option { +fn ice_path() -> &'static Option { ICE_PATH.get_or_init(|| { if !rustc_feature::UnstableFeatures::from_environment(None).is_nightly_build() { return None; } - if let Ok("0") = std::env::var("RUST_BACKTRACE").as_deref() { + if let Some(s) = std::env::var_os("RUST_BACKTRACE") + && s == "0" + { return None; } - let mut path = match std::env::var("RUSTC_ICE").as_deref() { - // Explicitly opting out of writing ICEs to disk. - Ok("0") => return None, - Ok(s) => PathBuf::from(s), - Err(_) => std::env::current_dir().unwrap_or_default(), + let mut path = match std::env::var_os("RUSTC_ICE") { + Some(s) => { + if s == "0" { + // Explicitly opting out of writing ICEs to disk. + return None; + } + PathBuf::from(s) + } + None => std::env::current_dir().unwrap_or_default(), }; let now: OffsetDateTime = SystemTime::now().into(); - let file_now = now.format(&Rfc3339).unwrap_or(String::new()); + let file_now = now + .format( + // Don't use a standard datetime format because Windows doesn't support `:` in paths + &time::format_description::parse("[year]-[month]-[day]T[hour]_[minute]_[second]") + .unwrap(), + ) + .unwrap_or_default(); let pid = std::process::id(); path.push(format!("rustc-ice-{file_now}-{pid}.txt")); Some(path) @@ -1314,18 +1355,24 @@ /// If you have no extra info to report, pass the empty closure `|_| ()` as the argument to /// extra_info. /// +/// Returns a flag that can be set to disable the note for submitting a bug. This can be passed to +/// [`RunCompiler::set_using_internal_features`] to let macro expansion set it when encountering +/// internal features. +/// /// A custom rustc driver can skip calling this to set up a custom ICE hook. -pub fn install_ice_hook(bug_report_url: &'static str, extra_info: fn(&Handler)) { +pub fn install_ice_hook(bug_report_url: &'static str, extra_info: fn(&Handler)) -> Arc { // If the user has not explicitly overridden "RUST_BACKTRACE", then produce // full backtraces. When a compiler ICE happens, we want to gather // as much information as possible to present in the issue opened // by the user. Compiler developers and other rustc users can // opt in to less-verbose backtraces by manually setting "RUST_BACKTRACE" // (e.g. `RUST_BACKTRACE=1`) - if std::env::var("RUST_BACKTRACE").is_err() { + if std::env::var_os("RUST_BACKTRACE").is_none() { std::env::set_var("RUST_BACKTRACE", "full"); } + let using_internal_features = Arc::new(std::sync::atomic::AtomicBool::default()); + let using_internal_features_hook = using_internal_features.clone(); panic::update_hook(Box::new( move |default_hook: &(dyn Fn(&PanicInfo<'_>) + Send + Sync + 'static), info: &PanicInfo<'_>| { @@ -1350,8 +1397,7 @@ eprintln!(); if let Some(ice_path) = ice_path() - && let Ok(mut out) = - File::options().create(true).append(true).open(&ice_path) + && let Ok(mut out) = File::options().create(true).append(true).open(&ice_path) { // The current implementation always returns `Some`. let location = info.location().unwrap(); @@ -1376,9 +1422,11 @@ } // Print the ICE message - report_ice(info, bug_report_url, extra_info); + report_ice(info, bug_report_url, extra_info, &using_internal_features_hook); }, )); + + using_internal_features } /// Prints the ICE message, including query stack, but without backtrace. @@ -1387,7 +1435,12 @@ /// /// When `install_ice_hook` is called, this function will be called as the panic /// hook. -pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str, extra_info: fn(&Handler)) { +fn report_ice( + info: &panic::PanicInfo<'_>, + bug_report_url: &str, + extra_info: fn(&Handler), + using_internal_features: &AtomicBool, +) { let fallback_bundle = rustc_errors::fallback_fluent_bundle(crate::DEFAULT_LOCALE_RESOURCES.to_vec(), false); let emitter = Box::new(rustc_errors::emitter::EmitterWriter::stderr( @@ -1404,19 +1457,22 @@ handler.emit_err(session_diagnostics::Ice); } - handler.emit_note(session_diagnostics::IceBugReport { bug_report_url }); + if using_internal_features.load(std::sync::atomic::Ordering::Relaxed) { + handler.emit_note(session_diagnostics::IceBugReportInternalFeature); + } else { + handler.emit_note(session_diagnostics::IceBugReport { bug_report_url }); + } let version = util::version_str!().unwrap_or("unknown_version"); let triple = config::host_triple(); static FIRST_PANIC: AtomicBool = AtomicBool::new(true); - let file = if let Some(path) = ice_path().as_ref() { + let file = if let Some(path) = ice_path() { // Create the ICE dump target file. match crate::fs::File::options().create(true).append(true).open(&path) { Ok(mut file) => { - handler - .emit_note(session_diagnostics::IcePath { path: path.display().to_string() }); + handler.emit_note(session_diagnostics::IcePath { path: path.clone() }); if FIRST_PANIC.swap(false, Ordering::SeqCst) { let _ = write!(file, "\n\nrustc version: {version}\nplatform: {triple}"); } @@ -1425,10 +1481,10 @@ Err(err) => { // The path ICE couldn't be written to disk, provide feedback to the user as to why. handler.emit_warning(session_diagnostics::IcePathError { - path: path.display().to_string(), + path: path.clone(), error: err.to_string(), - env_var: std::env::var("RUSTC_ICE") - .ok() + env_var: std::env::var_os("RUSTC_ICE") + .map(PathBuf::from) .map(|env_var| session_diagnostics::IcePathErrorEnv { env_var }), }); handler.emit_note(session_diagnostics::IceVersion { version, triple }); @@ -1489,7 +1545,7 @@ init_rustc_env_logger(&handler); signal_handler::install(); let mut callbacks = TimePassesCallbacks::default(); - install_ice_hook(DEFAULT_BUG_REPORT_URL, |_| ()); + let using_internal_features = install_ice_hook(DEFAULT_BUG_REPORT_URL, |_| ()); let exit_code = catch_with_exit_code(|| { let args = env::args_os() .enumerate() @@ -1499,7 +1555,9 @@ }) }) .collect::>(); - RunCompiler::new(&args, &mut callbacks).run() + RunCompiler::new(&args, &mut callbacks) + .set_using_internal_features(using_internal_features) + .run() }); if let Some(format) = callbacks.time_passes { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/pretty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/pretty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/pretty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/pretty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,14 +1,13 @@ //! The various pretty-printing routines. use rustc_ast as ast; -use rustc_ast_pretty::pprust; -use rustc_errors::ErrorGuaranteed; +use rustc_ast_pretty::pprust as pprust_ast; use rustc_hir as hir; use rustc_hir_pretty as pprust_hir; -use rustc_middle::hir::map as hir_map; +use rustc_middle::bug; use rustc_middle::mir::{write_mir_graphviz, write_mir_pretty}; use rustc_middle::ty::{self, TyCtxt}; -use rustc_session::config::{OutFileName, PpAstTreeMode, PpHirMode, PpMode, PpSourceMode}; +use rustc_session::config::{OutFileName, PpHirMode, PpMode, PpSourceMode}; use rustc_session::Session; use rustc_span::symbol::Ident; use rustc_span::FileName; @@ -20,174 +19,57 @@ pub use self::PpSourceMode::*; use crate::abort_on_err; -// This slightly awkward construction is to allow for each PpMode to -// choose whether it needs to do analyses (which can consume the -// Session) and then pass through the session (now attached to the -// analysis results) on to the chosen pretty-printer, along with the -// `&PpAnn` object. -// -// Note that since the `&PrinterSupport` is freshly constructed on each -// call, it would not make sense to try to attach the lifetime of `self` -// to the lifetime of the `&PrinterObject`. +struct AstNoAnn; -/// Constructs a `PrinterSupport` object and passes it to `f`. -fn call_with_pp_support<'tcx, A, F>( - ppmode: &PpSourceMode, - sess: &'tcx Session, - tcx: Option>, - f: F, -) -> A -where - F: FnOnce(&dyn PrinterSupport) -> A, -{ - match *ppmode { - Normal | Expanded => { - let annotation = NoAnn { sess, tcx }; - f(&annotation) - } - - Identified | ExpandedIdentified => { - let annotation = IdentifiedAnnotation { sess, tcx }; - f(&annotation) - } - ExpandedHygiene => { - let annotation = HygieneAnnotation { sess }; - f(&annotation) - } - } -} -fn call_with_pp_support_hir(ppmode: &PpHirMode, tcx: TyCtxt<'_>, f: F) -> A -where - F: FnOnce(&dyn HirPrinterSupport<'_>, hir_map::Map<'_>) -> A, -{ - match *ppmode { - PpHirMode::Normal => { - let annotation = NoAnn { sess: tcx.sess, tcx: Some(tcx) }; - f(&annotation, tcx.hir()) - } - - PpHirMode::Identified => { - let annotation = IdentifiedAnnotation { sess: tcx.sess, tcx: Some(tcx) }; - f(&annotation, tcx.hir()) - } - PpHirMode::Typed => { - abort_on_err(tcx.analysis(()), tcx.sess); - - let annotation = TypedAnnotation { tcx, maybe_typeck_results: Cell::new(None) }; - tcx.dep_graph.with_ignore(|| f(&annotation, tcx.hir())) - } - } -} - -trait PrinterSupport: pprust::PpAnn { - /// Provides a uniform interface for re-extracting a reference to a - /// `Session` from a value that now owns it. - fn sess(&self) -> &Session; - - /// Produces the pretty-print annotation object. - /// - /// (Rust does not yet support upcasting from a trait object to - /// an object for one of its supertraits.) - fn pp_ann(&self) -> &dyn pprust::PpAnn; -} - -trait HirPrinterSupport<'hir>: pprust_hir::PpAnn { - /// Provides a uniform interface for re-extracting a reference to a - /// `Session` from a value that now owns it. - fn sess(&self) -> &Session; - - /// Provides a uniform interface for re-extracting a reference to an - /// `hir_map::Map` from a value that now owns it. - fn hir_map(&self) -> Option>; - - /// Produces the pretty-print annotation object. - /// - /// (Rust does not yet support upcasting from a trait object to - /// an object for one of its supertraits.) - fn pp_ann(&self) -> &dyn pprust_hir::PpAnn; -} - -struct NoAnn<'hir> { - sess: &'hir Session, - tcx: Option>, -} - -impl<'hir> PrinterSupport for NoAnn<'hir> { - fn sess(&self) -> &Session { - self.sess - } - - fn pp_ann(&self) -> &dyn pprust::PpAnn { - self - } -} +impl pprust_ast::PpAnn for AstNoAnn {} -impl<'hir> HirPrinterSupport<'hir> for NoAnn<'hir> { - fn sess(&self) -> &Session { - self.sess - } - - fn hir_map(&self) -> Option> { - self.tcx.map(|tcx| tcx.hir()) - } - - fn pp_ann(&self) -> &dyn pprust_hir::PpAnn { - self - } +struct HirNoAnn<'tcx> { + tcx: TyCtxt<'tcx>, } -impl<'hir> pprust::PpAnn for NoAnn<'hir> {} -impl<'hir> pprust_hir::PpAnn for NoAnn<'hir> { +impl<'tcx> pprust_hir::PpAnn for HirNoAnn<'tcx> { fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested) { - if let Some(tcx) = self.tcx { - pprust_hir::PpAnn::nested(&(&tcx.hir() as &dyn hir::intravisit::Map<'_>), state, nested) - } + pprust_hir::PpAnn::nested( + &(&self.tcx.hir() as &dyn hir::intravisit::Map<'_>), + state, + nested, + ) } } -struct IdentifiedAnnotation<'hir> { - sess: &'hir Session, - tcx: Option>, -} - -impl<'hir> PrinterSupport for IdentifiedAnnotation<'hir> { - fn sess(&self) -> &Session { - self.sess - } +struct AstIdentifiedAnn; - fn pp_ann(&self) -> &dyn pprust::PpAnn { - self - } -} - -impl<'hir> pprust::PpAnn for IdentifiedAnnotation<'hir> { - fn pre(&self, s: &mut pprust::State<'_>, node: pprust::AnnNode<'_>) { - if let pprust::AnnNode::Expr(_) = node { +impl pprust_ast::PpAnn for AstIdentifiedAnn { + fn pre(&self, s: &mut pprust_ast::State<'_>, node: pprust_ast::AnnNode<'_>) { + if let pprust_ast::AnnNode::Expr(_) = node { s.popen(); } } - fn post(&self, s: &mut pprust::State<'_>, node: pprust::AnnNode<'_>) { + + fn post(&self, s: &mut pprust_ast::State<'_>, node: pprust_ast::AnnNode<'_>) { match node { - pprust::AnnNode::Crate(_) | pprust::AnnNode::Ident(_) | pprust::AnnNode::Name(_) => {} + pprust_ast::AnnNode::Crate(_) + | pprust_ast::AnnNode::Ident(_) + | pprust_ast::AnnNode::Name(_) => {} - pprust::AnnNode::Item(item) => { + pprust_ast::AnnNode::Item(item) => { s.s.space(); s.synth_comment(item.id.to_string()) } - pprust::AnnNode::SubItem(id) => { + pprust_ast::AnnNode::SubItem(id) => { s.s.space(); s.synth_comment(id.to_string()) } - pprust::AnnNode::Block(blk) => { + pprust_ast::AnnNode::Block(blk) => { s.s.space(); s.synth_comment(format!("block {}", blk.id)) } - pprust::AnnNode::Expr(expr) => { + pprust_ast::AnnNode::Expr(expr) => { s.s.space(); s.synth_comment(expr.id.to_string()); s.pclose() } - pprust::AnnNode::Pat(pat) => { + pprust_ast::AnnNode::Pat(pat) => { s.s.space(); s.synth_comment(format!("pat {}", pat.id)); } @@ -195,31 +77,25 @@ } } -impl<'hir> HirPrinterSupport<'hir> for IdentifiedAnnotation<'hir> { - fn sess(&self) -> &Session { - self.sess - } - - fn hir_map(&self) -> Option> { - self.tcx.map(|tcx| tcx.hir()) - } - - fn pp_ann(&self) -> &dyn pprust_hir::PpAnn { - self - } +struct HirIdentifiedAnn<'tcx> { + tcx: TyCtxt<'tcx>, } -impl<'hir> pprust_hir::PpAnn for IdentifiedAnnotation<'hir> { +impl<'tcx> pprust_hir::PpAnn for HirIdentifiedAnn<'tcx> { fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested) { - if let Some(ref tcx) = self.tcx { - pprust_hir::PpAnn::nested(&(&tcx.hir() as &dyn hir::intravisit::Map<'_>), state, nested) - } + pprust_hir::PpAnn::nested( + &(&self.tcx.hir() as &dyn hir::intravisit::Map<'_>), + state, + nested, + ) } + fn pre(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) { if let pprust_hir::AnnNode::Expr(_) = node { s.popen(); } } + fn post(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) { match node { pprust_hir::AnnNode::Name(_) => {} @@ -252,32 +128,22 @@ } } -struct HygieneAnnotation<'a> { +struct AstHygieneAnn<'a> { sess: &'a Session, } -impl<'a> PrinterSupport for HygieneAnnotation<'a> { - fn sess(&self) -> &Session { - self.sess - } - - fn pp_ann(&self) -> &dyn pprust::PpAnn { - self - } -} - -impl<'a> pprust::PpAnn for HygieneAnnotation<'a> { - fn post(&self, s: &mut pprust::State<'_>, node: pprust::AnnNode<'_>) { +impl<'a> pprust_ast::PpAnn for AstHygieneAnn<'a> { + fn post(&self, s: &mut pprust_ast::State<'_>, node: pprust_ast::AnnNode<'_>) { match node { - pprust::AnnNode::Ident(&Ident { name, span }) => { + pprust_ast::AnnNode::Ident(&Ident { name, span }) => { s.s.space(); s.synth_comment(format!("{}{:?}", name.as_u32(), span.ctxt())) } - pprust::AnnNode::Name(&name) => { + pprust_ast::AnnNode::Name(&name) => { s.s.space(); s.synth_comment(name.as_u32().to_string()) } - pprust::AnnNode::Crate(_) => { + pprust_ast::AnnNode::Crate(_) => { s.s.hardbreak(); let verbose = self.sess.verbose(); s.synth_comment(rustc_span::hygiene::debug_hygiene_data(verbose)); @@ -288,26 +154,12 @@ } } -struct TypedAnnotation<'tcx> { +struct HirTypedAnn<'tcx> { tcx: TyCtxt<'tcx>, maybe_typeck_results: Cell>>, } -impl<'tcx> HirPrinterSupport<'tcx> for TypedAnnotation<'tcx> { - fn sess(&self) -> &Session { - self.tcx.sess - } - - fn hir_map(&self) -> Option> { - Some(self.tcx.hir()) - } - - fn pp_ann(&self) -> &dyn pprust_hir::PpAnn { - self - } -} - -impl<'tcx> pprust_hir::PpAnn for TypedAnnotation<'tcx> { +impl<'tcx> pprust_hir::PpAnn for HirTypedAnn<'tcx> { fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested) { let old_maybe_typeck_results = self.maybe_typeck_results.get(); if let pprust_hir::Nested::Body(id) = nested { @@ -317,11 +169,13 @@ pprust_hir::PpAnn::nested(pp_ann, state, nested); self.maybe_typeck_results.set(old_maybe_typeck_results); } + fn pre(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) { if let pprust_hir::AnnNode::Expr(_) = node { s.popen(); } } + fn post(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) { if let pprust_hir::AnnNode::Expr(expr) = node { let typeck_results = self.maybe_typeck_results.get().or_else(|| { @@ -360,119 +214,119 @@ sess.io.output_file.as_ref().unwrap_or(&OutFileName::Stdout).overwrite(out, sess); } -pub fn print_after_parsing(sess: &Session, krate: &ast::Crate, ppm: PpMode) { - let (src, src_name) = get_source(sess); +// Extra data for pretty-printing, the form of which depends on what kind of +// pretty-printing we are doing. +pub enum PrintExtra<'tcx> { + AfterParsing { krate: &'tcx ast::Crate }, + NeedsAstMap { tcx: TyCtxt<'tcx> }, +} - let out = match ppm { - Source(s) => { - // Silently ignores an identified node. - call_with_pp_support(&s, sess, None, move |annotation| { - debug!("pretty printing source code {:?}", s); - let sess = annotation.sess(); - let parse = &sess.parse_sess; - pprust::print_crate( - sess.source_map(), - krate, - src_name, - src, - annotation.pp_ann(), - false, - parse.edition, - &sess.parse_sess.attr_id_generator, - ) - }) - } - AstTree(PpAstTreeMode::Normal) => { - debug!("pretty printing AST tree"); - format!("{krate:#?}") +impl<'tcx> PrintExtra<'tcx> { + fn with_krate(&self, f: F) -> R + where + F: FnOnce(&ast::Crate) -> R, + { + match self { + PrintExtra::AfterParsing { krate, .. } => f(krate), + PrintExtra::NeedsAstMap { tcx } => f(&tcx.resolver_for_lowering(()).borrow().1), } - _ => unreachable!(), - }; + } - write_or_print(&out, sess); + fn tcx(&self) -> TyCtxt<'tcx> { + match self { + PrintExtra::AfterParsing { .. } => bug!("PrintExtra::tcx"), + PrintExtra::NeedsAstMap { tcx } => *tcx, + } + } } -pub fn print_after_hir_lowering<'tcx>(tcx: TyCtxt<'tcx>, ppm: PpMode) { +pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) { if ppm.needs_analysis() { - abort_on_err(print_with_analysis(tcx, ppm), tcx.sess); - return; + abort_on_err(ex.tcx().analysis(()), sess); } - let (src, src_name) = get_source(tcx.sess); + let (src, src_name) = get_source(sess); let out = match ppm { Source(s) => { - // Silently ignores an identified node. - call_with_pp_support(&s, tcx.sess, Some(tcx), move |annotation| { - debug!("pretty printing source code {:?}", s); - let sess = annotation.sess(); - let parse = &sess.parse_sess; - pprust::print_crate( + debug!("pretty printing source code {:?}", s); + let annotation: Box = match s { + Normal => Box::new(AstNoAnn), + Expanded => Box::new(AstNoAnn), + Identified => Box::new(AstIdentifiedAnn), + ExpandedIdentified => Box::new(AstIdentifiedAnn), + ExpandedHygiene => Box::new(AstHygieneAnn { sess }), + }; + let parse = &sess.parse_sess; + let is_expanded = ppm.needs_ast_map(); + ex.with_krate(|krate| { + pprust_ast::print_crate( sess.source_map(), - &tcx.resolver_for_lowering(()).borrow().1, + krate, src_name, src, - annotation.pp_ann(), - true, + &*annotation, + is_expanded, parse.edition, &sess.parse_sess.attr_id_generator, ) }) } - - AstTree(PpAstTreeMode::Expanded) => { + AstTree => { + debug!("pretty printing AST tree"); + ex.with_krate(|krate| format!("{krate:#?}")) + } + AstTreeExpanded => { debug!("pretty-printing expanded AST"); - format!("{:#?}", tcx.resolver_for_lowering(()).borrow().1) + format!("{:#?}", ex.tcx().resolver_for_lowering(()).borrow().1) } - - Hir(s) => call_with_pp_support_hir(&s, tcx, move |annotation, hir_map| { + Hir(s) => { debug!("pretty printing HIR {:?}", s); - let sess = annotation.sess(); - let sm = sess.source_map(); - let attrs = |id| hir_map.attrs(id); - pprust_hir::print_crate( - sm, - hir_map.root_module(), - src_name, - src, - &attrs, - annotation.pp_ann(), - ) - }), - + let tcx = ex.tcx(); + let f = |annotation: &dyn pprust_hir::PpAnn| { + let sm = sess.source_map(); + let hir_map = tcx.hir(); + let attrs = |id| hir_map.attrs(id); + pprust_hir::print_crate( + sm, + hir_map.root_module(), + src_name, + src, + &attrs, + annotation, + ) + }; + match s { + PpHirMode::Normal => { + let annotation = HirNoAnn { tcx }; + f(&annotation) + } + PpHirMode::Identified => { + let annotation = HirIdentifiedAnn { tcx }; + f(&annotation) + } + PpHirMode::Typed => { + let annotation = HirTypedAnn { tcx, maybe_typeck_results: Cell::new(None) }; + tcx.dep_graph.with_ignore(|| f(&annotation)) + } + } + } HirTree => { - call_with_pp_support_hir(&PpHirMode::Normal, tcx, move |_annotation, hir_map| { - debug!("pretty printing HIR tree"); - format!("{:#?}", hir_map.krate()) - }) + debug!("pretty printing HIR tree"); + format!("{:#?}", ex.tcx().hir().krate()) } - - _ => unreachable!(), - }; - - write_or_print(&out, tcx.sess); -} - -// In an ideal world, this would be a public function called by the driver after -// analysis is performed. However, we want to call `phase_3_run_analysis_passes` -// with a different callback than the standard driver, so that isn't easy. -// Instead, we call that function ourselves. -fn print_with_analysis(tcx: TyCtxt<'_>, ppm: PpMode) -> Result<(), ErrorGuaranteed> { - tcx.analysis(())?; - let out = match ppm { Mir => { let mut out = Vec::new(); - write_mir_pretty(tcx, None, &mut out).unwrap(); + write_mir_pretty(ex.tcx(), None, &mut out).unwrap(); String::from_utf8(out).unwrap() } - MirCFG => { let mut out = Vec::new(); - write_mir_graphviz(tcx, None, &mut out).unwrap(); + write_mir_graphviz(ex.tcx(), None, &mut out).unwrap(); String::from_utf8(out).unwrap() } - ThirTree => { + let tcx = ex.tcx(); let mut out = String::new(); abort_on_err(rustc_hir_analysis::check_crate(tcx), tcx.sess); debug!("pretty printing THIR tree"); @@ -481,8 +335,8 @@ } out } - ThirFlat => { + let tcx = ex.tcx(); let mut out = String::new(); abort_on_err(rustc_hir_analysis::check_crate(tcx), tcx.sess); debug!("pretty printing THIR flat"); @@ -491,11 +345,7 @@ } out } - - _ => unreachable!(), }; - write_or_print(&out, tcx.sess); - - Ok(()) + write_or_print(&out, sess); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/session_diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/session_diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/session_diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_driver_impl/src/session_diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -43,6 +43,10 @@ } #[derive(Diagnostic)] +#[diag(driver_impl_ice_bug_report_internal_feature)] +pub(crate) struct IceBugReportInternalFeature; + +#[derive(Diagnostic)] #[diag(driver_impl_ice_version)] pub(crate) struct IceVersion<'a> { pub version: &'a str, @@ -52,13 +56,13 @@ #[derive(Diagnostic)] #[diag(driver_impl_ice_path)] pub(crate) struct IcePath { - pub path: String, + pub path: std::path::PathBuf, } #[derive(Diagnostic)] #[diag(driver_impl_ice_path_error)] pub(crate) struct IcePathError { - pub path: String, + pub path: std::path::PathBuf, pub error: String, #[subdiagnostic] pub env_var: Option, @@ -67,7 +71,7 @@ #[derive(Subdiagnostic)] #[note(driver_impl_ice_path_error_env)] pub(crate) struct IcePathErrorEnv { - pub env_var: String, + pub env_var: std::path::PathBuf, } #[derive(Diagnostic)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -2,3 +2,7 @@ name = "rustc_error_codes" version = "0.0.0" edition = "2021" + +[dependencies] +# tidy-alphabetical-start +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0282.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0282.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0282.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0282.md 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ Erroneous code example: ```compile_fail,E0282 -let x = "hello".chars().rev().collect(); +let x = Vec::new(); ``` This error indicates that type inference did not result in one unique possible @@ -11,21 +11,24 @@ by adding a type annotation. Sometimes you need to specify a generic type parameter manually. -A common example is the `collect` method on `Iterator`. It has a generic type -parameter with a `FromIterator` bound, which for a `char` iterator is -implemented by `Vec` and `String` among others. Consider the following snippet -that reverses the characters of a string: +In the example above, type `Vec` has a type parameter `T`. When calling +`Vec::new`, barring any other later usage of the variable `x` that allows the +compiler to infer what type `T` is, the compiler needs to be told what it is. -In the first code example, the compiler cannot infer what the type of `x` should -be: `Vec` and `String` are both suitable candidates. To specify which type -to use, you can use a type annotation on `x`: +The type can be specified on the variable: ``` -let x: Vec = "hello".chars().rev().collect(); +let x: Vec = Vec::new(); ``` -It is not necessary to annotate the full type. Once the ambiguity is resolved, -the compiler can infer the rest: +The type can also be specified in the path of the expression: + +``` +let x = Vec::::new(); +``` + +In cases with more complex types, it is not necessary to annotate the full +type. Once the ambiguity is resolved, the compiler can infer the rest: ``` let x: Vec<_> = "hello".chars().rev().collect(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0283.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0283.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0283.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0283.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,52 @@ -An implementation cannot be chosen unambiguously because of lack of information. +The compiler could not infer a type and asked for a type annotation. Erroneous code example: ```compile_fail,E0283 +let x = "hello".chars().rev().collect(); +``` + +This error indicates that type inference did not result in one unique possible +type, and extra information is required. In most cases this can be provided +by adding a type annotation. Sometimes you need to specify a generic type +parameter manually. + +A common example is the `collect` method on `Iterator`. It has a generic type +parameter with a `FromIterator` bound, which for a `char` iterator is +implemented by `Vec` and `String` among others. Consider the following snippet +that reverses the characters of a string: + +In the first code example, the compiler cannot infer what the type of `x` should +be: `Vec` and `String` are both suitable candidates. To specify which type +to use, you can use a type annotation on `x`: + +``` +let x: Vec = "hello".chars().rev().collect(); +``` + +It is not necessary to annotate the full type. Once the ambiguity is resolved, +the compiler can infer the rest: + +``` +let x: Vec<_> = "hello".chars().rev().collect(); +``` + +Another way to provide the compiler with enough information, is to specify the +generic type parameter: + +``` +let x = "hello".chars().rev().collect::>(); +``` + +Again, you need not specify the full type if the compiler can infer it: + +``` +let x = "hello".chars().rev().collect::>(); +``` + +We can see a self-contained example below: + +```compile_fail,E0283 struct Foo; impl Into for Foo { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0457.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0457.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0457.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0457.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,5 @@ +#### Note: this error code is no longer emitted by the compiler` + Plugin `..` only found in rlib format, but must be available in dylib format. Erroneous code example: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0463.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0463.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0463.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0463.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,16 +1,13 @@ -A plugin/crate was declared but cannot be found. +A crate was declared but cannot be found. Erroneous code example: ```compile_fail,E0463 -#![feature(plugin)] -#![plugin(cookie_monster)] // error: can't find crate for `cookie_monster` -extern crate cake_is_a_lie; // error: can't find crate for `cake_is_a_lie` +extern crate foo; // error: can't find crate ``` You need to link your code to the relevant crate in order to be able to use it -(through Cargo or the `-L` option of rustc example). Plugins are crates as -well, and you link to them the same way. +(through Cargo or the `-L` option of rustc, for example). ## Common causes diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0498.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0498.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0498.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0498.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler. + The `plugin` attribute was malformed. Erroneous code example: -```compile_fail,E0498 +```ignore (E0498 is no longer emitted) #![feature(plugin)] #![plugin(foo(args))] // error: invalid argument #![plugin(bar="test")] // error: invalid argument diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0551.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0551.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0551.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0551.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler + An invalid meta-item was used inside an attribute. Erroneous code example: -```compile_fail,E0551 +```compile_fail,E0539 #[deprecated(note)] // error! fn i_am_deprecated() {} ``` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0626.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0626.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0626.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0626.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,11 @@ -This error occurs because a borrow in a generator persists across a +This error occurs because a borrow in a coroutine persists across a yield point. Erroneous code example: ```compile_fail,E0626 -# #![feature(generators, generator_trait, pin)] -# use std::ops::Generator; +# #![feature(coroutines, coroutine_trait, pin)] +# use std::ops::Coroutine; # use std::pin::Pin; let mut b = || { let a = &String::new(); // <-- This borrow... @@ -23,8 +23,8 @@ the integer by value: ``` -# #![feature(generators, generator_trait, pin)] -# use std::ops::Generator; +# #![feature(coroutines, coroutine_trait, pin)] +# use std::ops::Coroutine; # use std::pin::Pin; let mut b = || { let a = 3; @@ -41,8 +41,8 @@ This error also frequently arises with iteration: ```compile_fail,E0626 -# #![feature(generators, generator_trait, pin)] -# use std::ops::Generator; +# #![feature(coroutines, coroutine_trait, pin)] +# use std::ops::Coroutine; # use std::pin::Pin; let mut b = || { let v = vec![1,2,3]; @@ -57,8 +57,8 @@ `into_iter()`) to avoid borrowing: ``` -# #![feature(generators, generator_trait, pin)] -# use std::ops::Generator; +# #![feature(coroutines, coroutine_trait, pin)] +# use std::ops::Coroutine; # use std::pin::Pin; let mut b = || { let v = vec![1,2,3]; @@ -72,8 +72,8 @@ If taking ownership is not an option, using indices can work too: ``` -# #![feature(generators, generator_trait, pin)] -# use std::ops::Generator; +# #![feature(coroutines, coroutine_trait, pin)] +# use std::ops::Coroutine; # use std::pin::Pin; let mut b = || { let v = vec![1,2,3]; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0627.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0627.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0627.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0627.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,28 +1,28 @@ -A yield expression was used outside of the generator literal. +A yield expression was used outside of the coroutine literal. Erroneous code example: ```compile_fail,E0627 -#![feature(generators, generator_trait)] +#![feature(coroutines, coroutine_trait)] -fn fake_generator() -> &'static str { +fn fake_coroutine() -> &'static str { yield 1; return "foo" } fn main() { - let mut generator = fake_generator; + let mut coroutine = fake_coroutine; } ``` -The error occurs because keyword `yield` can only be used inside the generator -literal. This can be fixed by constructing the generator correctly. +The error occurs because keyword `yield` can only be used inside the coroutine +literal. This can be fixed by constructing the coroutine correctly. ``` -#![feature(generators, generator_trait)] +#![feature(coroutines, coroutine_trait)] fn main() { - let mut generator = || { + let mut coroutine = || { yield 1; return "foo" }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0628.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0628.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0628.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0628.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,13 +1,13 @@ -More than one parameter was used for a generator. +More than one parameter was used for a coroutine. Erroneous code example: ```compile_fail,E0628 -#![feature(generators, generator_trait)] +#![feature(coroutines, coroutine_trait)] fn main() { - let generator = |a: i32, b: i32| { - // error: too many parameters for a generator + let coroutine = |a: i32, b: i32| { + // error: too many parameters for a coroutine // Allowed only 0 or 1 parameter yield a; }; @@ -15,15 +15,15 @@ ``` At present, it is not permitted to pass more than one explicit -parameter for a generator.This can be fixed by using -at most 1 parameter for the generator. For example, we might resolve +parameter for a coroutine.This can be fixed by using +at most 1 parameter for the coroutine. For example, we might resolve the previous example by passing only one parameter. ``` -#![feature(generators, generator_trait)] +#![feature(coroutines, coroutine_trait)] fn main() { - let generator = |a: i32| { + let coroutine = |a: i32| { yield a; }; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0698.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0698.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0698.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0698.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ #### Note: this error code is no longer emitted by the compiler. -When using generators (or async) all type variables must be bound so a -generator can be constructed. +When using coroutines (or async) all type variables must be bound so a +coroutine can be constructed. Erroneous code example: @@ -15,7 +15,7 @@ In the above example `T` is unknowable by the compiler. To fix this you must bind `T` to a concrete type such as `String` -so that a generator can then be constructed: +so that a coroutine can then be constructed: ```edition2018 async fn bar() -> () {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0706.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0706.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0706.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0706.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,10 @@ +#### Note: this error code is no longer emitted by the compiler. + `async fn`s are not yet supported in traits in Rust. Erroneous code example: -```compile_fail,edition2018 +```ignore,edition2018 trait T { // Neither case is currently supported. async fn foo() {} @@ -13,7 +15,7 @@ `async fn`s return an `impl Future`, making the following two examples equivalent: -```edition2018,ignore (example-of-desugaring-equivalence) +```ignore,edition2018 (example-of-desugaring-equivalence) async fn foo() -> User { unimplemented!() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0727.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0727.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0727.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0727.md 2023-12-21 16:55:28.000000000 +0000 @@ -3,10 +3,10 @@ Erroneous code example: ```compile_fail,E0727,edition2018 -#![feature(generators)] +#![feature(coroutines)] fn main() { - let generator = || { + let coroutine = || { async { yield; } @@ -20,10 +20,10 @@ To fix this error, you have to move `yield` out of the `async` block: ```edition2018 -#![feature(generators)] +#![feature(coroutines)] fn main() { - let generator = || { + let coroutine = || { yield; }; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0790.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0790.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0790.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0790.md 2023-12-21 16:55:28.000000000 +0000 @@ -4,24 +4,24 @@ Erroneous code example: ```compile_fail,E0790 -trait Generator { +trait Coroutine { fn create() -> u32; } struct Impl; -impl Generator for Impl { +impl Coroutine for Impl { fn create() -> u32 { 1 } } struct AnotherImpl; -impl Generator for AnotherImpl { +impl Coroutine for AnotherImpl { fn create() -> u32 { 2 } } -let cont: u32 = Generator::create(); -// error, impossible to choose one of Generator trait implementation +let cont: u32 = Coroutine::create(); +// error, impossible to choose one of Coroutine trait implementation // Should it be Impl or AnotherImpl, maybe something else? ``` @@ -30,18 +30,18 @@ type: ``` -trait Generator { +trait Coroutine { fn create() -> u32; } struct AnotherImpl; -impl Generator for AnotherImpl { +impl Coroutine for AnotherImpl { fn create() -> u32 { 2 } } let gen1 = AnotherImpl::create(); // if there are multiple methods with same name (different traits) -let gen2 = ::create(); +let gen2 = ::create(); ``` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0795.md rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0795.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0795.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes/E0795.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,28 @@ +Invalid argument for the `offset_of!` macro. + +Erroneous code example: + +```compile_fail,E0795 +#![feature(offset_of, offset_of_enum)] + +let x = std::mem::offset_of!(Option, Some); +``` + +The `offset_of!` macro gives the offset of a field within a type. It can +navigate through enum variants, but the final component of its second argument +must be a field and not a variant. + +The offset of the contained `u8` in the `Option` can be found by specifying +the field name `0`: + +``` +#![feature(offset_of, offset_of_enum)] + +let x: usize = std::mem::offset_of!(Option, Some.0); +``` + +The discriminant of an enumeration may be read with `core::mem::discriminant`, +but this is not always a value physically present within the enum. + +Further information about enum layout may be found at +https://rust-lang.github.io/unsafe-code-guidelines/layout/enums.html. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/error_codes.rs 2023-12-21 16:55:28.000000000 +0000 @@ -514,6 +514,7 @@ E0792: include_str!("./error_codes/E0792.md"), E0793: include_str!("./error_codes/E0793.md"), E0794: include_str!("./error_codes/E0794.md"), +E0795: include_str!("./error_codes/E0795.md"), } // Undocumented removed error codes. Note that many removed error codes are kept in the list above diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_codes/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,6 @@ +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![deny(rustdoc::invalid_codeblock_attributes)] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,23 +3,25 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start fluent-bundle = "0.15.2" fluent-syntax = "0.11" +icu_list = "1.2" +icu_locid = "1.2" +icu_provider_adapters = "1.2" intl-memoizer = "0.5.1" rustc_baked_icu_data = { path = "../rustc_baked_icu_data" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } -rustc_macros = { path = "../rustc_macros" } tracing = "0.1" unic-langid = { version = "0.9.0", features = ["macros"] } -icu_list = "1.2" -icu_locid = "1.2" -icu_provider_adapters = "1.2" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ['rustc_baked_icu_data/rustc_use_parallel_compiler'] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_error_messages/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,5 @@ +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(let_chains)] #![feature(lazy_cell)] #![feature(rustc_attrs)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,29 +3,29 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -tracing = "0.1" +# tidy-alphabetical-start +annotate-snippets = "0.9" +derive_setters = "0.1.6" rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } +rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_messages = { path = "../rustc_error_messages" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } +rustc_lint_defs = { path = "../rustc_lint_defs" } +rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } -rustc_macros = { path = "../rustc_macros" } -rustc_data_structures = { path = "../rustc_data_structures" } rustc_target = { path = "../rustc_target" } -rustc_hir = { path = "../rustc_hir" } -rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_type_ir = { path = "../rustc_type_ir" } -unicode-width = "0.1.4" -termcolor = "1.2.0" -annotate-snippets = "0.9" -termize = "0.1.1" serde = { version = "1.0.125", features = [ "derive" ] } serde_json = "1.0.59" -derive_setters = "0.1.6" +termcolor = "1.2.0" +termize = "0.1.1" +tracing = "0.1" +unicode-width = "0.1.4" +# tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] version = "0.48.0" @@ -36,4 +36,6 @@ ] [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ['rustc_error_messages/rustc_use_parallel_compiler'] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/diagnostic_builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/diagnostic_builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/diagnostic_builder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/diagnostic_builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -659,6 +659,7 @@ msg: impl Into, ) -> &mut Self); forward!(pub fn help(&mut self, msg: impl Into) -> &mut Self); + forward!(pub fn help_once(&mut self, msg: impl Into) -> &mut Self); forward!(pub fn span_help( &mut self, sp: impl Into, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/emitter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/emitter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/emitter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/emitter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,7 +23,7 @@ use rustc_lint_defs::pluralize; use derive_setters::Setters; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::{DynSend, IntoDynSyncSend, Lrc}; use rustc_error_messages::{FluentArgs, SpanLabel}; use rustc_span::hygiene::{ExpnKind, MacroKind}; @@ -337,9 +337,7 @@ && last_name != name { let descr = macro_kind.descr(); - format!( - " which comes from the expansion of the {descr} `{last_name}`", - ) + format!(" which comes from the expansion of the {descr} `{last_name}`",) } else { "".to_string() }; @@ -372,7 +370,7 @@ } fn render_multispan_macro_backtrace(&self, span: &mut MultiSpan, always_backtrace: bool) { - let mut new_labels: Vec<(Span, String)> = vec![]; + let mut new_labels = FxIndexSet::default(); for &sp in span.primary_spans() { if sp.is_dummy() { @@ -389,7 +387,7 @@ } if always_backtrace { - new_labels.push(( + new_labels.insert(( trace.def_site, format!( "in this expansion of `{}`{}", @@ -433,7 +431,7 @@ format!("this {} desugaring", kind.descr()).into() } }; - new_labels.push(( + new_labels.insert(( trace.call_site, format!( "in {}{}", @@ -1350,7 +1348,14 @@ buffer.append(0, "]", Style::Level(*level)); label_width += 2 + code.len(); } - let header_style = if is_secondary { Style::HeaderMsg } else { Style::MainHeaderMsg }; + let header_style = if is_secondary { + Style::HeaderMsg + } else if self.short_message { + // For short messages avoid bolding the message, as it doesn't look great (#63835). + Style::NoStyle + } else { + Style::MainHeaderMsg + }; if *level != Level::FailureNote { buffer.append(0, ": ", header_style); label_width += 2; @@ -1935,7 +1940,9 @@ is_multiline, ) } - if let DisplaySuggestion::Add = show_code_change && is_item_attribute { + if let DisplaySuggestion::Add = show_code_change + && is_item_attribute + { // The suggestion adds an entire line of code, ending on a newline, so we'll also // print the *following* line, to provide context of what we're advising people to // do. Otherwise you would only see contextless code that can be confused for @@ -2355,11 +2362,7 @@ let label = label.as_ref().map(|m| { normalize_whitespace( - &emitter - .translate_message(m, &args) - .map_err(Report::new) - .unwrap() - .to_string(), + &emitter.translate_message(m, &args).map_err(Report::new).unwrap(), ) }); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,8 @@ //! This module contains the code for creating and emitting diagnostics. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(array_windows)] #![feature(extract_if)] #![feature(if_let_guard)] @@ -505,6 +507,9 @@ CallAssocMethod, TraitMissingMethod, OpaqueHiddenTypeMismatch, + MaybeForgetReturn, + /// Query cycle detected, stashing in favor of a better error. + Cycle, } fn default_track_diagnostic(d: &mut Diagnostic, f: &mut dyn FnMut(&mut Diagnostic)) { @@ -551,7 +556,7 @@ // instead of "require some error happened". Sadly that isn't ideal, as // lints can be `#[allow]`'d, potentially leading to this triggering. // Also, "good path" should be replaced with a better naming. - if !self.has_any_message() && !self.suppressed_expected_diag { + if !self.has_any_message() && !self.suppressed_expected_diag && !std::thread::panicking() { let bugs = std::mem::replace(&mut self.delayed_good_path_bugs, Vec::new()); self.flush_delayed( bugs, @@ -1376,16 +1381,16 @@ self.emitted_diagnostic_codes.insert(code.clone()); } - let already_emitted = |this: &mut Self| { + let already_emitted = { let mut hasher = StableHasher::new(); diagnostic.hash(&mut hasher); let diagnostic_hash = hasher.finish(); - !this.emitted_diagnostics.insert(diagnostic_hash) + !self.emitted_diagnostics.insert(diagnostic_hash) }; // Only emit the diagnostic if we've been asked to deduplicate or // haven't already emitted an equivalent diagnostic. - if !(self.flags.deduplicate_diagnostics && already_emitted(self)) { + if !(self.flags.deduplicate_diagnostics && already_emitted) { debug!(?diagnostic); debug!(?self.emitted_diagnostics); let already_emitted_sub = |sub: &mut SubDiagnostic| { @@ -1401,6 +1406,11 @@ }; diagnostic.children.extract_if(already_emitted_sub).for_each(|_| {}); + if already_emitted { + diagnostic.note( + "duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`", + ); + } self.emitter.emit_diagnostic(diagnostic); if diagnostic.is_error() { @@ -1666,7 +1676,11 @@ let _ = write!( &mut out, "delayed span bug: {}\n{}\n", - bug.inner.styled_message().iter().filter_map(|(msg, _)| msg.as_str()).collect::(), + bug.inner + .styled_message() + .iter() + .filter_map(|(msg, _)| msg.as_str()) + .collect::(), &bug.note ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_errors/src/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -151,12 +151,14 @@ primary: box TranslateError::One { kind: TranslateErrorKind::PrimaryBundleMissing, .. }, fallback: box TranslateError::One { kind: TranslateErrorKind::Fluent { errs }, .. }, } = &err - && let [FluentError::ResolverError(ResolverError::Reference( - ReferenceKind::Message { id, .. } - | ReferenceKind::Variable { id, .. }, - ))] = &**errs + && let [ + FluentError::ResolverError(ResolverError::Reference( + ReferenceKind::Message { id, .. } | ReferenceKind::Variable { id, .. }, + )), + ] = &**errs && id == "name" - {} else { + { + } else { panic!("{err:#?}") }; assert_eq!( @@ -176,12 +178,14 @@ primary: box TranslateError::One { kind: TranslateErrorKind::PrimaryBundleMissing, .. }, fallback: box TranslateError::One { kind: TranslateErrorKind::Fluent { errs }, .. }, } = &err - && let [FluentError::ResolverError(ResolverError::Reference( - ReferenceKind::Message { id, .. } - | ReferenceKind::Variable { id, .. }, - ))] = &**errs + && let [ + FluentError::ResolverError(ResolverError::Reference( + ReferenceKind::Message { id, .. } | ReferenceKind::Variable { id, .. }, + )), + ] = &**errs && id == "oops" - {} else { + { + } else { panic!("{err:#?}") }; assert_eq!( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -8,9 +8,10 @@ doctest = false [dependencies] +# tidy-alphabetical-start crossbeam-channel = "0.5.0" -rustc_ast_passes = { path = "../rustc_ast_passes" } rustc_ast = { path = "../rustc_ast" } +rustc_ast_passes = { path = "../rustc_ast_passes" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } @@ -25,6 +26,7 @@ rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +termcolor = "1.2" thin-vec = "0.2.12" tracing = "0.1" -termcolor = "1.2" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -86,6 +86,7 @@ expand_module_file_not_found = file not found for module `{$name}` .help = to create the module `{$name}`, create file "{$default_path}" or "{$secondary_path}" + .note = if there is a `mod {$name}` elsewhere in the crate already, import it with `use crate::...` instead expand_module_in_block = cannot declare a non-inline module inside a block unless it has a path attribute diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/config.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/config.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/config.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/config.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,17 +13,16 @@ use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem}; use rustc_attr as attr; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; -use rustc_data_structures::fx::FxHashMap; -use rustc_feature::{Feature, Features, State as FeatureState}; -use rustc_feature::{ - ACCEPTED_FEATURES, ACTIVE_FEATURES, REMOVED_FEATURES, STABLE_REMOVED_FEATURES, -}; +use rustc_data_structures::fx::FxHashSet; +use rustc_feature::Features; +use rustc_feature::{ACCEPTED_FEATURES, REMOVED_FEATURES, UNSTABLE_FEATURES}; use rustc_parse::validate_attr; use rustc_session::parse::feature_err; use rustc_session::Session; -use rustc_span::edition::{Edition, ALL_EDITIONS}; +use rustc_span::edition::ALL_EDITIONS; use rustc_span::symbol::{sym, Symbol}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::Span; +use thin_vec::ThinVec; /// A folder that strips out items that do not belong in the current configuration. pub struct StripUnconfigured<'a> { @@ -36,85 +35,56 @@ pub lint_node_id: NodeId, } -pub fn features(sess: &Session, krate_attrs: &[Attribute]) -> Features { - fn feature_removed(sess: &Session, span: Span, reason: Option<&str>) { - sess.emit_err(FeatureRemoved { - span, - reason: reason.map(|reason| FeatureRemovedReason { reason }), - }); - } - - fn active_features_up_to(edition: Edition) -> impl Iterator { - ACTIVE_FEATURES.iter().filter(move |feature| { - if let Some(feature_edition) = feature.edition { - feature_edition <= edition - } else { - false - } - }) +pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) -> Features { + fn feature_list(attr: &Attribute) -> ThinVec { + if attr.has_name(sym::feature) + && let Some(list) = attr.meta_item_list() + { + list + } else { + ThinVec::new() + } } let mut features = Features::default(); - let mut edition_enabled_features = FxHashMap::default(); - let crate_edition = sess.edition(); - for &edition in ALL_EDITIONS { - if edition <= crate_edition { - // The `crate_edition` implies its respective umbrella feature-gate - // (i.e., `#![feature(rust_20XX_preview)]` isn't needed on edition 20XX). - edition_enabled_features.insert(edition.feature_name(), edition); - } - } - - for feature in active_features_up_to(crate_edition) { - feature.set(&mut features, DUMMY_SP); - edition_enabled_features.insert(feature.name, crate_edition); - } + // The edition from `--edition`. + let crate_edition = sess.edition(); - // Process the edition umbrella feature-gates first, to ensure - // `edition_enabled_features` is completed before it's queried. + // The maximum of (a) the edition from `--edition` and (b) any edition + // umbrella feature-gates declared in the code. + // - E.g. if `crate_edition` is 2015 but `rust_2018_preview` is present, + // `feature_edition` is 2018 + let mut features_edition = crate_edition; for attr in krate_attrs { - if !attr.has_name(sym::feature) { - continue; - } - - let Some(list) = attr.meta_item_list() else { - continue; - }; - - for mi in list { - if !mi.is_word() { - continue; - } - - let name = mi.name_or_empty(); - - let edition = ALL_EDITIONS.iter().find(|e| name == e.feature_name()).copied(); - if let Some(edition) = edition { - if edition <= crate_edition { - continue; - } - - for feature in active_features_up_to(edition) { - // FIXME(Manishearth) there is currently no way to set - // lib features by edition - feature.set(&mut features, DUMMY_SP); - edition_enabled_features.insert(feature.name, edition); + for mi in feature_list(attr) { + if mi.is_word() { + let name = mi.name_or_empty(); + let edition = ALL_EDITIONS.iter().find(|e| name == e.feature_name()).copied(); + if let Some(edition) = edition + && edition > features_edition + { + features_edition = edition; } } } } - for attr in krate_attrs { - if !attr.has_name(sym::feature) { - continue; + // Enable edition-dependent features based on `features_edition`. + // - E.g. enable `test_2018_feature` if `features_edition` is 2018 or higher + let mut edition_enabled_features = FxHashSet::default(); + for f in UNSTABLE_FEATURES { + if let Some(edition) = f.feature.edition && edition <= features_edition { + // FIXME(Manishearth) there is currently no way to set lib features by + // edition. + edition_enabled_features.insert(f.feature.name); + (f.set_enabled)(&mut features); } + } - let Some(list) = attr.meta_item_list() else { - continue; - }; - - for mi in list { + // Process all features declared in the code. + for attr in krate_attrs { + for mi in feature_list(attr) { let name = match mi.ident() { Some(ident) if mi.is_word() => ident.name, Some(ident) => { @@ -136,38 +106,57 @@ } }; - if let Some(&edition) = edition_enabled_features.get(&name) { + // If the declared feature is an edition umbrella feature-gate, + // warn if it was redundant w.r.t. `crate_edition`. + // - E.g. warn if `rust_2018_preview` is declared when + // `crate_edition` is 2018 + // - E.g. don't warn if `rust_2018_preview` is declared when + // `crate_edition` is 2015. + if let Some(&edition) = ALL_EDITIONS.iter().find(|e| name == e.feature_name()) { + if edition <= crate_edition { + sess.emit_warning(FeatureIncludedInEdition { + span: mi.span(), + feature: name, + edition, + }); + } + features.set_declared_lang_feature(name, mi.span(), None); + continue; + } + + // If the declared feature is edition-dependent and was already + // enabled due to `feature_edition`, give a warning. + // - E.g. warn if `test_2018_feature` is declared when + // `feature_edition` is 2018 or higher. + if edition_enabled_features.contains(&name) { sess.emit_warning(FeatureIncludedInEdition { span: mi.span(), feature: name, - edition, + edition: features_edition, }); + features.set_declared_lang_feature(name, mi.span(), None); continue; } - if ALL_EDITIONS.iter().any(|e| name == e.feature_name()) { - // Handled in the separate loop above. + // If the declared feature has been removed, issue an error. + if let Some(f) = REMOVED_FEATURES.iter().find(|f| name == f.feature.name) { + sess.emit_err(FeatureRemoved { + span: mi.span(), + reason: f.reason.map(|reason| FeatureRemovedReason { reason }), + }); continue; } - let removed = REMOVED_FEATURES.iter().find(|f| name == f.name); - let stable_removed = STABLE_REMOVED_FEATURES.iter().find(|f| name == f.name); - if let Some(Feature { state, .. }) = removed.or(stable_removed) { - if let FeatureState::Removed { reason } | FeatureState::Stabilized { reason } = - state - { - feature_removed(sess, mi.span(), *reason); - continue; - } - } - - if let Some(Feature { since, .. }) = ACCEPTED_FEATURES.iter().find(|f| name == f.name) { - let since = Some(Symbol::intern(since)); - features.declared_lang_features.push((name, mi.span(), since)); - features.active_features.insert(name); + // If the declared feature is stable, record it. + if let Some(f) = ACCEPTED_FEATURES.iter().find(|f| name == f.name) { + let since = Some(Symbol::intern(f.since)); + features.set_declared_lang_feature(name, mi.span(), since); continue; } + // If `-Z allow-features` is used and the declared feature is + // unstable and not also listed as one of the allowed features, + // issue an error. if let Some(allowed) = sess.opts.unstable_opts.allow_features.as_ref() { if allowed.iter().all(|f| name.as_str() != f) { sess.emit_err(FeatureNotAllowed { span: mi.span(), name }); @@ -175,15 +164,25 @@ } } - if let Some(f) = ACTIVE_FEATURES.iter().find(|f| name == f.name) { - f.set(&mut features, mi.span()); - features.declared_lang_features.push((name, mi.span(), None)); - features.active_features.insert(name); + // If the declared feature is unstable, record it. + if let Some(f) = UNSTABLE_FEATURES.iter().find(|f| name == f.feature.name) { + (f.set_enabled)(&mut features); + // When the ICE comes from core, alloc or std (approximation of the standard library), there's a chance + // that the person hitting the ICE may be using -Zbuild-std or similar with an untested target. + // The bug is probably in the standard library and not the compiler in that case, but that doesn't + // really matter - we want a bug report. + if features.internal(name) + && ![sym::core, sym::alloc, sym::std].contains(&crate_name) + { + sess.using_internal_features.store(true, std::sync::atomic::Ordering::Relaxed); + } + features.set_declared_lang_feature(name, mi.span(), None); continue; } - features.declared_lib_features.push((name, mi.span())); - features.active_features.insert(name); + // Otherwise, the feature is unknown. Record it as a lib feature. + // It will be checked later. + features.set_declared_lib_feature(name, mi.span()); } } @@ -252,7 +251,8 @@ let trees: Vec<_> = stream .0 .iter() - .flat_map(|tree| match tree.clone() { + .flat_map(|tree| { + match tree.clone() { AttrTokenTree::Attributes(mut data) => { data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr)); @@ -267,18 +267,17 @@ } AttrTokenTree::Delimited(sp, delim, mut inner) => { inner = self.configure_tokens(&inner); - Some(AttrTokenTree::Delimited(sp, delim, inner)) - .into_iter() + Some(AttrTokenTree::Delimited(sp, delim, inner)).into_iter() } - AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(nt) = &token.kind => { - panic!( - "Nonterminal should have been flattened at {:?}: {:?}", - token.span, nt - ); + AttrTokenTree::Token(ref token, _) + if let TokenKind::Interpolated(nt) = &token.kind => + { + panic!("Nonterminal should have been flattened at {:?}: {:?}", token.span, nt); } AttrTokenTree::Token(token, spacing) => { Some(AttrTokenTree::Token(token, spacing)).into_iter() } + } }) .collect(); AttrTokenStream::new(trees) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -350,6 +350,7 @@ #[derive(Diagnostic)] #[diag(expand_module_file_not_found, code = "E0583")] #[help] +#[note] pub(crate) struct ModuleFileNotFound { #[primary_span] pub span: Span, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,5 @@ +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(array_windows)] #![feature(associated_type_bounds)] #![feature(associated_type_defaults)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/macro_rules.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/macro_rules.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/macro_rules.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/macro_rules.rs 2023-12-21 16:55:28.000000000 +0000 @@ -716,18 +716,18 @@ match rhs { mbe::TokenTree::Delimited(_sp, d) => { let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| { - if let mbe::TokenTree::Token(ident) = ident && - let TokenKind::Ident(ident, _) = ident.kind && - ident == sym::compile_error && - let mbe::TokenTree::Token(bang) = bang && - let TokenKind::Not = bang.kind && - let mbe::TokenTree::Delimited(_, del) = args && - del.delim != Delimiter::Invisible - { - true - } else { - false - } + if let mbe::TokenTree::Token(ident) = ident + && let TokenKind::Ident(ident, _) = ident.kind + && ident == sym::compile_error + && let mbe::TokenTree::Token(bang) = bang + && let TokenKind::Not = bang.kind + && let mbe::TokenTree::Delimited(_, del) = args + && del.delim != Delimiter::Invisible + { + true + } else { + false + } }); if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/metavar_expr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/metavar_expr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/metavar_expr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/mbe/metavar_expr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -124,8 +124,7 @@ && let Ok(n_usize) = usize::try_from(n_u128) { Ok(n_usize) - } - else { + } else { let msg = "only unsuffixes integer literals are supported in meta-variable expressions"; Err(sess.span_diagnostic.struct_span_err(span, msg)) } @@ -137,15 +136,16 @@ sess: &'sess ParseSess, span: Span, ) -> PResult<'sess, Ident> { - if let Some(tt) = iter.next() && let TokenTree::Token(token, _) = tt { + if let Some(tt) = iter.next() + && let TokenTree::Token(token, _) = tt + { if let Some((elem, false)) = token.ident() { return Ok(elem); } let token_str = pprust::token_to_string(token); - let mut err = sess.span_diagnostic.struct_span_err( - span, - format!("expected identifier, found `{}`", &token_str) - ); + let mut err = sess + .span_diagnostic + .struct_span_err(span, format!("expected identifier, found `{}`", &token_str)); err.span_suggestion( token.span, format!("try removing `{}`", &token_str), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/module.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/module.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/module.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/module.rs 2023-12-21 16:55:28.000000000 +0000 @@ -91,7 +91,9 @@ inline: Inline, ) -> (PathBuf, DirOwnership) { match inline { - Inline::Yes if let Some(file_path) = mod_file_path_from_attr(sess, attrs, &module.dir_path) => { + Inline::Yes + if let Some(file_path) = mod_file_path_from_attr(sess, attrs, &module.dir_path) => + { // For inline modules file path from `#[path]` is actually the directory path // for historical reasons, so we don't pop the last segment here. (file_path, DirOwnership::Owned { relative: None }) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/placeholders.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/placeholders.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/placeholders.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/placeholders.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,8 +4,8 @@ use rustc_ast::ptr::P; use rustc_ast::token::Delimiter; use rustc_data_structures::fx::FxHashMap; -use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::Ident; +use rustc_span::DUMMY_SP; use smallvec::{smallvec, SmallVec}; use thin_vec::ThinVec; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/proc_macro_server.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/proc_macro_server.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/proc_macro_server.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/proc_macro_server.rs 2023-12-21 16:55:28.000000000 +0000 @@ -226,9 +226,8 @@ })); } - Interpolated(nt) if let NtIdent(ident, is_raw) = *nt => { - trees.push(TokenTree::Ident(Ident { sym: ident.name, is_raw, span: ident.span })) - } + Interpolated(nt) if let NtIdent(ident, is_raw) = *nt => trees + .push(TokenTree::Ident(Ident { sym: ident.name, is_raw, span: ident.span })), Interpolated(nt) => { let stream = TokenStream::from_nonterminal_ast(&nt); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_expand/src/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,7 @@ use rustc_ast::tokenstream::TokenStream; use rustc_parse::{new_parser_from_source_str, parser::Parser, source_file_to_stream}; use rustc_session::parse::ParseSess; -use rustc_span::create_default_session_if_not_set_then; +use rustc_span::create_default_session_globals_then; use rustc_span::source_map::{FilePathMapping, SourceMap}; use rustc_span::{BytePos, Span}; @@ -181,7 +181,7 @@ } fn test_harness(file_text: &str, span_labels: Vec, expected_output: &str) { - create_default_session_if_not_set_then(|_| { + create_default_session_globals_then(|| { let (handler, source_map, output) = create_test_handler(); source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,8 +3,8 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures" } rustc_span = { path = "../rustc_span" } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/accepted.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/accepted.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/accepted.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/accepted.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,23 +1,20 @@ //! List of the accepted feature gates. -use super::{to_nonzero, Feature, State}; +use super::{to_nonzero, Feature}; use rustc_span::symbol::sym; macro_rules! declare_features { ($( $(#[doc = $doc:tt])* (accepted, $feature:ident, $ver:expr, $issue:expr, None), )+) => { - /// Those language feature has since been Accepted (it was once Active) + /// Formerly unstable features that have now been accepted (stabilized). pub const ACCEPTED_FEATURES: &[Feature] = &[ - $( - Feature { - state: State::Accepted, - name: sym::$feature, - since: $ver, - issue: to_nonzero($issue), - edition: None, - } - ),+ + $(Feature { + name: sym::$feature, + since: $ver, + issue: to_nonzero($issue), + edition: None, + }),+ ]; } } @@ -67,6 +64,8 @@ (accepted, associated_types, "1.0.0", None, None), /// Allows free and inherent `async fn`s, `async` blocks, and `.await` expressions. (accepted, async_await, "1.39.0", Some(50547), None), + /// Allows async functions to be declared, implemented, and used in traits. + (accepted, async_fn_in_trait, "1.75.0", Some(91611), None), /// Allows all literals in attribute lists and values of key-value pairs. (accepted, attr_literals, "1.30.0", Some(34981), None), /// Allows overloading augmented assignment operations like `a += b`. @@ -306,6 +305,8 @@ (accepted, repr_packed, "1.33.0", Some(33158), None), /// Allows `#[repr(transparent)]` attribute on newtype structs. (accepted, repr_transparent, "1.28.0", Some(43036), None), + /// Allows return-position `impl Trait` in traits. + (accepted, return_position_impl_trait_in_trait, "1.75.0", Some(91611), None), /// Allows code like `let x: &'static u32 = &42` to work (RFC 1414). (accepted, rvalue_static_promotion, "1.21.0", Some(38865), None), /// Allows `Self` in type definitions (RFC 2300). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/active.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/active.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/active.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/active.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,613 +0,0 @@ -//! List of the active feature gates. - -use super::{to_nonzero, Feature, State}; - -use rustc_data_structures::fx::FxHashSet; -use rustc_span::edition::Edition; -use rustc_span::symbol::{sym, Symbol}; -use rustc_span::Span; - -macro_rules! set { - ($field: ident) => {{ - fn f(features: &mut Features, _: Span) { - features.$field = true; - } - f as fn(&mut Features, Span) - }}; -} - -#[derive(PartialEq)] -enum FeatureStatus { - Default, - Incomplete, - Internal, -} - -macro_rules! declare_features { - (__status_to_enum active) => { - FeatureStatus::Default - }; - (__status_to_enum incomplete) => { - FeatureStatus::Incomplete - }; - (__status_to_enum internal) => { - FeatureStatus::Internal - }; - ($( - $(#[doc = $doc:tt])* ($status:ident, $feature:ident, $ver:expr, $issue:expr, $edition:expr), - )+) => { - /// Represents active features that are currently being implemented or - /// currently being considered for addition/removal. - pub const ACTIVE_FEATURES: - &[Feature] = - &[$( - // (sym::$feature, $ver, $issue, $edition, set!($feature)) - Feature { - state: State::Active { set: set!($feature) }, - name: sym::$feature, - since: $ver, - issue: to_nonzero($issue), - edition: $edition, - } - ),+]; - - /// A set of features to be used by later passes. - #[derive(Clone, Default, Debug)] - pub struct Features { - /// `#![feature]` attrs for language features, for error reporting. - pub declared_lang_features: Vec<(Symbol, Span, Option)>, - /// `#![feature]` attrs for non-language (library) features. - pub declared_lib_features: Vec<(Symbol, Span)>, - /// Features enabled for this crate. - pub active_features: FxHashSet, - $( - $(#[doc = $doc])* - pub $feature: bool - ),+ - } - - impl Features { - pub fn walk_feature_fields(&self, mut f: impl FnMut(&str, bool)) { - $(f(stringify!($feature), self.$feature);)+ - } - - /// Is the given feature active? - pub fn active(&self, feature: Symbol) -> bool { - self.active_features.contains(&feature) - } - - /// Is the given feature enabled? - /// - /// Panics if the symbol doesn't correspond to a declared feature. - pub fn enabled(&self, feature: Symbol) -> bool { - match feature { - $( sym::$feature => self.$feature, )* - - _ => panic!("`{}` was not listed in `declare_features`", feature), - } - } - - /// Some features are known to be incomplete and using them is likely to have - /// unanticipated results, such as compiler crashes. We warn the user about these - /// to alert them. - pub fn incomplete(&self, feature: Symbol) -> bool { - match feature { - $( - sym::$feature => declare_features!(__status_to_enum $status) == FeatureStatus::Incomplete, - )* - // accepted and removed features aren't in this file but are never incomplete - _ if self.declared_lang_features.iter().any(|f| f.0 == feature) => false, - _ if self.declared_lib_features.iter().any(|f| f.0 == feature) => false, - _ => panic!("`{}` was not listed in `declare_features`", feature), - } - } - - /// Some features are internal to the compiler and standard library and should not - /// be used in normal projects. We warn the user about these - /// to alert them. - pub fn internal(&self, feature: Symbol) -> bool { - match feature { - $( - sym::$feature => declare_features!(__status_to_enum $status) == FeatureStatus::Internal, - )* - // accepted and removed features aren't in this file but are never internal - // (a removed feature might have been internal, but it doesn't matter anymore) - _ if self.declared_lang_features.iter().any(|f| f.0 == feature) => false, - _ if self.declared_lib_features.iter().any(|f| f.0 == feature) => false, - _ => panic!("`{}` was not listed in `declare_features`", feature), - } - } - } - }; -} - -impl Feature { - /// Sets this feature in `Features`. Panics if called on a non-active feature. - pub fn set(&self, features: &mut Features, span: Span) { - match self.state { - State::Active { set } => set(features, span), - _ => panic!("called `set` on feature `{}` which is not `active`", self.name), - } - } -} - -// See https://rustc-dev-guide.rust-lang.org/feature-gates.html#feature-gates for more -// documentation about handling feature gates. -// -// If you change this, please modify `src/doc/unstable-book` as well. -// -// Don't ever remove anything from this list; move them to `accepted.rs` if -// accepted or `removed.rs` if removed. -// -// The version numbers here correspond to the version in which the current status -// was set. This is most important for knowing when a particular feature became -// stable (active). -// -// Note that the features are grouped into internal/user-facing and then -// sorted by version inside those groups. This is enforced with tidy. -// -// N.B., `tools/tidy/src/features.rs` parses this information directly out of the -// source, so take care when modifying it. - -#[rustfmt::skip] -declare_features! ( - // ------------------------------------------------------------------------- - // feature-group-start: internal feature gates (no tracking issue) - // ------------------------------------------------------------------------- - // no-tracking-issue-start - - /// Allows using the `unadjusted` ABI; perma-unstable. - (active, abi_unadjusted, "1.16.0", None, None), - /// Allows using the `vectorcall` ABI. - (active, abi_vectorcall, "1.7.0", None, None), - /// Allows using `#![needs_allocator]`, an implementation detail of `#[global_allocator]`. - (internal, allocator_internals, "1.20.0", None, None), - /// Allows using `#[allow_internal_unsafe]`. This is an - /// attribute on `macro_rules!` and can't use the attribute handling - /// below (it has to be checked before expansion possibly makes - /// macros disappear). - (internal, allow_internal_unsafe, "1.0.0", None, None), - /// Allows using `#[allow_internal_unstable]`. This is an - /// attribute on `macro_rules!` and can't use the attribute handling - /// below (it has to be checked before expansion possibly makes - /// macros disappear). - (internal, allow_internal_unstable, "1.0.0", None, None), - /// Allows using anonymous lifetimes in argument-position impl-trait. - (active, anonymous_lifetime_in_impl_trait, "1.63.0", None, None), - /// Allows identifying the `compiler_builtins` crate. - (internal, compiler_builtins, "1.13.0", None, None), - /// Allows writing custom MIR - (internal, custom_mir, "1.65.0", None, None), - /// Outputs useful `assert!` messages - (active, generic_assert, "1.63.0", None, None), - /// Allows using the `rust-intrinsic`'s "ABI". - (internal, intrinsics, "1.0.0", None, None), - /// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic. - (internal, lang_items, "1.0.0", None, None), - /// Allows `#[link(..., cfg(..))]`; perma-unstable per #37406 - (active, link_cfg, "1.14.0", None, None), - /// Allows the `multiple_supertrait_upcastable` lint. - (active, multiple_supertrait_upcastable, "1.69.0", None, None), - /// Allow negative trait bounds. This is an internal-only feature for testing the trait solver! - (incomplete, negative_bounds, "1.71.0", None, None), - /// Allows using `#[omit_gdb_pretty_printer_section]`. - (internal, omit_gdb_pretty_printer_section, "1.5.0", None, None), - /// Allows using `#[prelude_import]` on glob `use` items. - (internal, prelude_import, "1.2.0", None, None), - /// Used to identify crates that contain the profiler runtime. - (internal, profiler_runtime, "1.18.0", None, None), - /// Allows using `rustc_*` attributes (RFC 572). - (internal, rustc_attrs, "1.0.0", None, None), - /// Allows using the `#[stable]` and `#[unstable]` attributes. - (internal, staged_api, "1.0.0", None, None), - /// Added for testing E0705; perma-unstable. - (internal, test_2018_feature, "1.31.0", None, Some(Edition::Edition2018)), - /// Added for testing unstable lints; perma-unstable. - (internal, test_unstable_lint, "1.60.0", None, None), - /// Allows non-`unsafe` —and thus, unsound— access to `Pin` constructions. - /// Marked `internal` since perma-unstable and unsound. - (internal, unsafe_pin_internals, "1.60.0", None, None), - /// Use for stable + negative coherence and strict coherence depending on trait's - /// rustc_strict_coherence value. - (active, with_negative_coherence, "1.60.0", None, None), - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - - // no-tracking-issue-end - // ------------------------------------------------------------------------- - // feature-group-end: internal feature gates (no tracking issue) - // ------------------------------------------------------------------------- - - // ------------------------------------------------------------------------- - // feature-group-start: internal feature gates - // ------------------------------------------------------------------------- - - /// Allows features specific to auto traits. - /// Renamed from `optin_builtin_traits`. - (active, auto_traits, "1.50.0", Some(13231), None), - /// Allows using `box` in patterns (RFC 469). - (active, box_patterns, "1.0.0", Some(29641), None), - /// Allows `#[doc(notable_trait)]`. - /// Renamed from `doc_spotlight`. - (active, doc_notable_trait, "1.52.0", Some(45040), None), - /// Allows using the `may_dangle` attribute (RFC 1327). - (active, dropck_eyepatch, "1.10.0", Some(34761), None), - /// Allows using the `#[fundamental]` attribute. - (active, fundamental, "1.0.0", Some(29635), None), - /// Allows using `#[link_name="llvm.*"]`. - (internal, link_llvm_intrinsics, "1.0.0", Some(29602), None), - /// Allows using the `#[linkage = ".."]` attribute. - (active, linkage, "1.0.0", Some(29603), None), - /// Allows declaring with `#![needs_panic_runtime]` that a panic runtime is needed. - (internal, needs_panic_runtime, "1.10.0", Some(32837), None), - /// Allows using the `#![panic_runtime]` attribute. - (internal, panic_runtime, "1.10.0", Some(32837), None), - /// Allows `extern "platform-intrinsic" { ... }`. - (internal, platform_intrinsics, "1.4.0", Some(27731), None), - /// Allows using `#[rustc_allow_const_fn_unstable]`. - /// This is an attribute on `const fn` for the same - /// purpose as `#[allow_internal_unstable]`. - (internal, rustc_allow_const_fn_unstable, "1.49.0", Some(69399), None), - /// Allows using compiler's own crates. - (active, rustc_private, "1.0.0", Some(27812), None), - /// Allows using internal rustdoc features like `doc(keyword)`. - (internal, rustdoc_internals, "1.58.0", Some(90418), None), - /// Allows using the `rustdoc::missing_doc_code_examples` lint - (active, rustdoc_missing_doc_code_examples, "1.31.0", Some(101730), None), - /// Allows using `#[start]` on a function indicating that it is the program entrypoint. - (active, start, "1.0.0", Some(29633), None), - /// Allows using `#[structural_match]` which indicates that a type is structurally matchable. - /// FIXME: Subsumed by trait `StructuralPartialEq`, cannot move to removed until a library - /// feature with the same name exists. - (active, structural_match, "1.8.0", Some(31434), None), - /// Allows using the `rust-call` ABI. - (active, unboxed_closures, "1.0.0", Some(29625), None), - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - - // ------------------------------------------------------------------------- - // feature-group-end: internal feature gates - // ------------------------------------------------------------------------- - - // ------------------------------------------------------------------------- - // feature-group-start: actual feature gates (target features) - // ------------------------------------------------------------------------- - - // FIXME: Document these and merge with the list below. - - // Unstable `#[target_feature]` directives. - (active, aarch64_ver_target_feature, "1.27.0", Some(44839), None), - (active, arm_target_feature, "1.27.0", Some(44839), None), - (active, avx512_target_feature, "1.27.0", Some(44839), None), - (active, bpf_target_feature, "1.54.0", Some(44839), None), - (active, csky_target_feature, "1.73.0", Some(44839), None), - (active, ermsb_target_feature, "1.49.0", Some(44839), None), - (active, hexagon_target_feature, "1.27.0", Some(44839), None), - (active, mips_target_feature, "1.27.0", Some(44839), None), - (active, powerpc_target_feature, "1.27.0", Some(44839), None), - (active, riscv_target_feature, "1.45.0", Some(44839), None), - (active, rtm_target_feature, "1.35.0", Some(44839), None), - (active, sse4a_target_feature, "1.27.0", Some(44839), None), - (active, tbm_target_feature, "1.27.0", Some(44839), None), - (active, wasm_target_feature, "1.30.0", Some(44839), None), - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - - // ------------------------------------------------------------------------- - // feature-group-end: actual feature gates (target features) - // ------------------------------------------------------------------------- - - // ------------------------------------------------------------------------- - // feature-group-start: actual feature gates - // ------------------------------------------------------------------------- - - /// Allows using the `amdgpu-kernel` ABI. - (active, abi_amdgpu_kernel, "1.29.0", Some(51575), None), - /// Allows `extern "avr-interrupt" fn()` and `extern "avr-non-blocking-interrupt" fn()`. - (active, abi_avr_interrupt, "1.45.0", Some(69664), None), - /// Allows `extern "C-cmse-nonsecure-call" fn()`. - (active, abi_c_cmse_nonsecure_call, "1.51.0", Some(81391), None), - /// Allows `extern "msp430-interrupt" fn()`. - (active, abi_msp430_interrupt, "1.16.0", Some(38487), None), - /// Allows `extern "ptx-*" fn()`. - (active, abi_ptx, "1.15.0", Some(38788), None), - /// Allows `extern "riscv-interrupt-m" fn()` and `extern "riscv-interrupt-s" fn()`. - (active, abi_riscv_interrupt, "1.73.0", Some(111889), None), - /// Allows `extern "x86-interrupt" fn()`. - (active, abi_x86_interrupt, "1.17.0", Some(40180), None), - /// Allows additional const parameter types, such as `&'static str` or user defined types - (incomplete, adt_const_params, "1.56.0", Some(95174), None), - /// Allows defining an `#[alloc_error_handler]`. - (active, alloc_error_handler, "1.29.0", Some(51540), None), - /// Allows trait methods with arbitrary self types. - (active, arbitrary_self_types, "1.23.0", Some(44874), None), - /// Allows using `const` operands in inline assembly. - (active, asm_const, "1.58.0", Some(93332), None), - /// Enables experimental inline assembly support for additional architectures. - (active, asm_experimental_arch, "1.58.0", Some(93335), None), - /// Allows the `may_unwind` option in inline assembly. - (active, asm_unwind, "1.58.0", Some(93334), None), - /// Allows users to enforce equality of associated constants `TraitImpl`. - (active, associated_const_equality, "1.58.0", Some(92827), None), - /// Allows the user of associated type bounds. - (active, associated_type_bounds, "1.34.0", Some(52662), None), - /// Allows associated type defaults. - (active, associated_type_defaults, "1.2.0", Some(29661), None), - /// Allows `async || body` closures. - (active, async_closure, "1.37.0", Some(62290), None), - /// Allows async functions to be declared, implemented, and used in traits. - (active, async_fn_in_trait, "1.66.0", Some(91611), None), - /// Allows `#[track_caller]` on async functions. - (active, async_fn_track_caller, "1.73.0", Some(110011), None), - /// Allows builtin # foo() syntax - (active, builtin_syntax, "1.71.0", Some(110680), None), - /// Allows `c"foo"` literals. - (active, c_str_literals, "1.71.0", Some(105723), None), - /// Treat `extern "C"` function as nounwind. - (active, c_unwind, "1.52.0", Some(74990), None), - /// Allows using C-variadics. - (active, c_variadic, "1.34.0", Some(44930), None), - /// Allows the use of `#[cfg(overflow_checks)` to check if integer overflow behaviour. - (active, cfg_overflow_checks, "1.71.0", Some(111466), None), - /// Provides the relocation model information as cfg entry - (active, cfg_relocation_model, "1.73.0", Some(114929), None), - /// Allows the use of `#[cfg(sanitize = "option")]`; set when -Zsanitizer is used. - (active, cfg_sanitize, "1.41.0", Some(39699), None), - /// Allows `cfg(target_abi = "...")`. - (active, cfg_target_abi, "1.55.0", Some(80970), None), - /// Allows `cfg(target(abi = "..."))`. - (active, cfg_target_compact, "1.63.0", Some(96901), None), - /// Allows `cfg(target_has_atomic_load_store = "...")`. - (active, cfg_target_has_atomic, "1.60.0", Some(94039), None), - /// Allows `cfg(target_has_atomic_equal_alignment = "...")`. - (active, cfg_target_has_atomic_equal_alignment, "1.60.0", Some(93822), None), - /// Allows `cfg(target_thread_local)`. - (active, cfg_target_thread_local, "1.7.0", Some(29594), None), - /// Allow conditional compilation depending on rust version - (active, cfg_version, "1.45.0", Some(64796), None), - /// Allows to use the `#[cfi_encoding = ""]` attribute. - (active, cfi_encoding, "1.71.0", Some(89653), None), - /// Allows `for<...>` on closures and generators. - (active, closure_lifetime_binder, "1.64.0", Some(97362), None), - /// Allows `#[track_caller]` on closures and generators. - (active, closure_track_caller, "1.57.0", Some(87417), None), - /// Allows to use the `#[cmse_nonsecure_entry]` attribute. - (active, cmse_nonsecure_entry, "1.48.0", Some(75835), None), - /// Allows use of the `#[collapse_debuginfo]` attribute. - (active, collapse_debuginfo, "1.65.0", Some(100758), None), - /// Allows `async {}` expressions in const contexts. - (active, const_async_blocks, "1.53.0", Some(85368), None), - /// Allows `const || {}` closures in const contexts. - (incomplete, const_closures, "1.68.0", Some(106003), None), - /// Allows the definition of `const extern fn` and `const unsafe extern fn`. - (active, const_extern_fn, "1.40.0", Some(64926), None), - /// Allows basic arithmetic on floating point types in a `const fn`. - (active, const_fn_floating_point_arithmetic, "1.48.0", Some(57241), None), - /// Allows `for _ in _` loops in const contexts. - (active, const_for, "1.56.0", Some(87575), None), - /// Allows using `&mut` in constant functions. - (active, const_mut_refs, "1.41.0", Some(57349), None), - /// Be more precise when looking for live drops in a const context. - (active, const_precise_live_drops, "1.46.0", Some(73255), None), - /// Allows references to types with interior mutability within constants - (active, const_refs_to_cell, "1.51.0", Some(80384), None), - /// Allows `impl const Trait for T` syntax. - (active, const_trait_impl, "1.42.0", Some(67792), None), - /// Allows the `?` operator in const contexts. - (active, const_try, "1.56.0", Some(74935), None), - /// Allows function attribute `#[coverage(on/off)]`, to control coverage - /// instrumentation of that function. - (active, coverage_attribute, "1.74.0", Some(84605), None), - /// Allows users to provide classes for fenced code block using `class:classname`. - (active, custom_code_classes_in_docs, "1.74.0", Some(79483), None), - /// Allows non-builtin attributes in inner attribute position. - (active, custom_inner_attributes, "1.30.0", Some(54726), None), - /// Allows custom test frameworks with `#![test_runner]` and `#[test_case]`. - (active, custom_test_frameworks, "1.30.0", Some(50297), None), - /// Allows declarative macros 2.0 (`macro`). - (active, decl_macro, "1.17.0", Some(39412), None), - /// Allows default type parameters to influence type inference. - (active, default_type_parameter_fallback, "1.3.0", Some(27336), None), - /// Allows using `#[deprecated_safe]` to deprecate the safeness of a function or trait - (active, deprecated_safe, "1.61.0", Some(94978), None), - /// Allows having using `suggestion` in the `#[deprecated]` attribute. - (active, deprecated_suggestion, "1.61.0", Some(94785), None), - /// Allows using the `#[diagnostic]` attribute tool namespace - (active, diagnostic_namespace, "1.73.0", Some(111996), None), - /// Controls errors in trait implementations. - (active, do_not_recommend, "1.67.0", Some(51992), None), - /// Tells rustdoc to automatically generate `#[doc(cfg(...))]`. - (active, doc_auto_cfg, "1.58.0", Some(43781), None), - /// Allows `#[doc(cfg(...))]`. - (active, doc_cfg, "1.21.0", Some(43781), None), - /// Allows `#[doc(cfg_hide(...))]`. - (active, doc_cfg_hide, "1.57.0", Some(43781), None), - /// Allows `#[doc(masked)]`. - (active, doc_masked, "1.21.0", Some(44027), None), - /// Allows `dyn* Trait` objects. - (incomplete, dyn_star, "1.65.0", Some(102425), None), - // Uses generic effect parameters for ~const bounds - (active, effects, "1.72.0", Some(102090), None), - /// Allows `X..Y` patterns. - (active, exclusive_range_pattern, "1.11.0", Some(37854), None), - /// Allows exhaustive pattern matching on types that contain uninhabited types. - (active, exhaustive_patterns, "1.13.0", Some(51085), None), - /// Allows explicit tail calls via `become` expression. - (incomplete, explicit_tail_calls, "1.72.0", Some(112788), None), - /// Allows using `efiapi`, `sysv64` and `win64` as calling convention - /// for functions with varargs. - (active, extended_varargs_abi_support, "1.65.0", Some(100189), None), - /// Allows defining `extern type`s. - (active, extern_types, "1.23.0", Some(43467), None), - /// Allows the use of `#[ffi_const]` on foreign functions. - (active, ffi_const, "1.45.0", Some(58328), None), - /// Allows the use of `#[ffi_pure]` on foreign functions. - (active, ffi_pure, "1.45.0", Some(58329), None), - /// Allows using `#[ffi_returns_twice]` on foreign functions. - (active, ffi_returns_twice, "1.34.0", Some(58314), None), - /// Allows using `#[repr(align(...))]` on function items - (active, fn_align, "1.53.0", Some(82232), None), - /// Allows generators to be cloned. - (active, generator_clone, "1.65.0", Some(95360), None), - /// Allows defining generators. - (active, generators, "1.21.0", Some(43122), None), - /// Infer generic args for both consts and types. - (active, generic_arg_infer, "1.55.0", Some(85077), None), - /// An extension to the `generic_associated_types` feature, allowing incomplete features. - (incomplete, generic_associated_types_extended, "1.61.0", Some(95451), None), - /// Allows non-trivial generic constants which have to have wfness manually propagated to callers - (incomplete, generic_const_exprs, "1.56.0", Some(76560), None), - /// Allows generic parameters and where-clauses on free & associated const items. - (incomplete, generic_const_items, "1.73.0", Some(113521), None), - /// Allows using `..=X` as a patterns in slices. - (active, half_open_range_patterns_in_slices, "1.66.0", Some(67264), None), - /// Allows `if let` guard in match arms. - (active, if_let_guard, "1.47.0", Some(51114), None), - /// Allows `impl Trait` to be used inside associated types (RFC 2515). - (active, impl_trait_in_assoc_type, "1.70.0", Some(63063), None), - /// Allows `impl Trait` as output type in `Fn` traits in return position of functions. - (active, impl_trait_in_fn_trait_return, "1.64.0", Some(99697), None), - /// Allows using imported `main` function - (active, imported_main, "1.53.0", Some(28937), None), - /// Allows associated types in inherent impls. - (incomplete, inherent_associated_types, "1.52.0", Some(8995), None), - /// Allow anonymous constants from an inline `const` block - (active, inline_const, "1.49.0", Some(76001), None), - /// Allow anonymous constants from an inline `const` block in pattern position - (incomplete, inline_const_pat, "1.58.0", Some(76001), None), - /// Allows using `pointer` and `reference` in intra-doc links - (active, intra_doc_pointers, "1.51.0", Some(80896), None), - // Allows setting the threshold for the `large_assignments` lint. - (active, large_assignments, "1.52.0", Some(83518), None), - /// Allow to have type alias types for inter-crate use. - (incomplete, lazy_type_alias, "1.72.0", Some(112792), None), - /// Allows `if/while p && let q = r && ...` chains. - (active, let_chains, "1.37.0", Some(53667), None), - /// Allows using `reason` in lint attributes and the `#[expect(lint)]` lint check. - (active, lint_reasons, "1.31.0", Some(54503), None), - /// Give access to additional metadata about declarative macro meta-variables. - (active, macro_metavar_expr, "1.61.0", Some(83527), None), - /// Allows `#[marker]` on certain traits allowing overlapping implementations. - (active, marker_trait_attr, "1.30.0", Some(29864), None), - /// A minimal, sound subset of specialization intended to be used by the - /// standard library until the soundness issues with specialization - /// are fixed. - (active, min_specialization, "1.7.0", Some(31844), None), - /// Allows qualified paths in struct expressions, struct patterns and tuple struct patterns. - (active, more_qualified_paths, "1.54.0", Some(86935), None), - /// Allows the `#[must_not_suspend]` attribute. - (active, must_not_suspend, "1.57.0", Some(83310), None), - /// Allows using `#[naked]` on functions. - (active, naked_functions, "1.9.0", Some(32408), None), - /// Allows specifying the as-needed link modifier - (active, native_link_modifiers_as_needed, "1.53.0", Some(81490), None), - /// Allow negative trait implementations. - (active, negative_impls, "1.44.0", Some(68318), None), - /// Allows the `!` type. Does not imply 'exhaustive_patterns' (below) any more. - (active, never_type, "1.13.0", Some(35121), None), - /// Allows diverging expressions to fall back to `!` rather than `()`. - (active, never_type_fallback, "1.41.0", Some(65992), None), - /// Allows `#![no_core]`. - (active, no_core, "1.3.0", Some(29639), None), - /// Allows the use of `no_sanitize` attribute. - (active, no_sanitize, "1.42.0", Some(39699), None), - /// Allows using the `non_exhaustive_omitted_patterns` lint. - (active, non_exhaustive_omitted_patterns_lint, "1.57.0", Some(89554), None), - /// Allows `for` binders in where-clauses - (incomplete, non_lifetime_binders, "1.69.0", Some(108185), None), - /// Allows making `dyn Trait` well-formed even if `Trait` is not object safe. - /// In that case, `dyn Trait: Trait` does not hold. Moreover, coercions and - /// casts in safe Rust to `dyn Trait` for such a `Trait` is also forbidden. - (active, object_safe_for_dispatch, "1.40.0", Some(43561), None), - /// Allows using `#[optimize(X)]`. - (active, optimize_attribute, "1.34.0", Some(54882), None), - /// Allows using `#![plugin(myplugin)]`. - (active, plugin, "1.0.0", Some(29597), None), - /// Allows exhaustive integer pattern matching on `usize` and `isize`. - (active, precise_pointer_size_matching, "1.32.0", Some(56354), None), - /// Allows macro attributes on expressions, statements and non-inline modules. - (active, proc_macro_hygiene, "1.30.0", Some(54727), None), - /// Allows `&raw const $place_expr` and `&raw mut $place_expr` expressions. - (active, raw_ref_op, "1.41.0", Some(64490), None), - /// Allows using the `#[register_tool]` attribute. - (active, register_tool, "1.41.0", Some(66079), None), - /// Allows the `#[repr(i128)]` attribute for enums. - (incomplete, repr128, "1.16.0", Some(56071), None), - /// Allows `repr(simd)` and importing the various simd intrinsics. - (active, repr_simd, "1.4.0", Some(27731), None), - /// Allows return-position `impl Trait` in traits. - (active, return_position_impl_trait_in_trait, "1.65.0", Some(91611), None), - /// Allows bounding the return type of AFIT/RPITIT. - (incomplete, return_type_notation, "1.70.0", Some(109417), None), - /// Allows `extern "rust-cold"`. - (active, rust_cold_cc, "1.63.0", Some(97544), None), - /// Allows the use of SIMD types in functions declared in `extern` blocks. - (active, simd_ffi, "1.0.0", Some(27731), None), - /// Allows specialization of implementations (RFC 1210). - (incomplete, specialization, "1.7.0", Some(31844), None), - /// Allows attributes on expressions and non-item statements. - (active, stmt_expr_attributes, "1.6.0", Some(15701), None), - /// Allows lints part of the strict provenance effort. - (active, strict_provenance, "1.61.0", Some(95228), None), - /// Allows string patterns to dereference values to match them. - (active, string_deref_patterns, "1.67.0", Some(87121), None), - /// Allows the use of `#[target_feature]` on safe functions. - (active, target_feature_11, "1.45.0", Some(69098), None), - /// Allows using `#[thread_local]` on `static` items. - (active, thread_local, "1.0.0", Some(29594), None), - /// Allows defining `trait X = A + B;` alias items. - (active, trait_alias, "1.24.0", Some(41517), None), - /// Allows dyn upcasting trait objects via supertraits. - /// Dyn upcasting is casting, e.g., `dyn Foo -> dyn Bar` where `Foo: Bar`. - (active, trait_upcasting, "1.56.0", Some(65991), None), - /// Allows for transmuting between arrays with sizes that contain generic consts. - (active, transmute_generic_consts, "1.70.0", Some(109929), None), - /// Allows #[repr(transparent)] on unions (RFC 2645). - (active, transparent_unions, "1.37.0", Some(60405), None), - /// Allows inconsistent bounds in where clauses. - (active, trivial_bounds, "1.28.0", Some(48214), None), - /// Allows using `try {...}` expressions. - (active, try_blocks, "1.29.0", Some(31436), None), - /// Allows `impl Trait` to be used inside type aliases (RFC 2515). - (active, type_alias_impl_trait, "1.38.0", Some(63063), None), - /// Allows the use of type ascription in expressions. - (active, type_ascription, "1.6.0", Some(23416), None), - /// Allows creation of instances of a struct by moving fields that have - /// not changed from prior instances of the same struct (RFC #2528) - (active, type_changing_struct_update, "1.58.0", Some(86555), None), - /// Allows using type privacy lints (`private_interfaces`, `private_bounds`, `unnameable_types`). - (active, type_privacy_lints, "1.72.0", Some(48054), None), - /// Enables rustc to generate code that instructs libstd to NOT ignore SIGPIPE. - (active, unix_sigpipe, "1.65.0", Some(97889), None), - /// Allows unnamed fields of struct and union type - (incomplete, unnamed_fields, "1.74.0", Some(49804), None), - /// Allows unsized fn parameters. - (active, unsized_fn_params, "1.49.0", Some(48055), None), - /// Allows unsized rvalues at arguments and parameters. - (incomplete, unsized_locals, "1.30.0", Some(48055), None), - /// Allows unsized tuple coercion. - (active, unsized_tuple_coercion, "1.20.0", Some(42877), None), - /// Allows using the `#[used(linker)]` (or `#[used(compiler)]`) attribute. - (active, used_with_arg, "1.60.0", Some(93798), None), - /// Allows `extern "wasm" fn` - (active, wasm_abi, "1.53.0", Some(83788), None), - /// Allows `do yeet` expressions - (active, yeet_expr, "1.62.0", Some(96373), None), - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. - // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! - - // ------------------------------------------------------------------------- - // feature-group-end: actual feature gates - // ------------------------------------------------------------------------- -); - -/// Some features are not allowed to be used together at the same time, if -/// the two are present, produce an error. -/// -/// Currently empty, but we will probably need this again in the future, -/// so let's keep it in for now. -pub const INCOMPATIBLE_FEATURES: &[(Symbol, Symbol)] = &[]; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/builtin_attrs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/builtin_attrs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/builtin_attrs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/builtin_attrs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -417,24 +417,6 @@ naked_functions, experimental!(naked) ), - // Plugins: - BuiltinAttribute { - name: sym::plugin, - only_local: false, - type_: CrateLevel, - template: template!(List: "name"), - duplicates: DuplicatesOk, - gate: Gated( - Stability::Deprecated( - "https://github.com/rust-lang/rust/pull/64675", - Some("may be removed in a future compiler version"), - ), - sym::plugin, - "compiler plugins are deprecated", - cfg_fn!(plugin) - ), - }, - // Testing: gated!( test_runner, CrateLevel, template!(List: "path"), ErrorFollowing, custom_test_frameworks, @@ -666,6 +648,10 @@ rustc_attr!( rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE ), + // Ensure the argument to this function is &&str during const-check. + rustc_attr!( + rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE + ), // ========================================================================== // Internal attributes, Layout related: @@ -821,6 +807,7 @@ rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing), + rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing), rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing), rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,44 +11,26 @@ //! even if it is stabilized or removed, *do not remove it*. Instead, move the //! symbol to the `accepted` or `removed` modules respectively. +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![feature(lazy_cell)] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] mod accepted; -mod active; mod builtin_attrs; mod removed; +mod unstable; #[cfg(test)] mod tests; -use rustc_span::{edition::Edition, symbol::Symbol, Span}; -use std::fmt; +use rustc_span::{edition::Edition, symbol::Symbol}; use std::num::NonZeroU32; -#[derive(Clone, Copy)] -pub enum State { - Accepted, - Active { set: fn(&mut Features, Span) }, - Removed { reason: Option<&'static str> }, - Stabilized { reason: Option<&'static str> }, -} - -impl fmt::Debug for State { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - State::Accepted { .. } => write!(f, "accepted"), - State::Active { .. } => write!(f, "active"), - State::Removed { .. } => write!(f, "removed"), - State::Stabilized { .. } => write!(f, "stabilized"), - } - } -} - #[derive(Debug, Clone)] pub struct Feature { - pub state: State, pub name: Symbol, pub since: &'static str, issue: Option, @@ -65,9 +47,9 @@ #[derive(Clone, Copy, Debug, Hash)] pub enum UnstableFeatures { - /// Hard errors for unstable features are active, as on beta/stable channels. + /// Disallow use of unstable features, as on beta/stable channels. Disallow, - /// Allow features to be activated, as on nightly. + /// Allow use of unstable features, as on nightly. Allow, /// Errors are bypassed for bootstrapping. This is required any time /// during the build that feature-related lints are set to warn or above @@ -79,8 +61,8 @@ impl UnstableFeatures { /// This takes into account `RUSTC_BOOTSTRAP`. /// - /// If `krate` is [`Some`], then setting `RUSTC_BOOTSTRAP=krate` will enable the nightly features. - /// Otherwise, only `RUSTC_BOOTSTRAP=1` will work. + /// If `krate` is [`Some`], then setting `RUSTC_BOOTSTRAP=krate` will enable the nightly + /// features. Otherwise, only `RUSTC_BOOTSTRAP=1` will work. pub fn from_environment(krate: Option<&str>) -> Self { // `true` if this is a feature-staged build, i.e., on the beta or stable channel. let disable_unstable_features = @@ -107,20 +89,17 @@ } fn find_lang_feature_issue(feature: Symbol) -> Option { - if let Some(info) = ACTIVE_FEATURES.iter().find(|t| t.name == feature) { - info.issue - } else { - // search in Accepted, Removed, or Stable Removed features - let found = ACCEPTED_FEATURES - .iter() - .chain(REMOVED_FEATURES) - .chain(STABLE_REMOVED_FEATURES) - .find(|t| t.name == feature); - match found { - Some(found) => found.issue, - None => panic!("feature `{feature}` is not declared anywhere"), - } + // Search in all the feature lists. + if let Some(f) = UNSTABLE_FEATURES.iter().find(|f| f.feature.name == feature) { + return f.feature.issue; + } + if let Some(f) = ACCEPTED_FEATURES.iter().find(|f| f.name == feature) { + return f.issue; + } + if let Some(f) = REMOVED_FEATURES.iter().find(|f| f.feature.name == feature) { + return f.feature.issue; } + panic!("feature `{feature}` is not declared anywhere"); } const fn to_nonzero(n: Option) -> Option { @@ -145,11 +124,11 @@ } pub use accepted::ACCEPTED_FEATURES; -pub use active::{Features, ACTIVE_FEATURES, INCOMPATIBLE_FEATURES}; pub use builtin_attrs::AttributeDuplicates; pub use builtin_attrs::{ deprecated_attributes, find_gated_cfg, is_builtin_attr_name, is_builtin_only_local, is_valid_for_get_attr, AttributeGate, AttributeTemplate, AttributeType, BuiltinAttribute, GatedCfg, BUILTIN_ATTRIBUTES, BUILTIN_ATTRIBUTE_MAP, }; -pub use removed::{REMOVED_FEATURES, STABLE_REMOVED_FEATURES}; +pub use removed::REMOVED_FEATURES; +pub use unstable::{Features, INCOMPATIBLE_FEATURES, UNSTABLE_FEATURES}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/removed.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/removed.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/removed.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/removed.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,40 +1,28 @@ //! List of the removed feature gates. -use super::{to_nonzero, Feature, State}; +use super::{to_nonzero, Feature}; use rustc_span::symbol::sym; +pub struct RemovedFeature { + pub feature: Feature, + pub reason: Option<&'static str>, +} + macro_rules! declare_features { ($( $(#[doc = $doc:tt])* (removed, $feature:ident, $ver:expr, $issue:expr, None, $reason:expr), )+) => { - /// Represents unstable features which have since been removed (it was once Active) - pub const REMOVED_FEATURES: &[Feature] = &[ - $( - Feature { - state: State::Removed { reason: $reason }, + /// Formerly unstable features that have now been removed. + pub const REMOVED_FEATURES: &[RemovedFeature] = &[ + $(RemovedFeature { + feature: Feature { name: sym::$feature, since: $ver, issue: to_nonzero($issue), edition: None, - } - ),+ - ]; - }; - - ($( - $(#[doc = $doc:tt])* (stable_removed, $feature:ident, $ver:expr, $issue:expr, None), - )+) => { - /// Represents stable features which have since been removed (it was once Accepted) - pub const STABLE_REMOVED_FEATURES: &[Feature] = &[ - $( - Feature { - state: State::Stabilized { reason: None }, - name: sym::$feature, - since: $ver, - issue: to_nonzero($issue), - edition: None, - } - ),+ + }, + reason: $reason + }),+ ]; }; } @@ -54,7 +42,7 @@ Some("subsumed by `.await` syntax")), /// Allows using the `box $expr` syntax. (removed, box_syntax, "1.70.0", Some(49733), None, Some("replaced with `#[rustc_box]`")), - /// Allows capturing disjoint fields in a closure/generator (RFC 2229). + /// Allows capturing disjoint fields in a closure/coroutine (RFC 2229). (removed, capture_disjoint_fields, "1.49.0", Some(53488), None, Some("stabilized in Rust 2021")), /// Allows comparing raw pointers during const eval. (removed, const_compare_raw_pointers, "1.46.0", Some(53020), None, @@ -108,6 +96,10 @@ /// Allows `#[doc(include = "some-file")]`. (removed, external_doc, "1.54.0", Some(44732), None, Some("use #[doc = include_str!(\"filename\")] instead, which handles macro invocations")), + /// Allows generators to be cloned. + (removed, generator_clone, "1.65.0", Some(95360), None, Some("renamed to `coroutine_clone`")), + /// Allows defining generators. + (removed, generators, "1.21.0", Some(43122), None, Some("renamed to `coroutines`")), /// Allows `impl Trait` in bindings (`let`, `const`, `static`). (removed, impl_trait_in_bindings, "1.55.0", Some(63065), None, Some("the implementation was not maintainable, the feature may get reintroduced once the current refactorings are done")), @@ -141,6 +133,11 @@ (removed, no_coverage, "1.74.0", Some(84605), None, Some("renamed to `coverage_attribute`")), /// Allows `#[no_debug]`. (removed, no_debug, "1.43.0", Some(29721), None, Some("removed due to lack of demand")), + /// Note: this feature was previously recorded in a separate + /// `STABLE_REMOVED` list because it, uniquely, was once stable but was + /// then removed. But there was no utility storing it separately, so now + /// it's in this list. + (removed, no_stack_check, "1.0.0", None, None, None), /// Allows using `#[on_unimplemented(..)]` on traits. /// (Moved to `rustc_attrs`.) (removed, on_unimplemented, "1.40.0", None, None, None), @@ -155,9 +152,12 @@ Some("removed in favor of `#![feature(marker_trait_attr)]`")), (removed, panic_implementation, "1.28.0", Some(44489), None, Some("subsumed by `#[panic_handler]`")), + /// Allows using `#![plugin(myplugin)]`. + (removed, plugin, "1.75.0", Some(29597), None, + Some("plugins are no longer supported")), /// Allows using `#[plugin_registrar]` on functions. (removed, plugin_registrar, "1.54.0", Some(29597), None, - Some("a __rustc_plugin_registrar symbol must now be defined instead")), + Some("plugins are no longer supported")), (removed, proc_macro_expr, "1.27.0", Some(54727), None, Some("subsumed by `#![feature(proc_macro_hygiene)]`")), (removed, proc_macro_gen, "1.27.0", Some(54727), None, @@ -208,8 +208,3 @@ // feature-group-end: removed features // ------------------------------------------------------------------------- ); - -#[rustfmt::skip] -declare_features! ( - (stable_removed, no_stack_check, "1.0.0", None, None), -); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/unstable.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/unstable.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/unstable.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_feature/src/unstable.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,615 @@ +//! List of the unstable feature gates. + +use super::{to_nonzero, Feature}; + +use rustc_data_structures::fx::FxHashSet; +use rustc_span::edition::Edition; +use rustc_span::symbol::{sym, Symbol}; +use rustc_span::Span; + +pub struct UnstableFeature { + pub feature: Feature, + pub set_enabled: fn(&mut Features), +} + +#[derive(PartialEq)] +enum FeatureStatus { + Default, + Incomplete, + Internal, +} + +macro_rules! status_to_enum { + (unstable) => { + FeatureStatus::Default + }; + (incomplete) => { + FeatureStatus::Incomplete + }; + (internal) => { + FeatureStatus::Internal + }; +} + +macro_rules! declare_features { + ($( + $(#[doc = $doc:tt])* ($status:ident, $feature:ident, $ver:expr, $issue:expr, $edition:expr), + )+) => { + /// Unstable language features that are being implemented or being + /// considered for acceptance (stabilization) or removal. + pub const UNSTABLE_FEATURES: &[UnstableFeature] = &[ + $(UnstableFeature { + feature: Feature { + name: sym::$feature, + since: $ver, + issue: to_nonzero($issue), + edition: $edition, + }, + // Sets this feature's corresponding bool within `features`. + set_enabled: |features| features.$feature = true, + }),+ + ]; + + /// A set of features to be used by later passes. + #[derive(Clone, Default, Debug)] + pub struct Features { + /// `#![feature]` attrs for language features, for error reporting. + pub declared_lang_features: Vec<(Symbol, Span, Option)>, + /// `#![feature]` attrs for non-language (library) features. + pub declared_lib_features: Vec<(Symbol, Span)>, + /// `declared_lang_features` + `declared_lib_features`. + pub declared_features: FxHashSet, + /// Active state of individual features (unstable only). + $( + $(#[doc = $doc])* + pub $feature: bool + ),+ + } + + impl Features { + pub fn set_declared_lang_feature( + &mut self, + symbol: Symbol, + span: Span, + since: Option + ) { + self.declared_lang_features.push((symbol, span, since)); + self.declared_features.insert(symbol); + } + + pub fn set_declared_lib_feature(&mut self, symbol: Symbol, span: Span) { + self.declared_lib_features.push((symbol, span)); + self.declared_features.insert(symbol); + } + + pub fn walk_feature_fields(&self, mut f: impl FnMut(&str, bool)) { + $(f(stringify!($feature), self.$feature);)+ + } + + /// Is the given feature explicitly declared, i.e. named in a + /// `#![feature(...)]` within the code? + pub fn declared(&self, feature: Symbol) -> bool { + self.declared_features.contains(&feature) + } + + /// Is the given feature active, i.e. declared or automatically + /// enabled due to the edition? + /// + /// Panics if the symbol doesn't correspond to a declared feature. + pub fn active(&self, feature: Symbol) -> bool { + match feature { + $( sym::$feature => self.$feature, )* + + _ => panic!("`{}` was not listed in `declare_features`", feature), + } + } + + /// Some features are known to be incomplete and using them is likely to have + /// unanticipated results, such as compiler crashes. We warn the user about these + /// to alert them. + pub fn incomplete(&self, feature: Symbol) -> bool { + match feature { + $( + sym::$feature => status_to_enum!($status) == FeatureStatus::Incomplete, + )* + // Accepted/removed features aren't in this file but are never incomplete. + _ if self.declared_features.contains(&feature) => false, + _ => panic!("`{}` was not listed in `declare_features`", feature), + } + } + + /// Some features are internal to the compiler and standard library and should not + /// be used in normal projects. We warn the user about these to alert them. + pub fn internal(&self, feature: Symbol) -> bool { + match feature { + $( + sym::$feature => status_to_enum!($status) == FeatureStatus::Internal, + )* + // Accepted/removed features aren't in this file but are never internal + // (a removed feature might have been internal, but that's now irrelevant). + _ if self.declared_features.contains(&feature) => false, + _ => panic!("`{}` was not listed in `declare_features`", feature), + } + } + } + }; +} + +// See https://rustc-dev-guide.rust-lang.org/feature-gates.html#feature-gates for more +// documentation about handling feature gates. +// +// If you change this, please modify `src/doc/unstable-book` as well. +// +// Don't ever remove anything from this list; move them to `accepted.rs` if +// accepted or `removed.rs` if removed. +// +// The version numbers here correspond to the version in which the current status +// was set. +// +// Note that the features are grouped into internal/user-facing and then +// sorted by version inside those groups. This is enforced with tidy. +// +// N.B., `tools/tidy/src/features.rs` parses this information directly out of the +// source, so take care when modifying it. + +#[rustfmt::skip] +declare_features! ( + // ------------------------------------------------------------------------- + // feature-group-start: internal feature gates (no tracking issue) + // ------------------------------------------------------------------------- + // no-tracking-issue-start + + /// Allows using the `unadjusted` ABI; perma-unstable. + (unstable, abi_unadjusted, "1.16.0", None, None), + /// Allows using the `vectorcall` ABI. + (unstable, abi_vectorcall, "1.7.0", None, None), + /// Allows using `#![needs_allocator]`, an implementation detail of `#[global_allocator]`. + (internal, allocator_internals, "1.20.0", None, None), + /// Allows using `#[allow_internal_unsafe]`. This is an + /// attribute on `macro_rules!` and can't use the attribute handling + /// below (it has to be checked before expansion possibly makes + /// macros disappear). + (internal, allow_internal_unsafe, "1.0.0", None, None), + /// Allows using `#[allow_internal_unstable]`. This is an + /// attribute on `macro_rules!` and can't use the attribute handling + /// below (it has to be checked before expansion possibly makes + /// macros disappear). + (internal, allow_internal_unstable, "1.0.0", None, None), + /// Allows using anonymous lifetimes in argument-position impl-trait. + (unstable, anonymous_lifetime_in_impl_trait, "1.63.0", None, None), + /// Allows identifying the `compiler_builtins` crate. + (internal, compiler_builtins, "1.13.0", None, None), + /// Allows writing custom MIR + (internal, custom_mir, "1.65.0", None, None), + /// Outputs useful `assert!` messages + (unstable, generic_assert, "1.63.0", None, None), + /// Allows using the `rust-intrinsic`'s "ABI". + (internal, intrinsics, "1.0.0", None, None), + /// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic. + (internal, lang_items, "1.0.0", None, None), + /// Allows `#[link(..., cfg(..))]`; perma-unstable per #37406 + (unstable, link_cfg, "1.14.0", None, None), + /// Allows the `multiple_supertrait_upcastable` lint. + (unstable, multiple_supertrait_upcastable, "1.69.0", None, None), + /// Allow negative trait bounds. This is an internal-only feature for testing the trait solver! + (incomplete, negative_bounds, "1.71.0", None, None), + /// Allows using `#[omit_gdb_pretty_printer_section]`. + (internal, omit_gdb_pretty_printer_section, "1.5.0", None, None), + /// Allows using `#[prelude_import]` on glob `use` items. + (internal, prelude_import, "1.2.0", None, None), + /// Used to identify crates that contain the profiler runtime. + (internal, profiler_runtime, "1.18.0", None, None), + /// Allows using `rustc_*` attributes (RFC 572). + (internal, rustc_attrs, "1.0.0", None, None), + /// Allows using the `#[stable]` and `#[unstable]` attributes. + (internal, staged_api, "1.0.0", None, None), + /// Added for testing E0705; perma-unstable. + (internal, test_2018_feature, "1.31.0", None, Some(Edition::Edition2018)), + /// Added for testing unstable lints; perma-unstable. + (internal, test_unstable_lint, "1.60.0", None, None), + /// Allows non-`unsafe` —and thus, unsound— access to `Pin` constructions. + /// Marked `internal` since perma-unstable and unsound. + (internal, unsafe_pin_internals, "1.60.0", None, None), + /// Use for stable + negative coherence and strict coherence depending on trait's + /// rustc_strict_coherence value. + (unstable, with_negative_coherence, "1.60.0", None, None), + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + + // no-tracking-issue-end + // ------------------------------------------------------------------------- + // feature-group-end: internal feature gates (no tracking issue) + // ------------------------------------------------------------------------- + + // ------------------------------------------------------------------------- + // feature-group-start: internal feature gates + // ------------------------------------------------------------------------- + + /// Allows features specific to auto traits. + /// Renamed from `optin_builtin_traits`. + (unstable, auto_traits, "1.50.0", Some(13231), None), + /// Allows using `box` in patterns (RFC 469). + (unstable, box_patterns, "1.0.0", Some(29641), None), + /// Allows `#[doc(notable_trait)]`. + /// Renamed from `doc_spotlight`. + (unstable, doc_notable_trait, "1.52.0", Some(45040), None), + /// Allows using the `may_dangle` attribute (RFC 1327). + (unstable, dropck_eyepatch, "1.10.0", Some(34761), None), + /// Allows using the `#[fundamental]` attribute. + (unstable, fundamental, "1.0.0", Some(29635), None), + /// Allows using `#[link_name="llvm.*"]`. + (internal, link_llvm_intrinsics, "1.0.0", Some(29602), None), + /// Allows using the `#[linkage = ".."]` attribute. + (unstable, linkage, "1.0.0", Some(29603), None), + /// Allows declaring with `#![needs_panic_runtime]` that a panic runtime is needed. + (internal, needs_panic_runtime, "1.10.0", Some(32837), None), + /// Allows using the `#![panic_runtime]` attribute. + (internal, panic_runtime, "1.10.0", Some(32837), None), + /// Allows `extern "platform-intrinsic" { ... }`. + (internal, platform_intrinsics, "1.4.0", Some(27731), None), + /// Allows using `#[rustc_allow_const_fn_unstable]`. + /// This is an attribute on `const fn` for the same + /// purpose as `#[allow_internal_unstable]`. + (internal, rustc_allow_const_fn_unstable, "1.49.0", Some(69399), None), + /// Allows using compiler's own crates. + (unstable, rustc_private, "1.0.0", Some(27812), None), + /// Allows using internal rustdoc features like `doc(keyword)`. + (internal, rustdoc_internals, "1.58.0", Some(90418), None), + /// Allows using the `rustdoc::missing_doc_code_examples` lint + (unstable, rustdoc_missing_doc_code_examples, "1.31.0", Some(101730), None), + /// Allows using `#[start]` on a function indicating that it is the program entrypoint. + (unstable, start, "1.0.0", Some(29633), None), + /// Allows using `#[structural_match]` which indicates that a type is structurally matchable. + /// FIXME: Subsumed by trait `StructuralPartialEq`, cannot move to removed until a library + /// feature with the same name exists. + (unstable, structural_match, "1.8.0", Some(31434), None), + /// Allows using the `rust-call` ABI. + (unstable, unboxed_closures, "1.0.0", Some(29625), None), + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + + // ------------------------------------------------------------------------- + // feature-group-end: internal feature gates + // ------------------------------------------------------------------------- + + // ------------------------------------------------------------------------- + // feature-group-start: actual feature gates (target features) + // ------------------------------------------------------------------------- + + // FIXME: Document these and merge with the list below. + + // Unstable `#[target_feature]` directives. + (unstable, aarch64_ver_target_feature, "1.27.0", Some(44839), None), + (unstable, arm_target_feature, "1.27.0", Some(44839), None), + (unstable, avx512_target_feature, "1.27.0", Some(44839), None), + (unstable, bpf_target_feature, "1.54.0", Some(44839), None), + (unstable, csky_target_feature, "1.73.0", Some(44839), None), + (unstable, ermsb_target_feature, "1.49.0", Some(44839), None), + (unstable, hexagon_target_feature, "1.27.0", Some(44839), None), + (unstable, loongarch_target_feature, "1.73.0", Some(44839), None), + (unstable, mips_target_feature, "1.27.0", Some(44839), None), + (unstable, powerpc_target_feature, "1.27.0", Some(44839), None), + (unstable, riscv_target_feature, "1.45.0", Some(44839), None), + (unstable, rtm_target_feature, "1.35.0", Some(44839), None), + (unstable, sse4a_target_feature, "1.27.0", Some(44839), None), + (unstable, tbm_target_feature, "1.27.0", Some(44839), None), + (unstable, wasm_target_feature, "1.30.0", Some(44839), None), + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + + // ------------------------------------------------------------------------- + // feature-group-end: actual feature gates (target features) + // ------------------------------------------------------------------------- + + // ------------------------------------------------------------------------- + // feature-group-start: actual feature gates + // ------------------------------------------------------------------------- + + /// Allows using the `amdgpu-kernel` ABI. + (unstable, abi_amdgpu_kernel, "1.29.0", Some(51575), None), + /// Allows `extern "avr-interrupt" fn()` and `extern "avr-non-blocking-interrupt" fn()`. + (unstable, abi_avr_interrupt, "1.45.0", Some(69664), None), + /// Allows `extern "C-cmse-nonsecure-call" fn()`. + (unstable, abi_c_cmse_nonsecure_call, "1.51.0", Some(81391), None), + /// Allows `extern "msp430-interrupt" fn()`. + (unstable, abi_msp430_interrupt, "1.16.0", Some(38487), None), + /// Allows `extern "ptx-*" fn()`. + (unstable, abi_ptx, "1.15.0", Some(38788), None), + /// Allows `extern "riscv-interrupt-m" fn()` and `extern "riscv-interrupt-s" fn()`. + (unstable, abi_riscv_interrupt, "1.73.0", Some(111889), None), + /// Allows `extern "x86-interrupt" fn()`. + (unstable, abi_x86_interrupt, "1.17.0", Some(40180), None), + /// Allows additional const parameter types, such as `&'static str` or user defined types + (incomplete, adt_const_params, "1.56.0", Some(95174), None), + /// Allows defining an `#[alloc_error_handler]`. + (unstable, alloc_error_handler, "1.29.0", Some(51540), None), + /// Allows trait methods with arbitrary self types. + (unstable, arbitrary_self_types, "1.23.0", Some(44874), None), + /// Allows using `const` operands in inline assembly. + (unstable, asm_const, "1.58.0", Some(93332), None), + /// Enables experimental inline assembly support for additional architectures. + (unstable, asm_experimental_arch, "1.58.0", Some(93335), None), + /// Allows the `may_unwind` option in inline assembly. + (unstable, asm_unwind, "1.58.0", Some(93334), None), + /// Allows users to enforce equality of associated constants `TraitImpl`. + (unstable, associated_const_equality, "1.58.0", Some(92827), None), + /// Allows the user of associated type bounds. + (unstable, associated_type_bounds, "1.34.0", Some(52662), None), + /// Allows associated type defaults. + (unstable, associated_type_defaults, "1.2.0", Some(29661), None), + /// Allows `async || body` closures. + (unstable, async_closure, "1.37.0", Some(62290), None), + /// Allows `#[track_caller]` on async functions. + (unstable, async_fn_track_caller, "1.73.0", Some(110011), None), + /// Allows builtin # foo() syntax + (unstable, builtin_syntax, "1.71.0", Some(110680), None), + /// Allows `c"foo"` literals. + (unstable, c_str_literals, "1.71.0", Some(105723), None), + /// Treat `extern "C"` function as nounwind. + (unstable, c_unwind, "1.52.0", Some(74990), None), + /// Allows using C-variadics. + (unstable, c_variadic, "1.34.0", Some(44930), None), + /// Allows the use of `#[cfg(overflow_checks)` to check if integer overflow behaviour. + (unstable, cfg_overflow_checks, "1.71.0", Some(111466), None), + /// Provides the relocation model information as cfg entry + (unstable, cfg_relocation_model, "1.73.0", Some(114929), None), + /// Allows the use of `#[cfg(sanitize = "option")]`; set when -Zsanitizer is used. + (unstable, cfg_sanitize, "1.41.0", Some(39699), None), + /// Allows `cfg(target_abi = "...")`. + (unstable, cfg_target_abi, "1.55.0", Some(80970), None), + /// Allows `cfg(target(abi = "..."))`. + (unstable, cfg_target_compact, "1.63.0", Some(96901), None), + /// Allows `cfg(target_has_atomic_load_store = "...")`. + (unstable, cfg_target_has_atomic, "1.60.0", Some(94039), None), + /// Allows `cfg(target_has_atomic_equal_alignment = "...")`. + (unstable, cfg_target_has_atomic_equal_alignment, "1.60.0", Some(93822), None), + /// Allows `cfg(target_thread_local)`. + (unstable, cfg_target_thread_local, "1.7.0", Some(29594), None), + /// Allow conditional compilation depending on rust version + (unstable, cfg_version, "1.45.0", Some(64796), None), + /// Allows to use the `#[cfi_encoding = ""]` attribute. + (unstable, cfi_encoding, "1.71.0", Some(89653), None), + /// Allows `for<...>` on closures and coroutines. + (unstable, closure_lifetime_binder, "1.64.0", Some(97362), None), + /// Allows `#[track_caller]` on closures and coroutines. + (unstable, closure_track_caller, "1.57.0", Some(87417), None), + /// Allows to use the `#[cmse_nonsecure_entry]` attribute. + (unstable, cmse_nonsecure_entry, "1.48.0", Some(75835), None), + /// Allows use of the `#[collapse_debuginfo]` attribute. + (unstable, collapse_debuginfo, "1.65.0", Some(100758), None), + /// Allows `async {}` expressions in const contexts. + (unstable, const_async_blocks, "1.53.0", Some(85368), None), + /// Allows `const || {}` closures in const contexts. + (incomplete, const_closures, "1.68.0", Some(106003), None), + /// Allows the definition of `const extern fn` and `const unsafe extern fn`. + (unstable, const_extern_fn, "1.40.0", Some(64926), None), + /// Allows basic arithmetic on floating point types in a `const fn`. + (unstable, const_fn_floating_point_arithmetic, "1.48.0", Some(57241), None), + /// Allows `for _ in _` loops in const contexts. + (unstable, const_for, "1.56.0", Some(87575), None), + /// Allows using `&mut` in constant functions. + (unstable, const_mut_refs, "1.41.0", Some(57349), None), + /// Be more precise when looking for live drops in a const context. + (unstable, const_precise_live_drops, "1.46.0", Some(73255), None), + /// Allows references to types with interior mutability within constants + (unstable, const_refs_to_cell, "1.51.0", Some(80384), None), + /// Allows `impl const Trait for T` syntax. + (unstable, const_trait_impl, "1.42.0", Some(67792), None), + /// Allows the `?` operator in const contexts. + (unstable, const_try, "1.56.0", Some(74935), None), + /// Allows coroutines to be cloned. + (unstable, coroutine_clone, "1.65.0", Some(95360), None), + /// Allows defining coroutines. + (unstable, coroutines, "1.21.0", Some(43122), None), + /// Allows function attribute `#[coverage(on/off)]`, to control coverage + /// instrumentation of that function. + (unstable, coverage_attribute, "1.74.0", Some(84605), None), + /// Allows users to provide classes for fenced code block using `class:classname`. + (unstable, custom_code_classes_in_docs, "1.74.0", Some(79483), None), + /// Allows non-builtin attributes in inner attribute position. + (unstable, custom_inner_attributes, "1.30.0", Some(54726), None), + /// Allows custom test frameworks with `#![test_runner]` and `#[test_case]`. + (unstable, custom_test_frameworks, "1.30.0", Some(50297), None), + /// Allows declarative macros 2.0 (`macro`). + (unstable, decl_macro, "1.17.0", Some(39412), None), + /// Allows default type parameters to influence type inference. + (unstable, default_type_parameter_fallback, "1.3.0", Some(27336), None), + /// Allows using `#[deprecated_safe]` to deprecate the safeness of a function or trait + (unstable, deprecated_safe, "1.61.0", Some(94978), None), + /// Allows having using `suggestion` in the `#[deprecated]` attribute. + (unstable, deprecated_suggestion, "1.61.0", Some(94785), None), + /// Allows using the `#[diagnostic]` attribute tool namespace + (unstable, diagnostic_namespace, "1.73.0", Some(111996), None), + /// Controls errors in trait implementations. + (unstable, do_not_recommend, "1.67.0", Some(51992), None), + /// Tells rustdoc to automatically generate `#[doc(cfg(...))]`. + (unstable, doc_auto_cfg, "1.58.0", Some(43781), None), + /// Allows `#[doc(cfg(...))]`. + (unstable, doc_cfg, "1.21.0", Some(43781), None), + /// Allows `#[doc(cfg_hide(...))]`. + (unstable, doc_cfg_hide, "1.57.0", Some(43781), None), + /// Allows `#[doc(masked)]`. + (unstable, doc_masked, "1.21.0", Some(44027), None), + /// Allows `dyn* Trait` objects. + (incomplete, dyn_star, "1.65.0", Some(102425), None), + // Uses generic effect parameters for ~const bounds + (unstable, effects, "1.72.0", Some(102090), None), + /// Allows `X..Y` patterns. + (unstable, exclusive_range_pattern, "1.11.0", Some(37854), None), + /// Allows exhaustive pattern matching on types that contain uninhabited types. + (unstable, exhaustive_patterns, "1.13.0", Some(51085), None), + /// Allows explicit tail calls via `become` expression. + (incomplete, explicit_tail_calls, "1.72.0", Some(112788), None), + /// Allows using `efiapi`, `sysv64` and `win64` as calling convention + /// for functions with varargs. + (unstable, extended_varargs_abi_support, "1.65.0", Some(100189), None), + /// Allows defining `extern type`s. + (unstable, extern_types, "1.23.0", Some(43467), None), + /// Allows the use of `#[ffi_const]` on foreign functions. + (unstable, ffi_const, "1.45.0", Some(58328), None), + /// Allows the use of `#[ffi_pure]` on foreign functions. + (unstable, ffi_pure, "1.45.0", Some(58329), None), + /// Allows using `#[ffi_returns_twice]` on foreign functions. + (unstable, ffi_returns_twice, "1.34.0", Some(58314), None), + /// Allows using `#[repr(align(...))]` on function items + (unstable, fn_align, "1.53.0", Some(82232), None), + /// Allows defining gen blocks and `gen fn`. + (unstable, gen_blocks, "1.75.0", Some(117078), None), + /// Infer generic args for both consts and types. + (unstable, generic_arg_infer, "1.55.0", Some(85077), None), + /// An extension to the `generic_associated_types` feature, allowing incomplete features. + (incomplete, generic_associated_types_extended, "1.61.0", Some(95451), None), + /// Allows non-trivial generic constants which have to have wfness manually propagated to callers + (incomplete, generic_const_exprs, "1.56.0", Some(76560), None), + /// Allows generic parameters and where-clauses on free & associated const items. + (incomplete, generic_const_items, "1.73.0", Some(113521), None), + /// Allows using `..=X` as a patterns in slices. + (unstable, half_open_range_patterns_in_slices, "1.66.0", Some(67264), None), + /// Allows `if let` guard in match arms. + (unstable, if_let_guard, "1.47.0", Some(51114), None), + /// Allows `impl Trait` to be used inside associated types (RFC 2515). + (unstable, impl_trait_in_assoc_type, "1.70.0", Some(63063), None), + /// Allows `impl Trait` as output type in `Fn` traits in return position of functions. + (unstable, impl_trait_in_fn_trait_return, "1.64.0", Some(99697), None), + /// Allows using imported `main` function + (unstable, imported_main, "1.53.0", Some(28937), None), + /// Allows associated types in inherent impls. + (incomplete, inherent_associated_types, "1.52.0", Some(8995), None), + /// Allow anonymous constants from an inline `const` block + (unstable, inline_const, "1.49.0", Some(76001), None), + /// Allow anonymous constants from an inline `const` block in pattern position + (incomplete, inline_const_pat, "1.58.0", Some(76001), None), + /// Allows using `pointer` and `reference` in intra-doc links + (unstable, intra_doc_pointers, "1.51.0", Some(80896), None), + // Allows setting the threshold for the `large_assignments` lint. + (unstable, large_assignments, "1.52.0", Some(83518), None), + /// Allow to have type alias types for inter-crate use. + (incomplete, lazy_type_alias, "1.72.0", Some(112792), None), + /// Allows `if/while p && let q = r && ...` chains. + (unstable, let_chains, "1.37.0", Some(53667), None), + /// Allows using `reason` in lint attributes and the `#[expect(lint)]` lint check. + (unstable, lint_reasons, "1.31.0", Some(54503), None), + /// Give access to additional metadata about declarative macro meta-variables. + (unstable, macro_metavar_expr, "1.61.0", Some(83527), None), + /// Allows `#[marker]` on certain traits allowing overlapping implementations. + (unstable, marker_trait_attr, "1.30.0", Some(29864), None), + /// A minimal, sound subset of specialization intended to be used by the + /// standard library until the soundness issues with specialization + /// are fixed. + (unstable, min_specialization, "1.7.0", Some(31844), None), + /// Allows qualified paths in struct expressions, struct patterns and tuple struct patterns. + (unstable, more_qualified_paths, "1.54.0", Some(86935), None), + /// Allows the `#[must_not_suspend]` attribute. + (unstable, must_not_suspend, "1.57.0", Some(83310), None), + /// Allows using `#[naked]` on functions. + (unstable, naked_functions, "1.9.0", Some(32408), None), + /// Allows specifying the as-needed link modifier + (unstable, native_link_modifiers_as_needed, "1.53.0", Some(81490), None), + /// Allow negative trait implementations. + (unstable, negative_impls, "1.44.0", Some(68318), None), + /// Allows the `!` type. Does not imply 'exhaustive_patterns' (below) any more. + (unstable, never_type, "1.13.0", Some(35121), None), + /// Allows diverging expressions to fall back to `!` rather than `()`. + (unstable, never_type_fallback, "1.41.0", Some(65992), None), + /// Allows `#![no_core]`. + (unstable, no_core, "1.3.0", Some(29639), None), + /// Allows the use of `no_sanitize` attribute. + (unstable, no_sanitize, "1.42.0", Some(39699), None), + /// Allows using the `non_exhaustive_omitted_patterns` lint. + (unstable, non_exhaustive_omitted_patterns_lint, "1.57.0", Some(89554), None), + /// Allows `for` binders in where-clauses + (incomplete, non_lifetime_binders, "1.69.0", Some(108185), None), + /// Allows making `dyn Trait` well-formed even if `Trait` is not object safe. + /// In that case, `dyn Trait: Trait` does not hold. Moreover, coercions and + /// casts in safe Rust to `dyn Trait` for such a `Trait` is also forbidden. + (unstable, object_safe_for_dispatch, "1.40.0", Some(43561), None), + /// Allows using enums in offset_of! + (unstable, offset_of_enum, "1.75.0", Some(106655), None), + /// Allows using `#[optimize(X)]`. + (unstable, optimize_attribute, "1.34.0", Some(54882), None), + /// Allows exhaustive integer pattern matching on `usize` and `isize`. + (unstable, precise_pointer_size_matching, "1.32.0", Some(56354), None), + /// Allows macro attributes on expressions, statements and non-inline modules. + (unstable, proc_macro_hygiene, "1.30.0", Some(54727), None), + /// Allows `&raw const $place_expr` and `&raw mut $place_expr` expressions. + (unstable, raw_ref_op, "1.41.0", Some(64490), None), + /// Allows using the `#[register_tool]` attribute. + (unstable, register_tool, "1.41.0", Some(66079), None), + /// Allows the `#[repr(i128)]` attribute for enums. + (incomplete, repr128, "1.16.0", Some(56071), None), + /// Allows `repr(simd)` and importing the various simd intrinsics. + (unstable, repr_simd, "1.4.0", Some(27731), None), + /// Allows bounding the return type of AFIT/RPITIT. + (incomplete, return_type_notation, "1.70.0", Some(109417), None), + /// Allows `extern "rust-cold"`. + (unstable, rust_cold_cc, "1.63.0", Some(97544), None), + /// Allows the use of SIMD types in functions declared in `extern` blocks. + (unstable, simd_ffi, "1.0.0", Some(27731), None), + /// Allows specialization of implementations (RFC 1210). + (incomplete, specialization, "1.7.0", Some(31844), None), + /// Allows attributes on expressions and non-item statements. + (unstable, stmt_expr_attributes, "1.6.0", Some(15701), None), + /// Allows lints part of the strict provenance effort. + (unstable, strict_provenance, "1.61.0", Some(95228), None), + /// Allows string patterns to dereference values to match them. + (unstable, string_deref_patterns, "1.67.0", Some(87121), None), + /// Allows the use of `#[target_feature]` on safe functions. + (unstable, target_feature_11, "1.45.0", Some(69098), None), + /// Allows using `#[thread_local]` on `static` items. + (unstable, thread_local, "1.0.0", Some(29594), None), + /// Allows defining `trait X = A + B;` alias items. + (unstable, trait_alias, "1.24.0", Some(41517), None), + /// Allows dyn upcasting trait objects via supertraits. + /// Dyn upcasting is casting, e.g., `dyn Foo -> dyn Bar` where `Foo: Bar`. + (unstable, trait_upcasting, "1.56.0", Some(65991), None), + /// Allows for transmuting between arrays with sizes that contain generic consts. + (unstable, transmute_generic_consts, "1.70.0", Some(109929), None), + /// Allows #[repr(transparent)] on unions (RFC 2645). + (unstable, transparent_unions, "1.37.0", Some(60405), None), + /// Allows inconsistent bounds in where clauses. + (unstable, trivial_bounds, "1.28.0", Some(48214), None), + /// Allows using `try {...}` expressions. + (unstable, try_blocks, "1.29.0", Some(31436), None), + /// Allows `impl Trait` to be used inside type aliases (RFC 2515). + (unstable, type_alias_impl_trait, "1.38.0", Some(63063), None), + /// Allows the use of type ascription in expressions. + (unstable, type_ascription, "1.6.0", Some(23416), None), + /// Allows creation of instances of a struct by moving fields that have + /// not changed from prior instances of the same struct (RFC #2528) + (unstable, type_changing_struct_update, "1.58.0", Some(86555), None), + /// Allows using type privacy lints (`private_interfaces`, `private_bounds`, `unnameable_types`). + (unstable, type_privacy_lints, "1.72.0", Some(48054), None), + /// Enables rustc to generate code that instructs libstd to NOT ignore SIGPIPE. + (unstable, unix_sigpipe, "1.65.0", Some(97889), None), + /// Allows unnamed fields of struct and union type + (incomplete, unnamed_fields, "1.74.0", Some(49804), None), + /// Allows unsized fn parameters. + (unstable, unsized_fn_params, "1.49.0", Some(48055), None), + /// Allows unsized rvalues at arguments and parameters. + (incomplete, unsized_locals, "1.30.0", Some(48055), None), + /// Allows unsized tuple coercion. + (unstable, unsized_tuple_coercion, "1.20.0", Some(42877), None), + /// Allows using the `#[used(linker)]` (or `#[used(compiler)]`) attribute. + (unstable, used_with_arg, "1.60.0", Some(93798), None), + /// Allows `extern "wasm" fn` + (unstable, wasm_abi, "1.53.0", Some(83788), None), + /// Allows `do yeet` expressions + (unstable, yeet_expr, "1.62.0", Some(96373), None), + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + // Features are listed in alphabetical order. Tidy will fail if you don't keep it this way. + // !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! + + // ------------------------------------------------------------------------- + // feature-group-end: actual feature gates + // ------------------------------------------------------------------------- +); + +/// Some features are not allowed to be used together at the same time, if +/// the two are present, produce an error. +/// +/// Currently empty, but we will probably need this again in the future, +/// so let's keep it in for now. +pub const INCOMPATIBLE_FEATURES: &[(Symbol, Symbol)] = &[]; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -1,16 +1,18 @@ [package] name = "rustc_fluent_macro" -version = "0.1.0" +version = "0.0.0" edition = "2021" [lib] proc-macro = true [dependencies] +# tidy-alphabetical-start annotate-snippets = "0.9" fluent-bundle = "0.15.2" fluent-syntax = "0.11" -syn = { version = "2", features = ["full"] } proc-macro2 = "1" quote = "1" +syn = { version = "2", features = ["full"] } unic-langid = { version = "0.9.0", features = ["macros"] } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/fluent.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/fluent.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/fluent.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/fluent.rs 2023-12-21 16:55:28.000000000 +0000 @@ -40,26 +40,35 @@ } } -/// Tokens to be returned when the macro cannot proceed. -fn failed(crate_name: &Ident) -> proc_macro::TokenStream { +/// Final tokens. +fn finish(body: TokenStream, resource: TokenStream) -> proc_macro::TokenStream { quote! { - pub static DEFAULT_LOCALE_RESOURCE: &'static str = ""; + /// Raw content of Fluent resource for this crate, generated by `fluent_messages` macro, + /// imported by `rustc_driver` to include all crates' resources in one bundle. + pub static DEFAULT_LOCALE_RESOURCE: &'static str = #resource; #[allow(non_upper_case_globals)] #[doc(hidden)] + /// Auto-generated constants for type-checked references to Fluent messages. pub(crate) mod fluent_generated { - pub mod #crate_name { - } + #body + /// Constants expected to exist by the diagnostic derive macros to use as default Fluent + /// identifiers for different subdiagnostic kinds. pub mod _subdiag { + /// Default for `#[help]` pub const help: crate::SubdiagnosticMessage = crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("help")); + /// Default for `#[note]` pub const note: crate::SubdiagnosticMessage = crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("note")); + /// Default for `#[warn]` pub const warn: crate::SubdiagnosticMessage = crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("warn")); + /// Default for `#[label]` pub const label: crate::SubdiagnosticMessage = crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("label")); + /// Default for `#[suggestion]` pub const suggestion: crate::SubdiagnosticMessage = crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("suggestion")); } @@ -68,6 +77,11 @@ .into() } +/// Tokens to be returned when the macro cannot proceed. +fn failed(crate_name: &Ident) -> proc_macro::TokenStream { + finish(quote! { pub mod #crate_name {} }, quote! { "" }) +} + /// See [rustc_fluent_macro::fluent_messages]. pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let crate_name = std::env::var("CARGO_PKG_NAME") @@ -311,39 +325,7 @@ } } - quote! { - /// Raw content of Fluent resource for this crate, generated by `fluent_messages` macro, - /// imported by `rustc_driver` to include all crates' resources in one bundle. - pub static DEFAULT_LOCALE_RESOURCE: &'static str = include_str!(#relative_ftl_path); - - #[allow(non_upper_case_globals)] - #[doc(hidden)] - /// Auto-generated constants for type-checked references to Fluent messages. - pub(crate) mod fluent_generated { - #constants - - /// Constants expected to exist by the diagnostic derive macros to use as default Fluent - /// identifiers for different subdiagnostic kinds. - pub mod _subdiag { - /// Default for `#[help]` - pub const help: crate::SubdiagnosticMessage = - crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("help")); - /// Default for `#[note]` - pub const note: crate::SubdiagnosticMessage = - crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("note")); - /// Default for `#[warn]` - pub const warn: crate::SubdiagnosticMessage = - crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("warn")); - /// Default for `#[label]` - pub const label: crate::SubdiagnosticMessage = - crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("label")); - /// Default for `#[suggestion]` - pub const suggestion: crate::SubdiagnosticMessage = - crate::SubdiagnosticMessage::FluentAttr(std::borrow::Cow::Borrowed("suggestion")); - } - } - } - .into() + finish(constants, quote! { include_str!(#relative_ftl_path) }) } fn variable_references<'a>(msg: &Message<&'a str>) -> Vec<&'a str> { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fluent_macro/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(proc_macro_diagnostic)] #![feature(proc_macro_span)] #![deny(rustc::untranslatable_diagnostic)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fs_util/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fs_util/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_fs_util/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_fs_util/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -2,3 +2,7 @@ name = "rustc_fs_util" version = "0.0.0" edition = "2021" + +[dependencies] +# tidy-alphabetical-start +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -2,3 +2,7 @@ name = "rustc_graphviz" version = "0.0.0" edition = "2021" + +[dependencies] +# tidy-alphabetical-start +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_graphviz/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -273,6 +273,9 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(allow(unused_variables), deny(warnings))) )] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,18 +3,18 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start +odht = { version = "0.3.1", features = ["nightly"] } rustc_arena = { path = "../rustc_arena" } -rustc_target = { path = "../rustc_target" } -rustc_macros = { path = "../rustc_macros" } +rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_messages = { path = "../rustc_error_messages" } rustc_index = { path = "../rustc_index" } -rustc_span = { path = "../rustc_span" } +rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } -rustc_ast = { path = "../rustc_ast" } -tracing = "0.1" +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -odht = { version = "0.3.1", features = ["nightly"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/def.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/def.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/def.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/def.rs 2023-12-21 16:55:28.000000000 +0000 @@ -118,7 +118,7 @@ of_trait: bool, }, Closure, - Generator, + Coroutine, } impl DefKind { @@ -126,7 +126,7 @@ /// /// If you have access to `TyCtxt`, use `TyCtxt::def_descr` or /// `TyCtxt::def_kind_descr` instead, because they give better - /// information for generators and associated functions. + /// information for coroutines and associated functions. pub fn descr(self, def_id: DefId) -> &'static str { match self { DefKind::Fn => "function", @@ -161,7 +161,7 @@ DefKind::Field => "field", DefKind::Impl { .. } => "implementation", DefKind::Closure => "closure", - DefKind::Generator => "generator", + DefKind::Coroutine => "coroutine", DefKind::ExternCrate => "extern crate", DefKind::GlobalAsm => "global assembly block", } @@ -171,7 +171,7 @@ /// /// If you have access to `TyCtxt`, use `TyCtxt::def_descr_article` or /// `TyCtxt::def_kind_descr_article` instead, because they give better - /// information for generators and associated functions. + /// information for coroutines and associated functions. pub fn article(&self) -> &'static str { match *self { DefKind::AssocTy @@ -220,7 +220,7 @@ | DefKind::LifetimeParam | DefKind::ExternCrate | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::Use | DefKind::ForeignMod | DefKind::GlobalAsm @@ -230,7 +230,7 @@ #[inline] pub fn is_fn_like(self) -> bool { - matches!(self, DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Generator) + matches!(self, DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Coroutine) } /// Whether `query get_codegen_attrs` should be used with this definition. @@ -240,7 +240,7 @@ | DefKind::AssocFn | DefKind::Ctor(..) | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::Static(_) => true, DefKind::Mod | DefKind::Struct diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/hir.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/hir.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/hir.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/hir.rs 2023-12-21 16:55:28.000000000 +0000 @@ -246,6 +246,8 @@ pub struct ConstArg { pub value: AnonConst, pub span: Span, + /// Indicates whether this comes from a `~const` desugaring. + pub is_desugared_from_effects: bool, } #[derive(Clone, Copy, Debug, HashStable_Generic)] @@ -400,7 +402,14 @@ /// This function returns the number of type and const generic params. /// It should only be used for diagnostics. pub fn num_generic_params(&self) -> usize { - self.args.iter().filter(|arg| !matches!(arg, GenericArg::Lifetime(_))).count() + self.args + .iter() + .filter(|arg| match arg { + GenericArg::Lifetime(_) + | GenericArg::Const(ConstArg { is_desugared_from_effects: true, .. }) => false, + _ => true, + }) + .count() } /// The span encompassing the text inside the surrounding brackets. @@ -1485,7 +1494,7 @@ /// /// - an `params` array containing the `(x, y)` pattern /// - a `value` containing the `x + y` expression (maybe wrapped in a block) -/// - `generator_kind` would be `None` +/// - `coroutine_kind` would be `None` /// /// All bodies have an **owner**, which can be accessed via the HIR /// map using `body_owner_def_id()`. @@ -1493,7 +1502,7 @@ pub struct Body<'hir> { pub params: &'hir [Param<'hir>], pub value: &'hir Expr<'hir>, - pub generator_kind: Option, + pub coroutine_kind: Option, } impl<'hir> Body<'hir> { @@ -1501,75 +1510,75 @@ BodyId { hir_id: self.value.hir_id } } - pub fn generator_kind(&self) -> Option { - self.generator_kind + pub fn coroutine_kind(&self) -> Option { + self.coroutine_kind } } -/// The type of source expression that caused this generator to be created. +/// The type of source expression that caused this coroutine to be created. #[derive(Clone, PartialEq, Eq, Debug, Copy, Hash)] #[derive(HashStable_Generic, Encodable, Decodable)] -pub enum GeneratorKind { +pub enum CoroutineKind { /// An explicit `async` block or the body of an async function. - Async(AsyncGeneratorKind), + Async(CoroutineSource), - /// A generator literal created via a `yield` inside a closure. - Gen, -} + /// An explicit `gen` block or the body of a `gen` function. + Gen(CoroutineSource), -impl fmt::Display for GeneratorKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - GeneratorKind::Async(k) => fmt::Display::fmt(k, f), - GeneratorKind::Gen => f.write_str("generator"), - } - } + /// A coroutine literal created via a `yield` inside a closure. + Coroutine, } -impl GeneratorKind { - pub fn descr(&self) -> &'static str { +impl fmt::Display for CoroutineKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - GeneratorKind::Async(ask) => ask.descr(), - GeneratorKind::Gen => "generator", + CoroutineKind::Async(k) => { + if f.alternate() { + f.write_str("`async` ")?; + } else { + f.write_str("async ")? + } + k.fmt(f) + } + CoroutineKind::Coroutine => f.write_str("coroutine"), + CoroutineKind::Gen(k) => { + if f.alternate() { + f.write_str("`gen` ")?; + } else { + f.write_str("gen ")? + } + k.fmt(f) + } } } } -/// In the case of a generator created as part of an async construct, -/// which kind of async construct caused it to be created? +/// In the case of a coroutine created as part of an async/gen construct, +/// which kind of async/gen construct caused it to be created? /// /// This helps error messages but is also used to drive coercions in /// type-checking (see #60424). #[derive(Clone, PartialEq, Eq, Hash, Debug, Copy)] #[derive(HashStable_Generic, Encodable, Decodable)] -pub enum AsyncGeneratorKind { - /// An explicit `async` block written by the user. +pub enum CoroutineSource { + /// An explicit `async`/`gen` block written by the user. Block, - /// An explicit `async` closure written by the user. + /// An explicit `async`/`gen` closure written by the user. Closure, - /// The `async` block generated as the body of an async function. + /// The `async`/`gen` block generated as the body of an async/gen function. Fn, } -impl fmt::Display for AsyncGeneratorKind { +impl fmt::Display for CoroutineSource { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(match self { - AsyncGeneratorKind::Block => "async block", - AsyncGeneratorKind::Closure => "async closure body", - AsyncGeneratorKind::Fn => "async fn body", - }) - } -} - -impl AsyncGeneratorKind { - pub fn descr(&self) -> &'static str { match self { - AsyncGeneratorKind::Block => "`async` block", - AsyncGeneratorKind::Closure => "`async` closure body", - AsyncGeneratorKind::Fn => "`async fn` body", + CoroutineSource::Block => "block", + CoroutineSource::Closure => "closure body", + CoroutineSource::Fn => "fn body", } + .fmt(f) } } @@ -2004,7 +2013,7 @@ /// /// The `Span` is the argument block `|...|`. /// - /// This may also be a generator literal or an `async block` as indicated by the + /// This may also be a coroutine literal or an `async block` as indicated by the /// `Option`. Closure(&'hir Closure<'hir>), /// A block (e.g., `'label: { ... }`). @@ -2055,7 +2064,7 @@ /// to be repeated; the second is the number of times to repeat it. Repeat(&'hir Expr<'hir>, ArrayLen), - /// A suspension point for generators (i.e., `yield `). + /// A suspension point for coroutines (i.e., `yield `). Yield(&'hir Expr<'hir>, YieldSource), /// A placeholder for an expression that wasn't syntactically well formed in some way. @@ -2247,12 +2256,13 @@ } } -impl From for YieldSource { - fn from(kind: GeneratorKind) -> Self { +impl From for YieldSource { + fn from(kind: CoroutineKind) -> Self { match kind { - // Guess based on the kind of the current generator. - GeneratorKind::Gen => Self::Yield, - GeneratorKind::Async(_) => Self::Await { expr: None }, + // Guess based on the kind of the current coroutine. + CoroutineKind::Coroutine => Self::Yield, + CoroutineKind::Async(_) => Self::Await { expr: None }, + CoroutineKind::Gen(_) => Self::Yield, } } } @@ -3556,6 +3566,15 @@ } } + pub fn fn_sig(self) -> Option<&'hir FnSig<'hir>> { + match self { + OwnerNode::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) + | OwnerNode::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) + | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig), + _ => None, + } + } + pub fn fn_decl(self) -> Option<&'hir FnDecl<'hir>> { match self { OwnerNode::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) @@ -3781,6 +3800,7 @@ ItemKind::TyAlias(ty, _) | ItemKind::Static(ty, _, _) | ItemKind::Const(ty, _, _) => Some(ty), + ItemKind::Impl(impl_item) => Some(&impl_item.self_ty), _ => None, }, Node::TraitItem(it) => match it.kind { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/intravisit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/intravisit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/intravisit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/intravisit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -62,7 +62,7 @@ //! respectively. (This follows from RPO respecting CFG domination). //! //! This order consistency is required in a few places in rustc, for -//! example generator inference, and possibly also HIR borrowck. +//! example coroutine inference, and possibly also HIR borrowck. use crate::hir::*; use rustc_ast::walk_list; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/lang_items.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/lang_items.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/lang_items.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/lang_items.rs 2023-12-21 16:55:28.000000000 +0000 @@ -210,9 +210,10 @@ FnOnceOutput, sym::fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None; + Iterator, sym::iterator, iterator_trait, Target::Trait, GenericRequirement::Exact(0); Future, sym::future_trait, future_trait, Target::Trait, GenericRequirement::Exact(0); - GeneratorState, sym::generator_state, gen_state, Target::Enum, GenericRequirement::None; - Generator, sym::generator, gen_trait, Target::Trait, GenericRequirement::Minimum(1); + CoroutineState, sym::coroutine_state, coroutine_state, Target::Enum, GenericRequirement::None; + Coroutine, sym::coroutine, coroutine_trait, Target::Trait, GenericRequirement::Minimum(1); Unpin, sym::unpin, unpin_trait, Target::Trait, GenericRequirement::None; Pin, sym::pin, pin_type, Target::Struct, GenericRequirement::None; @@ -230,7 +231,6 @@ Panic, sym::panic, panic_fn, Target::Fn, GenericRequirement::Exact(0); PanicNounwind, sym::panic_nounwind, panic_nounwind, Target::Fn, GenericRequirement::Exact(0); PanicFmt, sym::panic_fmt, panic_fmt, Target::Fn, GenericRequirement::None; - PanicDisplay, sym::panic_display, panic_display, Target::Fn, GenericRequirement::None; ConstPanicFmt, sym::const_panic_fmt, const_panic_fmt, Target::Fn, GenericRequirement::None; PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, Target::Fn, GenericRequirement::Exact(0); PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, Target::Fn, GenericRequirement::Exact(0); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir/src/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,7 @@ use rustc_data_structures::stable_hasher::Hash64; use rustc_span::def_id::{DefPathHash, StableCrateId}; use rustc_span::edition::Edition; -use rustc_span::{create_session_if_not_set_then, Symbol}; +use rustc_span::{create_session_globals_then, Symbol}; #[test] fn def_path_hash_depends_on_crate_id() { @@ -14,7 +14,7 @@ // the crate by changing the crate disambiguator (e.g. via bumping the // crate's version number). - create_session_if_not_set_then(Edition::Edition2024, |_| { + create_session_globals_then(Edition::Edition2024, || { let id0 = StableCrateId::new(Symbol::intern("foo"), false, vec!["1".to_string()], ""); let id1 = StableCrateId::new(Symbol::intern("foo"), false, vec!["2".to_string()], ""); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -8,23 +8,25 @@ doctest = false [dependencies] +# tidy-alphabetical-start rustc_arena = { path = "../rustc_arena" } -rustc_macros = { path = "../rustc_macros" } -rustc_middle = { path = "../rustc_middle" } +rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } +rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_target = { path = "../rustc_target" } -rustc_session = { path = "../rustc_session" } -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -rustc_ast = { path = "../rustc_ast" } -rustc_span = { path = "../rustc_span" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } -rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_lint_defs = { path = "../rustc_lint_defs" } +rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } +rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } +rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_type_ir = { path = "../rustc_type_ir" } -rustc_feature = { path = "../rustc_feature" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -72,6 +72,12 @@ the trait `Copy` cannot be implemented for this type; the type has a destructor .label = `Copy` not allowed on types with destructors +hir_analysis_cross_crate_traits = cross-crate traits with a default impl, like `{$traits}`, can only be implemented for a struct/enum type, not `{$self_ty}` + .label = can't implement cross-crate trait with a default impl for non-struct/enum type + +hir_analysis_cross_crate_traits_defined = cross-crate traits with a default impl, like `{$traits}`, can only be implemented for a struct/enum type defined in the current crate + .label = can't implement cross-crate trait for type in another crate + hir_analysis_dispatch_from_dyn_multi = implementing the `DispatchFromDyn` trait requires multiple coercions .note = the trait `DispatchFromDyn` may only be implemented for a coercion between structures with a single field being coerced .coercions_note = currently, {$number} fields need coercions: {$coercions} @@ -96,8 +102,6 @@ .label = overflowed on value after {$discr} .note = explicitly set `{$item_name} = {$wrapped_discr}` if that is desired outcome -hir_analysis_expected_used_symbol = expected `used`, `used(compiler)` or `used(linker)` - hir_analysis_field_already_declared = field `{$field_name}` is already declared .label = field already declared @@ -239,6 +243,28 @@ hir_analysis_must_implement_one_of_attribute = the `#[rustc_must_implement_one_of]` attribute must be used with at least 2 args +hir_analysis_only_current_traits_arbitrary = only traits defined in the current crate can be implemented for arbitrary types + +hir_analysis_only_current_traits_foreign = this is not defined in the current crate because this is a foreign trait + +hir_analysis_only_current_traits_label = impl doesn't use only types from inside the current crate + +hir_analysis_only_current_traits_name = this is not defined in the current crate because {$name} are always foreign + +hir_analysis_only_current_traits_note = define and implement a trait or new type instead + +hir_analysis_only_current_traits_opaque = type alias impl trait is treated as if it were foreign, because its hidden type could be from a foreign crate + +hir_analysis_only_current_traits_outside = only traits defined in the current crate can be implemented for types defined outside of the crate + +hir_analysis_only_current_traits_pointer = `{$pointer}` is not defined in the current crate because raw pointers are always foreign + +hir_analysis_only_current_traits_pointer_sugg = consider introducing a new wrapper type + +hir_analysis_only_current_traits_primitive = only traits defined in the current crate can be implemented for primitive types + +hir_analysis_only_current_traits_ty = `{$ty}` is not defined in the current crate + hir_analysis_paren_sugar_attribute = the `#[rustc_paren_sugar]` attribute is a temporary means of controlling which traits can use parenthetical notation .help = add `#![feature(unboxed_closures)]` to the crate attributes to use it @@ -328,6 +354,9 @@ at least one trait is required for an object type .alias_span = this alias does not contain a trait +hir_analysis_traits_with_defualt_impl = traits with a default impl, like `{$traits}`, cannot be implemented for {$problematic_kind} `{$self_ty}` + .note = a trait object implements `{$traits}` if and only if `{$traits}` is one of the trait object's trait bounds + hir_analysis_transparent_enum_variant = transparent enum needs exactly one variant, but has {$number} .label = needs exactly one variant, but has {$number} .many_label = too many variants in `{$path}` @@ -341,6 +370,18 @@ .label = needs at most one field with non-trivial size or alignment, but has {$field_count} .labels = this field has non-zero size or requires alignment +hir_analysis_ty_param_first_local = type parameter `{$param_ty}` must be covered by another type when it appears before the first local type (`{$local_type}`) + .label = type parameter `{$param_ty}` must be covered by another type when it appears before the first local type (`{$local_type}`) + .note = implementing a foreign trait is only possible if at least one of the types for which it is implemented is local, and no uncovered type parameters appear before that first local type + .case_note = in this case, 'before' refers to the following order: `impl<..> ForeignTrait for T0`, where `T0` is the first and `Tn` is the last + +hir_analysis_ty_param_some = type parameter `{$param_ty}` must be used as the type parameter for some local type (e.g., `MyStruct<{$param_ty}>`) + .label = type parameter `{$param_ty}` must be used as the type parameter for some local type + .note = implementing a foreign trait is only possible if at least one of the types for which it is implemented is local + .only_note = only traits defined in the current crate can be implemented for a type parameter + +hir_analysis_type_of = {$type_of} + hir_analysis_typeof_reserved_keyword_used = `typeof` is a reserved keyword but unimplemented .suggestion = consider replacing `typeof(...)` with an actual type @@ -363,7 +404,7 @@ .suggestion = remove this bound hir_analysis_value_of_associated_struct_already_specified = - the value of the associated type `{$item_name}` (from trait `{$def_path}`) is already specified + the value of the associated type `{$item_name}` in trait `{$def_path}` is already specified .label = re-bound here .previous_bound_label = `{$item_name}` bound here first diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/bounds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/bounds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/bounds.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/bounds.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,6 +8,7 @@ use rustc_span::symbol::Ident; use rustc_span::{ErrorGuaranteed, Span}; use rustc_trait_selection::traits; +use smallvec::SmallVec; use crate::astconv::{ AstConv, ConvertedBinding, ConvertedBindingKind, OnlySelfBounds, PredicateFilter, @@ -28,15 +29,11 @@ let tcx = self.tcx(); // Try to find an unbound in bounds. - let mut unbound = None; + let mut unbounds: SmallVec<[_; 1]> = SmallVec::new(); let mut search_bounds = |ast_bounds: &'tcx [hir::GenericBound<'tcx>]| { for ab in ast_bounds { if let hir::GenericBound::Trait(ptr, hir::TraitBoundModifier::Maybe) = ab { - if unbound.is_none() { - unbound = Some(&ptr.trait_ref); - } else { - tcx.sess.emit_err(errors::MultipleRelaxedDefaultBounds { span }); - } + unbounds.push(ptr) } } }; @@ -51,33 +48,41 @@ } } + if unbounds.len() > 1 { + tcx.sess.emit_err(errors::MultipleRelaxedDefaultBounds { + spans: unbounds.iter().map(|ptr| ptr.span).collect(), + }); + } + let sized_def_id = tcx.lang_items().sized_trait(); - match (&sized_def_id, unbound) { - (Some(sized_def_id), Some(tpb)) - if tpb.path.res == Res::Def(DefKind::Trait, *sized_def_id) => - { - // There was in fact a `?Sized` bound, return without doing anything - return; - } - (_, Some(_)) => { - // There was a `?Trait` bound, but it was not `?Sized`; warn. - tcx.sess.span_warn( - span, - "default bound relaxed for a type parameter, but \ - this does nothing because the given bound is not \ - a default; only `?Sized` is supported", - ); - // Otherwise, add implicitly sized if `Sized` is available. - } - _ => { - // There was no `?Sized` bound; add implicitly sized if `Sized` is available. + + let mut seen_sized_unbound = false; + for unbound in unbounds { + if let Some(sized_def_id) = sized_def_id { + if unbound.trait_ref.path.res == Res::Def(DefKind::Trait, sized_def_id) { + seen_sized_unbound = true; + continue; + } } + // There was a `?Trait` bound, but it was not `?Sized`; warn. + tcx.sess.span_warn( + unbound.span, + "relaxing a default bound only does something for `?Sized`; \ + all other traits are not bound by default", + ); } + + // If the above loop finished there was no `?Sized` bound; add implicitly sized if `Sized` is available. if sized_def_id.is_none() { // No lang item for `Sized`, so we can't add it as a bound. return; } - bounds.push_sized(tcx, self_ty, span); + if seen_sized_unbound { + // There was in fact a `?Sized` bound, return without doing anything + } else { + // There was no `?Sized` bound; add implicitly sized if `Sized` is available. + bounds.push_sized(tcx, self_ty, span); + } } /// This helper takes a *converted* parameter type (`param_ty`) @@ -284,6 +289,7 @@ self.one_bound_for_assoc_type( || traits::supertraits(tcx, trait_ref), trait_ref.skip_binder().print_only_trait_name(), + None, binding.item_name, path_span, match binding.kind { @@ -447,7 +453,7 @@ debug!(?args_trait_ref_and_assoc_item); - tcx.mk_alias_ty(assoc_item.def_id, args_trait_ref_and_assoc_item) + ty::AliasTy::new(tcx, assoc_item.def_id, args_trait_ref_and_assoc_item) }) }; @@ -517,8 +523,10 @@ ); if let DefKind::AssocConst = def_kind - && let Some(t) = term.ty() && (t.is_enum() || t.references_error()) - && tcx.features().associated_const_equality { + && let Some(t) = term.ty() + && (t.is_enum() || t.references_error()) + && tcx.features().associated_const_equality + { err.span_suggestion( binding.span, "if equating a const, try wrapping with braces", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,17 +3,18 @@ AssocTypeBindingNotAllowed, ManualImplementation, MissingTypeParams, ParenthesizedFnTraitExpansion, }; +use crate::traits::error_reporting::report_object_safety_error; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{pluralize, struct_span_err, Applicability, Diagnostic, ErrorGuaranteed}; use rustc_hir as hir; -use rustc_hir::def_id::DefId; +use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_infer::traits::FulfillmentError; -use rustc_middle::ty::TyCtxt; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::{self, suggest_constraining_type_param, Ty, TyCtxt}; use rustc_session::parse::feature_err; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::symbol::{sym, Ident}; use rustc_span::{Span, Symbol, DUMMY_SP}; +use rustc_trait_selection::traits::object_safety_violations_for_assoc_item; use std::collections::BTreeSet; @@ -102,6 +103,7 @@ &self, all_candidates: impl Fn() -> I, ty_param_name: &str, + ty_param_def_id: Option, assoc_name: Ident, span: Span, ) -> ErrorGuaranteed @@ -190,13 +192,61 @@ }) .collect::>()[..] { + let trait_name = self.tcx().def_path_str(*best_trait); + let an = if suggested_name != assoc_name.name { "a similarly named" } else { "an" }; err.span_label( assoc_name.span, format!( - "there is a similarly named associated type `{suggested_name}` in the trait `{}`", - self.tcx().def_path_str(*best_trait) + "there is {an} associated type `{suggested_name}` in the \ + trait `{trait_name}`", ), ); + let hir = self.tcx().hir(); + if let Some(def_id) = ty_param_def_id + && let parent = hir.get_parent_item(hir.local_def_id_to_hir_id(def_id)) + && let Some(generics) = hir.get_generics(parent.def_id) + { + if generics.bounds_for_param(def_id) + .flat_map(|pred| pred.bounds.iter()) + .any(|b| match b { + hir::GenericBound::Trait(t, ..) => { + t.trait_ref.trait_def_id().as_ref() == Some(best_trait) + } + _ => false, + }) + { + // The type param already has a bound for `trait_name`, we just need to + // change the associated type. + err.span_suggestion_verbose( + assoc_name.span, + format!( + "change the associated type name to use `{suggested_name}` from \ + `{trait_name}`", + ), + suggested_name.to_string(), + Applicability::MaybeIncorrect, + ); + } else if suggest_constraining_type_param( + self.tcx(), + generics, + &mut err, + &ty_param_name, + &trait_name, + None, + None, + ) + && suggested_name != assoc_name.name + { + // We suggested constraining a type parameter, but the associated type on it + // was also not an exact match, so we also suggest changing it. + err.span_suggestion_verbose( + assoc_name.span, + "and also change the associated type name", + suggested_name.to_string(), + Applicability::MaybeIncorrect, + ); + } + } return err.emit(); } } @@ -389,7 +439,7 @@ ); let quiet_projection_ty = - tcx.mk_alias_ty(projection_ty.def_id, args_with_infer_self); + ty::AliasTy::new(tcx, projection_ty.def_id, args_with_infer_self); let term = pred.skip_binder().term; @@ -472,24 +522,33 @@ (span, def_ids.into_iter().map(|did| tcx.associated_item(did)).collect()) }) .collect(); - let mut names = vec![]; + let mut names: FxHashMap> = Default::default(); + let mut names_len = 0; // Account for things like `dyn Foo + 'a`, like in tests `issue-22434.rs` and // `issue-22560.rs`. let mut trait_bound_spans: Vec = vec![]; + let mut object_safety_violations = false; for (span, items) in &associated_types { if !items.is_empty() { trait_bound_spans.push(*span); } for assoc_item in items { let trait_def_id = assoc_item.container_id(tcx); - names.push(format!( - "`{}` (from trait `{}`)", - assoc_item.name, - tcx.def_path_str(trait_def_id), - )); + names.entry(tcx.def_path_str(trait_def_id)).or_default().push(assoc_item.name); + names_len += 1; + + let violations = + object_safety_violations_for_assoc_item(tcx, trait_def_id, *assoc_item); + if !violations.is_empty() { + report_object_safety_error(tcx, *span, trait_def_id, &violations).emit(); + object_safety_violations = true; + } } } + if object_safety_violations { + return; + } if let ([], [bound]) = (&potential_assoc_types[..], &trait_bounds) { match bound.trait_ref.path.segments { // FIXME: `trait_ref.path.span` can point to a full path with multiple @@ -525,15 +584,35 @@ _ => {} } } + + let mut names = names + .into_iter() + .map(|(trait_, mut assocs)| { + assocs.sort(); + format!( + "{} in `{trait_}`", + match &assocs[..] { + [] => String::new(), + [only] => format!("`{only}`"), + [assocs @ .., last] => format!( + "{} and `{last}`", + assocs.iter().map(|a| format!("`{a}`")).collect::>().join(", ") + ), + } + ) + }) + .collect::>(); names.sort(); + let names = names.join(", "); + trait_bound_spans.sort(); let mut err = struct_span_err!( tcx.sess, trait_bound_spans, E0191, "the value of the associated type{} {} must be specified", - pluralize!(names.len()), - names.join(", "), + pluralize!(names_len), + names, ); let mut suggestions = vec![]; let mut types_count = 0; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/generics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/generics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/generics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/generics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -429,12 +429,22 @@ .filter(|param| matches!(param.kind, ty::GenericParamDefKind::Type { synthetic: true, .. })) .count(); let named_type_param_count = param_counts.types - has_self as usize - synth_type_param_count; + let synth_const_param_count = gen_params + .params + .iter() + .filter(|param| { + matches!(param.kind, ty::GenericParamDefKind::Const { is_host_effect: true, .. }) + }) + .count(); + let named_const_param_count = param_counts.consts - synth_const_param_count; let infer_lifetimes = (gen_pos != GenericArgPosition::Type || infer_args) && !gen_args.has_lifetime_params(); - if gen_pos != GenericArgPosition::Type && let Some(b) = gen_args.bindings.first() { - prohibit_assoc_ty_binding(tcx, b.span, None); - } + if gen_pos != GenericArgPosition::Type + && let Some(b) = gen_args.bindings.first() + { + prohibit_assoc_ty_binding(tcx, b.span, None); + } let explicit_late_bound = prohibit_explicit_late_bound_lifetimes(tcx, gen_params, gen_args, gen_pos); @@ -571,11 +581,13 @@ debug!(?expected_min); debug!(arg_counts.lifetimes=?gen_args.num_lifetime_params()); + let provided = gen_args.num_generic_params(); + check_types_and_consts( expected_min, - param_counts.consts + named_type_param_count, - param_counts.consts + named_type_param_count + synth_type_param_count, - gen_args.num_generic_params(), + named_const_param_count + named_type_param_count, + named_const_param_count + named_type_param_count + synth_type_param_count, + provided, param_counts.lifetimes + has_self as usize, gen_args.num_lifetime_params(), ) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/lint.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/lint.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/lint.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/lint.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,18 +18,26 @@ if let hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(hir::Impl { - self_ty: impl_self_ty, of_trait: Some(of_trait_ref), generics, .. + self_ty: impl_self_ty, + of_trait: Some(of_trait_ref), + generics, + .. }), .. - }) = tcx.hir().get_by_def_id(parent_id) && self_ty.hir_id == impl_self_ty.hir_id + }) = tcx.hir().get_by_def_id(parent_id) + && self_ty.hir_id == impl_self_ty.hir_id { if !of_trait_ref.trait_def_id().is_some_and(|def_id| def_id.is_local()) { return; } let of_trait_span = of_trait_ref.path.span; // make sure that we are not calling unwrap to abort during the compilation - let Ok(impl_trait_name) = tcx.sess.source_map().span_to_snippet(self_ty.span) else { return; }; - let Ok(of_trait_name) = tcx.sess.source_map().span_to_snippet(of_trait_span) else { return; }; + let Ok(impl_trait_name) = tcx.sess.source_map().span_to_snippet(self_ty.span) else { + return; + }; + let Ok(of_trait_name) = tcx.sess.source_map().span_to_snippet(of_trait_span) else { + return; + }; // check if the trait has generics, to make a correct suggestion let param_name = generics.params.next_type_param_name(None); @@ -39,13 +47,12 @@ (generics.span, format!("<{param_name}: {impl_trait_name}>")) }; diag.multipart_suggestion( - format!("alternatively use a blanket \ + format!( + "alternatively use a blanket \ implementation to implement `{of_trait_name}` for \ - all types that also implement `{impl_trait_name}`"), - vec![ - (self_ty.span, param_name), - add_generic_sugg, - ], + all types that also implement `{impl_trait_name}`" + ), + vec![(self_ty.span, param_name), add_generic_sugg], Applicability::MaybeIncorrect, ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -36,7 +36,7 @@ use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::symbol::{kw, Ident, Symbol}; -use rustc_span::{sym, Span, DUMMY_SP}; +use rustc_span::{sym, BytePos, Span, DUMMY_SP}; use rustc_target::spec::abi; use rustc_trait_selection::traits::wf::object_region_bounds; use rustc_trait_selection::traits::{self, NormalizeExt, ObligationCtxt}; @@ -567,9 +567,10 @@ ); if let ty::BoundConstness::ConstIfConst = constness - && generics.has_self && !tcx.has_attr(def_id, sym::const_trait) + && generics.has_self + && !tcx.has_attr(def_id, sym::const_trait) { - tcx.sess.emit_err(crate::errors::ConstBoundForNonConstTrait { span } ); + tcx.sess.emit_err(crate::errors::ConstBoundForNonConstTrait { span }); } (args, arg_count) @@ -915,7 +916,7 @@ // Type aliases defined in crates that have the // feature `lazy_type_alias` enabled get encoded as a type alias that normalization will // then actually instantiate the where bounds of. - let alias_ty = tcx.mk_alias_ty(did, args); + let alias_ty = ty::AliasTy::new(tcx, did, args); Ty::new_alias(tcx, ty::Weak, alias_ty) } else { tcx.at(span).type_of(did).instantiate(tcx, args) @@ -1017,7 +1018,7 @@ } err.span_suggestions( span, - "use the fully-qualified path", + "use fully-qualified syntax", suggestions, Applicability::MachineApplicable, ); @@ -1061,6 +1062,7 @@ ) }, param_name, + Some(ty_param_def_id), assoc_name, span, None, @@ -1074,6 +1076,7 @@ &self, all_candidates: impl Fn() -> I, ty_param_name: impl Display, + ty_param_def_id: Option, assoc_name: Ident, span: Span, is_equality: Option>, @@ -1088,13 +1091,15 @@ self.trait_defines_associated_item_named(r.def_id(), ty::AssocKind::Const, assoc_name) }); - let (bound, next_cand) = match (matching_candidates.next(), const_candidates.next()) { + let (mut bound, mut next_cand) = match (matching_candidates.next(), const_candidates.next()) + { (Some(bound), _) => (bound, matching_candidates.next()), (None, Some(bound)) => (bound, const_candidates.next()), (None, None) => { let reported = self.complain_about_assoc_type_not_found( all_candidates, &ty_param_name.to_string(), + ty_param_def_id, assoc_name, span, ); @@ -1103,6 +1108,37 @@ }; debug!(?bound); + // look for a candidate that is not the same as our first bound, disregarding + // whether the bound is const. + while let Some(mut bound2) = next_cand { + debug!(?bound2); + let tcx = self.tcx(); + if bound2.bound_vars() != bound.bound_vars() { + break; + } + + let generics = tcx.generics_of(bound.def_id()); + let Some(host_index) = generics.host_effect_index else { break }; + + // always return the bound that contains the host param. + if let ty::ConstKind::Param(_) = bound2.skip_binder().args.const_at(host_index).kind() { + (bound, bound2) = (bound2, bound); + } + + let unconsted_args = bound + .skip_binder() + .args + .iter() + .enumerate() + .map(|(n, arg)| if host_index == n { tcx.consts.true_.into() } else { arg }); + + if unconsted_args.eq(bound2.skip_binder().args.iter()) { + next_cand = matching_candidates.next().or_else(|| const_candidates.next()); + } else { + break; + } + } + if let Some(bound2) = next_cand { debug!(?bound2); @@ -1142,30 +1178,26 @@ err.span_label( bound_span, format!( - "ambiguous `{}` from `{}`", - assoc_name, + "ambiguous `{assoc_name}` from `{}`", bound.print_only_trait_path(), ), ); if let Some(constraint) = &is_equality { where_bounds.push(format!( - " T: {trait}::{assoc} = {constraint}", + " T: {trait}::{assoc_name} = {constraint}", trait=bound.print_only_trait_path(), - assoc=assoc_name, - constraint=constraint, )); } else { err.span_suggestion_verbose( span.with_hi(assoc_name.span.lo()), - "use fully qualified syntax to disambiguate", - format!("<{} as {}>::", ty_param_name, bound.print_only_trait_path()), + "use fully-qualified syntax to disambiguate", + format!("<{ty_param_name} as {}>::", bound.print_only_trait_path()), Applicability::MaybeIncorrect, ); } } else { err.note(format!( - "associated type `{}` could derive from `{}`", - ty_param_name, + "associated type `{ty_param_name}` could derive from `{}`", bound.print_only_trait_path(), )); } @@ -1173,8 +1205,7 @@ if !where_bounds.is_empty() { err.help(format!( "consider introducing a new type parameter `T` and adding `where` constraints:\ - \n where\n T: {},\n{}", - ty_param_name, + \n where\n T: {ty_param_name},\n{}", where_bounds.join(",\n"), )); } @@ -1275,8 +1306,10 @@ return; }; // Get the span of the generics args *including* the leading `::`. - let args_span = - assoc_segment.ident.span.shrink_to_hi().to(args.span_ext); + // We do so by stretching args.span_ext to the left by 2. Earlier + // it was done based on the end of assoc segment but that sometimes + // led to impossible spans and caused issues like #116473 + let args_span = args.span_ext.with_lo(args.span_ext.lo() - BytePos(2)); if tcx.generics_of(adt_def.did()).count() == 0 { // FIXME(estebank): we could also verify that the arguments being // work for the `enum`, instead of just looking if it takes *any*. @@ -1394,6 +1427,7 @@ ) }, kw::SelfUpper, + None, assoc_ident, span, None, @@ -1684,7 +1718,7 @@ .chain(args.into_iter().skip(parent_args.len())), ); - let ty = Ty::new_alias(tcx, ty::Inherent, tcx.mk_alias_ty(assoc_item, args)); + let ty = Ty::new_alias(tcx, ty::Inherent, ty::AliasTy::new(tcx, assoc_item, args)); return Ok(Some((ty, assoc_item))); } @@ -1917,9 +1951,12 @@ } else { Some(( match segment.res { - Res::PrimTy(ty) => format!("{} `{}`", segment.res.descr(), ty.name()), + Res::PrimTy(ty) => { + format!("{} `{}`", segment.res.descr(), ty.name()) + } Res::Def(_, def_id) - if let Some(name) = self.tcx().opt_item_name(def_id) => { + if let Some(name) = self.tcx().opt_item_name(def_id) => + { format!("{} `{name}`", segment.res.descr()) } Res::Err => "this type".to_string(), @@ -2249,7 +2286,9 @@ err.note(msg); } for segment in path.segments { - if let Some(args) = segment.args && segment.ident.name == kw::SelfUpper { + if let Some(args) = segment.args + && segment.ident.name == kw::SelfUpper + { if generics == 0 { // FIXME(estebank): we could also verify that the arguments being // work for the `enum`, instead of just looking if it takes *any*. @@ -2631,7 +2670,9 @@ .iter() .enumerate() .map(|(i, a)| { - if let hir::TyKind::Infer = a.kind && !self.allow_ty_infer() { + if let hir::TyKind::Infer = a.kind + && !self.allow_ty_infer() + { if let Some(suggested_ty) = self.suggest_trait_fn_ty_for_impl_fn_infer(hir_id, Some(i)) { @@ -2660,7 +2701,7 @@ self.ast_ty_to_ty(output) } } - hir::FnRetTy::DefaultReturn(..) => Ty::new_unit(tcx,), + hir::FnRetTy::DefaultReturn(..) => Ty::new_unit(tcx), }; debug!(?output_ty); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/object_safety.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/object_safety.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/object_safety.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/astconv/object_safety.rs 2023-12-21 16:55:28.000000000 +0000 @@ -380,7 +380,7 @@ span, E0228, "the lifetime bound for this object type cannot be deduced \ - from context; please supply an explicit bound" + from context; please supply an explicit bound" ); let e = if borrowed { // We will have already emitted an error E0106 complaining about a diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/bounds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/bounds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/bounds.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/bounds.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,6 +44,34 @@ span: Span, polarity: ty::ImplPolarity, ) { + self.push_trait_bound_inner(tcx, trait_ref, span, polarity); + + // push a non-const (`host = true`) version of the bound if it is `~const`. + if tcx.features().effects + && let Some(host_effect_idx) = tcx.generics_of(trait_ref.def_id()).host_effect_index + && trait_ref.skip_binder().args.const_at(host_effect_idx) != tcx.consts.true_ + { + let generics = tcx.generics_of(trait_ref.def_id()); + let Some(host_index) = generics.host_effect_index else { return }; + let trait_ref = trait_ref.map_bound(|mut trait_ref| { + trait_ref.args = + tcx.mk_args_from_iter(trait_ref.args.iter().enumerate().map(|(n, arg)| { + if host_index == n { tcx.consts.true_.into() } else { arg } + })); + trait_ref + }); + + self.push_trait_bound_inner(tcx, trait_ref, span, polarity); + } + } + + fn push_trait_bound_inner( + &mut self, + tcx: TyCtxt<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + span: Span, + polarity: ty::ImplPolarity, + ) { self.clauses.push(( trait_ref .map_bound(|trait_ref| { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/check.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/check.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/check.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/check.rs 2023-12-21 16:55:28.000000000 +0000 @@ -128,7 +128,11 @@ let param_env = tcx.param_env(item_def_id); for field in &def.non_enum_variant().fields { - let field_ty = tcx.normalize_erasing_regions(param_env, field.ty(tcx, args)); + let Ok(field_ty) = tcx.try_normalize_erasing_regions(param_env, field.ty(tcx, args)) + else { + tcx.sess.delay_span_bug(span, "could not normalize field type"); + continue; + }; if !allowed_union_field(field_ty, tcx, param_env) { let (field_span, ty_span) = match tcx.hir().get_if_local(field.did) { @@ -291,7 +295,7 @@ let opaque_ty = Ty::new_opaque(tcx, def_id.to_def_id(), args); - // `ReErased` regions appear in the "parent_args" of closures/generators. + // `ReErased` regions appear in the "parent_args" of closures/coroutines. // We're ignoring them here and replacing them with fresh region variables. // See tests in ui/type-alias-impl-trait/closure_{parent_args,wf_outlives}.rs. // @@ -327,7 +331,7 @@ // version. let errors = ocx.select_all_or_error(); if !errors.is_empty() { - let guar = infcx.err_ctxt().report_fulfillment_errors(&errors); + let guar = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(guar); } match origin { @@ -481,8 +485,7 @@ fn_maybe_err(tcx, assoc_item.ident(tcx).span, abi); } ty::AssocKind::Type if assoc_item.defaultness(tcx).has_value() => { - let trait_args = - GenericArgs::identity_for_item(tcx, id.owner_id); + let trait_args = GenericArgs::identity_for_item(tcx, id.owner_id); let _: Result<_, rustc_errors::ErrorGuaranteed> = check_type_bounds( tcx, assoc_item, @@ -502,7 +505,8 @@ } DefKind::OpaqueTy => { let origin = tcx.opaque_type_origin(id.owner_id.def_id); - if let hir::OpaqueTyOrigin::FnReturn(fn_def_id) | hir::OpaqueTyOrigin::AsyncFn(fn_def_id) = origin + if let hir::OpaqueTyOrigin::FnReturn(fn_def_id) + | hir::OpaqueTyOrigin::AsyncFn(fn_def_id) = origin && let hir::Node::TraitItem(trait_item) = tcx.hir().get_by_def_id(fn_def_id) && let (_, hir::TraitFn::Required(..)) = trait_item.expect_fn() { @@ -589,7 +593,9 @@ } DefKind::GlobalAsm => { let it = tcx.hir().item(id); - let hir::ItemKind::GlobalAsm(asm) = it.kind else { span_bug!(it.span, "DefKind::GlobalAsm but got {:#?}", it) }; + let hir::ItemKind::GlobalAsm(asm) = it.kind else { + span_bug!(it.span, "DefKind::GlobalAsm but got {:#?}", it) + }; InlineAsmCtxt::new_global_asm(tcx).check_asm(asm, id.owner_id.def_id); } _ => {} @@ -783,21 +789,21 @@ let (msg, feature) = if tcx.asyncness(def_id).is_async() { ( format!("async {descr} in trait cannot be specialized"), - sym::async_fn_in_trait, + "async functions in traits", ) } else { ( format!( "{descr} with return-position `impl Trait` in trait cannot be specialized" ), - sym::return_position_impl_trait_in_trait, + "return position `impl Trait` in traits", ) }; tcx.sess .struct_span_err(tcx.def_span(def_id), msg) .note(format!( "specialization behaves in inconsistent and \ - surprising ways with `#![feature({feature})]`, \ + surprising ways with {feature}, \ and for now is disallowed" )) .emit(); @@ -873,10 +879,7 @@ ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::RawPtr(_) => (), // struct(u8, u8, u8, u8) is ok ty::Array(t, _) if matches!(t.kind(), ty::Param(_)) => (), // pass struct([T; N]) through, let monomorphization catch errors ty::Array(t, _clen) - if matches!( - t.kind(), - ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::RawPtr(_) - ) => + if matches!(t.kind(), ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::RawPtr(_)) => { /* struct([f32; 4]) is ok */ } _ => { struct_span_err!( @@ -899,17 +902,17 @@ for attr in tcx.get_attrs(def.did(), sym::repr) { for r in attr::parse_repr_attr(&tcx.sess, attr) { if let attr::ReprPacked(pack) = r - && let Some(repr_pack) = repr.pack - && pack as u64 != repr_pack.bytes() - { - struct_span_err!( - tcx.sess, - sp, - E0634, - "type has conflicting packed representation hints" - ) - .emit(); - } + && let Some(repr_pack) = repr.pack + && pack as u64 != repr_pack.bytes() + { + struct_span_err!( + tcx.sess, + sp, + E0634, + "type has conflicting packed representation hints" + ) + .emit(); + } } } if repr.align.is_some() { @@ -1174,7 +1177,8 @@ let (span, display_discr) = match var.discr { ty::VariantDiscr::Explicit(discr_def_id) => { // In the case the discriminant is both a duplicate and overflowed, let the user know - if let hir::Node::AnonConst(expr) = tcx.hir().get_by_def_id(discr_def_id.expect_local()) + if let hir::Node::AnonConst(expr) = + tcx.hir().get_by_def_id(discr_def_id.expect_local()) && let hir::ExprKind::Lit(lit) = &tcx.hir().body(expr.body).value.kind && let rustc_ast::LitKind::Int(lit_value, _int_kind) = &lit.node && *lit_value != dis.val @@ -1303,15 +1307,9 @@ && let ty::GenericParamDefKind::Type { .. } = param.kind { let span = tcx.def_span(param.def_id); - struct_span_err!( - tcx.sess, - span, - E0091, - "type parameter `{}` is unused", - param.name, - ) - .span_label(span, "unused type parameter") - .emit(); + struct_span_err!(tcx.sess, span, E0091, "type parameter `{}` is unused", param.name,) + .span_label(span, "unused type parameter") + .emit(); } } } @@ -1400,7 +1398,7 @@ self.opaques.push(def); ControlFlow::Continue(()) } - ty::Closure(def_id, ..) | ty::Generator(def_id, ..) => { + ty::Closure(def_id, ..) | ty::Coroutine(def_id, ..) => { self.closures.push(def_id); t.super_visit_with(self) } @@ -1430,7 +1428,10 @@ let mut label_match = |ty: Ty<'_>, span| { for arg in ty.walk() { if let ty::GenericArgKind::Type(ty) = arg.unpack() - && let ty::Alias(ty::Opaque, ty::AliasTy { def_id: captured_def_id, .. }) = *ty.kind() + && let ty::Alias( + ty::Opaque, + ty::AliasTy { def_id: captured_def_id, .. }, + ) = *ty.kind() && captured_def_id == opaque_def_id.to_def_id() { err.span_label( @@ -1449,11 +1450,11 @@ { label_match(capture.place.ty(), capture.get_path_span(tcx)); } - // Label any generator locals that capture the opaque - if let DefKind::Generator = tcx.def_kind(closure_def_id) - && let Some(generator_layout) = tcx.mir_generator_witnesses(closure_def_id) + // Label any coroutine locals that capture the opaque + if let DefKind::Coroutine = tcx.def_kind(closure_def_id) + && let Some(coroutine_layout) = tcx.mir_coroutine_witnesses(closure_def_id) { - for interior_ty in &generator_layout.field_tys { + for interior_ty in &coroutine_layout.field_tys { label_match(interior_ty.ty, interior_ty.source_info.span); } } @@ -1467,14 +1468,14 @@ err.emit() } -pub(super) fn check_generator_obligations(tcx: TyCtxt<'_>, def_id: LocalDefId) { - debug_assert!(matches!(tcx.def_kind(def_id), DefKind::Generator)); +pub(super) fn check_coroutine_obligations(tcx: TyCtxt<'_>, def_id: LocalDefId) { + debug_assert!(matches!(tcx.def_kind(def_id), DefKind::Coroutine)); let typeck = tcx.typeck(def_id); let param_env = tcx.param_env(def_id); - let generator_interior_predicates = &typeck.generator_interior_predicates[&def_id]; - debug!(?generator_interior_predicates); + let coroutine_interior_predicates = &typeck.coroutine_interior_predicates[&def_id]; + debug!(?coroutine_interior_predicates); let infcx = tcx .infer_ctxt() @@ -1486,15 +1487,15 @@ .build(); let mut fulfillment_cx = >::new(&infcx); - for (predicate, cause) in generator_interior_predicates { + for (predicate, cause) in coroutine_interior_predicates { let obligation = Obligation::new(tcx, cause.clone(), param_env, *predicate); fulfillment_cx.register_predicate_obligation(&infcx, obligation); } if (tcx.features().unsized_locals || tcx.features().unsized_fn_params) - && let Some(generator) = tcx.mir_generator_witnesses(def_id) + && let Some(coroutine) = tcx.mir_coroutine_witnesses(def_id) { - for field_ty in generator.field_tys.iter() { + for field_ty in coroutine.field_tys.iter() { fulfillment_cx.register_bound( &infcx, param_env, @@ -1503,7 +1504,7 @@ ObligationCause::new( field_ty.source_info.span, def_id, - ObligationCauseCode::SizedGeneratorInterior(def_id), + ObligationCauseCode::SizedCoroutineInterior(def_id), ), ); } @@ -1512,6 +1513,6 @@ let errors = fulfillment_cx.select_all_or_error(&infcx); debug!(?errors); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,8 +23,12 @@ if !tcx.impl_method_has_trait_impl_trait_tys(impl_m.def_id) { return; } - // crate-private traits don't have any library guarantees, there's no need to do this check. - if !tcx.visibility(trait_m.container_id(tcx)).is_public() { + // unreachable traits don't have any library guarantees, there's no need to do this check. + if trait_m + .container_id(tcx) + .as_local() + .is_some_and(|trait_def_id| !tcx.effective_visibilities(()).is_reachable(trait_def_id)) + { return; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,6 +16,7 @@ use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::util::ExplicitSelf; +use rustc_middle::ty::ToPredicate; use rustc_middle::ty::{ self, GenericArgs, Ty, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, }; @@ -322,7 +323,7 @@ // FIXME(-Ztrait-solver=next): Not needed when the hack below is removed. let errors = ocx.select_where_possible(); if !errors.is_empty() { - let reported = infcx.err_ctxt().report_fulfillment_errors(&errors); + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(reported); } @@ -393,7 +394,7 @@ }); } CheckImpliedWfMode::Skip => { - let reported = infcx.err_ctxt().report_fulfillment_errors(&errors); + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(reported); } } @@ -632,8 +633,6 @@ /// For example, given the sample code: /// /// ``` -/// #![feature(return_position_impl_trait_in_trait)] -/// /// use std::ops::Deref; /// /// trait Foo { @@ -873,7 +872,7 @@ // RPITs. let errors = ocx.select_all_or_error(); if !errors.is_empty() { - let reported = infcx.err_ctxt().report_fulfillment_errors(&errors); + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(reported); } @@ -1010,7 +1009,11 @@ }); self.types.insert(proj.def_id, (infer_ty, proj.args)); // Recurse into bounds - for (pred, pred_span) in self.interner().explicit_item_bounds(proj.def_id).iter_instantiated_copied(self.interner(), proj.args) { + for (pred, pred_span) in self + .interner() + .explicit_item_bounds(proj.def_id) + .iter_instantiated_copied(self.interner(), proj.args) + { let pred = pred.fold_with(self); let pred = self.ocx.normalize( &ObligationCause::misc(self.span, self.body_id), @@ -1181,14 +1184,15 @@ if trait_sig.inputs().len() == *i { // Suggestion to change output type. We do not suggest in `async` functions // to avoid complex logic or incorrect output. - if let ImplItemKind::Fn(sig, _) = &tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind + if let ImplItemKind::Fn(sig, _) = + &tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind && !sig.header.asyncness.is_async() { let msg = "change the output type to match the trait"; let ap = Applicability::MachineApplicable; match sig.decl.output { hir::FnRetTy::DefaultReturn(sp) => { - let sugg = format!("-> {} ", trait_sig.output()); + let sugg = format!(" -> {}", trait_sig.output()); diag.span_suggestion_verbose(sp, msg, sugg, ap); } hir::FnRetTy::Return(hir_ty) => { @@ -1553,38 +1557,24 @@ DiagnosticId::Error("E0049".into()), ); - let mut suffix = None; - + let msg = + format!("expected {trait_count} {kind} parameter{}", pluralize!(trait_count),); if let Some(spans) = trait_spans { let mut spans = spans.iter(); if let Some(span) = spans.next() { - err.span_label( - *span, - format!( - "expected {} {} parameter{}", - trait_count, - kind, - pluralize!(trait_count), - ), - ); + err.span_label(*span, msg); } for span in spans { err.span_label(*span, ""); } } else { - suffix = Some(format!(", expected {trait_count}")); + err.span_label(tcx.def_span(trait_.def_id), msg); } if let Some(span) = span { err.span_label( span, - format!( - "found {} {} parameter{}{}", - impl_count, - kind, - pluralize!(impl_count), - suffix.unwrap_or_default(), - ), + format!("found {} {} parameter{}", impl_count, kind, pluralize!(impl_count),), ); } @@ -2049,7 +2039,7 @@ // version. let errors = ocx.select_all_or_error(); if !errors.is_empty() { - return Err(infcx.err_ctxt().report_fulfillment_errors(&errors)); + return Err(infcx.err_ctxt().report_fulfillment_errors(errors)); } let outlives_env = OutlivesEnvironment::new(param_env); @@ -2142,7 +2132,7 @@ // version. let errors = ocx.select_all_or_error(); if !errors.is_empty() { - let reported = infcx.err_ctxt().report_fulfillment_errors(&errors); + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(reported); } @@ -2173,127 +2163,9 @@ impl_trait_ref: ty::TraitRef<'tcx>, ) -> Result<(), ErrorGuaranteed> { let param_env = tcx.param_env(impl_ty.def_id); - let container_id = impl_ty.container_id(tcx); - // Given - // - // impl Foo for (A, B) { - // type Bar = Wrapper - // } - // - // - `impl_trait_ref` would be `<(A, B) as Foo>` - // - `normalize_impl_ty_args` would be `[A, B, ^0.0]` (`^0.0` here is the bound var with db 0 and index 0) - // - `normalize_impl_ty` would be `Wrapper` - // - `rebased_args` would be `[(A, B), u32, ^0.0]`, combining the args from - // the *trait* with the generic associated type parameters (as bound vars). - // - // A note regarding the use of bound vars here: - // Imagine as an example - // ``` - // trait Family { - // type Member; - // } - // - // impl Family for VecFamily { - // type Member = i32; - // } - // ``` - // Here, we would generate - // ```notrust - // forall { Normalize(::Member => i32) } - // ``` - // when we really would like to generate - // ```notrust - // forall { Normalize(::Member => i32) :- Implemented(C: Eq) } - // ``` - // But, this is probably fine, because although the first clause can be used with types C that - // do not implement Eq, for it to cause some kind of problem, there would have to be a - // VecFamily::Member for some type X where !(X: Eq), that appears in the value of type - // Member = .... That type would fail a well-formedness check that we ought to be doing - // elsewhere, which would check that any ::Member meets the bounds declared in - // the trait (notably, that X: Eq and T: Family). - let mut bound_vars: smallvec::SmallVec<[ty::BoundVariableKind; 8]> = - smallvec::SmallVec::with_capacity(tcx.generics_of(impl_ty.def_id).params.len()); - // Extend the impl's identity args with late-bound GAT vars - let normalize_impl_ty_args = ty::GenericArgs::identity_for_item(tcx, container_id).extend_to( - tcx, - impl_ty.def_id, - |param, _| match param.kind { - GenericParamDefKind::Type { .. } => { - let kind = ty::BoundTyKind::Param(param.def_id, param.name); - let bound_var = ty::BoundVariableKind::Ty(kind); - bound_vars.push(bound_var); - Ty::new_bound( - tcx, - ty::INNERMOST, - ty::BoundTy { var: ty::BoundVar::from_usize(bound_vars.len() - 1), kind }, - ) - .into() - } - GenericParamDefKind::Lifetime => { - let kind = ty::BoundRegionKind::BrNamed(param.def_id, param.name); - let bound_var = ty::BoundVariableKind::Region(kind); - bound_vars.push(bound_var); - ty::Region::new_late_bound( - tcx, - ty::INNERMOST, - ty::BoundRegion { var: ty::BoundVar::from_usize(bound_vars.len() - 1), kind }, - ) - .into() - } - GenericParamDefKind::Const { .. } => { - let bound_var = ty::BoundVariableKind::Const; - bound_vars.push(bound_var); - ty::Const::new_bound( - tcx, - ty::INNERMOST, - ty::BoundVar::from_usize(bound_vars.len() - 1), - tcx.type_of(param.def_id) - .no_bound_vars() - .expect("const parameter types cannot be generic"), - ) - .into() - } - }, - ); - // When checking something like - // - // trait X { type Y: PartialEq<::Y> } - // impl X for T { default type Y = S; } - // - // We will have to prove the bound S: PartialEq<::Y>. In this case - // we want ::Y to normalize to S. This is valid because we are - // checking the default value specifically here. Add this equality to the - // ParamEnv for normalization specifically. - let normalize_impl_ty = tcx.type_of(impl_ty.def_id).instantiate(tcx, normalize_impl_ty_args); - let rebased_args = normalize_impl_ty_args.rebase_onto(tcx, container_id, impl_trait_ref.args); - let bound_vars = tcx.mk_bound_variable_kinds(&bound_vars); - let normalize_param_env = { - let mut predicates = param_env.caller_bounds().iter().collect::>(); - match normalize_impl_ty.kind() { - ty::Alias(ty::Projection, proj) - if proj.def_id == trait_ty.def_id && proj.args == rebased_args => - { - // Don't include this predicate if the projected type is - // exactly the same as the projection. This can occur in - // (somewhat dubious) code like this: - // - // impl X for T where T: X { type Y = ::Y; } - } - _ => predicates.push(ty::Clause::from_projection_clause( - tcx, - ty::Binder::bind_with_vars( - ty::ProjectionPredicate { - projection_ty: tcx.mk_alias_ty(trait_ty.def_id, rebased_args), - term: normalize_impl_ty.into(), - }, - bound_vars, - ), - )), - }; - ty::ParamEnv::new(tcx.mk_clauses(&predicates), Reveal::UserFacing) - }; - debug!(?normalize_param_env); + debug!(?param_env); + let container_id = impl_ty.container_id(tcx); let impl_ty_def_id = impl_ty.def_id.expect_local(); let impl_ty_args = GenericArgs::identity_for_item(tcx, impl_ty.def_id); let rebased_args = impl_ty_args.rebase_onto(tcx, container_id, impl_trait_ref.args); @@ -2345,6 +2217,11 @@ .collect(); debug!("check_type_bounds: item_bounds={:?}", obligations); + // Normalize predicates with the assumption that the GAT may always normalize + // to its definition type. This should be the param-env we use to *prove* the + // predicate too, but we don't do that because of performance issues. + // See . + let normalize_param_env = param_env_with_gat_bounds(tcx, impl_ty, impl_trait_ref); for mut obligation in util::elaborate(tcx, obligations) { let normalized_predicate = ocx.normalize(&normalize_cause, normalize_param_env, obligation.predicate); @@ -2357,7 +2234,7 @@ // version. let errors = ocx.select_all_or_error(); if !errors.is_empty() { - let reported = infcx.err_ctxt().report_fulfillment_errors(&errors); + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(reported); } @@ -2368,6 +2245,171 @@ ocx.resolve_regions_and_report_errors(impl_ty_def_id, &outlives_env) } +/// Install projection predicates that allow GATs to project to their own +/// definition types. This is not allowed in general in cases of default +/// associated types in trait definitions, or when specialization is involved, +/// but is needed when checking these definition types actually satisfy the +/// trait bounds of the GAT. +/// +/// # How it works +/// +/// ```ignore (example) +/// impl Foo for (A, B) { +/// type Bar = Wrapper +/// } +/// ``` +/// +/// - `impl_trait_ref` would be `<(A, B) as Foo>` +/// - `normalize_impl_ty_args` would be `[A, B, ^0.0]` (`^0.0` here is the bound var with db 0 and index 0) +/// - `normalize_impl_ty` would be `Wrapper` +/// - `rebased_args` would be `[(A, B), u32, ^0.0]`, combining the args from +/// the *trait* with the generic associated type parameters (as bound vars). +/// +/// A note regarding the use of bound vars here: +/// Imagine as an example +/// ``` +/// trait Family { +/// type Member; +/// } +/// +/// impl Family for VecFamily { +/// type Member = i32; +/// } +/// ``` +/// Here, we would generate +/// ```ignore (pseudo-rust) +/// forall { Normalize(::Member => i32) } +/// ``` +/// +/// when we really would like to generate +/// ```ignore (pseudo-rust) +/// forall { Normalize(::Member => i32) :- Implemented(C: Eq) } +/// ``` +/// +/// But, this is probably fine, because although the first clause can be used with types `C` that +/// do not implement `Eq`, for it to cause some kind of problem, there would have to be a +/// `VecFamily::Member` for some type `X` where `!(X: Eq)`, that appears in the value of type +/// `Member = ....` That type would fail a well-formedness check that we ought to be doing +/// elsewhere, which would check that any `::Member` meets the bounds declared in +/// the trait (notably, that `X: Eq` and `T: Family`). +fn param_env_with_gat_bounds<'tcx>( + tcx: TyCtxt<'tcx>, + impl_ty: ty::AssocItem, + impl_trait_ref: ty::TraitRef<'tcx>, +) -> ty::ParamEnv<'tcx> { + let param_env = tcx.param_env(impl_ty.def_id); + let container_id = impl_ty.container_id(tcx); + let mut predicates = param_env.caller_bounds().to_vec(); + + // for RPITITs, we should install predicates that allow us to project all + // of the RPITITs associated with the same body. This is because checking + // the item bounds of RPITITs often involves nested RPITITs having to prove + // bounds about themselves. + let impl_tys_to_install = match impl_ty.opt_rpitit_info { + None => vec![impl_ty], + Some( + ty::ImplTraitInTraitData::Impl { fn_def_id } + | ty::ImplTraitInTraitData::Trait { fn_def_id, .. }, + ) => tcx + .associated_types_for_impl_traits_in_associated_fn(fn_def_id) + .iter() + .map(|def_id| tcx.associated_item(*def_id)) + .collect(), + }; + + for impl_ty in impl_tys_to_install { + let trait_ty = match impl_ty.container { + ty::AssocItemContainer::TraitContainer => impl_ty, + ty::AssocItemContainer::ImplContainer => { + tcx.associated_item(impl_ty.trait_item_def_id.unwrap()) + } + }; + + let mut bound_vars: smallvec::SmallVec<[ty::BoundVariableKind; 8]> = + smallvec::SmallVec::with_capacity(tcx.generics_of(impl_ty.def_id).params.len()); + // Extend the impl's identity args with late-bound GAT vars + let normalize_impl_ty_args = ty::GenericArgs::identity_for_item(tcx, container_id) + .extend_to(tcx, impl_ty.def_id, |param, _| match param.kind { + GenericParamDefKind::Type { .. } => { + let kind = ty::BoundTyKind::Param(param.def_id, param.name); + let bound_var = ty::BoundVariableKind::Ty(kind); + bound_vars.push(bound_var); + Ty::new_bound( + tcx, + ty::INNERMOST, + ty::BoundTy { var: ty::BoundVar::from_usize(bound_vars.len() - 1), kind }, + ) + .into() + } + GenericParamDefKind::Lifetime => { + let kind = ty::BoundRegionKind::BrNamed(param.def_id, param.name); + let bound_var = ty::BoundVariableKind::Region(kind); + bound_vars.push(bound_var); + ty::Region::new_late_bound( + tcx, + ty::INNERMOST, + ty::BoundRegion { + var: ty::BoundVar::from_usize(bound_vars.len() - 1), + kind, + }, + ) + .into() + } + GenericParamDefKind::Const { .. } => { + let bound_var = ty::BoundVariableKind::Const; + bound_vars.push(bound_var); + ty::Const::new_bound( + tcx, + ty::INNERMOST, + ty::BoundVar::from_usize(bound_vars.len() - 1), + tcx.type_of(param.def_id) + .no_bound_vars() + .expect("const parameter types cannot be generic"), + ) + .into() + } + }); + // When checking something like + // + // trait X { type Y: PartialEq<::Y> } + // impl X for T { default type Y = S; } + // + // We will have to prove the bound S: PartialEq<::Y>. In this case + // we want ::Y to normalize to S. This is valid because we are + // checking the default value specifically here. Add this equality to the + // ParamEnv for normalization specifically. + let normalize_impl_ty = + tcx.type_of(impl_ty.def_id).instantiate(tcx, normalize_impl_ty_args); + let rebased_args = + normalize_impl_ty_args.rebase_onto(tcx, container_id, impl_trait_ref.args); + let bound_vars = tcx.mk_bound_variable_kinds(&bound_vars); + + match normalize_impl_ty.kind() { + ty::Alias(ty::Projection, proj) + if proj.def_id == trait_ty.def_id && proj.args == rebased_args => + { + // Don't include this predicate if the projected type is + // exactly the same as the projection. This can occur in + // (somewhat dubious) code like this: + // + // impl X for T where T: X { type Y = ::Y; } + } + _ => predicates.push( + ty::Binder::bind_with_vars( + ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new(tcx, trait_ty.def_id, rebased_args), + term: normalize_impl_ty.into(), + }, + bound_vars, + ) + .to_predicate(tcx), + ), + }; + } + + ty::ParamEnv::new(tcx.mk_clauses(&predicates), Reveal::UserFacing) +} + fn assoc_item_kind_str(impl_item: &ty::AssocItem) -> &'static str { match impl_item.kind { ty::AssocKind::Const => "const", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/entry.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/entry.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/entry.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/entry.rs 2023-12-21 16:55:28.000000000 +0000 @@ -158,7 +158,7 @@ ocx.register_bound(cause, param_env, norm_return_ty, term_did); let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); error = true; } // now we can take the return type of the given main function diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -90,9 +90,8 @@ use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::ty::{GenericArgs, GenericArgsRef}; use rustc_session::parse::feature_err; -use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::{kw, Ident}; -use rustc_span::{self, def_id::CRATE_DEF_ID, BytePos, Span, Symbol}; +use rustc_span::{self, def_id::CRATE_DEF_ID, BytePos, Span, Symbol, DUMMY_SP}; use rustc_target::abi::VariantIdx; use rustc_target::spec::abi::Abi; use rustc_trait_selection::traits::error_reporting::suggestions::ReturnsVisitor; @@ -114,7 +113,7 @@ region_scope_tree, collect_return_position_impl_trait_in_trait_tys, compare_impl_const: compare_impl_item::compare_impl_const_raw, - check_generator_obligations: check::check_generator_obligations, + check_coroutine_obligations: check::check_coroutine_obligations, ..*providers }; } @@ -588,7 +587,7 @@ Ok(()) => { let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); return; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/region.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/region.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/region.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/region.rs 2023-12-21 16:55:28.000000000 +0000 @@ -598,7 +598,7 @@ } // Make sure we visit the initializer first, so expr_and_pat_count remains correct. - // The correct order, as shared between generator_interior, drop_ranges and intravisitor, + // The correct order, as shared between coroutine_interior, drop_ranges and intravisitor, // is to walk initializer, followed by pattern bindings, finally followed by the `else` block. if let Some(expr) = init { visitor.visit_expr(expr); @@ -825,7 +825,7 @@ resolve_local(self, None, Some(&body.value)); } - if body.generator_kind.is_some() { + if body.coroutine_kind.is_some() { self.scope_tree.body_expr_count.insert(body_id, self.expr_and_pat_count); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/wfcheck.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/wfcheck.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/wfcheck.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/check/wfcheck.rs 2023-12-21 16:55:28.000000000 +0000 @@ -93,8 +93,9 @@ span: Span, body_def_id: LocalDefId, f: F, -) where - F: for<'a> FnOnce(&WfCheckingCtxt<'a, 'tcx>), +) -> Result<(), ErrorGuaranteed> +where + F: for<'a> FnOnce(&WfCheckingCtxt<'a, 'tcx>) -> Result<(), ErrorGuaranteed>, { let param_env = tcx.param_env(body_def_id); let infcx = &tcx.infer_ctxt().build(); @@ -105,42 +106,48 @@ if !tcx.features().trivial_bounds { wfcx.check_false_global_bounds() } - f(&mut wfcx); + f(&mut wfcx)?; - let assumed_wf_types = match wfcx.ocx.assumed_wf_types_and_report_errors(param_env, body_def_id) - { - Ok(wf_types) => wf_types, - Err(_guar) => return, - }; + let assumed_wf_types = wfcx.ocx.assumed_wf_types_and_report_errors(param_env, body_def_id)?; let implied_bounds = infcx.implied_bounds_tys(param_env, body_def_id, assumed_wf_types); let errors = wfcx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); - return; + let err = infcx.err_ctxt().report_fulfillment_errors(errors); + if tcx.sess.err_count() > 0 { + return Err(err); + } else { + // HACK(oli-obk): tests/ui/specialization/min_specialization/specialize_on_type_error.rs causes an + // error (delay_span_bug) during normalization, without reporting an error, so we need to act as if + // no error happened, in order to let our callers continue and report an error later in + // check_impl_items_against_trait. + return Ok(()); + } } let outlives_env = OutlivesEnvironment::with_bounds(param_env, implied_bounds); - let _ = wfcx.ocx.resolve_regions_and_report_errors(body_def_id, &outlives_env); + wfcx.ocx.resolve_regions_and_report_errors(body_def_id, &outlives_env)?; + infcx.tainted_by_errors().error_reported() } -fn check_well_formed(tcx: TyCtxt<'_>, def_id: hir::OwnerId) { +fn check_well_formed(tcx: TyCtxt<'_>, def_id: hir::OwnerId) -> Result<(), ErrorGuaranteed> { let node = tcx.hir().owner(def_id); - match node { - hir::OwnerNode::Crate(_) => {} + let mut res = match node { + hir::OwnerNode::Crate(_) => bug!("check_well_formed cannot be applied to the crate root"), hir::OwnerNode::Item(item) => check_item(tcx, item), hir::OwnerNode::TraitItem(item) => check_trait_item(tcx, item), hir::OwnerNode::ImplItem(item) => check_impl_item(tcx, item), hir::OwnerNode::ForeignItem(item) => check_foreign_item(tcx, item), - } + }; if let Some(generics) = node.generics() { for param in generics.params { - check_param_wf(tcx, param) + res = res.and(check_param_wf(tcx, param)); } } + res } /// Checks that the field types (in a struct def'n) or argument types (in an enum def'n) are @@ -157,7 +164,7 @@ /// not included it frequently leads to confusing errors in fn bodies. So it's better to check /// the types first. #[instrument(skip(tcx), level = "debug")] -fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) { +fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) -> Result<(), ErrorGuaranteed> { let def_id = item.owner_id.def_id; debug!( @@ -187,31 +194,32 @@ let is_auto = tcx .impl_trait_ref(def_id) .is_some_and(|trait_ref| tcx.trait_is_auto(trait_ref.skip_binder().def_id)); + let mut res = Ok(()); if let (hir::Defaultness::Default { .. }, true) = (impl_.defaultness, is_auto) { let sp = impl_.of_trait.as_ref().map_or(item.span, |t| t.path.span); let mut err = tcx.sess.struct_span_err(sp, "impls of auto traits cannot be default"); err.span_labels(impl_.defaultness_span, "default because of this"); err.span_label(sp, "auto trait"); - err.emit(); + res = Err(err.emit()); } // We match on both `ty::ImplPolarity` and `ast::ImplPolarity` just to get the `!` span. match (tcx.impl_polarity(def_id), impl_.polarity) { (ty::ImplPolarity::Positive, _) => { - check_impl(tcx, item, impl_.self_ty, &impl_.of_trait); + res = res.and(check_impl(tcx, item, impl_.self_ty, &impl_.of_trait)); } (ty::ImplPolarity::Negative, ast::ImplPolarity::Negative(span)) => { // FIXME(#27579): what amount of WF checking do we need for neg impls? if let hir::Defaultness::Default { .. } = impl_.defaultness { let mut spans = vec![span]; spans.extend(impl_.defaultness_span); - struct_span_err!( + res = Err(struct_span_err!( tcx.sess, spans, E0750, "negative impls cannot be default impls" ) - .emit(); + .emit()); } } (ty::ImplPolarity::Reservation, _) => { @@ -219,49 +227,52 @@ } _ => unreachable!(), } + res } hir::ItemKind::Fn(ref sig, ..) => { - check_item_fn(tcx, def_id, item.ident, item.span, sig.decl); + check_item_fn(tcx, def_id, item.ident, item.span, sig.decl) } hir::ItemKind::Static(ty, ..) => { - check_item_type(tcx, def_id, ty.span, UnsizedHandling::Forbid); + check_item_type(tcx, def_id, ty.span, UnsizedHandling::Forbid) } hir::ItemKind::Const(ty, ..) => { - check_item_type(tcx, def_id, ty.span, UnsizedHandling::Forbid); + check_item_type(tcx, def_id, ty.span, UnsizedHandling::Forbid) } hir::ItemKind::Struct(_, ast_generics) => { - check_type_defn(tcx, item, false); + let res = check_type_defn(tcx, item, false); check_variances_for_type_defn(tcx, item, ast_generics); + res } hir::ItemKind::Union(_, ast_generics) => { - check_type_defn(tcx, item, true); + let res = check_type_defn(tcx, item, true); check_variances_for_type_defn(tcx, item, ast_generics); + res } hir::ItemKind::Enum(_, ast_generics) => { - check_type_defn(tcx, item, true); + let res = check_type_defn(tcx, item, true); check_variances_for_type_defn(tcx, item, ast_generics); + res } - hir::ItemKind::Trait(..) => { - check_trait(tcx, item); - } - hir::ItemKind::TraitAlias(..) => { - check_trait(tcx, item); - } + hir::ItemKind::Trait(..) => check_trait(tcx, item), + hir::ItemKind::TraitAlias(..) => check_trait(tcx, item), // `ForeignItem`s are handled separately. - hir::ItemKind::ForeignMod { .. } => {} + hir::ItemKind::ForeignMod { .. } => Ok(()), hir::ItemKind::TyAlias(hir_ty, ast_generics) => { if tcx.type_alias_is_lazy(item.owner_id) { // Bounds of lazy type aliases and of eager ones that contain opaque types are respected. // E.g: `type X = impl Trait;`, `type X = (impl Trait, Y);`. - check_item_type(tcx, def_id, hir_ty.span, UnsizedHandling::Allow); + let res = check_item_type(tcx, def_id, hir_ty.span, UnsizedHandling::Allow); check_variances_for_type_defn(tcx, item, ast_generics); + res + } else { + Ok(()) } } - _ => {} + _ => Ok(()), } } -fn check_foreign_item(tcx: TyCtxt<'_>, item: &hir::ForeignItem<'_>) { +fn check_foreign_item(tcx: TyCtxt<'_>, item: &hir::ForeignItem<'_>) -> Result<(), ErrorGuaranteed> { let def_id = item.owner_id.def_id; debug!( @@ -276,11 +287,14 @@ hir::ForeignItemKind::Static(ty, ..) => { check_item_type(tcx, def_id, ty.span, UnsizedHandling::AllowIfForeignTail) } - hir::ForeignItemKind::Type => (), + hir::ForeignItemKind::Type => Ok(()), } } -fn check_trait_item(tcx: TyCtxt<'_>, trait_item: &hir::TraitItem<'_>) { +fn check_trait_item( + tcx: TyCtxt<'_>, + trait_item: &hir::TraitItem<'_>, +) -> Result<(), ErrorGuaranteed> { let def_id = trait_item.owner_id.def_id; let (method_sig, span) = match trait_item.kind { @@ -289,18 +303,19 @@ _ => (None, trait_item.span), }; check_object_unsafe_self_trait_by_name(tcx, trait_item); - check_associated_item(tcx, def_id, span, method_sig); + let mut res = check_associated_item(tcx, def_id, span, method_sig); if matches!(trait_item.kind, hir::TraitItemKind::Fn(..)) { for &assoc_ty_def_id in tcx.associated_types_for_impl_traits_in_associated_fn(def_id) { - check_associated_item( + res = res.and(check_associated_item( tcx, assoc_ty_def_id.expect_local(), tcx.def_span(assoc_ty_def_id), None, - ); + )); } } + res } /// Require that the user writes where clauses on GATs for the implicit @@ -315,9 +330,10 @@ /// fn into_iter<'a>(&'a self) -> Self::Iter<'a>; /// } /// ``` -fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRef]) { +fn check_gat_where_clauses(tcx: TyCtxt<'_>, trait_def_id: LocalDefId) { // Associates every GAT's def_id to a list of possibly missing bounds detected by this lint. let mut required_bounds_by_item = FxHashMap::default(); + let associated_items = tcx.associated_items(trait_def_id); // Loop over all GATs together, because if this lint suggests adding a where-clause bound // to one GAT, it might then require us to an additional bound on another GAT. @@ -326,8 +342,8 @@ // those GATs. loop { let mut should_continue = false; - for gat_item in associated_items { - let gat_def_id = gat_item.id.owner_id; + for gat_item in associated_items.in_definition_order() { + let gat_def_id = gat_item.def_id.expect_local(); let gat_item = tcx.associated_item(gat_def_id); // If this item is not an assoc ty, or has no args, then it's not a GAT if gat_item.kind != ty::AssocKind::Type { @@ -343,8 +359,8 @@ // This is calculated by taking the intersection of the bounds that each item // constrains the GAT with individually. let mut new_required_bounds: Option>> = None; - for item in associated_items { - let item_def_id = item.id.owner_id; + for item in associated_items.in_definition_order() { + let item_def_id = item.def_id.expect_local(); // Skip our own GAT, since it does not constrain itself at all. if item_def_id == gat_def_id { continue; @@ -352,9 +368,9 @@ let param_env = tcx.param_env(item_def_id); - let item_required_bounds = match item.kind { + let item_required_bounds = match tcx.associated_item(item_def_id).kind { // In our example, this corresponds to `into_iter` method - hir::AssocItemKind::Fn { .. } => { + ty::AssocKind::Fn => { // For methods, we check the function signature's return type for any GATs // to constrain. In the `into_iter` case, we see that the return type // `Self::Iter<'a>` is a GAT we want to gather any potential missing bounds from. @@ -370,12 +386,12 @@ // We also assume that all of the function signature's parameter types // are well formed. &sig.inputs().iter().copied().collect(), - gat_def_id.def_id, + gat_def_id, gat_generics, ) } // In our example, this corresponds to the `Iter` and `Item` associated types - hir::AssocItemKind::Type => { + ty::AssocKind::Type => { // If our associated item is a GAT with missing bounds, add them to // the param-env here. This allows this GAT to propagate missing bounds // to other GATs. @@ -392,11 +408,11 @@ .instantiate_identity_iter_copied() .collect::>(), &FxIndexSet::default(), - gat_def_id.def_id, + gat_def_id, gat_generics, ) } - hir::AssocItemKind::Const => None, + ty::AssocKind::Const => None, }; if let Some(item_required_bounds) = item_required_bounds { @@ -432,7 +448,12 @@ } for (gat_def_id, required_bounds) in required_bounds_by_item { - let gat_item_hir = tcx.hir().expect_trait_item(gat_def_id.def_id); + // Don't suggest adding `Self: 'a` to a GAT that can't be named + if tcx.is_impl_trait_in_trait(gat_def_id.to_def_id()) { + continue; + } + + let gat_item_hir = tcx.hir().expect_trait_item(gat_def_id); debug!(?required_bounds); let param_env = tcx.param_env(gat_def_id); @@ -442,21 +463,16 @@ ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => { !region_known_to_outlive( tcx, - gat_def_id.def_id, + gat_def_id, param_env, &FxIndexSet::default(), a, b, ) } - ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(a, b)) => !ty_known_to_outlive( - tcx, - gat_def_id.def_id, - param_env, - &FxIndexSet::default(), - a, - b, - ), + ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(a, b)) => { + !ty_known_to_outlive(tcx, gat_def_id, param_env, &FxIndexSet::default(), a, b) + } _ => bug!("Unexpected ClauseKind"), }) .map(|clause| clause.to_string()) @@ -535,7 +551,7 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable>>( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - item_def_id: hir::OwnerId, + item_def_id: LocalDefId, to_check: T, wf_tys: &FxIndexSet>, gat_def_id: LocalDefId, @@ -568,7 +584,7 @@ // reflected in a where clause on the GAT itself. for (ty, ty_idx) in &types { // In our example, requires that `Self: 'a` - if ty_known_to_outlive(tcx, item_def_id.def_id, param_env, &wf_tys, *ty, *region_a) { + if ty_known_to_outlive(tcx, item_def_id, param_env, &wf_tys, *ty, *region_a) { debug!(?ty_idx, ?region_a_idx); debug!("required clause: {ty} must outlive {region_a}"); // Translate into the generic parameters of the GAT. In @@ -607,14 +623,7 @@ if matches!(**region_b, ty::ReStatic | ty::ReError(_)) || region_a == region_b { continue; } - if region_known_to_outlive( - tcx, - item_def_id.def_id, - param_env, - &wf_tys, - *region_a, - *region_b, - ) { + if region_known_to_outlive(tcx, item_def_id, param_env, &wf_tys, *region_a, *region_b) { debug!(?region_a_idx, ?region_b_idx); debug!("required clause: {region_a} must outlive {region_b}"); // Translate into the generic parameters of the GAT. @@ -833,7 +842,7 @@ } } -fn check_impl_item(tcx: TyCtxt<'_>, impl_item: &hir::ImplItem<'_>) { +fn check_impl_item(tcx: TyCtxt<'_>, impl_item: &hir::ImplItem<'_>) -> Result<(), ErrorGuaranteed> { let (method_sig, span) = match impl_item.kind { hir::ImplItemKind::Fn(ref sig, _) => (Some(sig), impl_item.span), // Constrain binding and overflow error spans to `` in `type foo = `. @@ -841,13 +850,13 @@ _ => (None, impl_item.span), }; - check_associated_item(tcx, impl_item.owner_id.def_id, span, method_sig); + check_associated_item(tcx, impl_item.owner_id.def_id, span, method_sig) } -fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) { +fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) -> Result<(), ErrorGuaranteed> { match param.kind { // We currently only check wf of const params here. - hir::GenericParamKind::Lifetime { .. } | hir::GenericParamKind::Type { .. } => (), + hir::GenericParamKind::Lifetime { .. } | hir::GenericParamKind::Type { .. } => Ok(()), // Const parameters are well formed if their type is structural match. hir::GenericParamKind::Const { ty: hir_ty, default: _ } => { @@ -867,68 +876,67 @@ ty, trait_def_id, ); - }); + Ok(()) + }) } else { - let diag = match ty.kind() { - ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Error(_) => None, - ty::FnPtr(_) => Some(tcx.sess.struct_span_err( + let mut diag = match ty.kind() { + ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Error(_) => return Ok(()), + ty::FnPtr(_) => tcx.sess.struct_span_err( hir_ty.span, "using function pointers as const generic parameters is forbidden", - )), - ty::RawPtr(_) => Some(tcx.sess.struct_span_err( + ), + ty::RawPtr(_) => tcx.sess.struct_span_err( hir_ty.span, "using raw pointers as const generic parameters is forbidden", - )), - _ => Some(tcx.sess.struct_span_err( + ), + _ => tcx.sess.struct_span_err( hir_ty.span, format!("`{}` is forbidden as the type of a const generic parameter", ty), - )), + ), }; - if let Some(mut diag) = diag { - diag.note("the only supported types are integers, `bool` and `char`"); + diag.note("the only supported types are integers, `bool` and `char`"); - let cause = ObligationCause::misc(hir_ty.span, param.def_id); - let may_suggest_feature = match type_allowed_to_implement_const_param_ty( - tcx, - tcx.param_env(param.def_id), - ty, - cause, - ) { - // Can never implement `ConstParamTy`, don't suggest anything. - Err(ConstParamTyImplementationError::NotAnAdtOrBuiltinAllowed) => false, - // May be able to implement `ConstParamTy`. Only emit the feature help - // if the type is local, since the user may be able to fix the local type. - Err(ConstParamTyImplementationError::InfrigingFields(..)) => { - fn ty_is_local(ty: Ty<'_>) -> bool { - match ty.kind() { - ty::Adt(adt_def, ..) => adt_def.did().is_local(), - // Arrays and slices use the inner type's `ConstParamTy`. - ty::Array(ty, ..) => ty_is_local(*ty), - ty::Slice(ty) => ty_is_local(*ty), - // `&` references use the inner type's `ConstParamTy`. - // `&mut` are not supported. - ty::Ref(_, ty, ast::Mutability::Not) => ty_is_local(*ty), - // Say that a tuple is local if any of its components are local. - // This is not strictly correct, but it's likely that the user can fix the local component. - ty::Tuple(tys) => tys.iter().any(|ty| ty_is_local(ty)), - _ => false, - } + let cause = ObligationCause::misc(hir_ty.span, param.def_id); + let may_suggest_feature = match type_allowed_to_implement_const_param_ty( + tcx, + tcx.param_env(param.def_id), + ty, + cause, + ) { + // Can never implement `ConstParamTy`, don't suggest anything. + Err(ConstParamTyImplementationError::NotAnAdtOrBuiltinAllowed) => false, + // May be able to implement `ConstParamTy`. Only emit the feature help + // if the type is local, since the user may be able to fix the local type. + Err(ConstParamTyImplementationError::InfrigingFields(..)) => { + fn ty_is_local(ty: Ty<'_>) -> bool { + match ty.kind() { + ty::Adt(adt_def, ..) => adt_def.did().is_local(), + // Arrays and slices use the inner type's `ConstParamTy`. + ty::Array(ty, ..) => ty_is_local(*ty), + ty::Slice(ty) => ty_is_local(*ty), + // `&` references use the inner type's `ConstParamTy`. + // `&mut` are not supported. + ty::Ref(_, ty, ast::Mutability::Not) => ty_is_local(*ty), + // Say that a tuple is local if any of its components are local. + // This is not strictly correct, but it's likely that the user can fix the local component. + ty::Tuple(tys) => tys.iter().any(|ty| ty_is_local(ty)), + _ => false, } - - ty_is_local(ty) } - // Implments `ConstParamTy`, suggest adding the feature to enable. - Ok(..) => true, - }; - if may_suggest_feature && tcx.sess.is_nightly_build() { - diag.help( + + ty_is_local(ty) + } + // Implments `ConstParamTy`, suggest adding the feature to enable. + Ok(..) => true, + }; + if may_suggest_feature && tcx.sess.is_nightly_build() { + diag.help( "add `#![feature(adt_const_params)]` to the crate attributes to enable more complex and user defined types", ); - } - - diag.emit(); } + + Err(diag.emit()) } } } @@ -940,7 +948,7 @@ item_id: LocalDefId, span: Span, sig_if_method: Option<&hir::FnSig<'_>>, -) { +) -> Result<(), ErrorGuaranteed> { let loc = Some(WellFormedLoc::Ty(item_id)); enter_wf_checking_ctxt(tcx, span, item_id, |wfcx| { let item = tcx.associated_item(item_id); @@ -955,6 +963,7 @@ let ty = tcx.type_of(item.def_id).instantiate_identity(); let ty = wfcx.normalize(span, Some(WellFormedLoc::Ty(item_id)), ty); wfcx.register_wf_obligation(span, loc, ty.into()); + Ok(()) } ty::AssocKind::Fn => { let sig = tcx.fn_sig(item.def_id).instantiate_identity(); @@ -966,7 +975,7 @@ hir_sig.decl, item.def_id.expect_local(), ); - check_method_receiver(wfcx, hir_sig, item, self_ty); + check_method_receiver(wfcx, hir_sig, item, self_ty) } ty::AssocKind::Type => { if let ty::AssocItemContainer::TraitContainer = item.container { @@ -977,6 +986,7 @@ let ty = wfcx.normalize(span, Some(WellFormedLoc::Ty(item_id)), ty); wfcx.register_wf_obligation(span, loc, ty.into()); } + Ok(()) } } }) @@ -992,7 +1002,11 @@ } /// In a type definition, we check that to ensure that the types of the fields are well-formed. -fn check_type_defn<'tcx>(tcx: TyCtxt<'tcx>, item: &hir::Item<'tcx>, all_sized: bool) { +fn check_type_defn<'tcx>( + tcx: TyCtxt<'tcx>, + item: &hir::Item<'tcx>, + all_sized: bool, +) -> Result<(), ErrorGuaranteed> { let _ = tcx.representability(item.owner_id.def_id); let adt_def = tcx.adt_def(item.owner_id); @@ -1087,11 +1101,12 @@ } check_where_clauses(wfcx, item.span, item.owner_id.def_id); - }); + Ok(()) + }) } #[instrument(skip(tcx, item))] -fn check_trait(tcx: TyCtxt<'_>, item: &hir::Item<'_>) { +fn check_trait(tcx: TyCtxt<'_>, item: &hir::Item<'_>) -> Result<(), ErrorGuaranteed> { debug!(?item.owner_id); let def_id = item.owner_id.def_id; @@ -1110,14 +1125,16 @@ } } - enter_wf_checking_ctxt(tcx, item.span, def_id, |wfcx| { - check_where_clauses(wfcx, item.span, def_id) + let res = enter_wf_checking_ctxt(tcx, item.span, def_id, |wfcx| { + check_where_clauses(wfcx, item.span, def_id); + Ok(()) }); // Only check traits, don't check trait aliases - if let hir::ItemKind::Trait(_, _, _, _, items) = item.kind { - check_gat_where_clauses(tcx, items); + if let hir::ItemKind::Trait(..) = item.kind { + check_gat_where_clauses(tcx, item.owner_id.def_id); } + res } /// Checks all associated type defaults of trait `trait_def_id`. @@ -1131,11 +1148,11 @@ let wf_obligations = bounds.instantiate_identity_iter_copied().flat_map(|(bound, bound_span)| { let normalized_bound = wfcx.normalize(span, None, bound); - traits::wf::predicate_obligations( + traits::wf::clause_obligations( wfcx.infcx, wfcx.param_env, wfcx.body_def_id, - normalized_bound.as_predicate(), + normalized_bound, bound_span, ) }); @@ -1149,10 +1166,11 @@ ident: Ident, span: Span, decl: &hir::FnDecl<'_>, -) { +) -> Result<(), ErrorGuaranteed> { enter_wf_checking_ctxt(tcx, span, def_id, |wfcx| { let sig = tcx.fn_sig(def_id).instantiate_identity(); check_fn_or_method(wfcx, ident.span, sig, decl, def_id); + Ok(()) }) } @@ -1167,7 +1185,7 @@ item_id: LocalDefId, ty_span: Span, unsized_handling: UnsizedHandling, -) { +) -> Result<(), ErrorGuaranteed> { debug!("check_item_type: {:?}", item_id); enter_wf_checking_ctxt(tcx, ty_span, item_id, |wfcx| { @@ -1207,7 +1225,8 @@ tcx.require_lang_item(LangItem::Sync, Some(ty_span)), ); } - }); + Ok(()) + }) } #[instrument(level = "debug", skip(tcx, ast_self_ty, ast_trait_ref))] @@ -1216,7 +1235,7 @@ item: &'tcx hir::Item<'tcx>, ast_self_ty: &hir::Ty<'_>, ast_trait_ref: &Option>, -) { +) -> Result<(), ErrorGuaranteed> { enter_wf_checking_ctxt(tcx, item.span, item.owner_id.def_id, |wfcx| { match ast_trait_ref { Some(ast_trait_ref) => { @@ -1235,7 +1254,7 @@ wfcx.infcx, wfcx.param_env, wfcx.body_def_id, - &trait_pred, + trait_pred, ast_trait_ref.path.span, item, ); @@ -1265,7 +1284,8 @@ } check_where_clauses(wfcx, item.span, item.owner_id.def_id); - }); + Ok(()) + }) } /// Checks where-clauses and inline bounds that are declared on `def_id`. @@ -1444,13 +1464,7 @@ debug!(?predicates.predicates); assert_eq!(predicates.predicates.len(), predicates.spans.len()); let wf_obligations = predicates.into_iter().flat_map(|(p, sp)| { - traits::wf::predicate_obligations( - infcx, - wfcx.param_env, - wfcx.body_def_id, - p.as_predicate(), - sp, - ) + traits::wf::clause_obligations(infcx, wfcx.param_env, wfcx.body_def_id, p, sp) }); let obligations: Vec<_> = wf_obligations.chain(default_obligations).collect(); wfcx.register_obligations(obligations); @@ -1543,11 +1557,11 @@ fn_sig: &hir::FnSig<'_>, method: ty::AssocItem, self_ty: Ty<'tcx>, -) { +) -> Result<(), ErrorGuaranteed> { let tcx = wfcx.tcx(); if !method.fn_has_self_parameter { - return; + return Ok(()); } let span = fn_sig.decl.inputs[0].span; @@ -1566,11 +1580,11 @@ if tcx.features().arbitrary_self_types { if !receiver_is_valid(wfcx, span, receiver_ty, self_ty, true) { // Report error; `arbitrary_self_types` was enabled. - e0307(tcx, span, receiver_ty); + return Err(e0307(tcx, span, receiver_ty)); } } else { if !receiver_is_valid(wfcx, span, receiver_ty, self_ty, false) { - if receiver_is_valid(wfcx, span, receiver_ty, self_ty, true) { + return Err(if receiver_is_valid(wfcx, span, receiver_ty, self_ty, true) { // Report error; would have worked with `arbitrary_self_types`. feature_err( &tcx.sess.parse_sess, @@ -1582,16 +1596,17 @@ ), ) .help(HELP_FOR_SELF_TYPE) - .emit(); + .emit() } else { // Report error; would not have worked with `arbitrary_self_types`. - e0307(tcx, span, receiver_ty); - } + e0307(tcx, span, receiver_ty) + }); } } + Ok(()) } -fn e0307(tcx: TyCtxt<'_>, span: Span, receiver_ty: Ty<'_>) { +fn e0307(tcx: TyCtxt<'_>, span: Span, receiver_ty: Ty<'_>) -> ErrorGuaranteed { struct_span_err!( tcx.sess.diagnostic(), span, @@ -1600,7 +1615,7 @@ ) .note("type of `self` must be `Self` or a type that dereferences to it") .help(HELP_FOR_SELF_TYPE) - .emit(); + .emit() } /// Returns whether `receiver_ty` would be considered a valid receiver type for `self_ty`. If @@ -1892,12 +1907,12 @@ } } -fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalModDefId) { +fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalModDefId) -> Result<(), ErrorGuaranteed> { let items = tcx.hir_module_items(module); - items.par_items(|item| tcx.ensure().check_well_formed(item.owner_id)); - items.par_impl_items(|item| tcx.ensure().check_well_formed(item.owner_id)); - items.par_trait_items(|item| tcx.ensure().check_well_formed(item.owner_id)); - items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id)); + let mut res = items.par_items(|item| tcx.ensure().check_well_formed(item.owner_id)); + res = res.and(items.par_impl_items(|item| tcx.ensure().check_well_formed(item.owner_id))); + res = res.and(items.par_trait_items(|item| tcx.ensure().check_well_formed(item.owner_id))); + res.and(items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id))) } fn error_392( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/builtin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/builtin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/builtin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/builtin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -162,7 +162,7 @@ // trait, they *do* satisfy the repr(transparent) rules, and then we assume that everything else // in the compiler (in particular, all the call ABI logic) will treat them as repr(transparent) // even if they do not carry that attribute. - use rustc_type_ir::sty::TyKind::*; + use rustc_type_ir::TyKind::*; match (source.kind(), target.kind()) { (&Ref(r_a, _, mutbl_a), Ref(r_b, _, mutbl_b)) if infcx.at(&cause, param_env).eq(DefineOpaqueTypes::No, r_a, *r_b).is_ok() @@ -261,7 +261,7 @@ } let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); } // Finally, resolve all regions. @@ -470,7 +470,7 @@ ocx.register_obligation(obligation); let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); } // Finally, resolve all regions. @@ -550,9 +550,11 @@ .entry((ty.clone(), predicate.clone())) .or_default() .push(origin.span()); - if let ty::RegionKind::ReEarlyBound(ebr) = *b && ebr.has_name() { - bounds.push((b.to_string(), a.to_string(), None)); - } + if let ty::RegionKind::ReEarlyBound(ebr) = *b + && ebr.has_name() + { + bounds.push((b.to_string(), a.to_string(), None)); + } } RegionResolutionError::GenericBoundFailure(origin, a, b) => { let predicate = format!("{a}: {b}"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs 2023-12-21 16:55:28.000000000 +0000 @@ -155,8 +155,8 @@ } ty::FnDef(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Bound(..) | ty::Placeholder(_) | ty::Infer(_) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/orphan.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/orphan.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/orphan.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/coherence/orphan.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,8 +2,7 @@ //! crate or pertains to a type defined in this crate. use rustc_data_structures::fx::FxHashSet; -use rustc_errors::{struct_span_err, DelayDm}; -use rustc_errors::{Diagnostic, ErrorGuaranteed}; +use rustc_errors::{DelayDm, ErrorGuaranteed}; use rustc_hir as hir; use rustc_middle::ty::util::CheckRegions; use rustc_middle::ty::GenericArgs; @@ -17,6 +16,8 @@ use rustc_trait_selection::traits; use std::ops::ControlFlow; +use crate::errors; + #[instrument(skip(tcx), level = "debug")] pub(crate) fn orphan_check_impl( tcx: TyCtxt<'_>, @@ -243,8 +244,8 @@ | ty::Tuple(..) => (LocalImpl::Allow, NonlocalImpl::DisallowOther), ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Bound(..) | ty::Placeholder(..) | ty::Infer(..) => { @@ -259,49 +260,30 @@ match local_impl { LocalImpl::Allow => {} LocalImpl::Disallow { problematic_kind } => { - let msg = format!( - "traits with a default impl, like `{trait}`, \ - cannot be implemented for {problematic_kind} `{self_ty}`", - trait = tcx.def_path_str(trait_def_id), - ); - let label = format!( - "a trait object implements `{trait}` if and only if `{trait}` \ - is one of the trait object's trait bounds", - trait = tcx.def_path_str(trait_def_id), - ); - let sp = tcx.def_span(def_id); - let reported = - struct_span_err!(tcx.sess, sp, E0321, "{}", msg).note(label).emit(); - return Err(reported); + return Err(tcx.sess.emit_err(errors::TraitsWithDefaultImpl { + span: tcx.def_span(def_id), + traits: tcx.def_path_str(trait_def_id), + problematic_kind, + self_ty, + })); } } } else { - if let Some((msg, label)) = match nonlocal_impl { - NonlocalImpl::Allow => None, - NonlocalImpl::DisallowBecauseNonlocal => Some(( - format!( - "cross-crate traits with a default impl, like `{}`, \ - can only be implemented for a struct/enum type \ - defined in the current crate", - tcx.def_path_str(trait_def_id) - ), - "can't implement cross-crate trait for type in another crate", - )), - NonlocalImpl::DisallowOther => Some(( - format!( - "cross-crate traits with a default impl, like `{}`, can \ - only be implemented for a struct/enum type, not `{}`", - tcx.def_path_str(trait_def_id), - self_ty - ), - "can't implement cross-crate trait with a default impl for \ - non-struct/enum type", - )), - } { - let sp = tcx.def_span(def_id); - let reported = - struct_span_err!(tcx.sess, sp, E0321, "{}", msg).span_label(sp, label).emit(); - return Err(reported); + match nonlocal_impl { + NonlocalImpl::Allow => {} + NonlocalImpl::DisallowBecauseNonlocal => { + return Err(tcx.sess.emit_err(errors::CrossCrateTraitsDefined { + span: tcx.def_span(def_id), + traits: tcx.def_path_str(trait_def_id), + })); + } + NonlocalImpl::DisallowOther => { + return Err(tcx.sess.emit_err(errors::CrossCrateTraits { + span: tcx.def_span(def_id), + traits: tcx.def_path_str(trait_def_id), + self_ty, + })); + } } } } @@ -322,19 +304,18 @@ let self_ty = trait_ref.self_ty(); Err(match err { traits::OrphanCheckErr::NonLocalInputType(tys) => { - let msg = match self_ty.kind() { - ty::Adt(..) => "can be implemented for types defined outside of the crate", - _ if self_ty.is_primitive() => "can be implemented for primitive types", - _ => "can be implemented for arbitrary types", - }; - let mut err = struct_span_err!( - tcx.sess, - sp, - E0117, - "only traits defined in the current crate {msg}" - ); - err.span_label(sp, "impl doesn't use only types from inside the current crate"); + let (mut opaque, mut foreign, mut name, mut pointer, mut ty_diag) = + (Vec::new(), Vec::new(), Vec::new(), Vec::new(), Vec::new()); + let mut sugg = None; for &(mut ty, is_target_ty) in &tys { + let span = if is_target_ty { + // Point at `D
` in `impl for C in D` + self_ty_span + } else { + // Point at `C` in `impl for C in D` + trait_span + }; + ty = tcx.erase_regions(ty); ty = match ty.kind() { // Remove the type arguments from the output, as they are not relevant. @@ -345,50 +326,103 @@ ty::Adt(def, _) => Ty::new_adt(tcx, *def, ty::List::empty()), _ => ty, }; - let msg = |ty: &str, postfix: &str| { - format!("{ty} is not defined in the current crate{postfix}") - }; - let this = |name: &str| { - if !trait_ref.def_id.is_local() && !is_target_ty { - msg("this", " because this is a foreign trait") + fn push_to_foreign_or_name<'tcx>( + is_foreign: bool, + foreign: &mut Vec, + name: &mut Vec>, + span: Span, + sname: &'tcx str, + ) { + if is_foreign { + foreign.push(errors::OnlyCurrentTraitsForeign { span }) } else { - msg("this", &format!(" because {name} are always foreign")) + name.push(errors::OnlyCurrentTraitsName { span, name: sname }); + } + } + + let is_foreign = !trait_ref.def_id.is_local() && !is_target_ty; + + match &ty.kind() { + ty::Slice(_) => { + push_to_foreign_or_name( + is_foreign, + &mut foreign, + &mut name, + span, + "slices", + ); + } + ty::Array(..) => { + push_to_foreign_or_name( + is_foreign, + &mut foreign, + &mut name, + span, + "arrays", + ); + } + ty::Tuple(..) => { + push_to_foreign_or_name( + is_foreign, + &mut foreign, + &mut name, + span, + "tuples", + ); } - }; - let msg = match &ty.kind() { - ty::Slice(_) => this("slices"), - ty::Array(..) => this("arrays"), - ty::Tuple(..) => this("tuples"), ty::Alias(ty::Opaque, ..) => { - "type alias impl trait is treated as if it were foreign, \ - because its hidden type could be from a foreign crate" - .to_string() + opaque.push(errors::OnlyCurrentTraitsOpaque { span }) } ty::RawPtr(ptr_ty) => { - emit_newtype_suggestion_for_raw_ptr( - full_impl_span, - self_ty, - self_ty_span, - ptr_ty, - &mut err, - ); - - msg(&format!("`{ty}`"), " because raw pointers are always foreign") + if !self_ty.has_param() { + let mut_key = ptr_ty.mutbl.prefix_str(); + sugg = Some(errors::OnlyCurrentTraitsPointerSugg { + wrapper_span: self_ty_span, + struct_span: full_impl_span.shrink_to_lo(), + mut_key, + ptr_ty: ptr_ty.ty, + }); + } + pointer.push(errors::OnlyCurrentTraitsPointer { span, pointer: ty }); } - _ => msg(&format!("`{ty}`"), ""), - }; - - if is_target_ty { - // Point at `D` in `impl for C in D` - err.span_label(self_ty_span, msg); - } else { - // Point at `C` in `impl for C in D` - err.span_label(trait_span, msg); + _ => ty_diag.push(errors::OnlyCurrentTraitsTy { span, ty }), } } - err.note("define and implement a trait or new type instead"); - err.emit() + + let err_struct = match self_ty.kind() { + ty::Adt(..) => errors::OnlyCurrentTraits::Outside { + span: sp, + note: (), + opaque, + foreign, + name, + pointer, + ty: ty_diag, + sugg, + }, + _ if self_ty.is_primitive() => errors::OnlyCurrentTraits::Primitive { + span: sp, + note: (), + opaque, + foreign, + name, + pointer, + ty: ty_diag, + sugg, + }, + _ => errors::OnlyCurrentTraits::Arbitrary { + span: sp, + note: (), + opaque, + foreign, + name, + pointer, + ty: ty_diag, + sugg, + }, + }; + tcx.sess.emit_err(err_struct) } traits::OrphanCheckErr::UncoveredTy(param_ty, local_type) => { let mut sp = sp; @@ -399,85 +433,18 @@ } match local_type { - Some(local_type) => struct_span_err!( - tcx.sess, - sp, - E0210, - "type parameter `{}` must be covered by another type \ - when it appears before the first local type (`{}`)", + Some(local_type) => tcx.sess.emit_err(errors::TyParamFirstLocal { + span: sp, + note: (), param_ty, - local_type - ) - .span_label( - sp, - format!( - "type parameter `{param_ty}` must be covered by another type \ - when it appears before the first local type (`{local_type}`)" - ), - ) - .note( - "implementing a foreign trait is only possible if at \ - least one of the types for which it is implemented is local, \ - and no uncovered type parameters appear before that first \ - local type", - ) - .note( - "in this case, 'before' refers to the following order: \ - `impl<..> ForeignTrait for T0`, \ - where `T0` is the first and `Tn` is the last", - ) - .emit(), - None => struct_span_err!( - tcx.sess, - sp, - E0210, - "type parameter `{}` must be used as the type parameter for some \ - local type (e.g., `MyStruct<{}>`)", - param_ty, - param_ty - ) - .span_label( - sp, - format!( - "type parameter `{param_ty}` must be used as the type parameter for some \ - local type", - ), - ) - .note( - "implementing a foreign trait is only possible if at \ - least one of the types for which it is implemented is local", - ) - .note( - "only traits defined in the current crate can be \ - implemented for a type parameter", - ) - .emit(), + local_type, + }), + None => tcx.sess.emit_err(errors::TyParamSome { span: sp, note: (), param_ty }), } } }) } -fn emit_newtype_suggestion_for_raw_ptr( - full_impl_span: Span, - self_ty: Ty<'_>, - self_ty_span: Span, - ptr_ty: &ty::TypeAndMut<'_>, - diag: &mut Diagnostic, -) { - if !self_ty.has_param() { - let mut_key = ptr_ty.mutbl.prefix_str(); - let msg_sugg = "consider introducing a new wrapper type".to_owned(); - let sugg = vec![ - ( - full_impl_span.shrink_to_lo(), - format!("struct WrapperType(*{}{});\n\n", mut_key, ptr_ty.ty), - ), - (self_ty_span, "WrapperType".to_owned()), - ]; - diag.multipart_suggestion(msg_sugg, sugg, rustc_errors::Applicability::MaybeIncorrect); - } -} - /// Lint impls of auto traits if they are likely to have /// unsound or surprising effects on auto impls. fn lint_auto_trait_impl<'tcx>( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/item_bounds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/item_bounds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/item_bounds.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/item_bounds.rs 2023-12-21 16:55:28.000000000 +0000 @@ -169,8 +169,8 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { if let ty::Alias(ty::Projection, projection_ty) = ty.kind() - && let Some(ty::ImplTraitInTraitData::Trait { fn_def_id, .. }) - = self.tcx.opt_rpitit_info(projection_ty.def_id) + && let Some(ty::ImplTraitInTraitData::Trait { fn_def_id, .. }) = + self.tcx.opt_rpitit_info(projection_ty.def_id) && fn_def_id == self.fn_def_id { self.tcx.type_of(projection_ty.def_id).instantiate(self.tcx, projection_ty.args) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/predicates_of.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/predicates_of.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/predicates_of.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/predicates_of.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,7 +11,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::ty::{GenericPredicates, ImplTraitInTraitData, ToPredicate}; use rustc_span::symbol::Ident; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{sym, Span, DUMMY_SP}; /// Returns a list of all type predicates (explicit and implicit) for the definition with /// ID `def_id`. This includes all predicates returned by `predicates_defined_on`, plus @@ -38,11 +38,38 @@ // an obligation and instead be skipped. Otherwise we'd use // `tcx.def_span(def_id);` let span = rustc_span::DUMMY_SP; - result.predicates = - tcx.arena.alloc_from_iter(result.predicates.iter().copied().chain(std::iter::once(( - ty::TraitRef::identity(tcx, def_id).to_predicate(tcx), + let non_const_bound = if tcx.features().effects && tcx.has_attr(def_id, sym::const_trait) { + // when `Self` is a const trait, also add `Self: Trait<.., true>` as implied bound, + // because only implementing `Self: Trait<.., false>` is currently not possible. + Some(( + ty::TraitRef::new( + tcx, + def_id, + ty::GenericArgs::for_item(tcx, def_id, |param, _| { + if param.is_host_effect() { + tcx.consts.true_.into() + } else { + tcx.mk_param_from_def(param) + } + }), + ) + .to_predicate(tcx), span, - )))); + )) + } else { + None + }; + result.predicates = tcx.arena.alloc_from_iter( + result + .predicates + .iter() + .copied() + .chain(std::iter::once(( + ty::TraitRef::identity(tcx, def_id).to_predicate(tcx), + span, + ))) + .chain(non_const_bound), + ); } debug!("predicates_of(def_id={:?}) = {:?}", def_id, result); result @@ -180,7 +207,7 @@ // // default impl Foo for Bar { .. } // - // we add a default where clause `Foo: Bar`. We do a similar thing for traits + // we add a default where clause `Bar: Foo`. We do a similar thing for traits // (see below). Recall that a default impl is not itself an impl, but rather a // set of defaults that can be incorporated into another impl. if let Some(trait_ref) = is_default_impl_trait { @@ -389,7 +416,9 @@ let node = tcx.hir().get(hir_id); let mut collector = ConstCollector { tcx, preds: FxIndexSet::default() }; - if let hir::Node::Item(item) = node && let hir::ItemKind::Impl(impl_) = item.kind { + if let hir::Node::Item(item) = node + && let hir::ItemKind::Impl(impl_) = item.kind + { if let Some(of_trait) = &impl_.of_trait { debug!("const_evaluatable_predicates_of({:?}): visit impl trait_ref", def_id); collector.visit_trait_ref(of_trait); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs 2023-12-21 16:55:28.000000000 +0000 @@ -880,7 +880,7 @@ (pair, r) }) .unzip(); - self.record_late_bound_vars(hir_id, binders.clone()); + self.record_late_bound_vars(hir_id, binders); // Even if there are no lifetimes defined here, we still wrap it in a binder // scope. If there happens to be a nested poly trait ref (an error), that // will be `Concatenating` anyways, so we don't have to worry about the depth @@ -1190,7 +1190,9 @@ Scope::Root { opt_parent_item } => { if let Some(parent_item) = opt_parent_item && let parent_generics = self.tcx.generics_of(parent_item) - && parent_generics.param_def_id_to_index(self.tcx, region_def_id.to_def_id()).is_some() + && parent_generics + .param_def_id_to_index(self.tcx, region_def_id.to_def_id()) + .is_some() { break Some(ResolvedArg::EarlyBound(region_def_id.to_def_id())); } @@ -1209,13 +1211,14 @@ // regular fns. if let Some(hir::PredicateOrigin::ImplTrait) = where_bound_origin && let hir::LifetimeName::Param(param_id) = lifetime_ref.res - && let Some(generics) = self.tcx.hir().get_generics(self.tcx.local_parent(param_id)) + && let Some(generics) = + self.tcx.hir().get_generics(self.tcx.local_parent(param_id)) && let Some(param) = generics.params.iter().find(|p| p.def_id == param_id) && param.is_elided_lifetime() && !self.tcx.asyncness(lifetime_ref.hir_id.owner.def_id).is_async() && !self.tcx.features().anonymous_lifetime_in_impl_trait { - let mut diag = rustc_session::parse::feature_err( + let mut diag = rustc_session::parse::feature_err( &self.tcx.sess.parse_sess, sym::anonymous_lifetime_in_impl_trait, lifetime_ref.ident.span, @@ -1225,25 +1228,31 @@ if let Some(generics) = self.tcx.hir().get_generics(lifetime_ref.hir_id.owner.def_id) { - let new_param_sugg = if let Some(span) = - generics.span_for_lifetime_suggestion() - { - (span, "'a, ".to_owned()) - } else { - (generics.span, "<'a>".to_owned()) - }; + let new_param_sugg = + if let Some(span) = generics.span_for_lifetime_suggestion() { + (span, "'a, ".to_owned()) + } else { + (generics.span, "<'a>".to_owned()) + }; let lifetime_sugg = match lifetime_ref.suggestion_position() { - (hir::LifetimeSuggestionPosition::Normal, span) => (span, "'a".to_owned()), - (hir::LifetimeSuggestionPosition::Ampersand, span) => (span, "'a ".to_owned()), - (hir::LifetimeSuggestionPosition::ElidedPath, span) => (span, "<'a>".to_owned()), - (hir::LifetimeSuggestionPosition::ElidedPathArgument, span) => (span, "'a, ".to_owned()), - (hir::LifetimeSuggestionPosition::ObjectDefault, span) => (span, "+ 'a".to_owned()), + (hir::LifetimeSuggestionPosition::Normal, span) => { + (span, "'a".to_owned()) + } + (hir::LifetimeSuggestionPosition::Ampersand, span) => { + (span, "'a ".to_owned()) + } + (hir::LifetimeSuggestionPosition::ElidedPath, span) => { + (span, "<'a>".to_owned()) + } + (hir::LifetimeSuggestionPosition::ElidedPathArgument, span) => { + (span, "'a, ".to_owned()) + } + (hir::LifetimeSuggestionPosition::ObjectDefault, span) => { + (span, "+ 'a".to_owned()) + } }; - let suggestions = vec![ - lifetime_sugg, - new_param_sugg, - ]; + let suggestions = vec![lifetime_sugg, new_param_sugg]; diag.span_label( lifetime_ref.ident.span, @@ -1378,7 +1387,9 @@ Scope::Root { opt_parent_item } => { if let Some(parent_item) = opt_parent_item && let parent_generics = self.tcx.generics_of(parent_item) - && parent_generics.param_def_id_to_index(self.tcx, param_def_id.to_def_id()).is_some() + && parent_generics + .param_def_id_to_index(self.tcx, param_def_id.to_def_id()) + .is_some() { break Some(ResolvedArg::EarlyBound(param_def_id.to_def_id())); } @@ -1689,14 +1700,12 @@ if binding.gen_args.parenthesized == hir::GenericArgsParentheses::ReturnTypeNotation { let bound_vars = if let Some(type_def_id) = type_def_id && self.tcx.def_kind(type_def_id) == DefKind::Trait - && let Some((mut bound_vars, assoc_fn)) = - BoundVarContext::supertrait_hrtb_vars( - self.tcx, - type_def_id, - binding.ident, - ty::AssocKind::Fn, - ) - { + && let Some((mut bound_vars, assoc_fn)) = BoundVarContext::supertrait_hrtb_vars( + self.tcx, + type_def_id, + binding.ident, + ty::AssocKind::Fn, + ) { bound_vars.extend(self.tcx.generics_of(assoc_fn.def_id).params.iter().map( |param| match param.kind { ty::GenericParamDefKind::Lifetime => ty::BoundVariableKind::Region( @@ -1708,14 +1717,14 @@ ty::GenericParamDefKind::Const { .. } => ty::BoundVariableKind::Const, }, )); - bound_vars - .extend(self.tcx.fn_sig(assoc_fn.def_id).instantiate_identity().bound_vars()); + bound_vars.extend( + self.tcx.fn_sig(assoc_fn.def_id).instantiate_identity().bound_vars(), + ); bound_vars } else { - self.tcx.sess.delay_span_bug( - binding.ident.span, - "bad return type notation here", - ); + self.tcx + .sess + .delay_span_bug(binding.ident.span, "bad return type notation here"); vec![] }; self.with(scope, |this| { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,25 @@ use rustc_errors::StashKey; -use rustc_hir::def_id::LocalDefId; +use rustc_hir::def::DefKind; +use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID}; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::{self as hir, def, Expr, ImplItem, Item, Node, TraitItem}; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::DUMMY_SP; +use rustc_span::{sym, DUMMY_SP}; -use crate::errors::{TaitForwardCompat, UnconstrainedOpaqueType}; +use crate::errors::{TaitForwardCompat, TypeOf, UnconstrainedOpaqueType}; + +pub fn test_opaque_hidden_types(tcx: TyCtxt<'_>) { + if tcx.has_attr(CRATE_DEF_ID, sym::rustc_hidden_type_of_opaques) { + for id in tcx.hir().items() { + if matches!(tcx.def_kind(id.owner_id), DefKind::OpaqueTy) { + let type_of = tcx.type_of(id.owner_id).instantiate_identity(); + + tcx.sess.emit_err(TypeOf { span: tcx.def_span(id.owner_id), type_of }); + } + } + } +} /// Checks "defining uses" of opaque `impl Trait` types to ensure that they meet the restrictions /// laid for "higher-order pattern unification". @@ -170,9 +183,17 @@ }; // Use borrowck to get the type with unerased regions. - let concrete_opaque_types = &self.tcx.mir_borrowck(item_def_id).concrete_opaque_types; - debug!(?concrete_opaque_types); - if let Some(&concrete_type) = concrete_opaque_types.get(&self.def_id) { + let borrowck_results = &self.tcx.mir_borrowck(item_def_id); + + // If the body was tainted, then assume the opaque may have been constrained and just set it to error. + if let Some(guar) = borrowck_results.tainted_by_errors { + self.found = + Some(ty::OpaqueHiddenType { span: DUMMY_SP, ty: Ty::new_error(self.tcx, guar) }); + return; + } + + debug!(?borrowck_results.concrete_opaque_types); + if let Some(&concrete_type) = borrowck_results.concrete_opaque_types.get(&self.def_id) { debug!(?concrete_type, "found constraint"); if let Some(prev) = &mut self.found { if concrete_type.ty != prev.ty && !(concrete_type, prev.ty).references_error() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect/type_of.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,6 +11,7 @@ use super::ItemCtxt; use super::{bad_placeholder, is_suggestable_infer_ty}; +pub use opaque::test_opaque_hidden_types; mod opaque; @@ -30,10 +31,10 @@ | Node::Expr(&Expr { kind: ExprKind::Repeat(_, ref constant), .. }) if constant.hir_id() == hir_id => { - return tcx.types.usize + return tcx.types.usize; } Node::Ty(&hir::Ty { kind: TyKind::Typeof(ref e), .. }) if e.hir_id == hir_id => { - return tcx.typeck(def_id).node_type(e.hir_id) + return tcx.typeck(def_id).node_type(e.hir_id); } Node::Expr(&Expr { kind: ExprKind::InlineAsm(asm), .. }) | Node::Item(&Item { kind: ItemKind::GlobalAsm(asm), .. }) @@ -43,36 +44,38 @@ _ => false, }) => { - return tcx.typeck(def_id).node_type(hir_id) + return tcx.typeck(def_id).node_type(hir_id); } Node::Variant(Variant { disr_expr: Some(ref e), .. }) if e.hir_id == hir_id => { - return tcx - .adt_def(tcx.hir().get_parent_item(hir_id)) - .repr() - .discr_type() - .to_ty(tcx) + return tcx.adt_def(tcx.hir().get_parent_item(hir_id)).repr().discr_type().to_ty(tcx); } Node::GenericParam(&GenericParam { def_id: param_def_id, kind: GenericParamKind::Const { default: Some(ct), .. }, .. }) if ct.hir_id == hir_id => { - return tcx.type_of(param_def_id) + return tcx + .type_of(param_def_id) .no_bound_vars() - .expect("const parameter types cannot be generic") + .expect("const parameter types cannot be generic"); } - Node::TypeBinding(binding @ &TypeBinding { hir_id: binding_id, .. }) - if let Node::TraitRef(trait_ref) = tcx.hir().get( - tcx.hir().parent_id(binding_id) - ) => + Node::TypeBinding(binding @ &TypeBinding { hir_id: binding_id, .. }) + if let Node::TraitRef(trait_ref) = tcx.hir().get(tcx.hir().parent_id(binding_id)) => { let Some(trait_def_id) = trait_ref.trait_def_id() else { - return Ty::new_error_with_message(tcx,tcx.def_span(def_id), "Could not find trait"); + return Ty::new_error_with_message( + tcx, + tcx.def_span(def_id), + "Could not find trait", + ); }; let assoc_items = tcx.associated_items(trait_def_id); let assoc_item = assoc_items.find_by_name_and_kind( - tcx, binding.ident, ty::AssocKind::Const, def_id.to_def_id(), + tcx, + binding.ident, + ty::AssocKind::Const, + def_id.to_def_id(), ); return if let Some(assoc_item) = assoc_item { tcx.type_of(assoc_item.def_id) @@ -80,8 +83,12 @@ .expect("const parameter types cannot be generic") } else { // FIXME(associated_const_equality): add a useful error message here. - Ty::new_error_with_message(tcx,tcx.def_span(def_id), "Could not find associated const on trait") - } + Ty::new_error_with_message( + tcx, + tcx.def_span(def_id), + "Could not find associated const on trait", + ) + }; } // This match arm is for when the def_id appears in a GAT whose @@ -138,7 +145,8 @@ (generics, arg_index) } else { // I dont think it's possible to reach this but I'm not 100% sure - BoxyUwU - return Ty::new_error_with_message(tcx, + return Ty::new_error_with_message( + tcx, tcx.def_span(def_id), "unexpected non-GAT usage of an anon const", ); @@ -155,7 +163,8 @@ // As there is no relevant param for `def_id`, we simply return // `None` here. let Some(type_dependent_def) = tables.type_dependent_def_id(parent_node_id) else { - return Ty::new_error_with_message(tcx, + return Ty::new_error_with_message( + tcx, tcx.def_span(def_id), format!("unable to find type-dependent def for {parent_node_id:?}"), ); @@ -196,14 +205,16 @@ if let Some(path) = get_path_containing_arg_in_pat(pat, hir_id) { path } else { - return Ty::new_error_with_message(tcx, + return Ty::new_error_with_message( + tcx, tcx.def_span(def_id), format!("unable to find const parent for {hir_id} in pat {pat:?}"), ); } } _ => { - return Ty::new_error_with_message(tcx, + return Ty::new_error_with_message( + tcx, tcx.def_span(def_id), format!("unexpected const parent path {parent_node:?}"), ); @@ -216,16 +227,20 @@ let Some((arg_index, segment)) = path.segments.iter().find_map(|seg| { let args = seg.args?; args.args - .iter() - .filter(|arg| arg.is_ty_or_const()) - .position(|arg| arg.hir_id() == hir_id) - .map(|index| (index, seg)).or_else(|| args.bindings .iter() - .filter_map(TypeBinding::opt_const) - .position(|ct| ct.hir_id == hir_id) - .map(|idx| (idx, seg))) + .filter(|arg| arg.is_ty_or_const()) + .position(|arg| arg.hir_id() == hir_id) + .map(|index| (index, seg)) + .or_else(|| { + args.bindings + .iter() + .filter_map(TypeBinding::opt_const) + .position(|ct| ct.hir_id == hir_id) + .map(|idx| (idx, seg)) + }) }) else { - return Ty::new_error_with_message(tcx, + return Ty::new_error_with_message( + tcx, tcx.def_span(def_id), "no arg matching AnonConst in path", ); @@ -234,7 +249,8 @@ let generics = match tcx.res_generics_def_id(segment.res) { Some(def_id) => tcx.generics_of(def_id), None => { - return Ty::new_error_with_message(tcx, + return Ty::new_error_with_message( + tcx, tcx.def_span(def_id), format!("unexpected anon const res {:?} in path: {:?}", segment.res, path), ); @@ -244,10 +260,13 @@ (generics, arg_index) } - _ => return Ty::new_error_with_message(tcx, - tcx.def_span(def_id), - format!("unexpected const parent in type_of(): {parent_node:?}"), - ), + _ => { + return Ty::new_error_with_message( + tcx, + tcx.def_span(def_id), + format!("unexpected const parent in type_of(): {parent_node:?}"), + ); + } }; debug!(?parent_node); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/collect.rs 2023-12-21 16:55:28.000000000 +0000 @@ -14,14 +14,11 @@ //! At present, however, we do run collection across all items in the //! crate as a kind of pass. This should eventually be factored away. -use crate::astconv::AstConv; -use crate::check::intrinsic::intrinsic_operation_unsafety; -use crate::errors; -use hir::def::DefKind; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, StashKey}; use rustc_hir as hir; +use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId}; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::{GenericParamKind, Node}; @@ -40,6 +37,11 @@ use std::iter; use std::ops::Bound; +use crate::astconv::AstConv; +use crate::check::intrinsic::intrinsic_operation_unsafety; +use crate::errors; +pub use type_of::test_opaque_hidden_types; + mod generics_of; mod item_bounds; mod predicates_of; @@ -76,7 +78,7 @@ fn_sig, impl_trait_ref, impl_polarity, - generator_kind, + coroutine_kind, collect_mod_item_types, is_type_alias_impl_trait, ..*providers @@ -212,7 +214,9 @@ let mut is_fn = false; let mut is_const_or_static = false; - if let Some(hir_ty) = hir_ty && let hir::TyKind::BareFn(_) = hir_ty.kind { + if let Some(hir_ty) = hir_ty + && let hir::TyKind::BareFn(_) = hir_ty.kind + { is_fn = true; // Check if parent is const or static @@ -224,10 +228,8 @@ Node::Item(&hir::Item { kind: hir::ItemKind::Const(..) | hir::ItemKind::Static(..), .. - }) | Node::TraitItem(&hir::TraitItem { - kind: hir::TraitItemKind::Const(..), - .. - }) | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Const(..), .. }) + }) | Node::TraitItem(&hir::TraitItem { kind: hir::TraitItemKind::Const(..), .. }) + | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Const(..), .. }) ); } @@ -1004,10 +1006,7 @@ && let Some(lit) = meta.name_value_literal() { if seen_attr { - tcx.sess.span_err( - meta.span, - "duplicated `implement_via_object` meta item", - ); + tcx.sess.span_err(meta.span, "duplicated `implement_via_object` meta item"); } seen_attr = true; @@ -1021,7 +1020,10 @@ _ => { tcx.sess.span_err( meta.span, - format!("unknown literal passed to `implement_via_object` attribute: {}", lit.symbol), + format!( + "unknown literal passed to `implement_via_object` attribute: {}", + lit.symbol + ), ); } } @@ -1115,8 +1117,7 @@ ImplItem(hir::ImplItem { kind: ImplItemKind::Fn(sig, _), generics, .. }) => { // Do not try to infer the return type for a impl method coming from a trait - if let Item(hir::Item { kind: ItemKind::Impl(i), .. }) = - tcx.hir().get_parent(hir_id) + if let Item(hir::Item { kind: ItemKind::Impl(i), .. }) = tcx.hir().get_parent(hir_id) && i.of_trait.is_some() { icx.astconv().ty_of_fn( @@ -1343,7 +1344,13 @@ if ocx.select_where_possible().is_empty() && let item_ty = infcx.resolve_vars_if_possible(item_ty) && let Some(item_ty) = item_ty.make_suggestable(tcx, false) - && let Some(sugg) = formatter(tcx, infcx.resolve_vars_if_possible(args), trait_def_id, assoc_item_def_id, item_ty) + && let Some(sugg) = formatter( + tcx, + infcx.resolve_vars_if_possible(args), + trait_def_id, + assoc_item_def_id, + item_ty, + ) { return Some(sugg); } @@ -1543,12 +1550,12 @@ fty } -fn generator_kind(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { +fn coroutine_kind(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { match tcx.hir().get_by_def_id(def_id) { Node::Expr(&rustc_hir::Expr { kind: rustc_hir::ExprKind::Closure(&rustc_hir::Closure { body, .. }), .. - }) => tcx.hir().body(body).generator_kind(), + }) => tcx.hir().body(body).coroutine_kind(), _ => None, } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/constrained_generic_params.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/constrained_generic_params.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/constrained_generic_params.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/constrained_generic_params.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_middle::ty::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}; use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_span::source_map::Span; +use rustc_span::Span; use std::ops::ControlFlow; #[derive(Clone, PartialEq, Eq, Hash, Debug)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -96,7 +96,7 @@ #[diag(hir_analysis_multiple_relaxed_default_bounds, code = "E0203")] pub struct MultipleRelaxedDefaultBounds { #[primary_span] - pub span: Span, + pub spans: Vec, } #[derive(Diagnostic)] @@ -226,7 +226,9 @@ let mut suggested = false; // Don't suggest setting the type params if there are some already: the order is // tricky to get right and the user will already know what the syntax is. - if let Some(snippet) = self.span_snippet && self.empty_generic_args { + if let Some(snippet) = self.span_snippet + && self.empty_generic_args + { if snippet.ends_with('>') { // The user wrote `Trait<'a, T>` or similar. To provide an accurate suggestion // we would have to preserve the right order. For now, as clearly the user is @@ -466,6 +468,14 @@ } #[derive(Diagnostic)] +#[diag(hir_analysis_type_of)] +pub(crate) struct TypeOf<'tcx> { + #[primary_span] + pub span: Span, + pub type_of: Ty<'tcx>, +} + +#[derive(Diagnostic)] #[diag(hir_analysis_pass_to_variadic_function, code = "E0617")] pub(crate) struct PassToVariadicFunction<'tcx, 'a> { #[primary_span] @@ -673,7 +683,6 @@ } #[derive(Diagnostic)] - pub enum ImplNotMarkedDefault { #[diag(hir_analysis_impl_not_marked_default, code = "E0520")] #[note] @@ -1149,3 +1158,174 @@ pub trait_name: String, pub ty: String, } + +#[derive(Diagnostic)] +#[diag(hir_analysis_traits_with_defualt_impl, code = "E0321")] +#[note] +pub struct TraitsWithDefaultImpl<'a> { + #[primary_span] + pub span: Span, + pub traits: String, + pub problematic_kind: &'a str, + pub self_ty: Ty<'a>, +} + +#[derive(Diagnostic)] +#[diag(hir_analysis_cross_crate_traits, code = "E0321")] +pub struct CrossCrateTraits<'a> { + #[primary_span] + #[label] + pub span: Span, + pub traits: String, + pub self_ty: Ty<'a>, +} + +#[derive(Diagnostic)] +#[diag(hir_analysis_cross_crate_traits_defined, code = "E0321")] +pub struct CrossCrateTraitsDefined { + #[primary_span] + #[label] + pub span: Span, + pub traits: String, +} + +#[derive(Diagnostic)] +#[diag(hir_analysis_ty_param_first_local, code = "E0210")] +#[note] +pub struct TyParamFirstLocal<'a> { + #[primary_span] + #[label] + pub span: Span, + #[note(hir_analysis_case_note)] + pub note: (), + pub param_ty: Ty<'a>, + pub local_type: Ty<'a>, +} + +#[derive(Diagnostic)] +#[diag(hir_analysis_ty_param_some, code = "E0210")] +#[note] +pub struct TyParamSome<'a> { + #[primary_span] + #[label] + pub span: Span, + #[note(hir_analysis_only_note)] + pub note: (), + pub param_ty: Ty<'a>, +} + +#[derive(Diagnostic)] +pub enum OnlyCurrentTraits<'a> { + #[diag(hir_analysis_only_current_traits_outside, code = "E0117")] + Outside { + #[primary_span] + #[label(hir_analysis_only_current_traits_label)] + span: Span, + #[note(hir_analysis_only_current_traits_note)] + note: (), + #[subdiagnostic] + opaque: Vec, + #[subdiagnostic] + foreign: Vec, + #[subdiagnostic] + name: Vec>, + #[subdiagnostic] + pointer: Vec>, + #[subdiagnostic] + ty: Vec>, + #[subdiagnostic] + sugg: Option>, + }, + #[diag(hir_analysis_only_current_traits_primitive, code = "E0117")] + Primitive { + #[primary_span] + #[label(hir_analysis_only_current_traits_label)] + span: Span, + #[note(hir_analysis_only_current_traits_note)] + note: (), + #[subdiagnostic] + opaque: Vec, + #[subdiagnostic] + foreign: Vec, + #[subdiagnostic] + name: Vec>, + #[subdiagnostic] + pointer: Vec>, + #[subdiagnostic] + ty: Vec>, + #[subdiagnostic] + sugg: Option>, + }, + #[diag(hir_analysis_only_current_traits_arbitrary, code = "E0117")] + Arbitrary { + #[primary_span] + #[label(hir_analysis_only_current_traits_label)] + span: Span, + #[note(hir_analysis_only_current_traits_note)] + note: (), + #[subdiagnostic] + opaque: Vec, + #[subdiagnostic] + foreign: Vec, + #[subdiagnostic] + name: Vec>, + #[subdiagnostic] + pointer: Vec>, + #[subdiagnostic] + ty: Vec>, + #[subdiagnostic] + sugg: Option>, + }, +} + +#[derive(Subdiagnostic)] +#[label(hir_analysis_only_current_traits_opaque)] +pub struct OnlyCurrentTraitsOpaque { + #[primary_span] + pub span: Span, +} + +#[derive(Subdiagnostic)] +#[label(hir_analysis_only_current_traits_foreign)] +pub struct OnlyCurrentTraitsForeign { + #[primary_span] + pub span: Span, +} + +#[derive(Subdiagnostic)] +#[label(hir_analysis_only_current_traits_name)] +pub struct OnlyCurrentTraitsName<'a> { + #[primary_span] + pub span: Span, + pub name: &'a str, +} + +#[derive(Subdiagnostic)] +#[label(hir_analysis_only_current_traits_pointer)] +pub struct OnlyCurrentTraitsPointer<'a> { + #[primary_span] + pub span: Span, + pub pointer: Ty<'a>, +} + +#[derive(Subdiagnostic)] +#[label(hir_analysis_only_current_traits_ty)] +pub struct OnlyCurrentTraitsTy<'a> { + #[primary_span] + pub span: Span, + pub ty: Ty<'a>, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + hir_analysis_only_current_traits_pointer_sugg, + applicability = "maybe-incorrect" +)] +pub struct OnlyCurrentTraitsPointerSugg<'a> { + #[suggestion_part(code = "WrapperType")] + pub wrapper_span: Span, + #[suggestion_part(code = "struct WrapperType(*{mut_key}{ptr_ty});\n\n")] + pub struct_span: Span, + pub mut_key: &'a str, + pub ptr_ty: Ty<'a>, +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs 2023-12-21 16:55:28.000000000 +0000 @@ -131,7 +131,9 @@ } fn check_has_items(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node, span: Span) { - if let Node::Impl(impl2_id) = impl2_node && tcx.associated_item_def_ids(impl1_def_id).is_empty() { + if let Node::Impl(impl2_id) = impl2_node + && tcx.associated_item_def_ids(impl1_def_id).is_empty() + { let base_impl_span = tcx.def_span(impl2_id); tcx.sess.emit_err(errors::EmptySpecialization { span, base_impl_span }); } @@ -196,7 +198,7 @@ let errors = ocx.select_all_or_error(); if !errors.is_empty() { - let guar = ocx.infcx.err_ctxt().report_fulfillment_errors(&errors); + let guar = ocx.infcx.err_ctxt().report_fulfillment_errors(errors); return Err(guar); } @@ -376,9 +378,9 @@ let always_applicable_traits = impl1_predicates .iter() .copied() - .filter(|(clause, _span)| { + .filter(|&(clause, _span)| { matches!( - trait_predicate_kind(tcx, clause.as_predicate()), + trait_specialization_kind(tcx, clause), Some(TraitSpecializationKind::AlwaysApplicable) ) }) @@ -402,7 +404,7 @@ .iter() .any(|pred2| trait_predicates_eq(tcx, clause.as_predicate(), *pred2, span)) { - check_specialization_on(tcx, clause.as_predicate(), span) + check_specialization_on(tcx, clause, span) } } } @@ -441,19 +443,16 @@ } #[instrument(level = "debug", skip(tcx))] -fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, predicate: ty::Predicate<'tcx>, span: Span) { - match predicate.kind().skip_binder() { +fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, span: Span) { + match clause.kind().skip_binder() { // Global predicates are either always true or always false, so we // are fine to specialize on. - _ if predicate.is_global() => (), + _ if clause.is_global() => (), // We allow specializing on explicitly marked traits with no associated // items. - ty::PredicateKind::Clause(ty::ClauseKind::Trait(ty::TraitPredicate { - trait_ref, - polarity: _, - })) => { + ty::ClauseKind::Trait(ty::TraitPredicate { trait_ref, polarity: _ }) => { if !matches!( - trait_predicate_kind(tcx, predicate), + trait_specialization_kind(tcx, clause), Some(TraitSpecializationKind::Marker) ) { tcx.sess @@ -467,10 +466,7 @@ .emit(); } } - ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate { - projection_ty, - term, - })) => { + ty::ClauseKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => { tcx.sess .struct_span_err( span, @@ -478,7 +474,7 @@ ) .emit(); } - ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(..)) => { + ty::ClauseKind::ConstArgHasType(..) => { // FIXME(min_specialization), FIXME(const_generics): // It probably isn't right to allow _every_ `ConstArgHasType` but I am somewhat unsure // about the actual rules that would be sound. Can't just always error here because otherwise @@ -490,33 +486,25 @@ } _ => { tcx.sess - .struct_span_err(span, format!("cannot specialize on predicate `{predicate}`")) + .struct_span_err(span, format!("cannot specialize on predicate `{clause}`")) .emit(); } } } -fn trait_predicate_kind<'tcx>( +fn trait_specialization_kind<'tcx>( tcx: TyCtxt<'tcx>, - predicate: ty::Predicate<'tcx>, + clause: ty::Clause<'tcx>, ) -> Option { - match predicate.kind().skip_binder() { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(ty::TraitPredicate { - trait_ref, - polarity: _, - })) => Some(tcx.trait_def(trait_ref.def_id).specialization_kind), - ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(_)) - | ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(_)) - | ty::PredicateKind::Clause(ty::ClauseKind::Projection(_)) - | ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(..)) - | ty::PredicateKind::AliasRelate(..) - | ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(_)) - | ty::PredicateKind::Subtype(_) - | ty::PredicateKind::Coerce(_) - | ty::PredicateKind::ObjectSafe(_) - | ty::PredicateKind::ClosureKind(..) - | ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..)) - | ty::PredicateKind::ConstEquate(..) - | ty::PredicateKind::Ambiguous => None, + match clause.kind().skip_binder() { + ty::ClauseKind::Trait(ty::TraitPredicate { trait_ref, polarity: _ }) => { + Some(tcx.trait_def(trait_ref.def_id).specialization_kind) + } + ty::ClauseKind::RegionOutlives(_) + | ty::ClauseKind::TypeOutlives(_) + | ty::ClauseKind::Projection(_) + | ty::ClauseKind::ConstArgHasType(..) + | ty::ClauseKind::WellFormed(_) + | ty::ClauseKind::ConstEvaluatable(..) => None, } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -57,6 +57,9 @@ #![allow(rustc::potential_query_instability)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(box_patterns)] #![feature(control_flow_enum)] #![feature(if_let_guard)] @@ -179,13 +182,10 @@ } tcx.sess.track_errors(|| { - tcx.sess.time("impl_wf_inference", || { - tcx.hir().for_each_module(|module| tcx.ensure().check_mod_impl_wf(module)) - }); - })?; - - tcx.sess.track_errors(|| { tcx.sess.time("coherence_checking", || { + // Check impls constrain their parameters + tcx.hir().for_each_module(|module| tcx.ensure().check_mod_impl_wf(module)); + for &trait_def_id in tcx.all_local_trait_impls(()).keys() { tcx.ensure().coherent_trait(trait_def_id); } @@ -202,17 +202,23 @@ })?; } - tcx.sess.track_errors(|| { - tcx.sess.time("wf_checking", || { - tcx.hir().par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module)) - }); - })?; + let errs = tcx.sess.time("wf_checking", || { + tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module)) + }); // NOTE: This is copy/pasted in librustdoc/core.rs and should be kept in sync. tcx.sess.time("item_types_checking", || { tcx.hir().for_each_module(|module| tcx.ensure().check_mod_item_types(module)) }); + // HACK: `check_mod_type_wf` may spuriously emit errors due to `delay_span_bug`, even if those errors + // only actually get emitted in `check_mod_item_types`. + errs?; + + if tcx.features().rustc_attrs { + tcx.sess.track_errors(|| collect::test_opaque_hidden_types(tcx))?; + } + // Freeze definitions as we don't add new ones at this point. This improves performance by // allowing lock-free access to them. tcx.untracked().definitions.freeze(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs 2023-12-21 16:55:28.000000000 +0000 @@ -129,6 +129,44 @@ if self.missing_lifetimes() { "lifetime" } else { "generic" } } + /// Returns true if the generic type is a trait + /// and is being referred to from one of its trait impls + fn is_in_trait_impl(&self) -> bool { + if self.tcx.is_trait(self.def_id) { + // Here we check if the reference to the generic type + // is from the 'of_trait' field of the enclosing impl + + let parent = self.tcx.hir().get_parent(self.path_segment.hir_id); + let parent_item = self + .tcx + .hir() + .get_by_def_id(self.tcx.hir().get_parent_item(self.path_segment.hir_id).def_id); + + // Get the HIR id of the trait ref + let hir::Node::TraitRef(hir::TraitRef { hir_ref_id: trait_ref_id, .. }) = parent else { + return false; + }; + + // Get the HIR id of the 'of_trait' field of the impl + let hir::Node::Item(hir::Item { + kind: + hir::ItemKind::Impl(hir::Impl { + of_trait: Some(hir::TraitRef { hir_ref_id: id_in_of_trait, .. }), + .. + }), + .. + }) = parent_item + else { + return false; + }; + + // Check that trait is referred to from the of_trait field of impl + trait_ref_id == id_in_of_trait + } else { + false + } + } + fn num_provided_args(&self) -> usize { if self.missing_lifetimes() { self.num_provided_lifetime_args() @@ -316,12 +354,18 @@ } // Suggest `'_` when in function parameter or elided function return. - if let Some(fn_decl) = node.fn_decl() && let Some(ty_id) = ty_id { + if let Some(fn_decl) = node.fn_decl() + && let Some(ty_id) = ty_id + { let in_arg = fn_decl.inputs.iter().any(|t| t.hir_id == ty_id); - let in_ret = matches!(fn_decl.output, hir::FnRetTy::Return(ty) if ty.hir_id == ty_id); + let in_ret = + matches!(fn_decl.output, hir::FnRetTy::Return(ty) if ty.hir_id == ty_id); if in_arg || (in_ret && fn_decl.lifetime_elision_allowed) { - return std::iter::repeat("'_".to_owned()).take(num_params_to_take).collect::>().join(", "); + return std::iter::repeat("'_".to_owned()) + .take(num_params_to_take) + .collect::>() + .join(", "); } } @@ -730,28 +774,27 @@ ); if let Some(parent_node) = self.tcx.hir().opt_parent_id(self.path_segment.hir_id) - && let Some(parent_node) = self.tcx.hir().find(parent_node) - && let hir::Node::Expr(expr) = parent_node { + && let Some(parent_node) = self.tcx.hir().find(parent_node) + && let hir::Node::Expr(expr) = parent_node + { match &expr.kind { - hir::ExprKind::Path(qpath) => { - self.suggest_moving_args_from_assoc_fn_to_trait_for_qualified_path( + hir::ExprKind::Path(qpath) => self + .suggest_moving_args_from_assoc_fn_to_trait_for_qualified_path( err, qpath, msg, num_assoc_fn_excess_args, - num_trait_generics_except_self - ) - }, - hir::ExprKind::MethodCall(..) => { - self.suggest_moving_args_from_assoc_fn_to_trait_for_method_call( + num_trait_generics_except_self, + ), + hir::ExprKind::MethodCall(..) => self + .suggest_moving_args_from_assoc_fn_to_trait_for_method_call( err, trait_, expr, msg, num_assoc_fn_excess_args, - num_trait_generics_except_self - ) - }, + num_trait_generics_except_self, + ), _ => return, } } @@ -766,23 +809,25 @@ num_trait_generics_except_self: usize, ) { if let hir::QPath::Resolved(_, path) = qpath - && let Some(trait_path_segment) = path.segments.get(0) { + && let Some(trait_path_segment) = path.segments.get(0) + { let num_generic_args_supplied_to_trait = trait_path_segment.args().num_generic_params(); - if num_generic_args_supplied_to_trait + num_assoc_fn_excess_args == num_trait_generics_except_self + if num_generic_args_supplied_to_trait + num_assoc_fn_excess_args + == num_trait_generics_except_self { if let Some(span) = self.gen_args.span_ext() - && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { + && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) + { let sugg = vec![ - (self.path_segment.ident.span, format!("{}::{}", snippet, self.path_segment.ident)), - (span.with_lo(self.path_segment.ident.span.hi()), "".to_owned()) + ( + self.path_segment.ident.span, + format!("{}::{}", snippet, self.path_segment.ident), + ), + (span.with_lo(self.path_segment.ident.span.hi()), "".to_owned()), ]; - err.multipart_suggestion( - msg, - sugg, - Applicability::MaybeIncorrect - ); + err.multipart_suggestion(msg, sugg, Applicability::MaybeIncorrect); } } } @@ -948,20 +993,26 @@ // If there is a single unbound associated type and a single excess generic param // suggest replacing the generic param with the associated type bound if provided_args_matches_unbound_traits && !unbound_types.is_empty() { - let unused_generics = &self.gen_args.args[self.num_expected_type_or_const_args()..]; - let suggestions = iter::zip(unused_generics, &unbound_types) - .map(|(potential, name)| (potential.span().shrink_to_lo(), format!("{name} = "))) - .collect::>(); - - if !suggestions.is_empty() { - err.multipart_suggestion_verbose( - format!( - "replace the generic bound{s} with the associated type{s}", - s = pluralize!(unbound_types.len()) - ), - suggestions, - Applicability::MaybeIncorrect, - ); + // Don't suggest if we're in a trait impl as + // that would result in invalid syntax (fixes #116464) + if !self.is_in_trait_impl() { + let unused_generics = &self.gen_args.args[self.num_expected_type_or_const_args()..]; + let suggestions = iter::zip(unused_generics, &unbound_types) + .map(|(potential, name)| { + (potential.span().shrink_to_lo(), format!("{name} = ")) + }) + .collect::>(); + + if !suggestions.is_empty() { + err.multipart_suggestion_verbose( + format!( + "replace the generic bound{s} with the associated type{s}", + s = pluralize!(unbound_types.len()) + ), + suggestions, + Applicability::MaybeIncorrect, + ); + } } } else if remove_entire_generics { let span = self diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/constraints.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/constraints.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/constraints.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/constraints.rs 2023-12-21 16:55:28.000000000 +0000 @@ -235,7 +235,7 @@ // leaf type -- noop } - ty::FnDef(..) | ty::Generator(..) | ty::Closure(..) => { + ty::FnDef(..) | ty::Coroutine(..) | ty::Closure(..) => { bug!("Unexpected closure type in variance computation"); } @@ -312,7 +312,7 @@ // types, where we use Error as the Self type } - ty::Placeholder(..) | ty::GeneratorWitness(..) | ty::Bound(..) | ty::Infer(..) => { + ty::Placeholder(..) | ty::CoroutineWitness(..) | ty::Bound(..) | ty::Infer(..) => { bug!("unexpected type encountered in variance inference: {}", ty); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_analysis/src/variance/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -192,5 +192,5 @@ } } } - tcx.arena.alloc_from_iter(collector.variances.into_iter()) + tcx.arena.alloc_from_iter(collector.variances) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,11 +3,11 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start +rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_hir = { path = "../rustc_hir" } -rustc_target = { path = "../rustc_target" } rustc_span = { path = "../rustc_span" } -rustc_ast = { path = "../rustc_ast" } +rustc_target = { path = "../rustc_target" } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_pretty/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -52,8 +52,6 @@ impl PpAnn for NoAnn {} pub const NO_ANN: &dyn PpAnn = &NoAnn; -/// Identical to the `PpAnn` implementation for `hir::Crate`, -/// except it avoids creating a dependency on the whole crate. impl PpAnn for &dyn rustc_hir::intravisit::Map<'_> { fn nested(&self, state: &mut State<'_>, nested: Nested) { match nested { @@ -75,7 +73,11 @@ } impl<'a> State<'a> { - pub fn print_node(&mut self, node: Node<'_>) { + fn attrs(&self, id: hir::HirId) -> &'a [ast::Attribute] { + (self.attrs)(id) + } + + fn print_node(&mut self, node: Node<'_>) { match node { Node::Param(a) => self.print_param(a), Node::Item(a) => self.print_item(a), @@ -144,7 +146,7 @@ } } -pub const INDENT_UNIT: isize = 4; +const INDENT_UNIT: isize = 4; /// Requires you to pass an input filename and reader so that /// it can scan the input text for comments to copy forward. @@ -156,7 +158,12 @@ attrs: &'a dyn Fn(hir::HirId) -> &'a [ast::Attribute], ann: &'a dyn PpAnn, ) -> String { - let mut s = State::new_from_input(sm, filename, input, attrs, ann); + let mut s = State { + s: pp::Printer::new(), + comments: Some(Comments::new(sm, filename, input)), + attrs, + ann, + }; // When printing the AST, we sometimes need to inject `#[no_std]` here. // Since you can't compile the HIR, it's not necessary. @@ -166,28 +173,7 @@ s.s.eof() } -impl<'a> State<'a> { - pub fn new_from_input( - sm: &'a SourceMap, - filename: FileName, - input: String, - attrs: &'a dyn Fn(hir::HirId) -> &'a [ast::Attribute], - ann: &'a dyn PpAnn, - ) -> State<'a> { - State { - s: pp::Printer::new(), - comments: Some(Comments::new(sm, filename, input)), - attrs, - ann, - } - } - - fn attrs(&self, id: hir::HirId) -> &'a [ast::Attribute] { - (self.attrs)(id) - } -} - -pub fn to_string(ann: &dyn PpAnn, f: F) -> String +fn to_string(ann: &dyn PpAnn, f: F) -> String where F: FnOnce(&mut State<'_>), { @@ -196,52 +182,20 @@ printer.s.eof() } -pub fn generic_params_to_string(generic_params: &[GenericParam<'_>]) -> String { - to_string(NO_ANN, |s| s.print_generic_params(generic_params)) -} - -pub fn bounds_to_string<'b>(bounds: impl IntoIterator>) -> String { - to_string(NO_ANN, |s| s.print_bounds("", bounds)) -} - pub fn ty_to_string(ty: &hir::Ty<'_>) -> String { to_string(NO_ANN, |s| s.print_type(ty)) } -pub fn path_segment_to_string(segment: &hir::PathSegment<'_>) -> String { - to_string(NO_ANN, |s| s.print_path_segment(segment)) -} - -pub fn path_to_string(segment: &hir::Path<'_>) -> String { - to_string(NO_ANN, |s| s.print_path(segment, false)) -} - pub fn qpath_to_string(segment: &hir::QPath<'_>) -> String { to_string(NO_ANN, |s| s.print_qpath(segment, false)) } -pub fn fn_to_string( - decl: &hir::FnDecl<'_>, - header: hir::FnHeader, - name: Option, - generics: &hir::Generics<'_>, - arg_names: &[Ident], - body_id: Option, -) -> String { - to_string(NO_ANN, |s| s.print_fn(decl, header, name, generics, arg_names, body_id)) -} - -pub fn enum_def_to_string( - enum_definition: &hir::EnumDef<'_>, - generics: &hir::Generics<'_>, - name: Symbol, - span: rustc_span::Span, -) -> String { - to_string(NO_ANN, |s| s.print_enum_def(enum_definition, generics, name, span)) +pub fn pat_to_string(pat: &hir::Pat<'_>) -> String { + to_string(NO_ANN, |s| s.print_pat(pat)) } impl<'a> State<'a> { - pub fn bclose_maybe_open(&mut self, span: rustc_span::Span, close_box: bool) { + fn bclose_maybe_open(&mut self, span: rustc_span::Span, close_box: bool) { self.maybe_print_comment(span.hi()); self.break_offset_if_not_bol(1, -INDENT_UNIT); self.word("}"); @@ -250,11 +204,11 @@ } } - pub fn bclose(&mut self, span: rustc_span::Span) { + fn bclose(&mut self, span: rustc_span::Span) { self.bclose_maybe_open(span, true) } - pub fn commasep_cmnt(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) + fn commasep_cmnt(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) where F: FnMut(&mut State<'_>, &T), G: FnMut(&T) -> rustc_span::Span, @@ -275,25 +229,25 @@ self.end(); } - pub fn commasep_exprs(&mut self, b: Breaks, exprs: &[hir::Expr<'_>]) { + fn commasep_exprs(&mut self, b: Breaks, exprs: &[hir::Expr<'_>]) { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span); } - pub fn print_mod(&mut self, _mod: &hir::Mod<'_>, attrs: &[ast::Attribute]) { + fn print_mod(&mut self, _mod: &hir::Mod<'_>, attrs: &[ast::Attribute]) { self.print_inner_attributes(attrs); for &item_id in _mod.item_ids { self.ann.nested(self, Nested::Item(item_id)); } } - pub fn print_opt_lifetime(&mut self, lifetime: &hir::Lifetime) { + fn print_opt_lifetime(&mut self, lifetime: &hir::Lifetime) { if !lifetime.is_elided() { self.print_lifetime(lifetime); self.nbsp(); } } - pub fn print_type(&mut self, ty: &hir::Ty<'_>) { + fn print_type(&mut self, ty: &hir::Ty<'_>) { self.maybe_print_comment(ty.span.lo()); self.ibox(0); match ty.kind { @@ -371,7 +325,7 @@ self.end() } - pub fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) { + fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); self.print_outer_attributes(self.attrs(item.hir_id())); @@ -478,8 +432,7 @@ self.end(); // end the outer ibox } - /// Pretty-print an item - pub fn print_item(&mut self, item: &hir::Item<'_>) { + fn print_item(&mut self, item: &hir::Item<'_>) { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); let attrs = self.attrs(item.hir_id()); @@ -549,13 +502,13 @@ self.word(";"); self.end(); // end the outer cbox } - hir::ItemKind::Fn(ref sig, param_names, body) => { + hir::ItemKind::Fn(ref sig, generics, body) => { self.head(""); self.print_fn( sig.decl, sig.header, Some(item.ident.name), - param_names, + generics, &[], Some(body), ); @@ -704,7 +657,7 @@ self.ann.post(self, AnnNode::Item(item)) } - pub fn print_trait_ref(&mut self, t: &hir::TraitRef<'_>) { + fn print_trait_ref(&mut self, t: &hir::TraitRef<'_>) { self.print_path(t.path, false); } @@ -721,7 +674,7 @@ self.print_trait_ref(&t.trait_ref); } - pub fn print_enum_def( + fn print_enum_def( &mut self, enum_definition: &hir::EnumDef<'_>, generics: &hir::Generics<'_>, @@ -736,7 +689,7 @@ self.print_variants(enum_definition.variants, span); } - pub fn print_variants(&mut self, variants: &[hir::Variant<'_>], span: rustc_span::Span) { + fn print_variants(&mut self, variants: &[hir::Variant<'_>], span: rustc_span::Span) { self.bopen(); for v in variants { self.space_if_not_bol(); @@ -751,14 +704,14 @@ self.bclose(span) } - pub fn print_defaultness(&mut self, defaultness: hir::Defaultness) { + fn print_defaultness(&mut self, defaultness: hir::Defaultness) { match defaultness { hir::Defaultness::Default { .. } => self.word_nbsp("default"), hir::Defaultness::Final => (), } } - pub fn print_struct( + fn print_struct( &mut self, struct_def: &hir::VariantData<'_>, generics: &hir::Generics<'_>, @@ -807,7 +760,7 @@ } } - pub fn print_variant(&mut self, v: &hir::Variant<'_>) { + fn print_variant(&mut self, v: &hir::Variant<'_>) { self.head(""); let generics = hir::Generics::empty(); self.print_struct(&v.data, generics, v.ident.name, v.span, false); @@ -817,7 +770,8 @@ self.print_anon_const(d); } } - pub fn print_method_sig( + + fn print_method_sig( &mut self, ident: Ident, m: &hir::FnSig<'_>, @@ -828,7 +782,7 @@ self.print_fn(m.decl, m.header, Some(ident.name), generics, arg_names, body_id); } - pub fn print_trait_item(&mut self, ti: &hir::TraitItem<'_>) { + fn print_trait_item(&mut self, ti: &hir::TraitItem<'_>) { self.ann.pre(self, AnnNode::SubItem(ti.hir_id())); self.hardbreak_if_not_bol(); self.maybe_print_comment(ti.span.lo()); @@ -856,7 +810,7 @@ self.ann.post(self, AnnNode::SubItem(ti.hir_id())) } - pub fn print_impl_item(&mut self, ii: &hir::ImplItem<'_>) { + fn print_impl_item(&mut self, ii: &hir::ImplItem<'_>) { self.ann.pre(self, AnnNode::SubItem(ii.hir_id())); self.hardbreak_if_not_bol(); self.maybe_print_comment(ii.span.lo()); @@ -881,7 +835,7 @@ self.ann.post(self, AnnNode::SubItem(ii.hir_id())) } - pub fn print_local( + fn print_local( &mut self, init: Option<&hir::Expr<'_>>, els: Option<&hir::Block<'_>>, @@ -914,7 +868,7 @@ self.end() } - pub fn print_stmt(&mut self, st: &hir::Stmt<'_>) { + fn print_stmt(&mut self, st: &hir::Stmt<'_>) { self.maybe_print_comment(st.span.lo()); match st.kind { hir::StmtKind::Local(loc) => { @@ -937,19 +891,19 @@ self.maybe_print_trailing_comment(st.span, None) } - pub fn print_block(&mut self, blk: &hir::Block<'_>) { + fn print_block(&mut self, blk: &hir::Block<'_>) { self.print_block_with_attrs(blk, &[]) } - pub fn print_block_unclosed(&mut self, blk: &hir::Block<'_>) { + fn print_block_unclosed(&mut self, blk: &hir::Block<'_>) { self.print_block_maybe_unclosed(blk, &[], false) } - pub fn print_block_with_attrs(&mut self, blk: &hir::Block<'_>, attrs: &[ast::Attribute]) { + fn print_block_with_attrs(&mut self, blk: &hir::Block<'_>, attrs: &[ast::Attribute]) { self.print_block_maybe_unclosed(blk, attrs, true) } - pub fn print_block_maybe_unclosed( + fn print_block_maybe_unclosed( &mut self, blk: &hir::Block<'_>, attrs: &[ast::Attribute], @@ -1005,7 +959,7 @@ } } - pub fn print_if( + fn print_if( &mut self, test: &hir::Expr<'_>, blk: &hir::Expr<'_>, @@ -1018,14 +972,14 @@ self.print_else(elseopt) } - pub fn print_array_length(&mut self, len: &hir::ArrayLen) { + fn print_array_length(&mut self, len: &hir::ArrayLen) { match len { hir::ArrayLen::Infer(_, _) => self.word("_"), hir::ArrayLen::Body(ct) => self.print_anon_const(ct), } } - pub fn print_anon_const(&mut self, constant: &hir::AnonConst) { + fn print_anon_const(&mut self, constant: &hir::AnonConst) { self.ann.nested(self, Nested::Body(constant.body)) } @@ -1041,7 +995,7 @@ /// Prints an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. - pub fn print_expr_as_cond(&mut self, expr: &hir::Expr<'_>) { + fn print_expr_as_cond(&mut self, expr: &hir::Expr<'_>) { self.print_expr_cond_paren(expr, Self::cond_needs_par(expr)) } @@ -1360,7 +1314,7 @@ self.pclose(); } - pub fn print_expr(&mut self, expr: &hir::Expr<'_>) { + fn print_expr(&mut self, expr: &hir::Expr<'_>) { self.maybe_print_comment(expr.span.lo()); self.print_outer_attributes(self.attrs(expr.hir_id)); self.ibox(INDENT_UNIT); @@ -1593,7 +1547,7 @@ self.end() } - pub fn print_local_decl(&mut self, loc: &hir::Local<'_>) { + fn print_local_decl(&mut self, loc: &hir::Local<'_>) { self.print_pat(loc.pat); if let Some(ty) = loc.ty { self.word_space(":"); @@ -1601,11 +1555,11 @@ } } - pub fn print_name(&mut self, name: Symbol) { + fn print_name(&mut self, name: Symbol) { self.print_ident(Ident::with_dummy_span(name)) } - pub fn print_path(&mut self, path: &hir::Path<'_, R>, colons_before_params: bool) { + fn print_path(&mut self, path: &hir::Path<'_, R>, colons_before_params: bool) { self.maybe_print_comment(path.span.lo()); for (i, segment) in path.segments.iter().enumerate() { @@ -1619,14 +1573,14 @@ } } - pub fn print_path_segment(&mut self, segment: &hir::PathSegment<'_>) { + fn print_path_segment(&mut self, segment: &hir::PathSegment<'_>) { if segment.ident.name != kw::PathRoot { self.print_ident(segment.ident); self.print_generic_args(segment.args(), false); } } - pub fn print_qpath(&mut self, qpath: &hir::QPath<'_>, colons_before_params: bool) { + fn print_qpath(&mut self, qpath: &hir::QPath<'_>, colons_before_params: bool) { match *qpath { hir::QPath::Resolved(None, path) => self.print_path(path, colons_before_params), hir::QPath::Resolved(Some(qself), path) => { @@ -1743,7 +1697,7 @@ } } - pub fn print_type_binding(&mut self, binding: &hir::TypeBinding<'_>) { + fn print_type_binding(&mut self, binding: &hir::TypeBinding<'_>) { self.print_ident(binding.ident); self.print_generic_args(binding.gen_args, false); self.space(); @@ -1761,7 +1715,7 @@ } } - pub fn print_pat(&mut self, pat: &hir::Pat<'_>) { + fn print_pat(&mut self, pat: &hir::Pat<'_>) { self.maybe_print_comment(pat.span.lo()); self.ann.pre(self, AnnNode::Pat(pat)); // Pat isn't normalized, but the beauty of it @@ -1905,7 +1859,7 @@ self.ann.post(self, AnnNode::Pat(pat)) } - pub fn print_patfield(&mut self, field: &hir::PatField<'_>) { + fn print_patfield(&mut self, field: &hir::PatField<'_>) { if self.attrs(field.hir_id).is_empty() { self.space(); } @@ -1919,12 +1873,12 @@ self.end(); } - pub fn print_param(&mut self, arg: &hir::Param<'_>) { + fn print_param(&mut self, arg: &hir::Param<'_>) { self.print_outer_attributes(self.attrs(arg.hir_id)); self.print_pat(arg.pat); } - pub fn print_arm(&mut self, arm: &hir::Arm<'_>) { + fn print_arm(&mut self, arm: &hir::Arm<'_>) { // I have no idea why this check is necessary, but here it // is :( if self.attrs(arm.hir_id).is_empty() { @@ -1976,7 +1930,7 @@ self.end() // close enclosing cbox } - pub fn print_fn( + fn print_fn( &mut self, decl: &hir::FnDecl<'_>, header: hir::FnHeader, @@ -1994,11 +1948,10 @@ self.print_generic_params(generics.params); self.popen(); - let mut i = 0; // Make sure we aren't supplied *both* `arg_names` and `body_id`. assert!(arg_names.is_empty() || body_id.is_none()); - self.commasep(Inconsistent, decl.inputs, |s, ty| { - s.ibox(INDENT_UNIT); + let mut i = 0; + let mut print_arg = |s: &mut Self| { if let Some(arg_name) = arg_names.get(i) { s.word(arg_name.to_string()); s.word(":"); @@ -2009,11 +1962,17 @@ s.space(); } i += 1; + }; + self.commasep(Inconsistent, decl.inputs, |s, ty| { + s.ibox(INDENT_UNIT); + print_arg(s); s.print_type(ty); - s.end() + s.end(); }); if decl.c_variadic { - self.word(", ..."); + self.word(", "); + print_arg(self); + self.word("..."); } self.pclose(); @@ -2056,14 +2015,14 @@ } } - pub fn print_capture_clause(&mut self, capture_clause: hir::CaptureBy) { + fn print_capture_clause(&mut self, capture_clause: hir::CaptureBy) { match capture_clause { - hir::CaptureBy::Value => self.word_space("move"), + hir::CaptureBy::Value { .. } => self.word_space("move"), hir::CaptureBy::Ref => {} } } - pub fn print_closure_binder( + fn print_closure_binder( &mut self, binder: hir::ClosureBinder, generic_params: &[GenericParam<'_>], @@ -2083,7 +2042,8 @@ match binder { hir::ClosureBinder::Default => {} - // we need to distinguish `|...| {}` from `for<> |...| {}` as `for<>` adds additional restrictions + // We need to distinguish `|...| {}` from `for<> |...| {}` as `for<>` adds additional + // restrictions. hir::ClosureBinder::For { .. } if generic_params.is_empty() => self.word("for<>"), hir::ClosureBinder::For { .. } => { self.word("for"); @@ -2099,7 +2059,7 @@ } } - pub fn print_bounds<'b>( + fn print_bounds<'b>( &mut self, prefix: &'static str, bounds: impl IntoIterator>, @@ -2137,7 +2097,7 @@ } } - pub fn print_generic_params(&mut self, generic_params: &[GenericParam<'_>]) { + fn print_generic_params(&mut self, generic_params: &[GenericParam<'_>]) { if !generic_params.is_empty() { self.word("<"); @@ -2147,7 +2107,7 @@ } } - pub fn print_generic_param(&mut self, param: &GenericParam<'_>) { + fn print_generic_param(&mut self, param: &GenericParam<'_>) { if let GenericParamKind::Const { .. } = param.kind { self.word_space("const"); } @@ -2175,11 +2135,11 @@ } } - pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) { + fn print_lifetime(&mut self, lifetime: &hir::Lifetime) { self.print_ident(lifetime.ident) } - pub fn print_where_clause(&mut self, generics: &hir::Generics<'_>) { + fn print_where_clause(&mut self, generics: &hir::Generics<'_>) { if generics.predicates.is_empty() { return; } @@ -2236,7 +2196,7 @@ } } - pub fn print_mutability(&mut self, mutbl: hir::Mutability, print_const: bool) { + fn print_mutability(&mut self, mutbl: hir::Mutability, print_const: bool) { match mutbl { hir::Mutability::Mut => self.word_nbsp("mut"), hir::Mutability::Not => { @@ -2247,12 +2207,12 @@ } } - pub fn print_mt(&mut self, mt: &hir::MutTy<'_>, print_const: bool) { + fn print_mt(&mut self, mt: &hir::MutTy<'_>, print_const: bool) { self.print_mutability(mt.mutbl, print_const); self.print_type(mt.ty); } - pub fn print_fn_output(&mut self, decl: &hir::FnDecl<'_>) { + fn print_fn_output(&mut self, decl: &hir::FnDecl<'_>) { if let hir::FnRetTy::DefaultReturn(..) = decl.output { return; } @@ -2271,7 +2231,7 @@ } } - pub fn print_ty_fn( + fn print_ty_fn( &mut self, abi: Abi, unsafety: hir::Unsafety, @@ -2299,7 +2259,7 @@ self.end(); } - pub fn print_fn_header_info(&mut self, header: hir::FnHeader) { + fn print_fn_header_info(&mut self, header: hir::FnHeader) { self.print_constness(header.constness); match header.asyncness { @@ -2317,21 +2277,21 @@ self.word("fn") } - pub fn print_constness(&mut self, s: hir::Constness) { + fn print_constness(&mut self, s: hir::Constness) { match s { hir::Constness::NotConst => {} hir::Constness::Const => self.word_nbsp("const"), } } - pub fn print_unsafety(&mut self, s: hir::Unsafety) { + fn print_unsafety(&mut self, s: hir::Unsafety) { match s { hir::Unsafety::Normal => {} hir::Unsafety::Unsafe => self.word_nbsp("unsafe"), } } - pub fn print_is_auto(&mut self, s: hir::IsAuto) { + fn print_is_auto(&mut self, s: hir::IsAuto) { match s { hir::IsAuto::Yes => self.word_nbsp("auto"), hir::IsAuto::No => {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -1,30 +1,30 @@ [package] name = "rustc_hir_typeck" -version = "0.1.0" +version = "0.0.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +# tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_graphviz = { path = "../rustc_graphviz" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_index = { path = "../rustc_index" } -rustc_infer = { path = "../rustc_infer" } +rustc_graphviz = { path = "../rustc_graphviz" } rustc_hir = { path = "../rustc_hir" } rustc_hir_analysis = { path = "../rustc_hir_analysis" } rustc_hir_pretty = { path = "../rustc_hir_pretty" } +rustc_index = { path = "../rustc_index" } +rustc_infer = { path = "../rustc_infer" } rustc_lint = { path = "../rustc_lint" } -rustc_middle = { path = "../rustc_middle" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_type_ir = { path = "../rustc_type_ir" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -150,5 +150,5 @@ hir_typeck_use_is_empty = consider using the `is_empty` method on `{$expr_ty}` to determine if it contains anything -hir_typeck_yield_expr_outside_of_generator = - yield expression outside of generator literal +hir_typeck_yield_expr_outside_of_coroutine = + yield expression outside of coroutine literal diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/_match.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/_match.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/_match.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/_match.rs 2023-12-21 16:55:28.000000000 +0000 @@ -121,6 +121,7 @@ prior_arm_ty, prior_arm_span, scrut_span: scrut.span, + scrut_hir_id: scrut.hir_id, source: match_src, prior_arms: other_arms.clone(), opt_suggest_box_span, @@ -373,7 +374,8 @@ // 6 | | }; // | |_____^ expected integer, found `()` // ``` - if block.expr.is_none() && block.stmts.is_empty() + if block.expr.is_none() + && block.stmts.is_empty() && let Some(outer_span) = &mut outer_span && let Some(cond_span) = cond_span.find_ancestor_inside(*outer_span) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/callee.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/callee.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/callee.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/callee.rs 2023-12-21 16:55:28.000000000 +0000 @@ -304,8 +304,8 @@ hir::ExprKind::Block(..), ) = (parent_node, callee_node) { - let fn_decl_span = if hir.body(body).generator_kind - == Some(hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Closure)) + let fn_decl_span = if hir.body(body).coroutine_kind + == Some(hir::CoroutineKind::Async(hir::CoroutineSource::Closure)) { // Actually need to unwrap one more layer of HIR to get to // the _real_ closure... @@ -421,13 +421,7 @@ .steal_diagnostic(segment.ident.span, StashKey::CallIntoMethod) { // Try suggesting `foo(a)` -> `a.foo()` if possible. - self.suggest_call_as_method( - &mut diag, - segment, - arg_exprs, - call_expr, - expected - ); + self.suggest_call_as_method(&mut diag, segment, arg_exprs, call_expr, expected); diag.emit(); } @@ -656,7 +650,7 @@ .sess .source_map() .is_multiline(call_expr.span.with_lo(callee_expr.span.hi())) - && call_expr.span.ctxt() == callee_expr.span.ctxt(); + && call_expr.span.eq_ctxt(callee_expr.span); if call_is_multiline { err.span_suggestion( callee_expr.span.shrink_to_hi(), @@ -792,8 +786,11 @@ tcx.consts.false_ } Some(hir::ConstContext::ConstFn) => { - let args = ty::GenericArgs::identity_for_item(tcx, context); - args.host_effect_param().expect("ConstContext::Maybe must have host effect param") + let host_idx = tcx + .generics_of(context) + .host_effect_index + .expect("ConstContext::Maybe must have host effect param"); + ty::GenericArgs::identity_for_item(tcx, context).const_at(host_idx) } None => tcx.consts.true_, }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/cast.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/cast.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/cast.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/cast.rs 2023-12-21 16:55:28.000000000 +0000 @@ -128,13 +128,13 @@ | ty::Uint(..) | ty::Float(_) | ty::Array(..) - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::RawPtr(_) | ty::Ref(..) | ty::FnDef(..) | ty::FnPtr(..) | ty::Closure(..) - | ty::Generator(..) + | ty::Coroutine(..) | ty::Adt(..) | ty::Never | ty::Dynamic(_, _, ty::DynStar) @@ -373,50 +373,49 @@ let mut sugg_mutref = false; if let ty::Ref(reg, cast_ty, mutbl) = *self.cast_ty.kind() { if let ty::RawPtr(TypeAndMut { ty: expr_ty, .. }) = *self.expr_ty.kind() - && fcx - .can_coerce( - Ty::new_ref(fcx.tcx, - fcx.tcx.lifetimes.re_erased, - TypeAndMut { ty: expr_ty, mutbl }, - ), - self.cast_ty, - ) + && fcx.can_coerce( + Ty::new_ref( + fcx.tcx, + fcx.tcx.lifetimes.re_erased, + TypeAndMut { ty: expr_ty, mutbl }, + ), + self.cast_ty, + ) { sugg = Some((format!("&{}*", mutbl.prefix_str()), cast_ty == expr_ty)); } else if let ty::Ref(expr_reg, expr_ty, expr_mutbl) = *self.expr_ty.kind() && expr_mutbl == Mutability::Not && mutbl == Mutability::Mut - && fcx - .can_coerce( - Ty::new_ref(fcx.tcx, - expr_reg, - TypeAndMut { ty: expr_ty, mutbl: Mutability::Mut }, - ), - self.cast_ty, - ) + && fcx.can_coerce( + Ty::new_ref( + fcx.tcx, + expr_reg, + TypeAndMut { ty: expr_ty, mutbl: Mutability::Mut }, + ), + self.cast_ty, + ) { sugg_mutref = true; } if !sugg_mutref && sugg == None - && fcx - .can_coerce( - Ty::new_ref(fcx.tcx,reg, TypeAndMut { ty: self.expr_ty, mutbl }), - self.cast_ty, - ) + && fcx.can_coerce( + Ty::new_ref(fcx.tcx, reg, TypeAndMut { ty: self.expr_ty, mutbl }), + self.cast_ty, + ) { sugg = Some((format!("&{}", mutbl.prefix_str()), false)); } } else if let ty::RawPtr(TypeAndMut { mutbl, .. }) = *self.cast_ty.kind() - && fcx - .can_coerce( - Ty::new_ref(fcx.tcx, - fcx.tcx.lifetimes.re_erased, - TypeAndMut { ty: self.expr_ty, mutbl }, - ), - self.cast_ty, - ) + && fcx.can_coerce( + Ty::new_ref( + fcx.tcx, + fcx.tcx.lifetimes.re_erased, + TypeAndMut { ty: self.expr_ty, mutbl }, + ), + self.cast_ty, + ) { sugg = Some((format!("&{}", mutbl.prefix_str()), false)); } @@ -661,9 +660,21 @@ } else { match self.try_coercion_cast(fcx) { Ok(()) => { - self.trivial_cast_lint(fcx); - debug!(" -> CoercionCast"); - fcx.typeck_results.borrow_mut().set_coercion_cast(self.expr.hir_id.local_id); + if self.expr_ty.is_unsafe_ptr() && self.cast_ty.is_unsafe_ptr() { + // When casting a raw pointer to another raw pointer, we cannot convert the cast into + // a coercion because the pointee types might only differ in regions, which HIR typeck + // cannot distinguish. This would cause us to erroneously discard a cast which will + // lead to a borrowck error like #113257. + // We still did a coercion above to unify inference variables for `ptr as _` casts. + // This does cause us to miss some trivial casts in the trival cast lint. + debug!(" -> PointerCast"); + } else { + self.trivial_cast_lint(fcx); + debug!(" -> CoercionCast"); + fcx.typeck_results + .borrow_mut() + .set_coercion_cast(self.expr.hir_id.local_id); + } } Err(_) => { match self.do_check(fcx) { @@ -942,10 +953,7 @@ lint::builtin::CENUM_IMPL_DROP_CAST, self.expr.hir_id, self.span, - errors::CastEnumDrop { - expr_ty, - cast_ty, - } + errors::CastEnumDrop { expr_ty, cast_ty }, ); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/check.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/check.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/check.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/check.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,8 +2,8 @@ use crate::coercion::CoerceMany; use crate::gather_locals::GatherLocalsVisitor; +use crate::CoroutineTypes; use crate::FnCtxt; -use crate::GeneratorTypes; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::intravisit::Visitor; @@ -31,9 +31,9 @@ decl: &'tcx hir::FnDecl<'tcx>, fn_def_id: LocalDefId, body: &'tcx hir::Body<'tcx>, - can_be_generator: Option, + can_be_coroutine: Option, params_can_be_unsized: bool, -) -> Option> { +) -> Option> { let fn_id = fcx.tcx.hir().local_def_id_to_hir_id(fn_def_id); let tcx = fcx.tcx; @@ -55,18 +55,23 @@ fn_maybe_err(tcx, span, fn_sig.abi); - if let Some(kind) = body.generator_kind && can_be_generator.is_some() { - let yield_ty = if kind == hir::GeneratorKind::Gen { - let yield_ty = fcx - .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span }); - fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType); - yield_ty - } else { - Ty::new_unit(tcx,) + if let Some(kind) = body.coroutine_kind + && can_be_coroutine.is_some() + { + let yield_ty = match kind { + hir::CoroutineKind::Gen(..) | hir::CoroutineKind::Coroutine => { + let yield_ty = fcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::TypeInference, + span, + }); + fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType); + yield_ty + } + hir::CoroutineKind::Async(..) => Ty::new_unit(tcx), }; - // Resume type defaults to `()` if the generator has no argument. - let resume_ty = fn_sig.inputs().get(0).copied().unwrap_or_else(|| Ty::new_unit(tcx,)); + // Resume type defaults to `()` if the coroutine has no argument. + let resume_ty = fn_sig.inputs().get(0).copied().unwrap_or_else(|| Ty::new_unit(tcx)); fcx.resume_yield_tys = Some((resume_ty, yield_ty)); } @@ -113,28 +118,34 @@ fcx.typeck_results.borrow_mut().liberated_fn_sigs_mut().insert(fn_id, fn_sig); - fcx.require_type_is_sized(declared_ret_ty, decl.output.span(), traits::SizedReturnType); + let return_or_body_span = match decl.output { + hir::FnRetTy::DefaultReturn(_) => body.value.span, + hir::FnRetTy::Return(ty) => ty.span, + }; + fcx.require_type_is_sized(declared_ret_ty, return_or_body_span, traits::SizedReturnType); fcx.check_return_expr(&body.value, false); - // We insert the deferred_generator_interiors entry after visiting the body. - // This ensures that all nested generators appear before the entry of this generator. - // resolve_generator_interiors relies on this property. - let gen_ty = if let (Some(_), Some(gen_kind)) = (can_be_generator, body.generator_kind) { + // We insert the deferred_coroutine_interiors entry after visiting the body. + // This ensures that all nested coroutines appear before the entry of this coroutine. + // resolve_coroutine_interiors relies on this property. + let coroutine_ty = if let (Some(_), Some(coroutine_kind)) = + (can_be_coroutine, body.coroutine_kind) + { let interior = fcx .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span }); - fcx.deferred_generator_interiors.borrow_mut().push(( + fcx.deferred_coroutine_interiors.borrow_mut().push(( fn_def_id, body.id(), interior, - gen_kind, + coroutine_kind, )); let (resume_ty, yield_ty) = fcx.resume_yield_tys.unwrap(); - Some(GeneratorTypes { + Some(CoroutineTypes { resume_ty, yield_ty, interior, - movability: can_be_generator.unwrap(), + movability: can_be_coroutine.unwrap(), }) } else { None @@ -169,11 +180,13 @@ check_panic_info_fn(tcx, panic_impl_did.expect_local(), fn_sig); } - if let Some(lang_start_defid) = tcx.lang_items().start_fn() && lang_start_defid == fn_def_id.to_def_id() { + if let Some(lang_start_defid) = tcx.lang_items().start_fn() + && lang_start_defid == fn_def_id.to_def_id() + { check_lang_start_fn(tcx, fn_sig, fn_def_id); } - gen_ty + coroutine_ty } fn check_panic_info_fn(tcx: TyCtxt<'_>, fn_id: LocalDefId, fn_sig: ty::FnSig<'_>) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/closure.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/closure.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/closure.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/closure.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ //! Code for type-checking closure expressions. -use super::{check_fn, Expectation, FnCtxt, GeneratorTypes}; +use super::{check_fn, CoroutineTypes, Expectation, FnCtxt}; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; @@ -14,8 +14,7 @@ use rustc_middle::ty::GenericArgs; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor}; use rustc_span::def_id::LocalDefId; -use rustc_span::source_map::Span; -use rustc_span::sym; +use rustc_span::{sym, Span}; use rustc_target::spec::abi::Abi; use rustc_trait_selection::traits; use rustc_trait_selection::traits::error_reporting::ArgKind; @@ -56,7 +55,9 @@ // closure sooner rather than later, so first examine the expected // type, and see if can glean a closure kind from there. let (expected_sig, expected_kind) = match expected.to_option(self) { - Some(ty) => self.deduce_closure_signature(ty), + Some(ty) => { + self.deduce_closure_signature(self.try_structurally_resolve_type(expr_span, ty)) + } None => (None, None), }; let body = self.tcx.hir().body(closure.body); @@ -82,7 +83,7 @@ debug!(?bound_sig, ?liberated_sig); let mut fcx = FnCtxt::new(self, self.param_env, closure.def_id); - let generator_types = check_fn( + let coroutine_types = check_fn( &mut fcx, liberated_sig, closure.fn_decl, @@ -103,11 +104,11 @@ span: self.tcx.def_span(expr_def_id), }); - if let Some(GeneratorTypes { resume_ty, yield_ty, interior, movability }) = generator_types + if let Some(CoroutineTypes { resume_ty, yield_ty, interior, movability }) = coroutine_types { - let generator_args = ty::GeneratorArgs::new( + let coroutine_args = ty::CoroutineArgs::new( self.tcx, - ty::GeneratorArgsParts { + ty::CoroutineArgsParts { parent_args, resume_ty, yield_ty, @@ -117,10 +118,10 @@ }, ); - return Ty::new_generator( + return Ty::new_coroutine( self.tcx, expr_def_id.to_def_id(), - generator_args.args, + coroutine_args.args, movability, ); } @@ -226,7 +227,8 @@ // Given a Projection predicate, we can potentially infer // the complete signature. if expected_sig.is_none() - && let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj_predicate)) = bound_predicate.skip_binder() + && let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj_predicate)) = + bound_predicate.skip_binder() { let inferred_sig = self.normalize( span, @@ -282,7 +284,7 @@ } /// Given a projection like "::Result == Y", we can deduce - /// everything we need to know about a closure or generator. + /// everything we need to know about a closure or coroutine. /// /// The `cause_span` should be the span that caused us to /// have this expected signature, or `None` if we can't readily @@ -299,18 +301,18 @@ let is_fn = tcx.is_fn_trait(trait_def_id); - let gen_trait = tcx.lang_items().gen_trait(); - let is_gen = gen_trait == Some(trait_def_id); + let coroutine_trait = tcx.lang_items().coroutine_trait(); + let is_gen = coroutine_trait == Some(trait_def_id); if !is_fn && !is_gen { - debug!("not fn or generator"); + debug!("not fn or coroutine"); return None; } - // Check that we deduce the signature from the `<_ as std::ops::Generator>::Return` + // Check that we deduce the signature from the `<_ as std::ops::Coroutine>::Return` // associated item and not yield. if is_gen && self.tcx.associated_item(projection.projection_def_id()).name != sym::Return { - debug!("not `Return` assoc item of `Generator`"); + debug!("not `Return` assoc item of `Coroutine`"); return None; } @@ -324,7 +326,7 @@ _ => return None, } } else { - // Generators with a `()` resume type may be defined with 0 or 1 explicit arguments, + // Coroutines with a `()` resume type may be defined with 0 or 1 explicit arguments, // else they must have exactly 1 argument. For now though, just give up in this case. return None; }; @@ -620,7 +622,7 @@ let astconv: &dyn AstConv<'_> = self; trace!("decl = {:#?}", decl); - debug!(?body.generator_kind); + debug!(?body.coroutine_kind); let hir_id = self.tcx.hir().local_def_id_to_hir_id(expr_def_id); let bound_vars = self.tcx.late_bound_vars(hir_id); @@ -629,11 +631,11 @@ let supplied_arguments = decl.inputs.iter().map(|a| astconv.ast_ty_to_ty(a)); let supplied_return = match decl.output { hir::FnRetTy::Return(ref output) => astconv.ast_ty_to_ty(&output), - hir::FnRetTy::DefaultReturn(_) => match body.generator_kind { + hir::FnRetTy::DefaultReturn(_) => match body.coroutine_kind { // In the case of the async block that we create for a function body, // we expect the return type of the block to match that of the enclosing // function. - Some(hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Fn)) => { + Some(hir::CoroutineKind::Async(hir::CoroutineSource::Fn)) => { debug!("closure is async fn body"); let def_id = self.tcx.hir().body_owner_def_id(body.id()); self.deduce_future_output_from_obligations(expr_def_id, def_id).unwrap_or_else( @@ -649,6 +651,9 @@ }, ) } + Some(hir::CoroutineKind::Gen(hir::CoroutineSource::Fn)) => { + todo!("gen closures do not exist yet") + } _ => astconv.ty_infer(None, decl.output.span()), }, @@ -672,7 +677,7 @@ self.normalize(self.tcx.hir().span(hir_id), result) } - /// Invoked when we are translating the generator that results + /// Invoked when we are translating the coroutine that results /// from desugaring an `async fn`. Returns the "sugared" return /// type of the `async fn` -- that is, the return type that the /// user specified. The "desugared" return type is an `impl @@ -685,11 +690,12 @@ body_def_id: LocalDefId, ) -> Option> { let ret_coercion = self.ret_coercion.as_ref().unwrap_or_else(|| { - span_bug!(self.tcx.def_span(expr_def_id), "async fn generator outside of a fn") + span_bug!(self.tcx.def_span(expr_def_id), "async fn coroutine outside of a fn") }); + let closure_span = self.tcx.def_span(expr_def_id); let ret_ty = ret_coercion.borrow().expected_ty(); - let ret_ty = self.inh.infcx.shallow_resolve(ret_ty); + let ret_ty = self.try_structurally_resolve_type(closure_span, ret_ty); let get_future_output = |predicate: ty::Predicate<'tcx>, span| { // Search for a pending obligation like @@ -711,8 +717,6 @@ } }; - let span = self.tcx.def_span(expr_def_id); - let output_ty = match *ret_ty.kind() { ty::Infer(ty::TyVar(ret_vid)) => { self.obligations_for_self_ty(ret_vid).find_map(|obligation| { @@ -726,17 +730,22 @@ .find_map(|(p, s)| get_future_output(p.as_predicate(), s))?, ty::Error(_) => return None, _ => span_bug!( - span, - "async fn generator return type not an inference variable: {ret_ty}" + closure_span, + "async fn coroutine return type not an inference variable: {ret_ty}" ), }; - let output_ty = self.normalize(span, output_ty); + let output_ty = self.normalize(closure_span, output_ty); // async fn that have opaque types in their return type need to redo the conversion to inference variables // as they fetch the still opaque version from the signature. let InferOk { value: output_ty, obligations } = self - .replace_opaque_types_with_inference_vars(output_ty, body_def_id, span, self.param_env); + .replace_opaque_types_with_inference_vars( + output_ty, + body_def_id, + closure_span, + self.param_env, + ); self.register_predicates(obligations); Some(output_ty) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/coercion.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/coercion.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/coercion.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/coercion.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1619,8 +1619,9 @@ unsized_return = self.is_return_ty_definitely_unsized(fcx); } if let Some(expression) = expression - && let hir::ExprKind::Loop(loop_blk, ..) = expression.kind { - intravisit::walk_block(& mut visitor, loop_blk); + && let hir::ExprKind::Loop(loop_blk, ..) = expression.kind + { + intravisit::walk_block(&mut visitor, loop_blk); } } ObligationCauseCode::ReturnValue(id) => { @@ -1661,7 +1662,9 @@ ); } - if visitor.ret_exprs.len() > 0 && let Some(expr) = expression { + if visitor.ret_exprs.len() > 0 + && let Some(expr) = expression + { self.note_unreachable_loop_return(&mut err, &expr, &visitor.ret_exprs); } @@ -1723,7 +1726,10 @@ let parent_id = fcx.tcx.hir().parent_id(id); let parent = fcx.tcx.hir().get(parent_id); if let Some(expr) = expression - && let hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(&hir::Closure { body, .. }), .. }) = parent + && let hir::Node::Expr(hir::Expr { + kind: hir::ExprKind::Closure(&hir::Closure { body, .. }), + .. + }) = parent && !matches!(fcx.tcx.hir().body(body).value.kind, hir::ExprKind::Block(..)) { fcx.suggest_missing_semicolon(&mut err, expr, expected, true); @@ -1798,12 +1804,7 @@ && let Some(fn_sig) = fcx.body_fn_sig() && fn_sig.output().is_ty_var() { - err.span_note( - sp, - format!( - "return type inferred to be `{expected}` here" - ), - ); + err.span_note(sp, format!("return type inferred to be `{expected}` here")); } err diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/demand.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/demand.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/demand.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/demand.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,30 +1,21 @@ use crate::FnCtxt; -use rustc_ast::util::parser::PREC_POSTFIX; use rustc_errors::MultiSpan; use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed}; use rustc_hir as hir; -use rustc_hir::def::{CtorKind, Res}; +use rustc_hir::def::Res; use rustc_hir::intravisit::Visitor; -use rustc_hir::lang_items::LangItem; -use rustc_hir::{is_range_literal, Node}; use rustc_infer::infer::{DefineOpaqueTypes, InferOk}; -use rustc_middle::lint::in_external_macro; -use rustc_middle::middle::stability::EvalResult; use rustc_middle::ty::adjustment::AllowTwoPhase; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::print::with_no_trimmed_paths; -use rustc_middle::ty::{self, Article, AssocItem, Ty, TypeAndMut, TypeFoldable}; +use rustc_middle::ty::{self, AssocItem, Ty, TypeFoldable, TypeVisitableExt}; use rustc_span::symbol::sym; -use rustc_span::{BytePos, Span, DUMMY_SP}; -use rustc_trait_selection::infer::InferCtxtExt as _; +use rustc_span::{Span, DUMMY_SP}; use rustc_trait_selection::traits::ObligationCause; use super::method::probe; -use std::cmp::min; -use std::iter; - impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pub fn emit_type_mismatch_suggestions( &self, @@ -58,7 +49,8 @@ || self.suggest_into(err, expr, expr_ty, expected) || self.suggest_floating_point_literal(err, expr, expected) || self.suggest_null_ptr_for_literal_zero_given_to_ptr_arg(err, expr, expected) - || self.suggest_coercing_result_via_try_operator(err, expr, expected, expr_ty); + || self.suggest_coercing_result_via_try_operator(err, expr, expected, expr_ty) + || self.suggest_missing_unwrap_expect(err, expr, expected, expr_ty); if !suggested { self.note_source_of_type_mismatch_constraint( @@ -151,7 +143,8 @@ && let [segment] = path.segments && segment.ident.name.as_str() == name && let Res::Local(hir_id) = path.res - && let Some((_, hir::Node::Expr(match_expr))) = self.tcx.hir().parent_iter(hir_id).nth(2) + && let Some((_, hir::Node::Expr(match_expr))) = + self.tcx.hir().parent_iter(hir_id).nth(2) && let hir::ExprKind::Match(scrutinee, _, _) = match_expr.kind && let hir::ExprKind::Tup(exprs) = scrutinee.kind && let hir::ExprKind::AddrOf(_, _, macro_arg) = exprs[idx].kind @@ -450,20 +443,33 @@ // If our binding became incompatible while it was a receiver // to a method call, we may be able to make a better guess to // the source of a type mismatch. - let Some(rcvr_ty) = self.node_ty_opt(rcvr.hir_id) else { continue; }; + let Some(rcvr_ty) = self.node_ty_opt(rcvr.hir_id) else { + continue; + }; let rcvr_ty = rcvr_ty.fold_with(&mut fudger); - let Ok(method) = - self.lookup_method_for_diagnostic(rcvr_ty, segment, DUMMY_SP, parent_expr, rcvr) - else { + let Ok(method) = self.lookup_method_for_diagnostic( + rcvr_ty, + segment, + DUMMY_SP, + parent_expr, + rcvr, + ) else { continue; }; let ideal_rcvr_ty = rcvr_ty.fold_with(&mut fudger); let ideal_method = self - .lookup_method_for_diagnostic(ideal_rcvr_ty, segment, DUMMY_SP, parent_expr, rcvr) + .lookup_method_for_diagnostic( + ideal_rcvr_ty, + segment, + DUMMY_SP, + parent_expr, + rcvr, + ) .ok() .and_then(|method| { - let _ = self.at(&ObligationCause::dummy(), self.param_env) + let _ = self + .at(&ObligationCause::dummy(), self.param_env) .eq(DefineOpaqueTypes::No, ideal_rcvr_ty, expected_ty) .ok()?; Some(method) @@ -474,15 +480,12 @@ for (idx, (expected_arg_ty, arg_expr)) in std::iter::zip(&method.sig.inputs()[1..], args).enumerate() { - let Some(arg_ty) = self.node_ty_opt(arg_expr.hir_id) else { continue; }; + let Some(arg_ty) = self.node_ty_opt(arg_expr.hir_id) else { + continue; + }; let arg_ty = arg_ty.fold_with(&mut fudger); - let _ = self.coerce( - arg_expr, - arg_ty, - *expected_arg_ty, - AllowTwoPhase::No, - None, - ); + let _ = + self.coerce(arg_expr, arg_ty, *expected_arg_ty, AllowTwoPhase::No, None); self.select_obligations_where_possible(|errs| { // Yeet the errors, we're already reporting errors. errs.clear(); @@ -504,12 +507,18 @@ // incompatible fix at the original mismatch site. if matches!(source, TypeMismatchSource::Ty(_)) && let Some(ideal_method) = ideal_method + && let ideal_arg_ty = self.resolve_vars_if_possible(ideal_method.sig.inputs()[idx + 1]) + // HACK(compiler-errors): We don't actually consider the implications + // of our inference guesses in `emit_type_mismatch_suggestions`, so + // only suggest things when we know our type error is precisely due to + // a type mismatch, and not via some projection or something. See #116155. + && !ideal_arg_ty.has_non_region_infer() { self.emit_type_mismatch_suggestions( err, arg_expr, arg_ty, - self.resolve_vars_if_possible(ideal_method.sig.inputs()[idx + 1]), + ideal_arg_ty, None, None, ); @@ -642,10 +651,7 @@ None => self.tcx.types.unit, }; if self.can_eq(self.param_env, ty, expected) { - err.span_label( - ex.span, - "expected because of this `break`", - ); + err.span_label(ex.span, "expected because of this `break`"); exit = true; } } @@ -940,310 +946,6 @@ ); } - pub(crate) fn suggest_coercing_result_via_try_operator( - &self, - err: &mut Diagnostic, - expr: &hir::Expr<'tcx>, - expected: Ty<'tcx>, - found: Ty<'tcx>, - ) -> bool { - let ty::Adt(e, args_e) = expected.kind() else { - return false; - }; - let ty::Adt(f, args_f) = found.kind() else { - return false; - }; - if e.did() != f.did() { - return false; - } - if Some(e.did()) != self.tcx.get_diagnostic_item(sym::Result) { - return false; - } - let map = self.tcx.hir(); - if let Some(hir::Node::Expr(expr)) = map.find_parent(expr.hir_id) - && let hir::ExprKind::Ret(_) = expr.kind - { - // `return foo;` - } else if map.get_return_block(expr.hir_id).is_some() { - // Function's tail expression. - } else { - return false; - } - let e = args_e.type_at(1); - let f = args_f.type_at(1); - if self - .infcx - .type_implements_trait( - self.tcx.get_diagnostic_item(sym::Into).unwrap(), - [f, e], - self.param_env, - ) - .must_apply_modulo_regions() - { - err.multipart_suggestion( - "use `?` to coerce and return an appropriate `Err`, and wrap the resulting value \ - in `Ok` so the expression remains of type `Result`", - vec![ - (expr.span.shrink_to_lo(), "Ok(".to_string()), - (expr.span.shrink_to_hi(), "?)".to_string()), - ], - Applicability::MaybeIncorrect, - ); - return true; - } - false - } - - /// If the expected type is an enum (Issue #55250) with any variants whose - /// sole field is of the found type, suggest such variants. (Issue #42764) - fn suggest_compatible_variants( - &self, - err: &mut Diagnostic, - expr: &hir::Expr<'_>, - expected: Ty<'tcx>, - expr_ty: Ty<'tcx>, - ) -> bool { - if in_external_macro(self.tcx.sess, expr.span) { - return false; - } - if let ty::Adt(expected_adt, args) = expected.kind() { - if let hir::ExprKind::Field(base, ident) = expr.kind { - let base_ty = self.typeck_results.borrow().expr_ty(base); - if self.can_eq(self.param_env, base_ty, expected) - && let Some(base_span) = base.span.find_ancestor_inside(expr.span) - { - err.span_suggestion_verbose( - expr.span.with_lo(base_span.hi()), - format!("consider removing the tuple struct field `{ident}`"), - "", - Applicability::MaybeIncorrect, - ); - return true; - } - } - - // If the expression is of type () and it's the return expression of a block, - // we suggest adding a separate return expression instead. - // (To avoid things like suggesting `Ok(while .. { .. })`.) - if expr_ty.is_unit() { - let mut id = expr.hir_id; - let mut parent; - - // Unroll desugaring, to make sure this works for `for` loops etc. - loop { - parent = self.tcx.hir().parent_id(id); - if let Some(parent_span) = self.tcx.hir().opt_span(parent) { - if parent_span.find_ancestor_inside(expr.span).is_some() { - // The parent node is part of the same span, so is the result of the - // same expansion/desugaring and not the 'real' parent node. - id = parent; - continue; - } - } - break; - } - - if let Some(hir::Node::Block(&hir::Block { - span: block_span, expr: Some(e), .. - })) = self.tcx.hir().find(parent) - { - if e.hir_id == id { - if let Some(span) = expr.span.find_ancestor_inside(block_span) { - let return_suggestions = if self - .tcx - .is_diagnostic_item(sym::Result, expected_adt.did()) - { - vec!["Ok(())"] - } else if self.tcx.is_diagnostic_item(sym::Option, expected_adt.did()) { - vec!["None", "Some(())"] - } else { - return false; - }; - if let Some(indent) = - self.tcx.sess.source_map().indentation_before(span.shrink_to_lo()) - { - // Add a semicolon, except after `}`. - let semicolon = - match self.tcx.sess.source_map().span_to_snippet(span) { - Ok(s) if s.ends_with('}') => "", - _ => ";", - }; - err.span_suggestions( - span.shrink_to_hi(), - "try adding an expression at the end of the block", - return_suggestions - .into_iter() - .map(|r| format!("{semicolon}\n{indent}{r}")), - Applicability::MaybeIncorrect, - ); - } - return true; - } - } - } - } - - let compatible_variants: Vec<(String, _, _, Option)> = expected_adt - .variants() - .iter() - .filter(|variant| { - variant.fields.len() == 1 - }) - .filter_map(|variant| { - let sole_field = &variant.single_field(); - - let field_is_local = sole_field.did.is_local(); - let field_is_accessible = - sole_field.vis.is_accessible_from(expr.hir_id.owner.def_id, self.tcx) - // Skip suggestions for unstable public fields (for example `Pin::pointer`) - && matches!(self.tcx.eval_stability(sole_field.did, None, expr.span, None), EvalResult::Allow | EvalResult::Unmarked); - - if !field_is_local && !field_is_accessible { - return None; - } - - let note_about_variant_field_privacy = (field_is_local && !field_is_accessible) - .then(|| " (its field is private, but it's local to this crate and its privacy can be changed)".to_string()); - - let sole_field_ty = sole_field.ty(self.tcx, args); - if self.can_coerce(expr_ty, sole_field_ty) { - let variant_path = - with_no_trimmed_paths!(self.tcx.def_path_str(variant.def_id)); - // FIXME #56861: DRYer prelude filtering - if let Some(path) = variant_path.strip_prefix("std::prelude::") - && let Some((_, path)) = path.split_once("::") - { - return Some((path.to_string(), variant.ctor_kind(), sole_field.name, note_about_variant_field_privacy)); - } - Some((variant_path, variant.ctor_kind(), sole_field.name, note_about_variant_field_privacy)) - } else { - None - } - }) - .collect(); - - let suggestions_for = |variant: &_, ctor_kind, field_name| { - let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { - Some(ident) => format!("{ident}: "), - None => String::new(), - }; - - let (open, close) = match ctor_kind { - Some(CtorKind::Fn) => ("(".to_owned(), ")"), - None => (format!(" {{ {field_name}: "), " }"), - - // unit variants don't have fields - Some(CtorKind::Const) => unreachable!(), - }; - - // Suggest constructor as deep into the block tree as possible. - // This fixes https://github.com/rust-lang/rust/issues/101065, - // and also just helps make the most minimal suggestions. - let mut expr = expr; - while let hir::ExprKind::Block(block, _) = &expr.kind - && let Some(expr_) = &block.expr - { - expr = expr_ - } - - vec![ - (expr.span.shrink_to_lo(), format!("{prefix}{variant}{open}")), - (expr.span.shrink_to_hi(), close.to_owned()), - ] - }; - - match &compatible_variants[..] { - [] => { /* No variants to format */ } - [(variant, ctor_kind, field_name, note)] => { - // Just a single matching variant. - err.multipart_suggestion_verbose( - format!( - "try wrapping the expression in `{variant}`{note}", - note = note.as_deref().unwrap_or("") - ), - suggestions_for(&**variant, *ctor_kind, *field_name), - Applicability::MaybeIncorrect, - ); - return true; - } - _ => { - // More than one matching variant. - err.multipart_suggestions( - format!( - "try wrapping the expression in a variant of `{}`", - self.tcx.def_path_str(expected_adt.did()) - ), - compatible_variants.into_iter().map( - |(variant, ctor_kind, field_name, _)| { - suggestions_for(&variant, ctor_kind, field_name) - }, - ), - Applicability::MaybeIncorrect, - ); - return true; - } - } - } - - false - } - - fn suggest_non_zero_new_unwrap( - &self, - err: &mut Diagnostic, - expr: &hir::Expr<'_>, - expected: Ty<'tcx>, - expr_ty: Ty<'tcx>, - ) -> bool { - let tcx = self.tcx; - let (adt, unwrap) = match expected.kind() { - // In case Option is wanted, but * is provided, suggest calling new - ty::Adt(adt, args) if tcx.is_diagnostic_item(sym::Option, adt.did()) => { - // Unwrap option - let ty::Adt(adt, _) = args.type_at(0).kind() else { - return false; - }; - - (adt, "") - } - // In case NonZero* is wanted, but * is provided also add `.unwrap()` to satisfy types - ty::Adt(adt, _) => (adt, ".unwrap()"), - _ => return false, - }; - - let map = [ - (sym::NonZeroU8, tcx.types.u8), - (sym::NonZeroU16, tcx.types.u16), - (sym::NonZeroU32, tcx.types.u32), - (sym::NonZeroU64, tcx.types.u64), - (sym::NonZeroU128, tcx.types.u128), - (sym::NonZeroI8, tcx.types.i8), - (sym::NonZeroI16, tcx.types.i16), - (sym::NonZeroI32, tcx.types.i32), - (sym::NonZeroI64, tcx.types.i64), - (sym::NonZeroI128, tcx.types.i128), - ]; - - let Some((s, _)) = map.iter().find(|&&(s, t)| { - self.tcx.is_diagnostic_item(s, adt.did()) && self.can_coerce(expr_ty, t) - }) else { - return false; - }; - - let path = self.tcx.def_path_str(adt.non_enum_variant().def_id); - - err.multipart_suggestion( - format!("consider calling `{s}::new`"), - vec![ - (expr.span.shrink_to_lo(), format!("{path}::new(")), - (expr.span.shrink_to_hi(), format!("){unwrap}")), - ], - Applicability::MaybeIncorrect, - ); - - true - } - pub fn get_conversion_methods( &self, span: Span, @@ -1290,82 +992,6 @@ } } - /// Identify some cases where `as_ref()` would be appropriate and suggest it. - /// - /// Given the following code: - /// ```compile_fail,E0308 - /// struct Foo; - /// fn takes_ref(_: &Foo) {} - /// let ref opt = Some(Foo); - /// - /// opt.map(|param| takes_ref(param)); - /// ``` - /// Suggest using `opt.as_ref().map(|param| takes_ref(param));` instead. - /// - /// It only checks for `Option` and `Result` and won't work with - /// ```ignore (illustrative) - /// opt.map(|param| { takes_ref(param) }); - /// ``` - fn can_use_as_ref(&self, expr: &hir::Expr<'_>) -> Option<(Vec<(Span, String)>, &'static str)> { - let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = expr.kind else { - return None; - }; - - let hir::def::Res::Local(local_id) = path.res else { - return None; - }; - - let local_parent = self.tcx.hir().parent_id(local_id); - let Some(Node::Param(hir::Param { hir_id: param_hir_id, .. })) = - self.tcx.hir().find(local_parent) - else { - return None; - }; - - let param_parent = self.tcx.hir().parent_id(*param_hir_id); - let Some(Node::Expr(hir::Expr { - hir_id: expr_hir_id, - kind: hir::ExprKind::Closure(hir::Closure { fn_decl: closure_fn_decl, .. }), - .. - })) = self.tcx.hir().find(param_parent) - else { - return None; - }; - - let expr_parent = self.tcx.hir().parent_id(*expr_hir_id); - let hir = self.tcx.hir().find(expr_parent); - let closure_params_len = closure_fn_decl.inputs.len(); - let ( - Some(Node::Expr(hir::Expr { - kind: hir::ExprKind::MethodCall(method_path, receiver, ..), - .. - })), - 1, - ) = (hir, closure_params_len) - else { - return None; - }; - - let self_ty = self.typeck_results.borrow().expr_ty(receiver); - let name = method_path.ident.name; - let is_as_ref_able = match self_ty.peel_refs().kind() { - ty::Adt(def, _) => { - (self.tcx.is_diagnostic_item(sym::Option, def.did()) - || self.tcx.is_diagnostic_item(sym::Result, def.did())) - && (name == sym::map || name == sym::and_then) - } - _ => false, - }; - if is_as_ref_able { - Some(( - vec![(method_path.ident.span.shrink_to_lo(), "as_ref().".to_string())], - "consider using `as_ref` instead", - )) - } else { - None - } - } - /// If the given `HirId` corresponds to a block with a trailing expression, return that expression pub(crate) fn maybe_get_block_expr( &self, @@ -1377,21 +1003,6 @@ } } - /// Returns whether the given expression is an `else if`. - pub(crate) fn is_else_if_block(&self, expr: &hir::Expr<'_>) -> bool { - if let hir::ExprKind::If(..) = expr.kind { - let parent_id = self.tcx.hir().parent_id(expr.hir_id); - if let Some(Node::Expr(hir::Expr { - kind: hir::ExprKind::If(_, _, Some(else_expr)), - .. - })) = self.tcx.hir().find(parent_id) - { - return else_expr.hir_id == expr.hir_id; - } - } - false - } - // Returns whether the given expression is a destruct assignment desugaring. // For example, `(a, b) = (1, &2);` // Here we try to find the pattern binding of the expression, @@ -1404,884 +1015,16 @@ { let bind = self.tcx.hir().find(*bind_hir_id); let parent = self.tcx.hir().find(self.tcx.hir().parent_id(*bind_hir_id)); - if let Some(hir::Node::Pat(hir::Pat { kind: hir::PatKind::Binding(_, _hir_id, _, _), .. })) = bind && - let Some(hir::Node::Pat(hir::Pat { default_binding_modes: false, .. })) = parent { - return true; - } - } - return false; - } - - /// This function is used to determine potential "simple" improvements or users' errors and - /// provide them useful help. For example: - /// - /// ```compile_fail,E0308 - /// fn some_fn(s: &str) {} - /// - /// let x = "hey!".to_owned(); - /// some_fn(x); // error - /// ``` - /// - /// No need to find every potential function which could make a coercion to transform a - /// `String` into a `&str` since a `&` would do the trick! - /// - /// In addition of this check, it also checks between references mutability state. If the - /// expected is mutable but the provided isn't, maybe we could just say "Hey, try with - /// `&mut`!". - pub fn suggest_deref_or_ref( - &self, - expr: &hir::Expr<'tcx>, - checked_ty: Ty<'tcx>, - expected: Ty<'tcx>, - ) -> Option<( - Vec<(Span, String)>, - String, - Applicability, - bool, /* verbose */ - bool, /* suggest `&` or `&mut` type annotation */ - )> { - let sess = self.sess(); - let sp = expr.span; - - // If the span is from an external macro, there's no suggestion we can make. - if in_external_macro(sess, sp) { - return None; - } - - let sm = sess.source_map(); - - let replace_prefix = |s: &str, old: &str, new: &str| { - s.strip_prefix(old).map(|stripped| new.to_string() + stripped) - }; - - // `ExprKind::DropTemps` is semantically irrelevant for these suggestions. - let expr = expr.peel_drop_temps(); - - match (&expr.kind, expected.kind(), checked_ty.kind()) { - (_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (exp.kind(), check.kind()) { - (&ty::Str, &ty::Array(arr, _) | &ty::Slice(arr)) if arr == self.tcx.types.u8 => { - if let hir::ExprKind::Lit(_) = expr.kind - && let Ok(src) = sm.span_to_snippet(sp) - && replace_prefix(&src, "b\"", "\"").is_some() - { - let pos = sp.lo() + BytePos(1); - return Some(( - vec![(sp.with_hi(pos), String::new())], - "consider removing the leading `b`".to_string(), - Applicability::MachineApplicable, - true, - false, - )); - } - } - (&ty::Array(arr, _) | &ty::Slice(arr), &ty::Str) if arr == self.tcx.types.u8 => { - if let hir::ExprKind::Lit(_) = expr.kind - && let Ok(src) = sm.span_to_snippet(sp) - && replace_prefix(&src, "\"", "b\"").is_some() - { - return Some(( - vec![(sp.shrink_to_lo(), "b".to_string())], - "consider adding a leading `b`".to_string(), - Applicability::MachineApplicable, - true, - false, - )); - } - } - _ => {} - }, - (_, &ty::Ref(_, _, mutability), _) => { - // Check if it can work when put into a ref. For example: - // - // ``` - // fn bar(x: &mut i32) {} - // - // let x = 0u32; - // bar(&x); // error, expected &mut - // ``` - let ref_ty = match mutability { - hir::Mutability::Mut => { - Ty::new_mut_ref(self.tcx,self.tcx.lifetimes.re_static, checked_ty) - } - hir::Mutability::Not => { - Ty::new_imm_ref(self.tcx,self.tcx.lifetimes.re_static, checked_ty) - } - }; - if self.can_coerce(ref_ty, expected) { - let mut sugg_sp = sp; - if let hir::ExprKind::MethodCall(ref segment, receiver, args, _) = expr.kind { - let clone_trait = - self.tcx.require_lang_item(LangItem::Clone, Some(segment.ident.span)); - if args.is_empty() - && self.typeck_results.borrow().type_dependent_def_id(expr.hir_id).map( - |did| { - let ai = self.tcx.associated_item(did); - ai.trait_container(self.tcx) == Some(clone_trait) - }, - ) == Some(true) - && segment.ident.name == sym::clone - { - // If this expression had a clone call when suggesting borrowing - // we want to suggest removing it because it'd now be unnecessary. - sugg_sp = receiver.span; - } - } - - if let hir::ExprKind::Unary(hir::UnOp::Deref, ref inner) = expr.kind - && let Some(1) = self.deref_steps(expected, checked_ty) - { - // We have `*&T`, check if what was expected was `&T`. - // If so, we may want to suggest removing a `*`. - sugg_sp = sugg_sp.with_hi(inner.span.lo()); - return Some(( - vec![(sugg_sp, String::new())], - "consider removing deref here".to_string(), - Applicability::MachineApplicable, - true, - false, - )); - } - - let needs_parens = match expr.kind { - // parenthesize if needed (Issue #46756) - hir::ExprKind::Cast(_, _) | hir::ExprKind::Binary(_, _, _) => true, - // parenthesize borrows of range literals (Issue #54505) - _ if is_range_literal(expr) => true, - _ => false, - }; - - if let Some((sugg, msg)) = self.can_use_as_ref(expr) { - return Some(( - sugg, - msg.to_string(), - Applicability::MachineApplicable, - true, - false, - )); - } - - let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { - Some(ident) => format!("{ident}: "), - None => String::new(), - }; - - if let Some(hir::Node::Expr(hir::Expr { - kind: hir::ExprKind::Assign(..), - .. - })) = self.tcx.hir().find_parent(expr.hir_id) - { - if mutability.is_mut() { - // Suppressing this diagnostic, we'll properly print it in `check_expr_assign` - return None; - } - } - - let sugg = mutability.ref_prefix_str(); - let (sugg, verbose) = if needs_parens { - ( - vec![ - (sp.shrink_to_lo(), format!("{prefix}{sugg}(")), - (sp.shrink_to_hi(), ")".to_string()), - ], - false, - ) - } else { - (vec![(sp.shrink_to_lo(), format!("{prefix}{sugg}"))], true) - }; - return Some(( - sugg, - format!("consider {}borrowing here", mutability.mutably_str()), - Applicability::MachineApplicable, - verbose, - false, - )); - } - } - ( - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, ref expr), - _, - &ty::Ref(_, checked, _), - ) if self.can_sub(self.param_env, checked, expected) => { - let make_sugg = |start: Span, end: BytePos| { - // skip `(` for tuples such as `(c) = (&123)`. - // make sure we won't suggest like `(c) = 123)` which is incorrect. - let sp = sm.span_extend_while(start.shrink_to_lo(), |c| c == '(' || c.is_whitespace()) - .map_or(start, |s| s.shrink_to_hi()); - Some(( - vec![(sp.with_hi(end), String::new())], - "consider removing the borrow".to_string(), - Applicability::MachineApplicable, - true, - true, - )) - }; - - // We have `&T`, check if what was expected was `T`. If so, - // we may want to suggest removing a `&`. - if sm.is_imported(expr.span) { - // Go through the spans from which this span was expanded, - // and find the one that's pointing inside `sp`. - // - // E.g. for `&format!("")`, where we want the span to the - // `format!()` invocation instead of its expansion. - if let Some(call_span) = - iter::successors(Some(expr.span), |s| s.parent_callsite()) - .find(|&s| sp.contains(s)) - && sm.is_span_accessible(call_span) - { - return make_sugg(sp, call_span.lo()) - } - return None; - } - if sp.contains(expr.span) && sm.is_span_accessible(expr.span) { - return make_sugg(sp, expr.span.lo()) - } - } - ( - _, - &ty::RawPtr(TypeAndMut { ty: ty_b, mutbl: mutbl_b }), - &ty::Ref(_, ty_a, mutbl_a), - ) => { - if let Some(steps) = self.deref_steps(ty_a, ty_b) - // Only suggest valid if dereferencing needed. - && steps > 0 - // The pointer type implements `Copy` trait so the suggestion is always valid. - && let Ok(src) = sm.span_to_snippet(sp) - { - let derefs = "*".repeat(steps); - let old_prefix = mutbl_a.ref_prefix_str(); - let new_prefix = mutbl_b.ref_prefix_str().to_owned() + &derefs; - - let suggestion = replace_prefix(&src, old_prefix, &new_prefix).map(|_| { - // skip `&` or `&mut ` if both mutabilities are mutable - let lo = sp.lo() - + BytePos(min(old_prefix.len(), mutbl_b.ref_prefix_str().len()) as _); - // skip `&` or `&mut ` - let hi = sp.lo() + BytePos(old_prefix.len() as _); - let sp = sp.with_lo(lo).with_hi(hi); - - ( - sp, - format!( - "{}{derefs}", - if mutbl_a != mutbl_b { mutbl_b.prefix_str() } else { "" } - ), - if mutbl_b <= mutbl_a { - Applicability::MachineApplicable - } else { - Applicability::MaybeIncorrect - }, - ) - }); - - if let Some((span, src, applicability)) = suggestion { - return Some(( - vec![(span, src)], - "consider dereferencing".to_string(), - applicability, - true, - false, - )); - } - } - } - _ if sp == expr.span => { - if let Some(mut steps) = self.deref_steps(checked_ty, expected) { - let mut expr = expr.peel_blocks(); - let mut prefix_span = expr.span.shrink_to_lo(); - let mut remove = String::new(); - - // Try peeling off any existing `&` and `&mut` to reach our target type - while steps > 0 { - if let hir::ExprKind::AddrOf(_, mutbl, inner) = expr.kind { - // If the expression has `&`, removing it would fix the error - prefix_span = prefix_span.with_hi(inner.span.lo()); - expr = inner; - remove.push_str(mutbl.ref_prefix_str()); - steps -= 1; - } else { - break; - } - } - // If we've reached our target type with just removing `&`, then just print now. - if steps == 0 && !remove.trim().is_empty() { - return Some(( - vec![(prefix_span, String::new())], - format!("consider removing the `{}`", remove.trim()), - // Do not remove `&&` to get to bool, because it might be something like - // { a } && b, which we have a separate fixup suggestion that is more - // likely correct... - if remove.trim() == "&&" && expected == self.tcx.types.bool { - Applicability::MaybeIncorrect - } else { - Applicability::MachineApplicable - }, - true, - false, - )); - } - - // For this suggestion to make sense, the type would need to be `Copy`, - // or we have to be moving out of a `Box` - if self.type_is_copy_modulo_regions(self.param_env, expected) - // FIXME(compiler-errors): We can actually do this if the checked_ty is - // `steps` layers of boxes, not just one, but this is easier and most likely. - || (checked_ty.is_box() && steps == 1) - // We can always deref a binop that takes its arguments by ref. - || matches!( - self.tcx.hir().get_parent(expr.hir_id), - hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Binary(op, ..), .. }) - if !op.node.is_by_value() - ) - { - let deref_kind = if checked_ty.is_box() { - "unboxing the value" - } else if checked_ty.is_ref() { - "dereferencing the borrow" - } else { - "dereferencing the type" - }; - - // Suggest removing `&` if we have removed any, otherwise suggest just - // dereferencing the remaining number of steps. - let message = if remove.is_empty() { - format!("consider {deref_kind}") - } else { - format!( - "consider removing the `{}` and {} instead", - remove.trim(), - deref_kind - ) - }; - - let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { - Some(ident) => format!("{ident}: "), - None => String::new(), - }; - - let (span, suggestion) = if self.is_else_if_block(expr) { - // Don't suggest nonsense like `else *if` - return None; - } else if let Some(expr) = self.maybe_get_block_expr(expr) { - // prefix should be empty here.. - (expr.span.shrink_to_lo(), "*".to_string()) - } else { - (prefix_span, format!("{}{}", prefix, "*".repeat(steps))) - }; - if suggestion.trim().is_empty() { - return None; - } - - return Some(( - vec![(span, suggestion)], - message, - Applicability::MachineApplicable, - true, - false, - )); - } - } - } - _ => {} - } - None - } - - pub fn suggest_cast( - &self, - err: &mut Diagnostic, - expr: &hir::Expr<'_>, - checked_ty: Ty<'tcx>, - expected_ty: Ty<'tcx>, - expected_ty_expr: Option<&'tcx hir::Expr<'tcx>>, - ) -> bool { - if self.tcx.sess.source_map().is_imported(expr.span) { - // Ignore if span is from within a macro. - return false; - } - - let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) else { - return false; - }; - - // If casting this expression to a given numeric type would be appropriate in case of a type - // mismatch. - // - // We want to minimize the amount of casting operations that are suggested, as it can be a - // lossy operation with potentially bad side effects, so we only suggest when encountering - // an expression that indicates that the original type couldn't be directly changed. - // - // For now, don't suggest casting with `as`. - let can_cast = false; - - let mut sugg = vec![]; - - if let Some(hir::Node::ExprField(field)) = self.tcx.hir().find_parent(expr.hir_id) { - // `expr` is a literal field for a struct, only suggest if appropriate - if field.is_shorthand { - // This is a field literal - sugg.push((field.ident.span.shrink_to_lo(), format!("{}: ", field.ident))); - } else { - // Likely a field was meant, but this field wasn't found. Do not suggest anything. - return false; - } - }; - - if let hir::ExprKind::Call(path, args) = &expr.kind - && let (hir::ExprKind::Path(hir::QPath::TypeRelative(base_ty, path_segment)), 1) = - (&path.kind, args.len()) - // `expr` is a conversion like `u32::from(val)`, do not suggest anything (#63697). - && let (hir::TyKind::Path(hir::QPath::Resolved(None, base_ty_path)), sym::from) = - (&base_ty.kind, path_segment.ident.name) - { - if let Some(ident) = &base_ty_path.segments.iter().map(|s| s.ident).next() { - match ident.name { - sym::i128 - | sym::i64 - | sym::i32 - | sym::i16 - | sym::i8 - | sym::u128 - | sym::u64 - | sym::u32 - | sym::u16 - | sym::u8 - | sym::isize - | sym::usize - if base_ty_path.segments.len() == 1 => - { - return false; - } - _ => {} - } - } - } - - let msg = format!( - "you can convert {} `{}` to {} `{}`", - checked_ty.kind().article(), - checked_ty, - expected_ty.kind().article(), - expected_ty, - ); - let cast_msg = format!( - "you can cast {} `{}` to {} `{}`", - checked_ty.kind().article(), - checked_ty, - expected_ty.kind().article(), - expected_ty, - ); - let lit_msg = format!( - "change the type of the numeric literal from `{checked_ty}` to `{expected_ty}`", - ); - - let close_paren = if expr.precedence().order() < PREC_POSTFIX { - sugg.push((expr.span.shrink_to_lo(), "(".to_string())); - ")" - } else { - "" - }; - - let mut cast_suggestion = sugg.clone(); - cast_suggestion.push((expr.span.shrink_to_hi(), format!("{close_paren} as {expected_ty}"))); - let mut into_suggestion = sugg.clone(); - into_suggestion.push((expr.span.shrink_to_hi(), format!("{close_paren}.into()"))); - let mut suffix_suggestion = sugg.clone(); - suffix_suggestion.push(( - if matches!( - (&expected_ty.kind(), &checked_ty.kind()), - (ty::Int(_) | ty::Uint(_), ty::Float(_)) - ) { - // Remove fractional part from literal, for example `42.0f32` into `42` - let src = src.trim_end_matches(&checked_ty.to_string()); - let len = src.split('.').next().unwrap().len(); - expr.span.with_lo(expr.span.lo() + BytePos(len as u32)) - } else { - let len = src.trim_end_matches(&checked_ty.to_string()).len(); - expr.span.with_lo(expr.span.lo() + BytePos(len as u32)) - }, - if expr.precedence().order() < PREC_POSTFIX { - // Readd `)` - format!("{expected_ty})") - } else { - expected_ty.to_string() - }, - )); - let literal_is_ty_suffixed = |expr: &hir::Expr<'_>| { - if let hir::ExprKind::Lit(lit) = &expr.kind { lit.node.is_suffixed() } else { false } - }; - let is_negative_int = - |expr: &hir::Expr<'_>| matches!(expr.kind, hir::ExprKind::Unary(hir::UnOp::Neg, ..)); - let is_uint = |ty: Ty<'_>| matches!(ty.kind(), ty::Uint(..)); - - let in_const_context = self.tcx.hir().is_inside_const_context(expr.hir_id); - - let suggest_fallible_into_or_lhs_from = - |err: &mut Diagnostic, exp_to_found_is_fallible: bool| { - // If we know the expression the expected type is derived from, we might be able - // to suggest a widening conversion rather than a narrowing one (which may - // panic). For example, given x: u8 and y: u32, if we know the span of "x", - // x > y - // can be given the suggestion "u32::from(x) > y" rather than - // "x > y.try_into().unwrap()". - let lhs_expr_and_src = expected_ty_expr.and_then(|expr| { - self.tcx - .sess - .source_map() - .span_to_snippet(expr.span) - .ok() - .map(|src| (expr, src)) - }); - let (msg, suggestion) = if let (Some((lhs_expr, lhs_src)), false) = - (lhs_expr_and_src, exp_to_found_is_fallible) - { - let msg = format!( - "you can convert `{lhs_src}` from `{expected_ty}` to `{checked_ty}`, matching the type of `{src}`", - ); - let suggestion = vec![ - (lhs_expr.span.shrink_to_lo(), format!("{checked_ty}::from(")), - (lhs_expr.span.shrink_to_hi(), ")".to_string()), - ]; - (msg, suggestion) - } else { - let msg = - format!("{} and panic if the converted value doesn't fit", msg.clone()); - let mut suggestion = sugg.clone(); - suggestion.push(( - expr.span.shrink_to_hi(), - format!("{close_paren}.try_into().unwrap()"), - )); - (msg, suggestion) - }; - err.multipart_suggestion_verbose(msg, suggestion, Applicability::MachineApplicable); - }; - - let suggest_to_change_suffix_or_into = - |err: &mut Diagnostic, - found_to_exp_is_fallible: bool, - exp_to_found_is_fallible: bool| { - let exp_is_lhs = expected_ty_expr.is_some_and(|e| self.tcx.hir().is_lhs(e.hir_id)); - - if exp_is_lhs { - return; - } - - let always_fallible = found_to_exp_is_fallible - && (exp_to_found_is_fallible || expected_ty_expr.is_none()); - let msg = if literal_is_ty_suffixed(expr) { - lit_msg.clone() - } else if always_fallible && (is_negative_int(expr) && is_uint(expected_ty)) { - // We now know that converting either the lhs or rhs is fallible. Before we - // suggest a fallible conversion, check if the value can never fit in the - // expected type. - let msg = format!("`{src}` cannot fit into type `{expected_ty}`"); - err.note(msg); - return; - } else if in_const_context { - // Do not recommend `into` or `try_into` in const contexts. - return; - } else if found_to_exp_is_fallible { - return suggest_fallible_into_or_lhs_from(err, exp_to_found_is_fallible); - } else { - msg.clone() - }; - let suggestion = if literal_is_ty_suffixed(expr) { - suffix_suggestion.clone() - } else { - into_suggestion.clone() - }; - err.multipart_suggestion_verbose(msg, suggestion, Applicability::MachineApplicable); - }; - - match (&expected_ty.kind(), &checked_ty.kind()) { - (ty::Int(exp), ty::Int(found)) => { - let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) - { - (Some(exp), Some(found)) if exp < found => (true, false), - (Some(exp), Some(found)) if exp > found => (false, true), - (None, Some(8 | 16)) => (false, true), - (Some(8 | 16), None) => (true, false), - (None, _) | (_, None) => (true, true), - _ => (false, false), - }; - suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); - true - } - (ty::Uint(exp), ty::Uint(found)) => { - let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) - { - (Some(exp), Some(found)) if exp < found => (true, false), - (Some(exp), Some(found)) if exp > found => (false, true), - (None, Some(8 | 16)) => (false, true), - (Some(8 | 16), None) => (true, false), - (None, _) | (_, None) => (true, true), - _ => (false, false), - }; - suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); - true - } - (&ty::Int(exp), &ty::Uint(found)) => { - let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) - { - (Some(exp), Some(found)) if found < exp => (false, true), - (None, Some(8)) => (false, true), - _ => (true, true), - }; - suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); - true - } - (&ty::Uint(exp), &ty::Int(found)) => { - let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) - { - (Some(exp), Some(found)) if found > exp => (true, false), - (Some(8), None) => (true, false), - _ => (true, true), - }; - suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); - true - } - (ty::Float(exp), ty::Float(found)) => { - if found.bit_width() < exp.bit_width() { - suggest_to_change_suffix_or_into(err, false, true); - } else if literal_is_ty_suffixed(expr) { - err.multipart_suggestion_verbose( - lit_msg, - suffix_suggestion, - Applicability::MachineApplicable, - ); - } else if can_cast { - // Missing try_into implementation for `f64` to `f32` - err.multipart_suggestion_verbose( - format!("{cast_msg}, producing the closest possible value"), - cast_suggestion, - Applicability::MaybeIncorrect, // lossy conversion - ); - } - true - } - (&ty::Uint(_) | &ty::Int(_), &ty::Float(_)) => { - if literal_is_ty_suffixed(expr) { - err.multipart_suggestion_verbose( - lit_msg, - suffix_suggestion, - Applicability::MachineApplicable, - ); - } else if can_cast { - // Missing try_into implementation for `{float}` to `{integer}` - err.multipart_suggestion_verbose( - format!("{msg}, rounding the float towards zero"), - cast_suggestion, - Applicability::MaybeIncorrect, // lossy conversion - ); - } - true - } - (ty::Float(exp), ty::Uint(found)) => { - // if `found` is `None` (meaning found is `usize`), don't suggest `.into()` - if exp.bit_width() > found.bit_width().unwrap_or(256) { - err.multipart_suggestion_verbose( - format!( - "{msg}, producing the floating point representation of the integer", - ), - into_suggestion, - Applicability::MachineApplicable, - ); - } else if literal_is_ty_suffixed(expr) { - err.multipart_suggestion_verbose( - lit_msg, - suffix_suggestion, - Applicability::MachineApplicable, - ); - } else { - // Missing try_into implementation for `{integer}` to `{float}` - err.multipart_suggestion_verbose( - format!( - "{cast_msg}, producing the floating point representation of the integer, \ - rounded if necessary", - ), - cast_suggestion, - Applicability::MaybeIncorrect, // lossy conversion - ); - } - true - } - (ty::Float(exp), ty::Int(found)) => { - // if `found` is `None` (meaning found is `isize`), don't suggest `.into()` - if exp.bit_width() > found.bit_width().unwrap_or(256) { - err.multipart_suggestion_verbose( - format!( - "{}, producing the floating point representation of the integer", - msg.clone(), - ), - into_suggestion, - Applicability::MachineApplicable, - ); - } else if literal_is_ty_suffixed(expr) { - err.multipart_suggestion_verbose( - lit_msg, - suffix_suggestion, - Applicability::MachineApplicable, - ); - } else { - // Missing try_into implementation for `{integer}` to `{float}` - err.multipart_suggestion_verbose( - format!( - "{}, producing the floating point representation of the integer, \ - rounded if necessary", - &msg, - ), - cast_suggestion, - Applicability::MaybeIncorrect, // lossy conversion - ); - } - true - } - ( - &ty::Uint(ty::UintTy::U32 | ty::UintTy::U64 | ty::UintTy::U128) - | &ty::Int(ty::IntTy::I32 | ty::IntTy::I64 | ty::IntTy::I128), - &ty::Char, - ) => { - err.multipart_suggestion_verbose( - format!("{cast_msg}, since a `char` always occupies 4 bytes"), - cast_suggestion, - Applicability::MachineApplicable, - ); - true + if let Some(hir::Node::Pat(hir::Pat { + kind: hir::PatKind::Binding(_, _hir_id, _, _), + .. + })) = bind + && let Some(hir::Node::Pat(hir::Pat { default_binding_modes: false, .. })) = parent + { + return true; } - _ => false, - } - } - - /// Identify when the user has written `foo..bar()` instead of `foo.bar()`. - pub fn suggest_method_call_on_range_literal( - &self, - err: &mut Diagnostic, - expr: &hir::Expr<'tcx>, - checked_ty: Ty<'tcx>, - expected_ty: Ty<'tcx>, - ) { - if !hir::is_range_literal(expr) { - return; - } - let hir::ExprKind::Struct(hir::QPath::LangItem(LangItem::Range, ..), [start, end], _) = - expr.kind - else { - return; - }; - let parent = self.tcx.hir().parent_id(expr.hir_id); - if let Some(hir::Node::ExprField(_)) = self.tcx.hir().find(parent) { - // Ignore `Foo { field: a..Default::default() }` - return; - } - let mut expr = end.expr; - let mut expectation = Some(expected_ty); - while let hir::ExprKind::MethodCall(_, rcvr, ..) = expr.kind { - // Getting to the root receiver and asserting it is a fn call let's us ignore cases in - // `tests/ui/methods/issues/issue-90315.stderr`. - expr = rcvr; - // If we have more than one layer of calls, then the expected ty - // cannot guide the method probe. - expectation = None; - } - let hir::ExprKind::Call(method_name, _) = expr.kind else { - return; - }; - let ty::Adt(adt, _) = checked_ty.kind() else { - return; - }; - if self.tcx.lang_items().range_struct() != Some(adt.did()) { - return; - } - if let ty::Adt(adt, _) = expected_ty.kind() - && self.tcx.lang_items().range_struct() == Some(adt.did()) - { - return; - } - // Check if start has method named end. - let hir::ExprKind::Path(hir::QPath::Resolved(None, p)) = method_name.kind else { - return; - }; - let [hir::PathSegment { ident, .. }] = p.segments else { - return; - }; - let self_ty = self.typeck_results.borrow().expr_ty(start.expr); - let Ok(_pick) = self.lookup_probe_for_diagnostic( - *ident, - self_ty, - expr, - probe::ProbeScope::AllTraits, - expectation, - ) else { - return; - }; - let mut sugg = "."; - let mut span = start.expr.span.between(end.expr.span); - if span.lo() + BytePos(2) == span.hi() { - // There's no space between the start, the range op and the end, suggest removal which - // will be more noticeable than the replacement of `..` with `.`. - span = span.with_lo(span.lo() + BytePos(1)); - sugg = ""; - } - err.span_suggestion_verbose( - span, - "you likely meant to write a method call instead of a range", - sugg, - Applicability::MachineApplicable, - ); - } - - /// Identify when the type error is because `()` is found in a binding that was assigned a - /// block without a tail expression. - fn suggest_return_binding_for_missing_tail_expr( - &self, - err: &mut Diagnostic, - expr: &hir::Expr<'_>, - checked_ty: Ty<'tcx>, - expected_ty: Ty<'tcx>, - ) { - if !checked_ty.is_unit() { - return; - } - let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = expr.kind else { - return; - }; - let hir::def::Res::Local(hir_id) = path.res else { - return; - }; - let Some(hir::Node::Pat(pat)) = self.tcx.hir().find(hir_id) else { - return; - }; - let Some(hir::Node::Local(hir::Local { ty: None, init: Some(init), .. })) = - self.tcx.hir().find_parent(pat.hir_id) - else { - return; - }; - let hir::ExprKind::Block(block, None) = init.kind else { - return; - }; - if block.expr.is_some() { - return; - } - let [.., stmt] = block.stmts else { - err.span_label(block.span, "this empty block is missing a tail expression"); - return; - }; - let hir::StmtKind::Semi(tail_expr) = stmt.kind else { - return; - }; - let Some(ty) = self.node_ty_opt(tail_expr.hir_id) else { - return; - }; - if self.can_eq(self.param_env, expected_ty, ty) { - err.span_suggestion_short( - stmt.span.with_lo(tail_expr.span.hi()), - "remove this semicolon", - "", - Applicability::MachineApplicable, - ); - } else { - err.span_label(block.span, "this block is missing a tail expression"); } + return false; } fn note_wrong_return_ty_due_to_generic_arg( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/diverges.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/diverges.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/diverges.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/diverges.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,4 @@ -use rustc_span::source_map::DUMMY_SP; -use rustc_span::{self, Span}; +use rustc_span::{self, Span, DUMMY_SP}; use std::{cmp, ops}; /// Tracks whether executing a node may exit normally (versus diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -62,8 +62,8 @@ } #[derive(Diagnostic)] -#[diag(hir_typeck_yield_expr_outside_of_generator, code = "E0627")] -pub struct YieldExprOutsideOfGenerator { +#[diag(hir_typeck_yield_expr_outside_of_coroutine, code = "E0627")] +pub struct YieldExprOutsideOfCoroutine { #[primary_span] pub span: Span, } @@ -110,7 +110,7 @@ pub enum AddReturnTypeSuggestion { #[suggestion( hir_typeck_add_return_type_add, - code = "-> {found} ", + code = " -> {found}", applicability = "machine-applicable" )] Add { @@ -120,7 +120,7 @@ }, #[suggestion( hir_typeck_add_return_type_missing_here, - code = "-> _ ", + code = " -> _", applicability = "has-placeholders" )] MissingHere { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,10 +10,10 @@ use crate::errors::{AddressOfTemporaryTaken, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive}; use crate::errors::{ FieldMultiplySpecifiedInInitializer, FunctionalRecordUpdateOnNonStruct, HelpUseLatestEdition, - YieldExprOutsideOfGenerator, + YieldExprOutsideOfCoroutine, }; use crate::fatally_break_rust; -use crate::method::{MethodCallComponents, SelfSource}; +use crate::method::SelfSource; use crate::type_error_struct; use crate::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectation}; use crate::{ @@ -41,7 +41,6 @@ use rustc_infer::infer::InferOk; use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::ObligationCause; -use rustc_middle::middle::stability; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase}; use rustc_middle::ty::error::{ ExpectedFound, @@ -53,9 +52,10 @@ use rustc_session::parse::feature_err; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::hygiene::DesugaringKind; -use rustc_span::source_map::{Span, Spanned}; +use rustc_span::source_map::Spanned; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_target::abi::FieldIdx; +use rustc_span::Span; +use rustc_target::abi::{FieldIdx, FIRST_VARIANT}; use rustc_target::spec::abi::Abi::RustIntrinsic; use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt; @@ -512,7 +512,7 @@ ) -> Ty<'tcx> { let tcx = self.tcx; let (res, opt_ty, segs) = - self.resolve_ty_and_res_fully_qualified_call(qpath, expr.hir_id, expr.span); + self.resolve_ty_and_res_fully_qualified_call(qpath, expr.hir_id, expr.span, Some(args)); let ty = match res { Res::Err => { self.suggest_assoc_method_call(segs); @@ -626,10 +626,15 @@ } }; - // If the loop context is not a `loop { }`, then break with - // a value is illegal, and `opt_coerce_to` will be `None`. - // Just set expectation to error in that case. - let coerce_to = opt_coerce_to.unwrap_or_else(|| Ty::new_misc_error(tcx)); + let coerce_to = match opt_coerce_to { + Some(c) => c, + None => { + // If the loop context is not a `loop { }`, then break with + // a value is illegal, and `opt_coerce_to` will be `None`. + // Return error in that case (#114529). + return Ty::new_misc_error(tcx); + } + }; // Recurse without `enclosing_breakables` borrowed. e_ty = self.check_expr_with_hint(e, coerce_to); @@ -718,7 +723,9 @@ // ... except when we try to 'break rust;'. // ICE this expression in particular (see #43162). if let ExprKind::Path(QPath::Resolved(_, path)) = e.kind { - if let [segment] = path.segments && segment.ident.name == sym::rust { + if let [segment] = path.segments + && segment.ident.name == sym::rust + { fatally_break_rust(self.tcx); } } @@ -827,7 +834,7 @@ && let ExprKind::Block(body, _) = return_expr.kind && let Some(last_expr) = body.expr { - span = last_expr.span; + span = last_expr.span; } ret_coercion.borrow_mut().coerce( self, @@ -842,7 +849,12 @@ // Point any obligations that were registered due to opaque type // inference at the return expression. self.select_obligations_where_possible(|errors| { - self.point_at_return_for_opaque_ty_error(errors, span, return_expr_ty, return_expr.span); + self.point_at_return_for_opaque_ty_error( + errors, + span, + return_expr_ty, + return_expr.span, + ); }); } } @@ -947,12 +959,39 @@ Applicability::MachineApplicable, ); }); + self.check_for_missing_semi(lhs, &mut err); adjust_err(&mut err); err.emit(); } + /// Check if the expression that could not be assigned to was a typoed expression that + pub fn check_for_missing_semi( + &self, + expr: &'tcx hir::Expr<'tcx>, + err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>, + ) -> bool { + if let hir::ExprKind::Binary(binop, lhs, rhs) = expr.kind + && let hir::BinOpKind::Mul = binop.node + && self.tcx.sess.source_map().is_multiline(lhs.span.between(rhs.span)) + && rhs.is_syntactic_place_expr() + { + // v missing semicolon here + // foo() + // *bar = baz; + // (#80446). + err.span_suggestion_verbose( + lhs.span.shrink_to_hi(), + "you might have meant to write a semicolon here", + ";".to_string(), + Applicability::MachineApplicable, + ); + return true; + } + false + } + // Check if an expression `original_expr_id` comes from the condition of a while loop, /// as opposed from the body of a while loop, which we can naively check by iterating /// parents until we find a loop... @@ -1293,7 +1332,7 @@ segment.ident, SelfSource::MethodCall(rcvr), error, - Some(MethodCallComponents { receiver: rcvr, args, full_expr: expr }), + Some(args), expected, false, ) { @@ -1403,7 +1442,12 @@ && let hir::ArrayLen::Body(hir::AnonConst { hir_id, .. }) = length && let Some(span) = self.tcx.hir().opt_span(hir_id) { - match self.tcx.sess.diagnostic().steal_diagnostic(span, StashKey::UnderscoreForArrayLengths) { + match self + .tcx + .sess + .diagnostic() + .steal_diagnostic(span, StashKey::UnderscoreForArrayLengths) + { Some(mut err) => { err.span_suggestion( span, @@ -1413,7 +1457,7 @@ ); err.emit(); } - None => () + None => (), } } } @@ -1507,21 +1551,42 @@ } _ => {} } - // If someone calls a const fn, they can extract that call out into a separate constant (or a const - // block in the future), so we check that to tell them that in the diagnostic. Does not affect typeck. - let is_const_fn = match element.kind { + // If someone calls a const fn or constructs a const value, they can extract that + // out into a separate constant (or a const block in the future), so we check that + // to tell them that in the diagnostic. Does not affect typeck. + let is_constable = match element.kind { hir::ExprKind::Call(func, _args) => match *self.node_ty(func.hir_id).kind() { - ty::FnDef(def_id, _) => tcx.is_const_fn(def_id), - _ => false, + ty::FnDef(def_id, _) if tcx.is_const_fn(def_id) => traits::IsConstable::Fn, + _ => traits::IsConstable::No, }, - _ => false, + hir::ExprKind::Path(qpath) => { + match self.typeck_results.borrow().qpath_res(&qpath, element.hir_id) { + Res::Def(DefKind::Ctor(_, CtorKind::Const), _) => traits::IsConstable::Ctor, + _ => traits::IsConstable::No, + } + } + _ => traits::IsConstable::No, }; // If the length is 0, we don't create any elements, so we don't copy any. If the length is 1, we // don't copy that one element, we move it. Only check for Copy if the length is larger. if count.try_eval_target_usize(tcx, self.param_env).map_or(true, |len| len > 1) { let lang_item = self.tcx.require_lang_item(LangItem::Copy, None); - let code = traits::ObligationCauseCode::RepeatElementCopy { is_const_fn }; + let code = traits::ObligationCauseCode::RepeatElementCopy { + is_constable, + elt_type: element_ty, + elt_span: element.span, + elt_stmt_span: self + .tcx + .hir() + .parent_iter(element.hir_id) + .find_map(|(_, node)| match node { + hir::Node::Item(it) => Some(it.span), + hir::Node::Stmt(stmt) => Some(stmt.span), + _ => None, + }) + .expect("array repeat expressions must be inside an item or statement"), + }; self.require_type_meets(element_ty, element.span, code, lang_item); } } @@ -1585,12 +1650,11 @@ self.check_expr_struct_fields( adt_ty, expected, - expr.hir_id, + expr, qpath.span(), variant, fields, base_expr, - expr.span, ); self.require_type_is_sized(adt_ty, expr.span, traits::StructInitializerSized); @@ -1601,12 +1665,11 @@ &self, adt_ty: Ty<'tcx>, expected: Expectation<'tcx>, - expr_id: hir::HirId, + expr: &hir::Expr<'_>, span: Span, variant: &'tcx ty::VariantDef, ast_fields: &'tcx [hir::ExprField<'tcx>], base_expr: &'tcx Option<&'tcx hir::Expr<'tcx>>, - expr_span: Span, ) { let tcx = self.tcx; @@ -1646,7 +1709,7 @@ // struct-like enums (yet...), but it's definitely not // a bug to have constructed one. if adt_kind != AdtKind::Enum { - tcx.check_stability(v_field.did, Some(expr_id), field.span, None); + tcx.check_stability(v_field.did, Some(expr.hir_id), field.span, None); } self.field_ty(field.span, v_field, args) @@ -1662,10 +1725,10 @@ self.report_unknown_field( adt_ty, variant, + expr, field, ast_fields, adt.variant_descr(), - expr_span, ) }; @@ -1731,7 +1794,7 @@ .iter() .map(|f| { let fru_ty = self - .normalize(expr_span, self.field_ty(base_expr.span, f, fresh_args)); + .normalize(expr.span, self.field_ty(base_expr.span, f, fresh_args)); let ident = self.tcx.adjust_ident(f.ident(self.tcx), variant.def_id); if let Some(_) = remaining_fields.remove(&ident) { let target_ty = self.field_ty(base_expr.span, f, args); @@ -1814,7 +1877,7 @@ ty::Adt(adt, args) if adt.is_struct() => variant .fields .iter() - .map(|f| self.normalize(expr_span, f.ty(self.tcx, args))) + .map(|f| self.normalize(expr.span, f.ty(self.tcx, args))) .collect(), _ => { self.tcx @@ -1824,13 +1887,13 @@ } } }; - self.typeck_results.borrow_mut().fru_field_types_mut().insert(expr_id, fru_tys); + self.typeck_results.borrow_mut().fru_field_types_mut().insert(expr.hir_id, fru_tys); } else if adt_kind != AdtKind::Union && !remaining_fields.is_empty() { debug!(?remaining_fields); let private_fields: Vec<&ty::FieldDef> = variant .fields .iter() - .filter(|field| !field.vis.is_accessible_from(tcx.parent_module(expr_id), tcx)) + .filter(|field| !field.vis.is_accessible_from(tcx.parent_module(expr.hir_id), tcx)) .collect(); if !private_fields.is_empty() { @@ -1934,11 +1997,8 @@ err: &mut Diagnostic, ) { // I don't use 'is_range_literal' because only double-sided, half-open ranges count. - if let ExprKind::Struct( - QPath::LangItem(LangItem::Range, ..), - [range_start, range_end], - _, - ) = last_expr_field.expr.kind + if let ExprKind::Struct(QPath::LangItem(LangItem::Range, ..), [range_start, range_end], _) = + last_expr_field.expr.kind && let variant_field = variant.fields.iter().find(|field| field.ident(self.tcx) == last_expr_field.ident) && let range_def_id = self.tcx.lang_items().range_struct() @@ -1973,13 +2033,11 @@ .sess .source_map() .span_extend_while(range_start.span, |c| c.is_whitespace()) - .unwrap_or(range_start.span).shrink_to_hi().to(range_end.span); + .unwrap_or(range_start.span) + .shrink_to_hi() + .to(range_end.span); - err.subdiagnostic(TypeMismatchFruTypo { - expr_span: range_start.span, - fru_span, - expr, - }); + err.subdiagnostic(TypeMismatchFruTypo { expr_span: range_start.span, fru_span, expr }); } } @@ -2049,16 +2107,16 @@ &self, ty: Ty<'tcx>, variant: &'tcx ty::VariantDef, + expr: &hir::Expr<'_>, field: &hir::ExprField<'_>, skip_fields: &[hir::ExprField<'_>], kind_name: &str, - expr_span: Span, ) -> ErrorGuaranteed { if variant.is_recovered() { let guar = self .tcx .sess - .delay_span_bug(expr_span, "parser recovered but no error was emitted"); + .delay_span_bug(expr.span, "parser recovered but no error was emitted"); self.set_tainted_by_errors(guar); return guar; } @@ -2102,7 +2160,7 @@ ); err.span_label(field.ident.span, "field does not exist"); err.span_suggestion_verbose( - expr_span, + expr.span, format!( "`{adt}::{variant}` is a tuple {kind_name}, use the appropriate syntax", adt = ty, @@ -2120,7 +2178,7 @@ err.span_label(variant_ident_span, format!("`{ty}` defined here")); err.span_label(field.ident.span, "field does not exist"); err.span_suggestion_verbose( - expr_span, + expr.span, format!("`{ty}` is a tuple {kind_name}, use the appropriate syntax",), format!("{ty}(/* fields */)"), Applicability::HasPlaceholders, @@ -2129,9 +2187,9 @@ }, _ => { // prevent all specified fields from being suggested - let skip_fields: Vec<_> = skip_fields.iter().map(|x| x.ident.name).collect(); + let available_field_names = self.available_field_names(variant, expr, skip_fields); if let Some(field_name) = - self.suggest_field_name(variant, field.ident.name, &skip_fields, expr_span) + find_best_match_for_name(&available_field_names, field.ident.name, None) { err.span_suggestion( field.ident.span, @@ -2153,10 +2211,6 @@ format!("`{ty}` does not have this field"), ); } - let mut available_field_names = - self.available_field_names(variant, expr_span); - available_field_names - .retain(|name| skip_fields.iter().all(|skip| name != skip)); if available_field_names.is_empty() { err.note("all struct fields are already assigned"); } else { @@ -2174,63 +2228,19 @@ err.emit() } - // Return a hint about the closest match in field names - fn suggest_field_name( - &self, - variant: &'tcx ty::VariantDef, - field: Symbol, - skip: &[Symbol], - // The span where stability will be checked - span: Span, - ) -> Option { - let names = variant - .fields - .iter() - .filter_map(|field| { - // ignore already set fields and private fields from non-local crates - // and unstable fields. - if skip.iter().any(|&x| x == field.name) - || (!variant.def_id.is_local() && !field.vis.is_public()) - || matches!( - self.tcx.eval_stability(field.did, None, span, None), - stability::EvalResult::Deny { .. } - ) - { - None - } else { - Some(field.name) - } - }) - .collect::>(); - - find_best_match_for_name(&names, field, None) - } - fn available_field_names( &self, variant: &'tcx ty::VariantDef, - access_span: Span, + expr: &hir::Expr<'_>, + skip_fields: &[hir::ExprField<'_>], ) -> Vec { - let body_owner_hir_id = self.tcx.hir().local_def_id_to_hir_id(self.body_id); variant .fields .iter() .filter(|field| { - let def_scope = self - .tcx - .adjust_ident_and_get_scope( - field.ident(self.tcx), - variant.def_id, - body_owner_hir_id, - ) - .1; - field.vis.is_accessible_from(def_scope, self.tcx) - && !matches!( - self.tcx.eval_stability(field.did, None, access_span, None), - stability::EvalResult::Deny { .. } - ) + skip_fields.iter().all(|&skip| skip.ident.name != field.name) + && self.is_field_suggestable(field, expr.hir_id, expr.span) }) - .filter(|field| !self.tcx.is_doc_hidden(field.did)) .map(|field| field.name) .collect() } @@ -2344,7 +2354,9 @@ Some('e') | Some('E') => { chars.next(); if let Some(c) = chars.peek() - && !c.is_numeric() && *c != '-' && *c != '+' + && !c.is_numeric() + && *c != '-' + && *c != '+' { return false; } @@ -2460,7 +2472,7 @@ self.suggest_first_deref_field(&mut err, expr, base, ident); } ty::Adt(def, _) if !def.is_enum() => { - self.suggest_fields_on_recordish(&mut err, def, ident, expr.span); + self.suggest_fields_on_recordish(&mut err, expr, def, ident); } ty::Param(param_ty) => { self.point_at_param_definition(&mut err, param_ty); @@ -2472,7 +2484,9 @@ } self.suggest_fn_call(&mut err, base, base_ty, |output_ty| { - if let ty::Adt(def, _) = output_ty.kind() && !def.is_enum() { + if let ty::Adt(def, _) = output_ty.kind() + && !def.is_enum() + { def.non_enum_variant().fields.iter().any(|field| { field.ident(self.tcx) == ident && field.vis.is_accessible_from(expr.hir_id.owner.def_id, self.tcx) @@ -2622,12 +2636,13 @@ fn suggest_fields_on_recordish( &self, err: &mut Diagnostic, + expr: &hir::Expr<'_>, def: ty::AdtDef<'tcx>, field: Ident, - access_span: Span, ) { + let available_field_names = self.available_field_names(def.non_enum_variant(), expr, &[]); if let Some(suggested_field_name) = - self.suggest_field_name(def.non_enum_variant(), field.name, &[], access_span) + find_best_match_for_name(&available_field_names, field.name, None) { err.span_suggestion( field.span, @@ -2637,12 +2652,10 @@ ); } else { err.span_label(field.span, "unknown field"); - let struct_variant_def = def.non_enum_variant(); - let field_names = self.available_field_names(struct_variant_def, access_span); - if !field_names.is_empty() { + if !available_field_names.is_empty() { err.note(format!( "available fields are: {}", - self.name_series_display(field_names), + self.name_series_display(available_field_names), )); } } @@ -2894,9 +2907,11 @@ // fixed expression: if let ExprKind::Lit(ref lit) = idx.kind && let ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) = lit.node - && i < types.len().try_into().expect("expected tuple index to be < usize length") + && i < types + .len() + .try_into() + .expect("expected tuple index to be < usize length") { - err.span_suggestion( brackets_span, "to access tuple elements, use", @@ -2905,7 +2920,10 @@ ); needs_note = false; } else if let ExprKind::Path(..) = idx.peel_borrows().kind { - err.span_label(idx.span, "cannot access tuple elements at a variable index"); + err.span_label( + idx.span, + "cannot access tuple elements at a variable index", + ); } if needs_note { err.help( @@ -3010,7 +3028,7 @@ // There should be at least one error reported. If not, we // will still delay a span bug in `report_fulfillment_errors`. Ok::<_, NoSolution>(( - self.err_ctxt().report_fulfillment_errors(&errors), + self.err_ctxt().report_fulfillment_errors(errors), impl_trait_ref.args.type_at(1), element_ty, )) @@ -3055,7 +3073,7 @@ Ty::new_unit(self.tcx) } _ => { - self.tcx.sess.emit_err(YieldExprOutsideOfGenerator { span: expr.span }); + self.tcx.sess.emit_err(YieldExprOutsideOfCoroutine { span: expr.span }); // Avoid expressions without types during writeback (#78653). self.check_expr(value); Ty::new_unit(self.tcx) @@ -3138,12 +3156,90 @@ let mut field_indices = Vec::with_capacity(fields.len()); let mut current_container = container; + let mut fields = fields.into_iter(); - for &field in fields { + while let Some(&field) = fields.next() { let container = self.structurally_resolve_type(expr.span, current_container); match container.kind() { - ty::Adt(container_def, args) if !container_def.is_enum() => { + ty::Adt(container_def, args) if container_def.is_enum() => { + let block = self.tcx.hir().local_def_id_to_hir_id(self.body_id); + let (ident, _def_scope) = + self.tcx.adjust_ident_and_get_scope(field, container_def.did(), block); + + if !self.tcx.features().offset_of_enum { + rustc_session::parse::feature_err( + &self.tcx.sess.parse_sess, + sym::offset_of_enum, + ident.span, + "using enums in offset_of is experimental", + ).emit(); + } + + let Some((index, variant)) = container_def.variants() + .iter_enumerated() + .find(|(_, v)| v.ident(self.tcx).normalize_to_macros_2_0() == ident) else { + let mut err = type_error_struct!( + self.tcx().sess, + ident.span, + container, + E0599, + "no variant named `{ident}` found for enum `{container}`", + ); + err.span_label(field.span, "variant not found"); + err.emit(); + break; + }; + let Some(&subfield) = fields.next() else { + let mut err = type_error_struct!( + self.tcx().sess, + ident.span, + container, + E0795, + "`{ident}` is an enum variant; expected field at end of `offset_of`", + ); + err.span_label(field.span, "enum variant"); + err.emit(); + break; + }; + let (subident, sub_def_scope) = + self.tcx.adjust_ident_and_get_scope(subfield, variant.def_id, block); + + let Some((subindex, field)) = variant.fields + .iter_enumerated() + .find(|(_, f)| f.ident(self.tcx).normalize_to_macros_2_0() == subident) else { + let mut err = type_error_struct!( + self.tcx().sess, + ident.span, + container, + E0609, + "no field named `{subfield}` on enum variant `{container}::{ident}`", + ); + err.span_label(field.span, "this enum variant..."); + err.span_label(subident.span, "...does not have this field"); + err.emit(); + break; + }; + + let field_ty = self.field_ty(expr.span, field, args); + + // FIXME: DSTs with static alignment should be allowed + self.require_type_is_sized(field_ty, expr.span, traits::MiscObligation); + + if field.vis.is_accessible_from(sub_def_scope, self.tcx) { + self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span, None); + } else { + self.private_field_err(ident, container_def.did()).emit(); + } + + // Save the index of all fields regardless of their visibility in case + // of error recovery. + field_indices.push((index, subindex)); + current_container = field_ty; + + continue; + } + ty::Adt(container_def, args) => { let block = self.tcx.hir().local_def_id_to_hir_id(self.body_id); let (ident, def_scope) = self.tcx.adjust_ident_and_get_scope(field, container_def.did(), block); @@ -3166,7 +3262,7 @@ // Save the index of all fields regardless of their visibility in case // of error recovery. - field_indices.push(index); + field_indices.push((FIRST_VARIANT, index)); current_container = field_ty; continue; @@ -3180,7 +3276,7 @@ self.require_type_is_sized(ty, expr.span, traits::MiscObligation); } if let Some(&field_ty) = tys.get(index) { - field_indices.push(index.into()); + field_indices.push((FIRST_VARIANT, index.into())); current_container = field_ty; continue; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr_use_visitor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr_use_visitor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr_use_visitor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/expr_use_visitor.rs 2023-12-21 16:55:28.000000000 +0000 @@ -779,7 +779,7 @@ let closure_def_id = closure_expr.def_id; let upvars = tcx.upvars_mentioned(self.body_owner); - // For purposes of this function, generator and closures are equivalent. + // For purposes of this function, coroutine and closures are equivalent. let body_owner_is_closure = matches!(tcx.hir().body_owner_kind(self.body_owner), hir::BodyOwnerKind::Closure,); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fallback.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fallback.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fallback.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fallback.rs 2023-12-21 16:55:28.000000000 +0000 @@ -142,7 +142,7 @@ // type, `?T` is not considered unsolved, but `?I` is. The // same is true for float variables.) let fallback = match ty.kind() { - _ if let Some(e) = self.tainted_by_errors() => Ty::new_error(self.tcx,e), + _ if let Some(e) = self.tainted_by_errors() => Ty::new_error(self.tcx, e), ty::Infer(ty::IntVar(_)) => self.tcx.types.i32, ty::Infer(ty::FloatVar(_)) => self.tcx.types.f64, _ => match diverging_fallback.get(&ty) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,7 @@ use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::visit::{TypeVisitable, TypeVisitableExt}; use rustc_middle::ty::{ - self, AdtKind, CanonicalUserType, GenericParamDefKind, Ty, TyCtxt, UserType, + self, AdtKind, CanonicalUserType, GenericParamDefKind, IsIdentity, Ty, TyCtxt, UserType, }; use rustc_middle::ty::{GenericArgKind, GenericArgsRef, UserArgs, UserSelfTy}; use rustc_session::lint; @@ -207,6 +207,7 @@ ) { debug!("fcx {}", self.tag()); + // FIXME: is_identity being on `UserType` and not `Canonical` is awkward if !canonical_user_type_annotation.is_identity() { self.typeck_results .borrow_mut() @@ -445,7 +446,7 @@ pub fn node_ty(&self, id: hir::HirId) -> Ty<'tcx> { match self.typeck_results.borrow().node_types().get(id) { Some(&t) => t, - None if let Some(e) = self.tainted_by_errors() => Ty::new_error(self.tcx,e), + None if let Some(e) = self.tainted_by_errors() => Ty::new_error(self.tcx, e), None => { bug!( "no type for node {} in fcx {}", @@ -459,7 +460,7 @@ pub fn node_ty_opt(&self, id: hir::HirId) -> Option> { match self.typeck_results.borrow().node_types().get(id) { Some(&t) => Some(t), - None if let Some(e) = self.tainted_by_errors() => Some(Ty::new_error(self.tcx,e)), + None if let Some(e) = self.tainted_by_errors() => Some(Ty::new_error(self.tcx, e)), None => None, } } @@ -509,40 +510,40 @@ typeck_results.rvalue_scopes = rvalue_scopes; } - /// Unify the inference variables corresponding to generator witnesses, and save all the + /// Unify the inference variables corresponding to coroutine witnesses, and save all the /// predicates that were stalled on those inference variables. /// - /// This process allows to conservatively save all predicates that do depend on the generator - /// interior types, for later processing by `check_generator_obligations`. + /// This process allows to conservatively save all predicates that do depend on the coroutine + /// interior types, for later processing by `check_coroutine_obligations`. /// /// We must not attempt to select obligations after this method has run, or risk query cycle /// ICE. #[instrument(level = "debug", skip(self))] - pub(in super::super) fn resolve_generator_interiors(&self, def_id: DefId) { + pub(in super::super) fn resolve_coroutine_interiors(&self, def_id: DefId) { // Try selecting all obligations that are not blocked on inference variables. - // Once we start unifying generator witnesses, trying to select obligations on them will + // Once we start unifying coroutine witnesses, trying to select obligations on them will // trigger query cycle ICEs, as doing so requires MIR. self.select_obligations_where_possible(|_| {}); - let generators = std::mem::take(&mut *self.deferred_generator_interiors.borrow_mut()); - debug!(?generators); + let coroutines = std::mem::take(&mut *self.deferred_coroutine_interiors.borrow_mut()); + debug!(?coroutines); - for &(expr_def_id, body_id, interior, _) in generators.iter() { + for &(expr_def_id, body_id, interior, _) in coroutines.iter() { debug!(?expr_def_id); - // Create the `GeneratorWitness` type that we will unify with `interior`. + // Create the `CoroutineWitness` type that we will unify with `interior`. let args = ty::GenericArgs::identity_for_item( self.tcx, self.tcx.typeck_root_def_id(expr_def_id.to_def_id()), ); - let witness = Ty::new_generator_witness(self.tcx, expr_def_id.to_def_id(), args); + let witness = Ty::new_coroutine_witness(self.tcx, expr_def_id.to_def_id(), args); // Unify `interior` with `witness` and collect all the resulting obligations. let span = self.tcx.hir().body(body_id).value.span; let ok = self .at(&self.misc(span), self.param_env) .eq(DefineOpaqueTypes::No, interior, witness) - .expect("Failed to unify generator interior type"); + .expect("Failed to unify coroutine interior type"); let mut obligations = ok.obligations; // Also collect the obligations that were unstalled by this unification. @@ -553,7 +554,7 @@ debug!(?obligations); self.typeck_results .borrow_mut() - .generator_interior_predicates + .coroutine_interior_predicates .insert(expr_def_id, obligations); } } @@ -564,7 +565,12 @@ if !errors.is_empty() { self.adjust_fulfillment_errors_for_expr_obligation(&mut errors); - self.err_ctxt().report_fulfillment_errors(&errors); + let errors_causecode = errors + .iter() + .map(|e| (e.obligation.cause.span, e.root_obligation.cause.code().clone())) + .collect::>(); + self.err_ctxt().report_fulfillment_errors(errors); + self.collect_unused_stmts_for_coerce_return_ty(errors_causecode); } } @@ -577,7 +583,7 @@ if !result.is_empty() { mutate_fulfillment_errors(&mut result); self.adjust_fulfillment_errors_for_expr_obligation(&mut result); - self.err_ctxt().report_fulfillment_errors(&result); + self.err_ctxt().report_fulfillment_errors(result); } } @@ -713,7 +719,8 @@ if let ty::GenericArgKind::Type(ty) = ty.unpack() && let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = *ty.kind() && let Some(def_id) = def_id.as_local() - && self.opaque_type_origin(def_id).is_some() { + && self.opaque_type_origin(def_id).is_some() + { return None; } } @@ -790,6 +797,7 @@ qpath: &'tcx QPath<'tcx>, hir_id: hir::HirId, span: Span, + args: Option<&'tcx [hir::Expr<'tcx>]>, ) -> (Res, Option>, &'tcx [hir::PathSegment<'tcx>]) { debug!( "resolve_ty_and_res_fully_qualified_call: qpath={:?} hir_id={:?} span={:?}", @@ -833,7 +841,13 @@ .resolve_fully_qualified_call(span, item_name, ty.normalized, qself.span, hir_id) .and_then(|r| { // lint bare trait if the method is found in the trait - if span.edition().at_least_rust_2021() && let Some(mut diag) = self.tcx.sess.diagnostic().steal_diagnostic(qself.span, StashKey::TraitMissingMethod) { + if span.edition().at_least_rust_2021() + && let Some(mut diag) = self + .tcx + .sess + .diagnostic() + .steal_diagnostic(qself.span, StashKey::TraitMissingMethod) + { diag.emit(); } Ok(r) @@ -863,7 +877,13 @@ } // emit or cancel the diagnostic for bare traits - if span.edition().at_least_rust_2021() && let Some(mut diag) = self.tcx.sess.diagnostic().steal_diagnostic(qself.span, StashKey::TraitMissingMethod) { + if span.edition().at_least_rust_2021() + && let Some(mut diag) = self + .tcx + .sess + .diagnostic() + .steal_diagnostic(qself.span, StashKey::TraitMissingMethod) + { if trait_missing_method { // cancel the diag for bare traits when meeting `MyTrait::missing_method` diag.cancel(); @@ -879,7 +899,7 @@ item_name, SelfSource::QPath(qself), error, - None, + args, Expectation::NoExpectation, trait_missing_method && span.edition().at_least_rust_2021(), // emits missing method for trait only after edition 2021 ) { @@ -949,12 +969,15 @@ kind: hir::ItemKind::Fn(ref sig, ..), owner_id, .. - })) = self.tcx.hir().find_parent(hir_id) => Some(( - hir::HirId::make_owner(owner_id.def_id), - &sig.decl, - ident, - ident.name != sym::main, - )), + })) = self.tcx.hir().find_parent(hir_id) => + { + Some(( + hir::HirId::make_owner(owner_id.def_id), + &sig.decl, + ident, + ident.name != sym::main, + )) + } _ => None, } } @@ -1077,11 +1100,10 @@ let mut user_self_ty = None; let mut is_alias_variant_ctor = false; match res { - Res::Def(DefKind::Ctor(CtorOf::Variant, _), _) - if let Some(self_ty) = self_ty => - { + Res::Def(DefKind::Ctor(CtorOf::Variant, _), _) if let Some(self_ty) = self_ty => { let adt_def = self_ty.normalized.ty_adt_def().unwrap(); - user_self_ty = Some(UserSelfTy { impl_def_id: adt_def.did(), self_ty: self_ty.raw }); + user_self_ty = + Some(UserSelfTy { impl_def_id: adt_def.did(), self_ty: self_ty.raw }); is_alias_variant_ctor = true; } Res::Def(DefKind::AssocFn | DefKind::AssocConst, def_id) => { @@ -1090,9 +1112,13 @@ let container_id = assoc_item.container_id(tcx); debug!(?def_id, ?container, ?container_id); match container { - ty::TraitContainer => { - callee::check_legal_trait_for_method_call(tcx, span, None, span, container_id) - } + ty::TraitContainer => callee::check_legal_trait_for_method_call( + tcx, + span, + None, + span, + container_id, + ), ty::ImplContainer => { if segments.len() == 1 { // `::assoc` will end up here, and so @@ -1477,13 +1503,13 @@ { Ok(normalized_ty) => normalized_ty, Err(errors) => { - let guar = self.err_ctxt().report_fulfillment_errors(&errors); - return Ty::new_error(self.tcx,guar); + let guar = self.err_ctxt().report_fulfillment_errors(errors); + return Ty::new_error(self.tcx, guar); } } } else { ty - } + } } /// Resolves `ty` by a single level if `ty` is a type variable. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -129,21 +129,19 @@ return false; } - for param in - [param_to_point_at, fallback_param_to_point_at, self_param_to_point_at] + for param in [param_to_point_at, fallback_param_to_point_at, self_param_to_point_at] .into_iter() .flatten() { if self.blame_specific_arg_if_possible( - error, - def_id, - param, - *call_hir_id, - callee.span, - None, - args, - ) - { + error, + def_id, + param, + *call_hir_id, + callee.span, + None, + args, + ) { return true; } } @@ -346,8 +344,8 @@ && let TypeVariableOriginKind::TypeParameterDefinition(_, def_id) = origin.kind && let generics = self.0.tcx.generics_of(self.1) && let Some(index) = generics.param_def_id_to_index(self.0.tcx, def_id) - && let Some(subst) = ty::GenericArgs::identity_for_item(self.0.tcx, self.1) - .get(index as usize) + && let Some(subst) = + ty::GenericArgs::identity_for_item(self.0.tcx, self.1).get(index as usize) { ControlFlow::Break(*subst) } else { @@ -364,11 +362,12 @@ span: Span, ) -> bool { if let traits::FulfillmentErrorCode::CodeSelectionError( - traits::SelectionError::OutputTypeParameterMismatch(box traits::SelectionOutputTypeParameterMismatch{ - expected_trait_ref, .. - }), + traits::SelectionError::OutputTypeParameterMismatch( + box traits::SelectionOutputTypeParameterMismatch { expected_trait_ref, .. }, + ), ) = error.code - && let ty::Closure(def_id, _) | ty::Generator(def_id, ..) = expected_trait_ref.skip_binder().self_ty().kind() + && let ty::Closure(def_id, _) | ty::Coroutine(def_id, ..) = + expected_trait_ref.skip_binder().self_ty().kind() && span.overlaps(self.tcx.def_span(*def_id)) { true @@ -446,10 +445,14 @@ .collect(); // If there's one field that references the given generic, great! if let [(idx, _)] = args_referencing_param.as_slice() - && let Some(arg) = receiver - .map_or(args.get(*idx), |rcvr| if *idx == 0 { Some(rcvr) } else { args.get(*idx - 1) }) { - - error.obligation.cause.span = arg.span.find_ancestor_in_same_ctxt(error.obligation.cause.span).unwrap_or(arg.span); + && let Some(arg) = receiver.map_or(args.get(*idx), |rcvr| { + if *idx == 0 { Some(rcvr) } else { args.get(*idx - 1) } + }) + { + error.obligation.cause.span = arg + .span + .find_ancestor_in_same_ctxt(error.obligation.cause.span) + .unwrap_or(arg.span); if let hir::Node::Expr(arg_expr) = self.tcx.hir().get(arg.hir_id) { // This is more specific than pointing at the entire argument. @@ -934,16 +937,16 @@ return true; } if let ty::GenericArgKind::Type(ty) = arg.unpack() - && let ty::Alias(ty::Projection | ty::Inherent, ..) = ty.kind() - { - // This logic may seem a bit strange, but typically when - // we have a projection type in a function signature, the - // argument that's being passed into that signature is - // not actually constraining that projection's args in - // a meaningful way. So we skip it, and see improvements - // in some UI tests. - walk.skip_current_subtree(); - } + && let ty::Alias(ty::Projection | ty::Inherent, ..) = ty.kind() + { + // This logic may seem a bit strange, but typically when + // we have a projection type in a function signature, the + // argument that's being passed into that signature is + // not actually constraining that projection's args in + // a meaningful way. So we skip it, and see improvements + // in some UI tests. + walk.skip_current_subtree(); + } } false } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,11 +11,12 @@ use rustc_ast as ast; use rustc_data_structures::fx::FxIndexSet; use rustc_errors::{ - pluralize, Applicability, Diagnostic, DiagnosticId, ErrorGuaranteed, MultiSpan, + pluralize, Applicability, Diagnostic, DiagnosticId, ErrorGuaranteed, MultiSpan, StashKey, }; use rustc_hir as hir; use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::def_id::DefId; +use rustc_hir::intravisit::Visitor; use rustc_hir::{ExprKind, Node, QPath}; use rustc_hir_analysis::astconv::AstConv; use rustc_hir_analysis::check::intrinsicck::InlineAsmCtxt; @@ -26,9 +27,10 @@ use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_infer::infer::TypeTrace; use rustc_infer::infer::{DefineOpaqueTypes, InferOk}; +use rustc_middle::traits::ObligationCauseCode::ExprBindingObligation; use rustc_middle::ty::adjustment::AllowTwoPhase; use rustc_middle::ty::visit::TypeVisitableExt; -use rustc_middle::ty::{self, IsSuggestable, Ty}; +use rustc_middle::ty::{self, IsSuggestable, Ty, TyCtxt}; use rustc_session::Session; use rustc_span::symbol::{kw, Ident}; use rustc_span::{self, sym, BytePos, Span}; @@ -365,13 +367,13 @@ continue; } - // For this check, we do *not* want to treat async generator closures (async blocks) + // For this check, we do *not* want to treat async coroutine closures (async blocks) // as proper closures. Doing so would regress type inference when feeding // the return value of an argument-position async block to an argument-position // closure wrapped in a block. // See . let is_closure = if let ExprKind::Closure(closure) = arg.kind { - !tcx.generator_is_async(closure.def_id.to_def_id()) + !tcx.coroutine_is_async(closure.def_id.to_def_id()) } else { false }; @@ -651,7 +653,8 @@ && provided_arg_tys.len() == formal_and_expected_inputs.len() - 1 + tys.len() { // Wrap up the N provided arguments starting at this position in a tuple. - let provided_as_tuple = Ty::new_tup_from_iter(tcx, + let provided_as_tuple = Ty::new_tup_from_iter( + tcx, provided_arg_tys.iter().map(|(ty, _)| *ty).skip(mismatch_idx).take(tys.len()), ); @@ -722,6 +725,8 @@ &mut err, fn_def_id, callee_ty, + call_expr, + None, Some(mismatch_idx), is_method, ); @@ -826,6 +831,8 @@ &mut err, fn_def_id, callee_ty, + call_expr, + Some(expected_ty), Some(expected_idx.as_usize()), is_method, ); @@ -879,8 +886,8 @@ && self.tcx.def_kind(fn_def_id).is_fn_like() && let self_implicit = matches!(call_expr.kind, hir::ExprKind::MethodCall(..)) as usize - && let Some(arg) = self.tcx.fn_arg_names(fn_def_id) - .get(expected_idx.as_usize() + self_implicit) + && let Some(arg) = + self.tcx.fn_arg_names(fn_def_id).get(expected_idx.as_usize() + self_implicit) && arg.name != kw::SelfLower { format!("/* {} */", arg.name) @@ -941,9 +948,9 @@ && error_span.can_be_used_for_suggestions() { if arg_idx.index() > 0 - && let Some((_, prev)) = provided_arg_tys - .get(ProvidedIdx::from_usize(arg_idx.index() - 1) - ) { + && let Some((_, prev)) = + provided_arg_tys.get(ProvidedIdx::from_usize(arg_idx.index() - 1)) + { // Include previous comma span = prev.shrink_to_hi().to(span); } @@ -1208,7 +1215,7 @@ } // Call out where the function is defined - self.label_fn_like(&mut err, fn_def_id, callee_ty, None, is_method); + self.label_fn_like(&mut err, fn_def_id, callee_ty, call_expr, None, None, is_method); // And add a suggestion block for all of the parameters let suggestion_text = match suggestion_text { @@ -1286,7 +1293,8 @@ err: &mut rustc_errors::DiagnosticBuilder<'tcx, ErrorGuaranteed>, ) { if let ty::RawPtr(ty::TypeAndMut { mutbl: hir::Mutability::Mut, .. }) = expected_ty.kind() - && let ty::RawPtr(ty::TypeAndMut { mutbl: hir::Mutability::Not, .. }) = provided_ty.kind() + && let ty::RawPtr(ty::TypeAndMut { mutbl: hir::Mutability::Not, .. }) = + provided_ty.kind() && let hir::ExprKind::Call(callee, _) = arg.kind && let hir::ExprKind::Path(hir::QPath::Resolved(_, path)) = callee.kind && let Res::Def(_, def_id) = path.res @@ -1294,9 +1302,7 @@ { // The user provided `ptr::null()`, but the function expects // `ptr::null_mut()`. - err.subdiagnostic(SuggestPtrNullMut { - span: arg.span - }); + err.subdiagnostic(SuggestPtrNullMut { span: arg.span }); } } @@ -1370,7 +1376,10 @@ } _ => bug!("unexpected type: {:?}", ty.normalized), }, - Res::Def(DefKind::Struct | DefKind::Union | DefKind::TyAlias | DefKind::AssocTy, _) + Res::Def( + DefKind::Struct | DefKind::Union | DefKind::TyAlias { .. } | DefKind::AssocTy, + _, + ) | Res::SelfTyParam { .. } | Res::SelfTyAlias { .. } => match ty.normalized.ty_adt_def() { Some(adt) if !adt.is_enum() => { @@ -1840,6 +1849,55 @@ } } + pub(super) fn collect_unused_stmts_for_coerce_return_ty( + &self, + errors_causecode: Vec<(Span, ObligationCauseCode<'tcx>)>, + ) { + for (span, code) in errors_causecode { + let Some(mut diag) = + self.tcx.sess.diagnostic().steal_diagnostic(span, StashKey::MaybeForgetReturn) + else { + continue; + }; + + if let Some(fn_sig) = self.body_fn_sig() + && let ExprBindingObligation(_, _, hir_id, ..) = code + && !fn_sig.output().is_unit() + { + let mut block_num = 0; + let mut found_semi = false; + for (_, node) in self.tcx.hir().parent_iter(hir_id) { + match node { + hir::Node::Stmt(stmt) => if let hir::StmtKind::Semi(ref expr) = stmt.kind { + let expr_ty = self.typeck_results.borrow().expr_ty(expr); + let return_ty = fn_sig.output(); + if !matches!(expr.kind, hir::ExprKind::Ret(..)) && + self.can_coerce(expr_ty, return_ty) { + found_semi = true; + } + }, + hir::Node::Block(_block) => if found_semi { + block_num += 1; + } + hir::Node::Item(item) => if let hir::ItemKind::Fn(..) = item.kind { + break; + } + _ => {} + } + } + if block_num > 1 && found_semi { + diag.span_suggestion_verbose( + span.shrink_to_lo(), + "you might have meant to return this to infer its type parameters", + "return ", + Applicability::MaybeIncorrect, + ); + } + } + diag.emit(); + } + } + /// Given a vector of fulfillment errors, try to adjust the spans of the /// errors to more accurately point at the cause of the failure. /// @@ -1899,6 +1957,8 @@ err: &mut Diagnostic, callable_def_id: Option, callee_ty: Option>, + call_expr: &'tcx hir::Expr<'tcx>, + expected_ty: Option>, // A specific argument should be labeled, instead of all of them expected_idx: Option, is_method: bool, @@ -1921,8 +1981,7 @@ let callee_ty = callee_ty.peel_refs(); match *callee_ty.kind() { ty::Param(param) => { - let param = - self.tcx.generics_of(self.body_id).type_param(¶m, self.tcx); + let param = self.tcx.generics_of(self.body_id).type_param(¶m, self.tcx); if param.kind.is_synthetic() { // if it's `impl Fn() -> ..` then just fall down to the def-id based logic def_id = param.def_id; @@ -1936,8 +1995,7 @@ // FIXME(compiler-errors): This could be problematic if something has two // fn-like predicates with different args, but callable types really never // do that, so it's OK. - for (predicate, span) in instantiated - { + for (predicate, span) in instantiated { if let ty::ClauseKind::Trait(pred) = predicate.kind().skip_binder() && pred.self_ty().peel_refs() == callee_ty && self.tcx.is_fn_trait(pred.def_id()) @@ -1956,7 +2014,8 @@ _ => { // Look for a user-provided impl of a `Fn` trait, and point to it. let new_def_id = self.probe(|_| { - let trait_ref = ty::TraitRef::new(self.tcx, + let trait_ref = ty::TraitRef::new( + self.tcx, call_kind.to_def_id(self.tcx), [ callee_ty, @@ -1988,7 +2047,9 @@ } } - if let Some(def_span) = self.tcx.def_ident_span(def_id) && !def_span.is_dummy() { + if let Some(def_span) = self.tcx.def_ident_span(def_id) + && !def_span.is_dummy() + { let mut spans: MultiSpan = def_span.into(); let params = self @@ -2015,6 +2076,48 @@ let param = expected_idx .and_then(|expected_idx| self.tcx.hir().body(*body).params.get(expected_idx)); let (kind, span) = if let Some(param) = param { + // Try to find earlier invocations of this closure to find if the type mismatch + // is because of inference. If we find one, point at them. + let mut call_finder = FindClosureArg { tcx: self.tcx, calls: vec![] }; + let node = self + .tcx + .opt_local_def_id_to_hir_id(self.tcx.hir().get_parent_item(call_expr.hir_id)) + .and_then(|hir_id| self.tcx.hir().find(hir_id)); + match node { + Some(hir::Node::Item(item)) => call_finder.visit_item(item), + Some(hir::Node::TraitItem(item)) => call_finder.visit_trait_item(item), + Some(hir::Node::ImplItem(item)) => call_finder.visit_impl_item(item), + _ => {} + } + let typeck = self.typeck_results.borrow(); + for (rcvr, args) in call_finder.calls { + if rcvr.hir_id.owner == typeck.hir_owner + && let Some(rcvr_ty) = typeck.node_type_opt(rcvr.hir_id) + && let ty::Closure(call_def_id, _) = rcvr_ty.kind() + && def_id == *call_def_id + && let Some(idx) = expected_idx + && let Some(arg) = args.get(idx) + && let Some(arg_ty) = typeck.node_type_opt(arg.hir_id) + && let Some(expected_ty) = expected_ty + && self.can_eq(self.param_env, arg_ty, expected_ty) + { + let mut sp: MultiSpan = vec![arg.span].into(); + sp.push_span_label( + arg.span, + format!("expected because this argument is of type `{arg_ty}`"), + ); + sp.push_span_label(rcvr.span, "in this closure call"); + err.span_note( + sp, + format!( + "expected because the closure was earlier called with an \ + argument of type `{arg_ty}`", + ), + ); + break; + } + } + ("closure parameter", param.span) } else { ("closure", self.tcx.def_span(def_id)) @@ -2028,3 +2131,23 @@ } } } + +struct FindClosureArg<'tcx> { + tcx: TyCtxt<'tcx>, + calls: Vec<(&'tcx hir::Expr<'tcx>, &'tcx [hir::Expr<'tcx>])>, +} + +impl<'tcx> Visitor<'tcx> for FindClosureArg<'tcx> { + type NestedFilter = rustc_middle::hir::nested_filter::All; + + fn nested_visit_map(&mut self) -> Self::Map { + self.tcx.hir() + } + + fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) { + if let hir::ExprKind::Call(rcvr, args) = ex.kind { + self.calls.push((rcvr, args)); + } + hir::intravisit::walk_expr(self, ex); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,9 +4,7 @@ mod checks; mod suggestions; -pub use _impl::*; use rustc_errors::ErrorGuaranteed; -pub use suggestions::*; use crate::coercion::DynamicCoerceMany; use crate::{Diverges, EnclosingBreakables, Inherited}; @@ -221,14 +219,14 @@ let item_def_id = tcx.hir().ty_param_owner(def_id); let generics = tcx.generics_of(item_def_id); let index = generics.param_def_id_to_index[&def_id.to_def_id()]; + // HACK(eddyb) should get the original `Span`. + let span = tcx.def_span(def_id); ty::GenericPredicates { parent: None, predicates: tcx.arena.alloc_from_iter( self.param_env.caller_bounds().iter().filter_map(|predicate| { match predicate.kind().skip_binder() { ty::ClauseKind::Trait(data) if data.self_ty().is_param(index) => { - // HACK(eddyb) should get the original `Span`. - let span = tcx.def_span(def_id); Some((predicate, span)) } _ => None, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,19 +2,28 @@ use crate::errors; use crate::fluent_generated as fluent; +use crate::fn_ctxt::rustc_span::BytePos; +use crate::hir::is_range_literal; +use crate::method::probe; use crate::method::probe::{IsSuggestion, Mode, ProbeScope}; +use crate::rustc_middle::ty::Article; +use crate::ty::TypeAndMut; +use core::cmp::min; +use core::iter; use rustc_ast::util::parser::{ExprPrecedence, PREC_POSTFIX}; use rustc_errors::{Applicability, Diagnostic, MultiSpan}; use rustc_hir as hir; +use rustc_hir::def::Res; use rustc_hir::def::{CtorKind, CtorOf, DefKind}; use rustc_hir::lang_items::LangItem; use rustc_hir::{ - AsyncGeneratorKind, Expr, ExprKind, GeneratorKind, GenericBound, HirId, Node, Path, QPath, - Stmt, StmtKind, TyKind, WherePredicate, + CoroutineKind, CoroutineSource, Expr, ExprKind, GenericBound, HirId, Node, Path, QPath, Stmt, + StmtKind, TyKind, WherePredicate, }; use rustc_hir_analysis::astconv::AstConv; use rustc_infer::traits::{self, StatementAsExpression}; use rustc_middle::lint::in_external_macro; +use rustc_middle::middle::stability::EvalResult; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{ self, suggest_constraining_type_params, Binder, IsSuggestable, ToPredicate, Ty, @@ -254,22 +263,24 @@ expr: &hir::Expr<'tcx>, expected: Ty<'tcx>, ) -> bool { - if let hir::ExprKind::MethodCall(hir::PathSegment { ident: method, .. }, recv_expr, &[], _) = expr.kind && - let Some(recv_ty) = self.typeck_results.borrow().expr_ty_opt(recv_expr) && - self.can_coerce(recv_ty, expected) { - let span = if let Some(recv_span) = recv_expr.span.find_ancestor_inside(expr.span) { - expr.span.with_lo(recv_span.hi()) - } else { - expr.span.with_lo(method.span.lo() - rustc_span::BytePos(1)) - }; - err.span_suggestion_verbose( - span, - "try removing the method call", - "", - Applicability::MachineApplicable, - ); - return true; - } + if let hir::ExprKind::MethodCall(hir::PathSegment { ident: method, .. }, recv_expr, &[], _) = + expr.kind + && let Some(recv_ty) = self.typeck_results.borrow().expr_ty_opt(recv_expr) + && self.can_coerce(recv_ty, expected) + { + let span = if let Some(recv_span) = recv_expr.span.find_ancestor_inside(expr.span) { + expr.span.with_lo(recv_span.hi()) + } else { + expr.span.with_lo(method.span.lo() - rustc_span::BytePos(1)) + }; + err.span_suggestion_verbose( + span, + "try removing the method call", + "", + Applicability::MachineApplicable, + ); + return true; + } false } @@ -347,10 +358,16 @@ let name = self.tcx.item_name(def_id); let kind = self.tcx.def_kind(def_id); if let DefKind::Ctor(of, CtorKind::Fn) = kind { - err.span_label(sp, format!("`{name}` defines {} constructor here, which should be called", match of { - CtorOf::Struct => "a struct", - CtorOf::Variant => "an enum variant", - })); + err.span_label( + sp, + format!( + "`{name}` defines {} constructor here, which should be called", + match of { + CtorOf::Struct => "a struct", + CtorOf::Variant => "an enum variant", + } + ), + ); } else { let descr = self.tcx.def_kind_descr(kind, def_id); err.span_label(sp, format!("{descr} `{name}` defined here")); @@ -370,25 +387,20 @@ if let Some(method_ident) = receiver_method_ident && method_ident.name == conversion_method.name { - return None // do not suggest code that is already there (#53348) + return None; // do not suggest code that is already there (#53348) } let method_call_list = [sym::to_vec, sym::to_string]; let mut sugg = if let ExprKind::MethodCall(receiver_method, ..) = expr.kind && receiver_method.ident.name == sym::clone && method_call_list.contains(&conversion_method.name) - // If receiver is `.clone()` and found type has one of those methods, - // we guess that the user wants to convert from a slice type (`&[]` or `&str`) - // to an owned type (`Vec` or `String`). These conversions clone internally, - // so we remove the user's `clone` call. - { - vec![( - receiver_method.ident.span, - conversion_method.name.to_string() - )] - } else if expr.precedence().order() - < ExprPrecedence::MethodCall.order() + // If receiver is `.clone()` and found type has one of those methods, + // we guess that the user wants to convert from a slice type (`&[]` or `&str`) + // to an owned type (`Vec` or `String`). These conversions clone internally, + // so we remove the user's `clone` call. { + vec![(receiver_method.ident.span, conversion_method.name.to_string())] + } else if expr.precedence().order() < ExprPrecedence::MethodCall.order() { vec![ (expr.span.shrink_to_lo(), "(".to_string()), (expr.span.shrink_to_hi(), format!(").{}()", conversion_method.name)), @@ -431,7 +443,11 @@ // Given `Result<_, E>`, check our expected ty is `Result<_, &E>` for // `as_ref` and `as_deref` compatibility. let error_tys_equate_as_ref = error_tys.map_or(true, |(found, expected)| { - self.can_eq(self.param_env, Ty::new_imm_ref(self.tcx,self.tcx.lifetimes.re_erased, found), expected) + self.can_eq( + self.param_env, + Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, found), + expected, + ) }); // FIXME: This could/should be extended to suggest `as_mut` and `as_deref_mut`, // but those checks need to be a bit more delicate and the benefit is diminishing. @@ -525,10 +541,10 @@ ty::Tuple(tuple) if tuple.is_empty() => { errors::SuggestBoxing::Unit { start: span.shrink_to_lo(), end: span } } - ty::Generator(def_id, ..) + ty::Coroutine(def_id, ..) if matches!( - self.tcx.generator_kind(def_id), - Some(GeneratorKind::Async(AsyncGeneratorKind::Closure)) + self.tcx.coroutine_kind(def_id), + Some(CoroutineKind::Async(CoroutineSource::Closure)) ) => { errors::SuggestBoxing::AsyncBody @@ -604,8 +620,12 @@ return false; } let box_found = Ty::new_box(self.tcx, found); - let pin_box_found = Ty::new_lang_item(self.tcx, box_found, LangItem::Pin).unwrap(); - let pin_found = Ty::new_lang_item(self.tcx, found, LangItem::Pin).unwrap(); + let Some(pin_box_found) = Ty::new_lang_item(self.tcx, box_found, LangItem::Pin) else { + return false; + }; + let Some(pin_found) = Ty::new_lang_item(self.tcx, found, LangItem::Pin) else { + return false; + }; match expected.kind() { ty::Adt(def, _) if Some(def.did()) == pin_did => { if self.can_coerce(pin_box_found, expected) { @@ -766,61 +786,75 @@ } &hir::FnRetTy::DefaultReturn(span) if expected.is_unit() => { if let Some(found) = found.make_suggestable(self.tcx, false) { - err.subdiagnostic(errors::AddReturnTypeSuggestion::Add { span, found: found.to_string() }); + err.subdiagnostic(errors::AddReturnTypeSuggestion::Add { + span, + found: found.to_string(), + }); return true; } else if let ty::Closure(_, args) = found.kind() // FIXME(compiler-errors): Get better at printing binders... && let closure = args.as_closure() && closure.sig().is_suggestable(self.tcx, false) { - err.subdiagnostic(errors::AddReturnTypeSuggestion::Add { span, found: closure.print_as_impl_trait().to_string() }); + err.subdiagnostic(errors::AddReturnTypeSuggestion::Add { + span, + found: closure.print_as_impl_trait().to_string(), + }); return true; } else { // FIXME: if `found` could be `impl Iterator` we should suggest that. err.subdiagnostic(errors::AddReturnTypeSuggestion::MissingHere { span }); - return true + return true; } } hir::FnRetTy::Return(hir_ty) => { - let span = hir_ty.span; - if let hir::TyKind::OpaqueDef(item_id, ..) = hir_ty.kind && let hir::Node::Item(hir::Item { - kind: hir::ItemKind::OpaqueTy(op_ty), - .. + kind: hir::ItemKind::OpaqueTy(op_ty), .. }) = self.tcx.hir().get(item_id.hir_id()) - && let [hir::GenericBound::LangItemTrait( - hir::LangItem::Future, _, _, generic_args)] = op_ty.bounds + && let [ + hir::GenericBound::LangItemTrait(hir::LangItem::Future, _, _, generic_args), + ] = op_ty.bounds && let hir::GenericArgs { bindings: [ty_binding], .. } = generic_args - && let hir::TypeBindingKind::Equality { term: hir::Term::Ty(term) } = ty_binding.kind + && let hir::TypeBindingKind::Equality { term: hir::Term::Ty(term) } = + ty_binding.kind { // Check if async function's return type was omitted. // Don't emit suggestions if the found type is `impl Future<...>`. debug!(?found); if found.is_suggestable(self.tcx, false) { if term.span.is_empty() { - err.subdiagnostic(errors::AddReturnTypeSuggestion::Add { span, found: found.to_string() }); + err.subdiagnostic(errors::AddReturnTypeSuggestion::Add { + span: term.span, + found: found.to_string(), + }); return true; } else { - err.subdiagnostic(errors::ExpectedReturnTypeLabel::Other { span, expected }); + err.subdiagnostic(errors::ExpectedReturnTypeLabel::Other { + span: term.span, + expected, + }); } } - } - - // Only point to return type if the expected type is the return type, as if they - // are not, the expectation must have been caused by something else. - debug!("return type {:?}", hir_ty); - let ty = self.astconv().ast_ty_to_ty(hir_ty); - debug!("return type {:?}", ty); - debug!("expected type {:?}", expected); - let bound_vars = self.tcx.late_bound_vars(hir_ty.hir_id.owner.into()); - let ty = Binder::bind_with_vars(ty, bound_vars); - let ty = self.normalize(span, ty); - let ty = self.tcx.erase_late_bound_regions(ty); - if self.can_coerce(expected, ty) { - err.subdiagnostic(errors::ExpectedReturnTypeLabel::Other { span, expected }); - self.try_suggest_return_impl_trait(err, expected, ty, fn_id); - return true; + } else { + // Only point to return type if the expected type is the return type, as if they + // are not, the expectation must have been caused by something else. + debug!("return type {:?}", hir_ty); + let ty = self.astconv().ast_ty_to_ty(hir_ty); + debug!("return type {:?}", ty); + debug!("expected type {:?}", expected); + let bound_vars = self.tcx.late_bound_vars(hir_ty.hir_id.owner.into()); + let ty = Binder::bind_with_vars(ty, bound_vars); + let ty = self.normalize(hir_ty.span, ty); + let ty = self.tcx.erase_late_bound_regions(ty); + if self.can_coerce(expected, ty) { + err.subdiagnostic(errors::ExpectedReturnTypeLabel::Other { + span: hir_ty.span, + expected, + }); + self.try_suggest_return_impl_trait(err, expected, ty, fn_id); + return true; + } } } _ => {} @@ -1075,13 +1109,13 @@ .type_implements_trait( clone_trait_def, [self.tcx.erase_regions(expected_ty)], - self.param_env + self.param_env, ) .must_apply_modulo_regions() - { + { let suggestion = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!(": {ident}.clone()"), - None => ".clone()".to_string() + None => ".clone()".to_string(), }; diag.span_suggestion_verbose( @@ -1091,7 +1125,7 @@ Applicability::MachineApplicable, ); return true; - } + } false } @@ -1119,31 +1153,27 @@ let expr_inner_ty = args.type_at(0); let expected_inner_ty = expected_args.type_at(0); if let &ty::Ref(_, ty, _mutability) = expr_inner_ty.kind() - && self.can_eq(self.param_env, ty, expected_inner_ty) + && self.can_eq(self.param_env, ty, expected_inner_ty) + { + let def_path = self.tcx.def_path_str(adt_def.did()); + let span = expr.span.shrink_to_hi(); + let subdiag = if self.type_is_copy_modulo_regions(self.param_env, ty) { + errors::OptionResultRefMismatch::Copied { span, def_path } + } else if let Some(clone_did) = self.tcx.lang_items().clone_trait() + && rustc_trait_selection::traits::type_known_to_meet_bound_modulo_regions( + self, + self.param_env, + ty, + clone_did, + ) { - let def_path = self.tcx.def_path_str(adt_def.did()); - let span = expr.span.shrink_to_hi(); - let subdiag = if self.type_is_copy_modulo_regions(self.param_env, ty) { - errors::OptionResultRefMismatch::Copied { - span, def_path - } - } else if let Some(clone_did) = self.tcx.lang_items().clone_trait() - && rustc_trait_selection::traits::type_known_to_meet_bound_modulo_regions( - self, - self.param_env, - ty, - clone_did, - ) - { - errors::OptionResultRefMismatch::Cloned { - span, def_path - } - } else { - return false; - }; - diag.subdiagnostic(subdiag); - return true; - } + errors::OptionResultRefMismatch::Cloned { span, def_path } + } else { + return false; + }; + diag.subdiagnostic(subdiag); + return true; + } } false @@ -1179,14 +1209,12 @@ self.tcx, self.misc(expr.span), self.param_env, - ty::TraitRef::new(self.tcx, - into_def_id, - [expr_ty, expected_ty] - ), + ty::TraitRef::new(self.tcx, into_def_id, [expr_ty, expected_ty]), )) { let mut span = expr.span; - while expr.span.eq_ctxt(span) && let Some(parent_callsite) = span.parent_callsite() + while expr.span.eq_ctxt(span) + && let Some(parent_callsite) = span.parent_callsite() { span = parent_callsite; } @@ -1194,7 +1222,10 @@ let sugg = if expr.precedence().order() >= PREC_POSTFIX { vec![(span.shrink_to_hi(), ".into()".to_owned())] } else { - vec![(span.shrink_to_lo(), "(".to_owned()), (span.shrink_to_hi(), ").into()".to_owned())] + vec![ + (span.shrink_to_lo(), "(".to_owned()), + (span.shrink_to_hi(), ").into()".to_owned()), + ] }; diag.multipart_suggestion( format!("call `Into::into` on this expression to convert `{expr_ty}` into `{expected_ty}`"), @@ -1236,9 +1267,12 @@ // since the user probably just misunderstood how `let else` // and `&&` work together. if let Some((_, hir::Node::Local(local))) = cond_parent - && let hir::PatKind::Path(qpath) | hir::PatKind::TupleStruct(qpath, _, _) = &local.pat.kind + && let hir::PatKind::Path(qpath) | hir::PatKind::TupleStruct(qpath, _, _) = + &local.pat.kind && let hir::QPath::Resolved(None, path) = qpath - && let Some(did) = path.res.opt_def_id() + && let Some(did) = path + .res + .opt_def_id() .and_then(|did| self.tcx.opt_parent(did)) .and_then(|did| self.tcx.opt_parent(did)) && self.tcx.is_diagnostic_item(sym::Option, did) @@ -1605,7 +1639,8 @@ .. }) => { let Some(hir::Node::Local(hir::Local { init: Some(init), .. })) = - self.tcx.hir().find(self.tcx.hir().parent_id(*pat_hir_id)) else { + self.tcx.hir().find(self.tcx.hir().parent_id(*pat_hir_id)) + else { return expr; }; @@ -1632,12 +1667,18 @@ // to worry if it's a call to a typed function or closure as this would ne handled // previously. hir::ExprKind::Call(Expr { kind: call_expr_kind, .. }, _) => { - if let hir::ExprKind::Path(hir::QPath::Resolved(None, call_expr_path)) = call_expr_kind - && let hir::Path { segments: [_], res: crate::Res::Local(binding), .. } = call_expr_path - && let Some(hir::Node::Pat(hir::Pat { hir_id, .. })) = self.tcx.hir().find(*binding) + if let hir::ExprKind::Path(hir::QPath::Resolved(None, call_expr_path)) = + call_expr_kind + && let hir::Path { segments: [_], res: crate::Res::Local(binding), .. } = + call_expr_path + && let Some(hir::Node::Pat(hir::Pat { hir_id, .. })) = + self.tcx.hir().find(*binding) && let Some(closure) = self.tcx.hir().find(self.tcx.hir().parent_id(*hir_id)) && let hir::Node::Local(hir::Local { init: Some(init), .. }) = closure - && let Expr { kind: hir::ExprKind::Closure(hir::Closure { body: body_id, .. }), ..} = init + && let Expr { + kind: hir::ExprKind::Closure(hir::Closure { body: body_id, .. }), + .. + } = init { let hir::Body { value: body_expr, .. } = self.tcx.hir().body(*body_id); self.note_type_is_not_clone_inner_expr(body_expr) @@ -1687,4 +1728,1361 @@ false } } + + pub(crate) fn is_field_suggestable( + &self, + field: &ty::FieldDef, + hir_id: HirId, + span: Span, + ) -> bool { + // The field must be visible in the containing module. + field.vis.is_accessible_from(self.tcx.parent_module(hir_id), self.tcx) + // The field must not be unstable. + && !matches!( + self.tcx.eval_stability(field.did, None, rustc_span::DUMMY_SP, None), + rustc_middle::middle::stability::EvalResult::Deny { .. } + ) + // If the field is from an external crate it must not be `doc(hidden)`. + && (field.did.is_local() || !self.tcx.is_doc_hidden(field.did)) + // If the field is hygienic it must come from the same syntax context. + && self.tcx.def_ident_span(field.did).unwrap().normalize_to_macros_2_0().eq_ctxt(span) + } + + pub(crate) fn suggest_missing_unwrap_expect( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'tcx>, + expected: Ty<'tcx>, + found: Ty<'tcx>, + ) -> bool { + let ty::Adt(adt, args) = found.kind() else { + return false; + }; + let ret_ty_matches = |diagnostic_item| { + let Some(sig) = self.body_fn_sig() else { + return false; + }; + let ty::Adt(kind, _) = sig.output().kind() else { + return false; + }; + self.tcx.is_diagnostic_item(diagnostic_item, kind.did()) + }; + + // don't suggest anything like `Ok(ok_val).unwrap()` , `Some(some_val).unwrap()`, + // `None.unwrap()` etc. + let is_ctor = matches!( + expr.kind, + hir::ExprKind::Call( + hir::Expr { + kind: hir::ExprKind::Path(hir::QPath::Resolved( + None, + hir::Path { res: Res::Def(hir::def::DefKind::Ctor(_, _), _), .. }, + )), + .. + }, + .., + ) | hir::ExprKind::Path(hir::QPath::Resolved( + None, + hir::Path { res: Res::Def(hir::def::DefKind::Ctor(_, _), _), .. }, + )), + ); + + let (article, kind, variant, sugg_operator) = + if self.tcx.is_diagnostic_item(sym::Result, adt.did()) { + ("a", "Result", "Err", ret_ty_matches(sym::Result)) + } else if self.tcx.is_diagnostic_item(sym::Option, adt.did()) { + ("an", "Option", "None", ret_ty_matches(sym::Option)) + } else { + return false; + }; + if is_ctor || !self.can_coerce(args.type_at(0), expected) { + return false; + } + + let (msg, sugg) = if sugg_operator { + ( + format!( + "use the `?` operator to extract the `{found}` value, propagating \ + {article} `{kind}::{variant}` value to the caller" + ), + "?", + ) + } else { + ( + format!( + "consider using `{kind}::expect` to unwrap the `{found}` value, \ + panicking if the value is {article} `{kind}::{variant}`" + ), + ".expect(\"REASON\")", + ) + }; + err.span_suggestion_verbose( + expr.span.shrink_to_hi(), + msg, + sugg, + Applicability::HasPlaceholders, + ); + return true; + } + + pub(crate) fn suggest_coercing_result_via_try_operator( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'tcx>, + expected: Ty<'tcx>, + found: Ty<'tcx>, + ) -> bool { + let map = self.tcx.hir(); + let returned = matches!( + map.find_parent(expr.hir_id), + Some(hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Ret(_), .. })) + ) || map.get_return_block(expr.hir_id).is_some(); + if returned + && let ty::Adt(e, args_e) = expected.kind() + && let ty::Adt(f, args_f) = found.kind() + && e.did() == f.did() + && Some(e.did()) == self.tcx.get_diagnostic_item(sym::Result) + && let e_ok = args_e.type_at(0) + && let f_ok = args_f.type_at(0) + && self.infcx.can_eq(self.param_env, f_ok, e_ok) + && let e_err = args_e.type_at(1) + && let f_err = args_f.type_at(1) + && self + .infcx + .type_implements_trait( + self.tcx.get_diagnostic_item(sym::Into).unwrap(), + [f_err, e_err], + self.param_env, + ) + .must_apply_modulo_regions() + { + err.multipart_suggestion( + "use `?` to coerce and return an appropriate `Err`, and wrap the resulting value \ + in `Ok` so the expression remains of type `Result`", + vec![ + (expr.span.shrink_to_lo(), "Ok(".to_string()), + (expr.span.shrink_to_hi(), "?)".to_string()), + ], + Applicability::MaybeIncorrect, + ); + return true; + } + false + } + + /// If the expected type is an enum (Issue #55250) with any variants whose + /// sole field is of the found type, suggest such variants. (Issue #42764) + pub(crate) fn suggest_compatible_variants( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'_>, + expected: Ty<'tcx>, + expr_ty: Ty<'tcx>, + ) -> bool { + if in_external_macro(self.tcx.sess, expr.span) { + return false; + } + if let ty::Adt(expected_adt, args) = expected.kind() { + if let hir::ExprKind::Field(base, ident) = expr.kind { + let base_ty = self.typeck_results.borrow().expr_ty(base); + if self.can_eq(self.param_env, base_ty, expected) + && let Some(base_span) = base.span.find_ancestor_inside(expr.span) + { + err.span_suggestion_verbose( + expr.span.with_lo(base_span.hi()), + format!("consider removing the tuple struct field `{ident}`"), + "", + Applicability::MaybeIncorrect, + ); + return true; + } + } + + // If the expression is of type () and it's the return expression of a block, + // we suggest adding a separate return expression instead. + // (To avoid things like suggesting `Ok(while .. { .. })`.) + if expr_ty.is_unit() { + let mut id = expr.hir_id; + let mut parent; + + // Unroll desugaring, to make sure this works for `for` loops etc. + loop { + parent = self.tcx.hir().parent_id(id); + if let Some(parent_span) = self.tcx.hir().opt_span(parent) { + if parent_span.find_ancestor_inside(expr.span).is_some() { + // The parent node is part of the same span, so is the result of the + // same expansion/desugaring and not the 'real' parent node. + id = parent; + continue; + } + } + break; + } + + if let Some(hir::Node::Block(&hir::Block { + span: block_span, expr: Some(e), .. + })) = self.tcx.hir().find(parent) + { + if e.hir_id == id { + if let Some(span) = expr.span.find_ancestor_inside(block_span) { + let return_suggestions = if self + .tcx + .is_diagnostic_item(sym::Result, expected_adt.did()) + { + vec!["Ok(())"] + } else if self.tcx.is_diagnostic_item(sym::Option, expected_adt.did()) { + vec!["None", "Some(())"] + } else { + return false; + }; + if let Some(indent) = + self.tcx.sess.source_map().indentation_before(span.shrink_to_lo()) + { + // Add a semicolon, except after `}`. + let semicolon = + match self.tcx.sess.source_map().span_to_snippet(span) { + Ok(s) if s.ends_with('}') => "", + _ => ";", + }; + err.span_suggestions( + span.shrink_to_hi(), + "try adding an expression at the end of the block", + return_suggestions + .into_iter() + .map(|r| format!("{semicolon}\n{indent}{r}")), + Applicability::MaybeIncorrect, + ); + } + return true; + } + } + } + } + + let compatible_variants: Vec<(String, _, _, Option)> = expected_adt + .variants() + .iter() + .filter(|variant| { + variant.fields.len() == 1 + }) + .filter_map(|variant| { + let sole_field = &variant.single_field(); + + let field_is_local = sole_field.did.is_local(); + let field_is_accessible = + sole_field.vis.is_accessible_from(expr.hir_id.owner.def_id, self.tcx) + // Skip suggestions for unstable public fields (for example `Pin::pointer`) + && matches!(self.tcx.eval_stability(sole_field.did, None, expr.span, None), EvalResult::Allow | EvalResult::Unmarked); + + if !field_is_local && !field_is_accessible { + return None; + } + + let note_about_variant_field_privacy = (field_is_local && !field_is_accessible) + .then(|| " (its field is private, but it's local to this crate and its privacy can be changed)".to_string()); + + let sole_field_ty = sole_field.ty(self.tcx, args); + if self.can_coerce(expr_ty, sole_field_ty) { + let variant_path = + with_no_trimmed_paths!(self.tcx.def_path_str(variant.def_id)); + // FIXME #56861: DRYer prelude filtering + if let Some(path) = variant_path.strip_prefix("std::prelude::") + && let Some((_, path)) = path.split_once("::") + { + return Some((path.to_string(), variant.ctor_kind(), sole_field.name, note_about_variant_field_privacy)); + } + Some((variant_path, variant.ctor_kind(), sole_field.name, note_about_variant_field_privacy)) + } else { + None + } + }) + .collect(); + + let suggestions_for = |variant: &_, ctor_kind, field_name| { + let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + Some(ident) => format!("{ident}: "), + None => String::new(), + }; + + let (open, close) = match ctor_kind { + Some(CtorKind::Fn) => ("(".to_owned(), ")"), + None => (format!(" {{ {field_name}: "), " }"), + + // unit variants don't have fields + Some(CtorKind::Const) => unreachable!(), + }; + + // Suggest constructor as deep into the block tree as possible. + // This fixes https://github.com/rust-lang/rust/issues/101065, + // and also just helps make the most minimal suggestions. + let mut expr = expr; + while let hir::ExprKind::Block(block, _) = &expr.kind + && let Some(expr_) = &block.expr + { + expr = expr_ + } + + vec![ + (expr.span.shrink_to_lo(), format!("{prefix}{variant}{open}")), + (expr.span.shrink_to_hi(), close.to_owned()), + ] + }; + + match &compatible_variants[..] { + [] => { /* No variants to format */ } + [(variant, ctor_kind, field_name, note)] => { + // Just a single matching variant. + err.multipart_suggestion_verbose( + format!( + "try wrapping the expression in `{variant}`{note}", + note = note.as_deref().unwrap_or("") + ), + suggestions_for(&**variant, *ctor_kind, *field_name), + Applicability::MaybeIncorrect, + ); + return true; + } + _ => { + // More than one matching variant. + err.multipart_suggestions( + format!( + "try wrapping the expression in a variant of `{}`", + self.tcx.def_path_str(expected_adt.did()) + ), + compatible_variants.into_iter().map( + |(variant, ctor_kind, field_name, _)| { + suggestions_for(&variant, ctor_kind, field_name) + }, + ), + Applicability::MaybeIncorrect, + ); + return true; + } + } + } + + false + } + + pub(crate) fn suggest_non_zero_new_unwrap( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'_>, + expected: Ty<'tcx>, + expr_ty: Ty<'tcx>, + ) -> bool { + let tcx = self.tcx; + let (adt, unwrap) = match expected.kind() { + // In case Option is wanted, but * is provided, suggest calling new + ty::Adt(adt, args) if tcx.is_diagnostic_item(sym::Option, adt.did()) => { + // Unwrap option + let ty::Adt(adt, _) = args.type_at(0).kind() else { + return false; + }; + + (adt, "") + } + // In case NonZero* is wanted, but * is provided also add `.unwrap()` to satisfy types + ty::Adt(adt, _) => (adt, ".unwrap()"), + _ => return false, + }; + + let map = [ + (sym::NonZeroU8, tcx.types.u8), + (sym::NonZeroU16, tcx.types.u16), + (sym::NonZeroU32, tcx.types.u32), + (sym::NonZeroU64, tcx.types.u64), + (sym::NonZeroU128, tcx.types.u128), + (sym::NonZeroI8, tcx.types.i8), + (sym::NonZeroI16, tcx.types.i16), + (sym::NonZeroI32, tcx.types.i32), + (sym::NonZeroI64, tcx.types.i64), + (sym::NonZeroI128, tcx.types.i128), + ]; + + let Some((s, _)) = map.iter().find(|&&(s, t)| { + self.tcx.is_diagnostic_item(s, adt.did()) && self.can_coerce(expr_ty, t) + }) else { + return false; + }; + + let path = self.tcx.def_path_str(adt.non_enum_variant().def_id); + + err.multipart_suggestion( + format!("consider calling `{s}::new`"), + vec![ + (expr.span.shrink_to_lo(), format!("{path}::new(")), + (expr.span.shrink_to_hi(), format!("){unwrap}")), + ], + Applicability::MaybeIncorrect, + ); + + true + } + + /// Identify some cases where `as_ref()` would be appropriate and suggest it. + /// + /// Given the following code: + /// ```compile_fail,E0308 + /// struct Foo; + /// fn takes_ref(_: &Foo) {} + /// let ref opt = Some(Foo); + /// + /// opt.map(|param| takes_ref(param)); + /// ``` + /// Suggest using `opt.as_ref().map(|param| takes_ref(param));` instead. + /// + /// It only checks for `Option` and `Result` and won't work with + /// ```ignore (illustrative) + /// opt.map(|param| { takes_ref(param) }); + /// ``` + fn can_use_as_ref(&self, expr: &hir::Expr<'_>) -> Option<(Vec<(Span, String)>, &'static str)> { + let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = expr.kind else { + return None; + }; + + let hir::def::Res::Local(local_id) = path.res else { + return None; + }; + + let local_parent = self.tcx.hir().parent_id(local_id); + let Some(Node::Param(hir::Param { hir_id: param_hir_id, .. })) = + self.tcx.hir().find(local_parent) + else { + return None; + }; + + let param_parent = self.tcx.hir().parent_id(*param_hir_id); + let Some(Node::Expr(hir::Expr { + hir_id: expr_hir_id, + kind: hir::ExprKind::Closure(hir::Closure { fn_decl: closure_fn_decl, .. }), + .. + })) = self.tcx.hir().find(param_parent) + else { + return None; + }; + + let expr_parent = self.tcx.hir().parent_id(*expr_hir_id); + let hir = self.tcx.hir().find(expr_parent); + let closure_params_len = closure_fn_decl.inputs.len(); + let ( + Some(Node::Expr(hir::Expr { + kind: hir::ExprKind::MethodCall(method_path, receiver, ..), + .. + })), + 1, + ) = (hir, closure_params_len) + else { + return None; + }; + + let self_ty = self.typeck_results.borrow().expr_ty(receiver); + let name = method_path.ident.name; + let is_as_ref_able = match self_ty.peel_refs().kind() { + ty::Adt(def, _) => { + (self.tcx.is_diagnostic_item(sym::Option, def.did()) + || self.tcx.is_diagnostic_item(sym::Result, def.did())) + && (name == sym::map || name == sym::and_then) + } + _ => false, + }; + if is_as_ref_able { + Some(( + vec![(method_path.ident.span.shrink_to_lo(), "as_ref().".to_string())], + "consider using `as_ref` instead", + )) + } else { + None + } + } + + /// This function is used to determine potential "simple" improvements or users' errors and + /// provide them useful help. For example: + /// + /// ```compile_fail,E0308 + /// fn some_fn(s: &str) {} + /// + /// let x = "hey!".to_owned(); + /// some_fn(x); // error + /// ``` + /// + /// No need to find every potential function which could make a coercion to transform a + /// `String` into a `&str` since a `&` would do the trick! + /// + /// In addition of this check, it also checks between references mutability state. If the + /// expected is mutable but the provided isn't, maybe we could just say "Hey, try with + /// `&mut`!". + pub(crate) fn suggest_deref_or_ref( + &self, + expr: &hir::Expr<'tcx>, + checked_ty: Ty<'tcx>, + expected: Ty<'tcx>, + ) -> Option<( + Vec<(Span, String)>, + String, + Applicability, + bool, /* verbose */ + bool, /* suggest `&` or `&mut` type annotation */ + )> { + let sess = self.sess(); + let sp = expr.span; + + // If the span is from an external macro, there's no suggestion we can make. + if in_external_macro(sess, sp) { + return None; + } + + let sm = sess.source_map(); + + let replace_prefix = |s: &str, old: &str, new: &str| { + s.strip_prefix(old).map(|stripped| new.to_string() + stripped) + }; + + // `ExprKind::DropTemps` is semantically irrelevant for these suggestions. + let expr = expr.peel_drop_temps(); + + match (&expr.kind, expected.kind(), checked_ty.kind()) { + (_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (exp.kind(), check.kind()) { + (&ty::Str, &ty::Array(arr, _) | &ty::Slice(arr)) if arr == self.tcx.types.u8 => { + if let hir::ExprKind::Lit(_) = expr.kind + && let Ok(src) = sm.span_to_snippet(sp) + && replace_prefix(&src, "b\"", "\"").is_some() + { + let pos = sp.lo() + BytePos(1); + return Some(( + vec![(sp.with_hi(pos), String::new())], + "consider removing the leading `b`".to_string(), + Applicability::MachineApplicable, + true, + false, + )); + } + } + (&ty::Array(arr, _) | &ty::Slice(arr), &ty::Str) if arr == self.tcx.types.u8 => { + if let hir::ExprKind::Lit(_) = expr.kind + && let Ok(src) = sm.span_to_snippet(sp) + && replace_prefix(&src, "\"", "b\"").is_some() + { + return Some(( + vec![(sp.shrink_to_lo(), "b".to_string())], + "consider adding a leading `b`".to_string(), + Applicability::MachineApplicable, + true, + false, + )); + } + } + _ => {} + }, + (_, &ty::Ref(_, _, mutability), _) => { + // Check if it can work when put into a ref. For example: + // + // ``` + // fn bar(x: &mut i32) {} + // + // let x = 0u32; + // bar(&x); // error, expected &mut + // ``` + let ref_ty = match mutability { + hir::Mutability::Mut => { + Ty::new_mut_ref(self.tcx, self.tcx.lifetimes.re_static, checked_ty) + } + hir::Mutability::Not => { + Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_static, checked_ty) + } + }; + if self.can_coerce(ref_ty, expected) { + let mut sugg_sp = sp; + if let hir::ExprKind::MethodCall(ref segment, receiver, args, _) = expr.kind { + let clone_trait = + self.tcx.require_lang_item(LangItem::Clone, Some(segment.ident.span)); + if args.is_empty() + && self.typeck_results.borrow().type_dependent_def_id(expr.hir_id).map( + |did| { + let ai = self.tcx.associated_item(did); + ai.trait_container(self.tcx) == Some(clone_trait) + }, + ) == Some(true) + && segment.ident.name == sym::clone + { + // If this expression had a clone call when suggesting borrowing + // we want to suggest removing it because it'd now be unnecessary. + sugg_sp = receiver.span; + } + } + + if let hir::ExprKind::Unary(hir::UnOp::Deref, ref inner) = expr.kind + && let Some(1) = self.deref_steps(expected, checked_ty) + { + // We have `*&T`, check if what was expected was `&T`. + // If so, we may want to suggest removing a `*`. + sugg_sp = sugg_sp.with_hi(inner.span.lo()); + return Some(( + vec![(sugg_sp, String::new())], + "consider removing deref here".to_string(), + Applicability::MachineApplicable, + true, + false, + )); + } + + let needs_parens = match expr.kind { + // parenthesize if needed (Issue #46756) + hir::ExprKind::Cast(_, _) | hir::ExprKind::Binary(_, _, _) => true, + // parenthesize borrows of range literals (Issue #54505) + _ if is_range_literal(expr) => true, + _ => false, + }; + + if let Some((sugg, msg)) = self.can_use_as_ref(expr) { + return Some(( + sugg, + msg.to_string(), + Applicability::MachineApplicable, + true, + false, + )); + } + + let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) + { + Some(ident) => format!("{ident}: "), + None => String::new(), + }; + + if let Some(hir::Node::Expr(hir::Expr { + kind: hir::ExprKind::Assign(..), + .. + })) = self.tcx.hir().find_parent(expr.hir_id) + { + if mutability.is_mut() { + // Suppressing this diagnostic, we'll properly print it in `check_expr_assign` + return None; + } + } + + let sugg = mutability.ref_prefix_str(); + let (sugg, verbose) = if needs_parens { + ( + vec![ + (sp.shrink_to_lo(), format!("{prefix}{sugg}(")), + (sp.shrink_to_hi(), ")".to_string()), + ], + false, + ) + } else { + (vec![(sp.shrink_to_lo(), format!("{prefix}{sugg}"))], true) + }; + return Some(( + sugg, + format!("consider {}borrowing here", mutability.mutably_str()), + Applicability::MachineApplicable, + verbose, + false, + )); + } + } + ( + hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, ref expr), + _, + &ty::Ref(_, checked, _), + ) if self.can_sub(self.param_env, checked, expected) => { + let make_sugg = |start: Span, end: BytePos| { + // skip `(` for tuples such as `(c) = (&123)`. + // make sure we won't suggest like `(c) = 123)` which is incorrect. + let sp = sm + .span_extend_while(start.shrink_to_lo(), |c| c == '(' || c.is_whitespace()) + .map_or(start, |s| s.shrink_to_hi()); + Some(( + vec![(sp.with_hi(end), String::new())], + "consider removing the borrow".to_string(), + Applicability::MachineApplicable, + true, + true, + )) + }; + + // We have `&T`, check if what was expected was `T`. If so, + // we may want to suggest removing a `&`. + if sm.is_imported(expr.span) { + // Go through the spans from which this span was expanded, + // and find the one that's pointing inside `sp`. + // + // E.g. for `&format!("")`, where we want the span to the + // `format!()` invocation instead of its expansion. + if let Some(call_span) = + iter::successors(Some(expr.span), |s| s.parent_callsite()) + .find(|&s| sp.contains(s)) + && sm.is_span_accessible(call_span) + { + return make_sugg(sp, call_span.lo()); + } + return None; + } + if sp.contains(expr.span) && sm.is_span_accessible(expr.span) { + return make_sugg(sp, expr.span.lo()); + } + } + ( + _, + &ty::RawPtr(TypeAndMut { ty: ty_b, mutbl: mutbl_b }), + &ty::Ref(_, ty_a, mutbl_a), + ) => { + if let Some(steps) = self.deref_steps(ty_a, ty_b) + // Only suggest valid if dereferencing needed. + && steps > 0 + // The pointer type implements `Copy` trait so the suggestion is always valid. + && let Ok(src) = sm.span_to_snippet(sp) + { + let derefs = "*".repeat(steps); + let old_prefix = mutbl_a.ref_prefix_str(); + let new_prefix = mutbl_b.ref_prefix_str().to_owned() + &derefs; + + let suggestion = replace_prefix(&src, old_prefix, &new_prefix).map(|_| { + // skip `&` or `&mut ` if both mutabilities are mutable + let lo = sp.lo() + + BytePos(min(old_prefix.len(), mutbl_b.ref_prefix_str().len()) as _); + // skip `&` or `&mut ` + let hi = sp.lo() + BytePos(old_prefix.len() as _); + let sp = sp.with_lo(lo).with_hi(hi); + + ( + sp, + format!( + "{}{derefs}", + if mutbl_a != mutbl_b { mutbl_b.prefix_str() } else { "" } + ), + if mutbl_b <= mutbl_a { + Applicability::MachineApplicable + } else { + Applicability::MaybeIncorrect + }, + ) + }); + + if let Some((span, src, applicability)) = suggestion { + return Some(( + vec![(span, src)], + "consider dereferencing".to_string(), + applicability, + true, + false, + )); + } + } + } + _ if sp == expr.span => { + if let Some(mut steps) = self.deref_steps(checked_ty, expected) { + let mut expr = expr.peel_blocks(); + let mut prefix_span = expr.span.shrink_to_lo(); + let mut remove = String::new(); + + // Try peeling off any existing `&` and `&mut` to reach our target type + while steps > 0 { + if let hir::ExprKind::AddrOf(_, mutbl, inner) = expr.kind { + // If the expression has `&`, removing it would fix the error + prefix_span = prefix_span.with_hi(inner.span.lo()); + expr = inner; + remove.push_str(mutbl.ref_prefix_str()); + steps -= 1; + } else { + break; + } + } + // If we've reached our target type with just removing `&`, then just print now. + if steps == 0 && !remove.trim().is_empty() { + return Some(( + vec![(prefix_span, String::new())], + format!("consider removing the `{}`", remove.trim()), + // Do not remove `&&` to get to bool, because it might be something like + // { a } && b, which we have a separate fixup suggestion that is more + // likely correct... + if remove.trim() == "&&" && expected == self.tcx.types.bool { + Applicability::MaybeIncorrect + } else { + Applicability::MachineApplicable + }, + true, + false, + )); + } + + // For this suggestion to make sense, the type would need to be `Copy`, + // or we have to be moving out of a `Box` + if self.type_is_copy_modulo_regions(self.param_env, expected) + // FIXME(compiler-errors): We can actually do this if the checked_ty is + // `steps` layers of boxes, not just one, but this is easier and most likely. + || (checked_ty.is_box() && steps == 1) + // We can always deref a binop that takes its arguments by ref. + || matches!( + self.tcx.hir().get_parent(expr.hir_id), + hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Binary(op, ..), .. }) + if !op.node.is_by_value() + ) + { + let deref_kind = if checked_ty.is_box() { + "unboxing the value" + } else if checked_ty.is_ref() { + "dereferencing the borrow" + } else { + "dereferencing the type" + }; + + // Suggest removing `&` if we have removed any, otherwise suggest just + // dereferencing the remaining number of steps. + let message = if remove.is_empty() { + format!("consider {deref_kind}") + } else { + format!( + "consider removing the `{}` and {} instead", + remove.trim(), + deref_kind + ) + }; + + let prefix = + match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + Some(ident) => format!("{ident}: "), + None => String::new(), + }; + + let (span, suggestion) = if self.is_else_if_block(expr) { + // Don't suggest nonsense like `else *if` + return None; + } else if let Some(expr) = self.maybe_get_block_expr(expr) { + // prefix should be empty here.. + (expr.span.shrink_to_lo(), "*".to_string()) + } else { + (prefix_span, format!("{}{}", prefix, "*".repeat(steps))) + }; + if suggestion.trim().is_empty() { + return None; + } + + return Some(( + vec![(span, suggestion)], + message, + Applicability::MachineApplicable, + true, + false, + )); + } + } + } + _ => {} + } + None + } + + /// Returns whether the given expression is an `else if`. + fn is_else_if_block(&self, expr: &hir::Expr<'_>) -> bool { + if let hir::ExprKind::If(..) = expr.kind { + let parent_id = self.tcx.hir().parent_id(expr.hir_id); + if let Some(Node::Expr(hir::Expr { + kind: hir::ExprKind::If(_, _, Some(else_expr)), + .. + })) = self.tcx.hir().find(parent_id) + { + return else_expr.hir_id == expr.hir_id; + } + } + false + } + + pub(crate) fn suggest_cast( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'_>, + checked_ty: Ty<'tcx>, + expected_ty: Ty<'tcx>, + expected_ty_expr: Option<&'tcx hir::Expr<'tcx>>, + ) -> bool { + if self.tcx.sess.source_map().is_imported(expr.span) { + // Ignore if span is from within a macro. + return false; + } + + let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) else { + return false; + }; + + // If casting this expression to a given numeric type would be appropriate in case of a type + // mismatch. + // + // We want to minimize the amount of casting operations that are suggested, as it can be a + // lossy operation with potentially bad side effects, so we only suggest when encountering + // an expression that indicates that the original type couldn't be directly changed. + // + // For now, don't suggest casting with `as`. + let can_cast = false; + + let mut sugg = vec![]; + + if let Some(hir::Node::ExprField(field)) = self.tcx.hir().find_parent(expr.hir_id) { + // `expr` is a literal field for a struct, only suggest if appropriate + if field.is_shorthand { + // This is a field literal + sugg.push((field.ident.span.shrink_to_lo(), format!("{}: ", field.ident))); + } else { + // Likely a field was meant, but this field wasn't found. Do not suggest anything. + return false; + } + }; + + if let hir::ExprKind::Call(path, args) = &expr.kind + && let (hir::ExprKind::Path(hir::QPath::TypeRelative(base_ty, path_segment)), 1) = + (&path.kind, args.len()) + // `expr` is a conversion like `u32::from(val)`, do not suggest anything (#63697). + && let (hir::TyKind::Path(hir::QPath::Resolved(None, base_ty_path)), sym::from) = + (&base_ty.kind, path_segment.ident.name) + { + if let Some(ident) = &base_ty_path.segments.iter().map(|s| s.ident).next() { + match ident.name { + sym::i128 + | sym::i64 + | sym::i32 + | sym::i16 + | sym::i8 + | sym::u128 + | sym::u64 + | sym::u32 + | sym::u16 + | sym::u8 + | sym::isize + | sym::usize + if base_ty_path.segments.len() == 1 => + { + return false; + } + _ => {} + } + } + } + + let msg = format!( + "you can convert {} `{}` to {} `{}`", + checked_ty.kind().article(), + checked_ty, + expected_ty.kind().article(), + expected_ty, + ); + let cast_msg = format!( + "you can cast {} `{}` to {} `{}`", + checked_ty.kind().article(), + checked_ty, + expected_ty.kind().article(), + expected_ty, + ); + let lit_msg = format!( + "change the type of the numeric literal from `{checked_ty}` to `{expected_ty}`", + ); + + let close_paren = if expr.precedence().order() < PREC_POSTFIX { + sugg.push((expr.span.shrink_to_lo(), "(".to_string())); + ")" + } else { + "" + }; + + let mut cast_suggestion = sugg.clone(); + cast_suggestion.push((expr.span.shrink_to_hi(), format!("{close_paren} as {expected_ty}"))); + let mut into_suggestion = sugg.clone(); + into_suggestion.push((expr.span.shrink_to_hi(), format!("{close_paren}.into()"))); + let mut suffix_suggestion = sugg.clone(); + suffix_suggestion.push(( + if matches!( + (&expected_ty.kind(), &checked_ty.kind()), + (ty::Int(_) | ty::Uint(_), ty::Float(_)) + ) { + // Remove fractional part from literal, for example `42.0f32` into `42` + let src = src.trim_end_matches(&checked_ty.to_string()); + let len = src.split('.').next().unwrap().len(); + expr.span.with_lo(expr.span.lo() + BytePos(len as u32)) + } else { + let len = src.trim_end_matches(&checked_ty.to_string()).len(); + expr.span.with_lo(expr.span.lo() + BytePos(len as u32)) + }, + if expr.precedence().order() < PREC_POSTFIX { + // Readd `)` + format!("{expected_ty})") + } else { + expected_ty.to_string() + }, + )); + let literal_is_ty_suffixed = |expr: &hir::Expr<'_>| { + if let hir::ExprKind::Lit(lit) = &expr.kind { lit.node.is_suffixed() } else { false } + }; + let is_negative_int = + |expr: &hir::Expr<'_>| matches!(expr.kind, hir::ExprKind::Unary(hir::UnOp::Neg, ..)); + let is_uint = |ty: Ty<'_>| matches!(ty.kind(), ty::Uint(..)); + + let in_const_context = self.tcx.hir().is_inside_const_context(expr.hir_id); + + let suggest_fallible_into_or_lhs_from = + |err: &mut Diagnostic, exp_to_found_is_fallible: bool| { + // If we know the expression the expected type is derived from, we might be able + // to suggest a widening conversion rather than a narrowing one (which may + // panic). For example, given x: u8 and y: u32, if we know the span of "x", + // x > y + // can be given the suggestion "u32::from(x) > y" rather than + // "x > y.try_into().unwrap()". + let lhs_expr_and_src = expected_ty_expr.and_then(|expr| { + self.tcx + .sess + .source_map() + .span_to_snippet(expr.span) + .ok() + .map(|src| (expr, src)) + }); + let (msg, suggestion) = if let (Some((lhs_expr, lhs_src)), false) = + (lhs_expr_and_src, exp_to_found_is_fallible) + { + let msg = format!( + "you can convert `{lhs_src}` from `{expected_ty}` to `{checked_ty}`, matching the type of `{src}`", + ); + let suggestion = vec![ + (lhs_expr.span.shrink_to_lo(), format!("{checked_ty}::from(")), + (lhs_expr.span.shrink_to_hi(), ")".to_string()), + ]; + (msg, suggestion) + } else { + let msg = + format!("{} and panic if the converted value doesn't fit", msg.clone()); + let mut suggestion = sugg.clone(); + suggestion.push(( + expr.span.shrink_to_hi(), + format!("{close_paren}.try_into().unwrap()"), + )); + (msg, suggestion) + }; + err.multipart_suggestion_verbose(msg, suggestion, Applicability::MachineApplicable); + }; + + let suggest_to_change_suffix_or_into = + |err: &mut Diagnostic, + found_to_exp_is_fallible: bool, + exp_to_found_is_fallible: bool| { + let exp_is_lhs = expected_ty_expr.is_some_and(|e| self.tcx.hir().is_lhs(e.hir_id)); + + if exp_is_lhs { + return; + } + + let always_fallible = found_to_exp_is_fallible + && (exp_to_found_is_fallible || expected_ty_expr.is_none()); + let msg = if literal_is_ty_suffixed(expr) { + lit_msg.clone() + } else if always_fallible && (is_negative_int(expr) && is_uint(expected_ty)) { + // We now know that converting either the lhs or rhs is fallible. Before we + // suggest a fallible conversion, check if the value can never fit in the + // expected type. + let msg = format!("`{src}` cannot fit into type `{expected_ty}`"); + err.note(msg); + return; + } else if in_const_context { + // Do not recommend `into` or `try_into` in const contexts. + return; + } else if found_to_exp_is_fallible { + return suggest_fallible_into_or_lhs_from(err, exp_to_found_is_fallible); + } else { + msg.clone() + }; + let suggestion = if literal_is_ty_suffixed(expr) { + suffix_suggestion.clone() + } else { + into_suggestion.clone() + }; + err.multipart_suggestion_verbose(msg, suggestion, Applicability::MachineApplicable); + }; + + match (&expected_ty.kind(), &checked_ty.kind()) { + (ty::Int(exp), ty::Int(found)) => { + let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) + { + (Some(exp), Some(found)) if exp < found => (true, false), + (Some(exp), Some(found)) if exp > found => (false, true), + (None, Some(8 | 16)) => (false, true), + (Some(8 | 16), None) => (true, false), + (None, _) | (_, None) => (true, true), + _ => (false, false), + }; + suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); + true + } + (ty::Uint(exp), ty::Uint(found)) => { + let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) + { + (Some(exp), Some(found)) if exp < found => (true, false), + (Some(exp), Some(found)) if exp > found => (false, true), + (None, Some(8 | 16)) => (false, true), + (Some(8 | 16), None) => (true, false), + (None, _) | (_, None) => (true, true), + _ => (false, false), + }; + suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); + true + } + (&ty::Int(exp), &ty::Uint(found)) => { + let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) + { + (Some(exp), Some(found)) if found < exp => (false, true), + (None, Some(8)) => (false, true), + _ => (true, true), + }; + suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); + true + } + (&ty::Uint(exp), &ty::Int(found)) => { + let (f2e_is_fallible, e2f_is_fallible) = match (exp.bit_width(), found.bit_width()) + { + (Some(exp), Some(found)) if found > exp => (true, false), + (Some(8), None) => (true, false), + _ => (true, true), + }; + suggest_to_change_suffix_or_into(err, f2e_is_fallible, e2f_is_fallible); + true + } + (ty::Float(exp), ty::Float(found)) => { + if found.bit_width() < exp.bit_width() { + suggest_to_change_suffix_or_into(err, false, true); + } else if literal_is_ty_suffixed(expr) { + err.multipart_suggestion_verbose( + lit_msg, + suffix_suggestion, + Applicability::MachineApplicable, + ); + } else if can_cast { + // Missing try_into implementation for `f64` to `f32` + err.multipart_suggestion_verbose( + format!("{cast_msg}, producing the closest possible value"), + cast_suggestion, + Applicability::MaybeIncorrect, // lossy conversion + ); + } + true + } + (&ty::Uint(_) | &ty::Int(_), &ty::Float(_)) => { + if literal_is_ty_suffixed(expr) { + err.multipart_suggestion_verbose( + lit_msg, + suffix_suggestion, + Applicability::MachineApplicable, + ); + } else if can_cast { + // Missing try_into implementation for `{float}` to `{integer}` + err.multipart_suggestion_verbose( + format!("{msg}, rounding the float towards zero"), + cast_suggestion, + Applicability::MaybeIncorrect, // lossy conversion + ); + } + true + } + (ty::Float(exp), ty::Uint(found)) => { + // if `found` is `None` (meaning found is `usize`), don't suggest `.into()` + if exp.bit_width() > found.bit_width().unwrap_or(256) { + err.multipart_suggestion_verbose( + format!( + "{msg}, producing the floating point representation of the integer", + ), + into_suggestion, + Applicability::MachineApplicable, + ); + } else if literal_is_ty_suffixed(expr) { + err.multipart_suggestion_verbose( + lit_msg, + suffix_suggestion, + Applicability::MachineApplicable, + ); + } else { + // Missing try_into implementation for `{integer}` to `{float}` + err.multipart_suggestion_verbose( + format!( + "{cast_msg}, producing the floating point representation of the integer, \ + rounded if necessary", + ), + cast_suggestion, + Applicability::MaybeIncorrect, // lossy conversion + ); + } + true + } + (ty::Float(exp), ty::Int(found)) => { + // if `found` is `None` (meaning found is `isize`), don't suggest `.into()` + if exp.bit_width() > found.bit_width().unwrap_or(256) { + err.multipart_suggestion_verbose( + format!( + "{}, producing the floating point representation of the integer", + msg.clone(), + ), + into_suggestion, + Applicability::MachineApplicable, + ); + } else if literal_is_ty_suffixed(expr) { + err.multipart_suggestion_verbose( + lit_msg, + suffix_suggestion, + Applicability::MachineApplicable, + ); + } else { + // Missing try_into implementation for `{integer}` to `{float}` + err.multipart_suggestion_verbose( + format!( + "{}, producing the floating point representation of the integer, \ + rounded if necessary", + &msg, + ), + cast_suggestion, + Applicability::MaybeIncorrect, // lossy conversion + ); + } + true + } + ( + &ty::Uint(ty::UintTy::U32 | ty::UintTy::U64 | ty::UintTy::U128) + | &ty::Int(ty::IntTy::I32 | ty::IntTy::I64 | ty::IntTy::I128), + &ty::Char, + ) => { + err.multipart_suggestion_verbose( + format!("{cast_msg}, since a `char` always occupies 4 bytes"), + cast_suggestion, + Applicability::MachineApplicable, + ); + true + } + _ => false, + } + } + + /// Identify when the user has written `foo..bar()` instead of `foo.bar()`. + pub(crate) fn suggest_method_call_on_range_literal( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'tcx>, + checked_ty: Ty<'tcx>, + expected_ty: Ty<'tcx>, + ) { + if !hir::is_range_literal(expr) { + return; + } + let hir::ExprKind::Struct(hir::QPath::LangItem(LangItem::Range, ..), [start, end], _) = + expr.kind + else { + return; + }; + let parent = self.tcx.hir().parent_id(expr.hir_id); + if let Some(hir::Node::ExprField(_)) = self.tcx.hir().find(parent) { + // Ignore `Foo { field: a..Default::default() }` + return; + } + let mut expr = end.expr; + let mut expectation = Some(expected_ty); + while let hir::ExprKind::MethodCall(_, rcvr, ..) = expr.kind { + // Getting to the root receiver and asserting it is a fn call let's us ignore cases in + // `tests/ui/methods/issues/issue-90315.stderr`. + expr = rcvr; + // If we have more than one layer of calls, then the expected ty + // cannot guide the method probe. + expectation = None; + } + let hir::ExprKind::Call(method_name, _) = expr.kind else { + return; + }; + let ty::Adt(adt, _) = checked_ty.kind() else { + return; + }; + if self.tcx.lang_items().range_struct() != Some(adt.did()) { + return; + } + if let ty::Adt(adt, _) = expected_ty.kind() + && self.tcx.lang_items().range_struct() == Some(adt.did()) + { + return; + } + // Check if start has method named end. + let hir::ExprKind::Path(hir::QPath::Resolved(None, p)) = method_name.kind else { + return; + }; + let [hir::PathSegment { ident, .. }] = p.segments else { + return; + }; + let self_ty = self.typeck_results.borrow().expr_ty(start.expr); + let Ok(_pick) = self.lookup_probe_for_diagnostic( + *ident, + self_ty, + expr, + probe::ProbeScope::AllTraits, + expectation, + ) else { + return; + }; + let mut sugg = "."; + let mut span = start.expr.span.between(end.expr.span); + if span.lo() + BytePos(2) == span.hi() { + // There's no space between the start, the range op and the end, suggest removal which + // will be more noticeable than the replacement of `..` with `.`. + span = span.with_lo(span.lo() + BytePos(1)); + sugg = ""; + } + err.span_suggestion_verbose( + span, + "you likely meant to write a method call instead of a range", + sugg, + Applicability::MachineApplicable, + ); + } + + /// Identify when the type error is because `()` is found in a binding that was assigned a + /// block without a tail expression. + pub(crate) fn suggest_return_binding_for_missing_tail_expr( + &self, + err: &mut Diagnostic, + expr: &hir::Expr<'_>, + checked_ty: Ty<'tcx>, + expected_ty: Ty<'tcx>, + ) { + if !checked_ty.is_unit() { + return; + } + let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = expr.kind else { + return; + }; + let hir::def::Res::Local(hir_id) = path.res else { + return; + }; + let Some(hir::Node::Pat(pat)) = self.tcx.hir().find(hir_id) else { + return; + }; + let Some(hir::Node::Local(hir::Local { ty: None, init: Some(init), .. })) = + self.tcx.hir().find_parent(pat.hir_id) + else { + return; + }; + let hir::ExprKind::Block(block, None) = init.kind else { + return; + }; + if block.expr.is_some() { + return; + } + let [.., stmt] = block.stmts else { + err.span_label(block.span, "this empty block is missing a tail expression"); + return; + }; + let hir::StmtKind::Semi(tail_expr) = stmt.kind else { + return; + }; + let Some(ty) = self.node_ty_opt(tail_expr.hir_id) else { + return; + }; + if self.can_eq(self.param_env, expected_ty, ty) { + err.span_suggestion_short( + stmt.span.with_lo(tail_expr.span.hi()), + "remove this semicolon", + "", + Applicability::MachineApplicable, + ); + } else { + err.span_label(block.span, "this block is missing a tail expression"); + } + } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/inherited.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/inherited.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/inherited.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/inherited.rs 2023-12-21 16:55:28.000000000 +0000 @@ -55,8 +55,8 @@ pub(super) deferred_asm_checks: RefCell, hir::HirId)>>, - pub(super) deferred_generator_interiors: - RefCell, hir::GeneratorKind)>>, + pub(super) deferred_coroutine_interiors: + RefCell, hir::CoroutineKind)>>, /// Whenever we introduce an adjustment from `!` into a type variable, /// we record that type variable here. This is later used to inform @@ -94,7 +94,7 @@ deferred_cast_checks: RefCell::new(Vec::new()), deferred_transmute_checks: RefCell::new(Vec::new()), deferred_asm_checks: RefCell::new(Vec::new()), - deferred_generator_interiors: RefCell::new(Vec::new()), + deferred_coroutine_interiors: RefCell::new(Vec::new()), diverging_type_vars: RefCell::new(Default::default()), infer_var_info: RefCell::new(Default::default()), } @@ -129,25 +129,29 @@ let infer_var_info = &mut self.infer_var_info.borrow_mut(); // (*) binder skipped - if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(tpred)) = obligation.predicate.kind().skip_binder() - && let Some(ty) = self.shallow_resolve(tpred.self_ty()).ty_vid().map(|t| self.root_var(t)) + if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(tpred)) = + obligation.predicate.kind().skip_binder() + && let Some(ty) = + self.shallow_resolve(tpred.self_ty()).ty_vid().map(|t| self.root_var(t)) && self.tcx.lang_items().sized_trait().is_some_and(|st| st != tpred.trait_ref.def_id) { let new_self_ty = self.tcx.types.unit; // Then construct a new obligation with Self = () added // to the ParamEnv, and see if it holds. - let o = obligation.with(self.tcx, - obligation - .predicate - .kind() - .rebind( - // (*) binder moved here - ty::PredicateKind::Clause(ty::ClauseKind::Trait(tpred.with_self_ty(self.tcx, new_self_ty))) - ), + let o = obligation.with( + self.tcx, + obligation.predicate.kind().rebind( + // (*) binder moved here + ty::PredicateKind::Clause(ty::ClauseKind::Trait( + tpred.with_self_ty(self.tcx, new_self_ty), + )), + ), ); // Don't report overflow errors. Otherwise equivalent to may_hold. - if let Ok(result) = self.probe(|_| self.evaluate_obligation(&o)) && result.may_apply() { + if let Ok(result) = self.probe(|_| self.evaluate_obligation(&o)) + && result.may_apply() + { infer_var_info.entry(ty).or_default().self_in_trait = true; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/intrinsicck.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/intrinsicck.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/intrinsicck.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/intrinsicck.rs 2023-12-21 16:55:28.000000000 +0000 @@ -70,7 +70,9 @@ // Special-case transmuting from `typeof(function)` and // `Option` to present a clearer error. let from = unpack_option_like(tcx, from); - if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (from.kind(), sk_to) && size_to == Pointer(dl.instruction_address_space).size(&tcx) { + if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (from.kind(), sk_to) + && size_to == Pointer(dl.instruction_address_space).size(&tcx) + { struct_span_err!(tcx.sess, span, E0591, "can't transmute zero-sized type") .note(format!("source type: {from}")) .note(format!("target type: {to}")) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,6 @@ #![feature(box_patterns)] #![feature(min_specialization)] #![feature(control_flow_enum)] -#![feature(option_as_slice)] #![recursion_limit = "256"] #[macro_use] @@ -256,11 +255,11 @@ fcx.check_casts(); fcx.select_obligations_where_possible(|_| {}); - // Closure and generator analysis may run after fallback + // Closure and coroutine analysis may run after fallback // because they don't constrain other type variables. fcx.closure_analyze(body); assert!(fcx.deferred_call_resolutions.borrow().is_empty()); - // Before the generator analysis, temporary scopes shall be marked to provide more + // Before the coroutine analysis, temporary scopes shall be marked to provide more // precise information on types to be captured. fcx.resolve_rvalue_scopes(def_id.to_def_id()); @@ -274,7 +273,7 @@ debug!(pending_obligations = ?fcx.fulfillment_cx.borrow().pending_obligations()); // This must be the last thing before `report_ambiguity_errors`. - fcx.resolve_generator_interiors(def_id.to_def_id()); + fcx.resolve_coroutine_interiors(def_id.to_def_id()); debug!(pending_obligations = ?fcx.fulfillment_cx.borrow().pending_obligations()); @@ -299,20 +298,20 @@ typeck_results } -/// When `check_fn` is invoked on a generator (i.e., a body that +/// When `check_fn` is invoked on a coroutine (i.e., a body that /// includes yield), it returns back some information about the yield /// points. -struct GeneratorTypes<'tcx> { - /// Type of generator argument / values returned by `yield`. +struct CoroutineTypes<'tcx> { + /// Type of coroutine argument / values returned by `yield`. resume_ty: Ty<'tcx>, /// Type of value that is yielded. yield_ty: Ty<'tcx>, - /// Types that are captured (see `GeneratorInterior` for more). + /// Types that are captured (see `CoroutineInterior` for more). interior: Ty<'tcx>, - /// Indicates if the generator is movable or static (immovable). + /// Indicates if the coroutine is movable or static (immovable). movability: hir::Movability, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,7 +7,7 @@ pub mod probe; mod suggest; -pub use self::suggest::{MethodCallComponents, SelfSource}; +pub use self::suggest::SelfSource; pub use self::MethodError::*; use crate::errors::OpMethodGenericParams; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/probe.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/probe.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/probe.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/probe.rs 2023-12-21 16:55:28.000000000 +0000 @@ -667,8 +667,7 @@ // will still match the original object type, but it won't pollute our // type variables in any form, so just do that! let (QueryResponse { value: generalized_self_ty, .. }, _ignored_var_values) = - self.fcx - .instantiate_canonical_with_fresh_inference_vars(self.span, self_ty); + self.fcx.instantiate_canonical_with_fresh_inference_vars(self.span, self_ty); self.assemble_inherent_candidates_from_object(generalized_self_ty); self.assemble_inherent_impl_candidates_for_type(p.def_id()); @@ -1690,15 +1689,12 @@ } } - debug!( - "comparing return_ty {:?} with xform ret ty {:?}", - return_ty, xform_ret_ty - ); + debug!("comparing return_ty {:?} with xform ret ty {:?}", return_ty, xform_ret_ty); if let ProbeResult::Match = result && self - .at(&ObligationCause::dummy(), self.param_env) - .sup(DefineOpaqueTypes::No, return_ty, xform_ret_ty) - .is_err() + .at(&ObligationCause::dummy(), self.param_env) + .sup(DefineOpaqueTypes::No, return_ty, xform_ret_ty) + .is_err() { result = ProbeResult::BadReturnType; } @@ -1959,15 +1955,18 @@ if let Some(nested) = v.meta_item_list() { // #[doc(alias("foo", "bar"))] for n in nested { - if let Some(lit) = n.lit() && name.as_str() == lit.symbol.as_str() { + if let Some(lit) = n.lit() + && name.as_str() == lit.symbol.as_str() + { return true; } } } else if let Some(meta) = v.meta_item() && let Some(lit) = meta.name_value_literal() - && name.as_str() == lit.symbol.as_str() { - // #[doc(alias = "foo")] - return true; + && name.as_str() == lit.symbol.as_str() + { + // #[doc(alias = "foo")] + return true; } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/suggest.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/suggest.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/suggest.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/method/suggest.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,6 @@ RegionVariableOrigin, }; use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind}; -use rustc_middle::traits::util::supertraits; use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::fast_reject::{simplify_type, TreatParams}; use rustc_middle::ty::print::{with_crate_prefix, with_forced_trimmed_paths}; @@ -35,12 +34,13 @@ use rustc_span::def_id::DefIdSet; use rustc_span::symbol::{kw, sym, Ident}; use rustc_span::Symbol; -use rustc_span::{edit_distance, source_map, ExpnKind, FileName, MacroKind, Span}; +use rustc_span::{edit_distance, ExpnKind, FileName, MacroKind, Span}; +use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::error_reporting::on_unimplemented::OnUnimplementedNote; use rustc_trait_selection::traits::error_reporting::on_unimplemented::TypeErrCtxtExt as _; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; use rustc_trait_selection::traits::{ - FulfillmentError, Obligation, ObligationCause, ObligationCauseCode, + supertraits, FulfillmentError, Obligation, ObligationCause, ObligationCauseCode, }; use std::borrow::Cow; @@ -50,15 +50,6 @@ use std::cmp::{self, Ordering}; use std::iter; -/// After identifying that `full_expr` is a method call, we use this type to keep the expression's -/// components readily available to us to point at the right place in diagnostics. -#[derive(Debug, Clone, Copy)] -pub struct MethodCallComponents<'tcx> { - pub receiver: &'tcx hir::Expr<'tcx>, - pub args: &'tcx [hir::Expr<'tcx>], - pub full_expr: &'tcx hir::Expr<'tcx>, -} - impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn is_fn_ty(&self, ty: Ty<'tcx>, span: Span) -> bool { let tcx = self.tcx; @@ -124,7 +115,7 @@ item_name: Ident, source: SelfSource<'tcx>, error: MethodError<'tcx>, - args: Option>, + args: Option<&'tcx [hir::Expr<'tcx>]>, expected: Expectation<'tcx>, trait_missing_method: bool, ) -> Option> { @@ -167,6 +158,7 @@ self.note_candidates_on_method_error( rcvr_ty, item_name, + source, args, span, &mut err, @@ -193,7 +185,7 @@ .span_if_local(def_id) .unwrap_or_else(|| self.tcx.def_span(def_id)); err.span_label(sp, format!("private {kind} defined here")); - self.suggest_valid_traits(&mut err, out_of_scope_traits); + self.suggest_valid_traits(&mut err, out_of_scope_traits, true); err.emit(); } @@ -266,23 +258,23 @@ fn suggest_missing_writer( &self, rcvr_ty: Ty<'tcx>, - args: MethodCallComponents<'tcx>, + rcvr_expr: &hir::Expr<'tcx>, ) -> DiagnosticBuilder<'_, ErrorGuaranteed> { let (ty_str, _ty_file) = self.tcx.short_ty_string(rcvr_ty); let mut err = struct_span_err!( self.tcx.sess, - args.receiver.span, + rcvr_expr.span, E0599, "cannot write into `{}`", ty_str ); err.span_note( - args.receiver.span, + rcvr_expr.span, "must implement `io::Write`, `fmt::Write`, or have a `write_fmt` method", ); - if let ExprKind::Lit(_) = args.receiver.kind { + if let ExprKind::Lit(_) = rcvr_expr.kind { err.span_help( - args.receiver.span.shrink_to_lo(), + rcvr_expr.span.shrink_to_lo(), "a writer is needed before this format string", ); }; @@ -296,7 +288,7 @@ rcvr_ty: Ty<'tcx>, item_name: Ident, source: SelfSource<'tcx>, - args: Option>, + args: Option<&'tcx [hir::Expr<'tcx>]>, sugg_span: Span, no_match_data: &mut NoMatchData<'tcx>, expected: Expectation<'tcx>, @@ -305,8 +297,8 @@ let mode = no_match_data.mode; let tcx = self.tcx; let rcvr_ty = self.resolve_vars_if_possible(rcvr_ty); - let ((mut ty_str, ty_file), short_ty_str) = if trait_missing_method - && let ty::Dynamic(predicates, _, _) = rcvr_ty.kind() { + let ((mut ty_str, ty_file), short_ty_str) = + if trait_missing_method && let ty::Dynamic(predicates, _, _) = rcvr_ty.kind() { ((predicates.to_string(), None), with_forced_trimmed_paths!(predicates.to_string())) } else { (tcx.short_ty_string(rcvr_ty), with_forced_trimmed_paths!(rcvr_ty.to_string())) @@ -377,25 +369,25 @@ tcx.is_diagnostic_item(sym::write_macro, def_id) || tcx.is_diagnostic_item(sym::writeln_macro, def_id) }) && item_name.name == Symbol::intern("write_fmt"); - let mut err = if is_write - && let Some(args) = args - { - self.suggest_missing_writer(rcvr_ty, args) - } else { - tcx.sess.create_err(NoAssociatedItem { - span, - item_kind, - item_name, - ty_prefix: if trait_missing_method { - // FIXME(mu001999) E0599 maybe not suitable here because it is for types - Cow::from("trait") - } else { - rcvr_ty.prefix_string(self.tcx) - }, - ty_str: ty_str_reported, - trait_missing_method, - }) - }; + let mut err = + if is_write && let SelfSource::MethodCall(rcvr_expr) = source + { + self.suggest_missing_writer(rcvr_ty, rcvr_expr) + } else { + tcx.sess.create_err(NoAssociatedItem { + span, + item_kind, + item_name, + ty_prefix: if trait_missing_method { + // FIXME(mu001999) E0599 maybe not suitable here because it is for types + Cow::from("trait") + } else { + rcvr_ty.prefix_string(self.tcx) + }, + ty_str: ty_str_reported, + trait_missing_method, + }) + }; if tcx.sess.source_map().is_multiline(sugg_span) { err.span_label(sugg_span.with_hi(span.lo()), ""); } @@ -411,6 +403,10 @@ err.downgrade_to_delayed_bug(); } + if matches!(source, SelfSource::QPath(_)) && args.is_some() { + self.find_builder_fn(&mut err, rcvr_ty); + } + if tcx.ty_is_opaque_future(rcvr_ty) && item_name.name == sym::poll { err.help(format!( "method `poll` found on `Pin<&mut {ty_str}>`, \ @@ -421,9 +417,16 @@ ); } - if let Mode::MethodCall = mode && let SelfSource::MethodCall(cal) = source { + if let Mode::MethodCall = mode + && let SelfSource::MethodCall(cal) = source + { self.suggest_await_before_method( - &mut err, item_name, rcvr_ty, cal, span, expected.only_has_type(self), + &mut err, + item_name, + rcvr_ty, + cal, + span, + expected.only_has_type(self), ); } if let Some(span) = @@ -478,7 +481,6 @@ ); probe.is_ok() }); - self.note_internal_mutation_in_method( &mut err, rcvr_expr, @@ -517,6 +519,7 @@ self.note_candidates_on_method_error( rcvr_ty, item_name, + source, args, span, &mut err, @@ -527,6 +530,7 @@ self.note_candidates_on_method_error( rcvr_ty, item_name, + source, args, span, &mut err, @@ -669,7 +673,7 @@ ); let quiet_projection_ty = - tcx.mk_alias_ty(projection_ty.def_id, args_with_infer_self); + ty::AliasTy::new(tcx, projection_ty.def_id, args_with_infer_self); let term = pred.skip_binder().term; @@ -863,7 +867,9 @@ .filter_map(|(pred, parent_pred, _cause)| { let mut suggested = false; format_pred(*pred).map(|(p, self_ty)| { - if let Some(parent) = parent_pred && suggested_bounds.contains(parent) { + if let Some(parent) = parent_pred + && suggested_bounds.contains(parent) + { // We don't suggest `PartialEq` when we already suggest `Eq`. } else if !suggested_bounds.contains(pred) { if collect_type_param_suggestions(self_ty, *pred, &p) { @@ -967,7 +973,9 @@ unsatisfied_bounds = true; } - } else if let ty::Adt(def, targs) = rcvr_ty.kind() && let Some(args) = args { + } else if let ty::Adt(def, targs) = rcvr_ty.kind() + && let SelfSource::MethodCall(rcvr_expr) = source + { // This is useful for methods on arbitrary self types that might have a simple // mutability difference, like calling a method on `Pin<&mut Self>` that is on // `Pin<&Self>`. @@ -975,23 +983,22 @@ let mut item_segment = hir::PathSegment::invalid(); item_segment.ident = item_name; for t in [Ty::new_mut_ref, Ty::new_imm_ref, |_, _, t| t] { - let new_args = tcx.mk_args_from_iter( - targs - .iter() - .map(|arg| match arg.as_type() { - Some(ty) => ty::GenericArg::from( - t(tcx, tcx.lifetimes.re_erased, ty.peel_refs()), - ), - _ => arg, - }) - ); + let new_args = + tcx.mk_args_from_iter(targs.iter().map(|arg| match arg.as_type() { + Some(ty) => ty::GenericArg::from(t( + tcx, + tcx.lifetimes.re_erased, + ty.peel_refs(), + )), + _ => arg, + })); let rcvr_ty = Ty::new_adt(tcx, *def, new_args); if let Ok(method) = self.lookup_method_for_diagnostic( rcvr_ty, &item_segment, span, - args.full_expr, - args.receiver, + tcx.hir().get_parent(rcvr_expr.hir_id).expect_expr(), + rcvr_expr, ) { err.span_note( tcx.def_span(method.def_id), @@ -1088,7 +1095,9 @@ for inherent_method in self.tcx.associated_items(inherent_impl_did).in_definition_order() { - if let Some(attr) = self.tcx.get_attr(inherent_method.def_id, sym::rustc_confusables) + if let Some(attr) = self + .tcx + .get_attr(inherent_method.def_id, sym::rustc_confusables) && let Some(candidates) = parse_confusables(attr) && candidates.contains(&item_name.name) { @@ -1158,7 +1167,7 @@ span, rcvr_ty, item_name, - args.map(|MethodCallComponents { args, .. }| args.len() + 1), + args.map(|args| args.len() + 1), source, no_match_data.out_of_scope_traits.clone(), &unsatisfied_predicates, @@ -1230,7 +1239,40 @@ } } } + // If an appropriate error source is not found, check method chain for possible candiates + if unsatisfied_predicates.is_empty() && let Mode::MethodCall = mode && let SelfSource::MethodCall(mut source_expr) = source { + let mut stack_methods = vec![]; + while let hir::ExprKind::MethodCall(_path_segment, rcvr_expr, _args, method_span) = + source_expr.kind + { + // Pop the matching receiver, to align on it's notional span + if let Some(prev_match) = stack_methods.pop() { + err.span_label(method_span, format!("{item_kind} `{item_name}` is available on `{prev_match}`")); + } + let rcvr_ty = self.resolve_vars_if_possible( + self.typeck_results + .borrow() + .expr_ty_adjusted_opt(rcvr_expr) + .unwrap_or(Ty::new_misc_error(self.tcx)),); + for _matched_method in self.probe_for_name_many( + Mode::MethodCall, + item_name, + None, + IsSuggestion(true), + rcvr_ty, + source_expr.hir_id, + ProbeScope::TraitsInScope,) { + // found a match, push to stack + stack_methods.push(rcvr_ty); + } + source_expr = rcvr_expr; + } + // If there is a match at the start of the chain, add a label for it too! + if let Some(prev_match) = stack_methods.pop() { + err.span_label(source_expr.span, format!("{item_kind} `{item_name}` is available on `{prev_match}`")); + } + } self.note_derefed_ty_has_method(&mut err, source, rcvr_ty, item_name, expected); return Some(err); } @@ -1239,7 +1281,8 @@ &self, rcvr_ty: Ty<'tcx>, item_name: Ident, - args: Option>, + self_source: SelfSource<'tcx>, + args: Option<&'tcx [hir::Expr<'tcx>]>, span: Span, err: &mut Diagnostic, sources: &mut Vec, @@ -1250,6 +1293,7 @@ // Dynamic limit to avoid hiding just one candidate, which is silly. let limit = if sources.len() == 5 { 5 } else { 4 }; + let mut suggs = vec![]; for (idx, source) in sources.iter().take(limit).enumerate() { match *source { CandidateSource::Impl(impl_did) => { @@ -1307,35 +1351,22 @@ err.note(note_str); } if let Some(sugg_span) = sugg_span - && let Some(trait_ref) = self.tcx.impl_trait_ref(impl_did) { - let path = self.tcx.def_path_str(trait_ref.skip_binder().def_id); - - let ty = match item.kind { - ty::AssocKind::Const | ty::AssocKind::Type => rcvr_ty, - ty::AssocKind::Fn => self - .tcx - .fn_sig(item.def_id) - .instantiate_identity() - .inputs() - .skip_binder() - .get(0) - .filter(|ty| ty.is_ref() && !rcvr_ty.is_ref()) - .copied() - .unwrap_or(rcvr_ty), - }; - print_disambiguation_help( - item_name, - args, + && let Some(trait_ref) = self.tcx.impl_trait_ref(impl_did) + && let Some(sugg) = print_disambiguation_help( + self.tcx, err, - path, - ty, - item.kind, - self.tcx.def_kind_descr(item.kind.as_def_kind(), item.def_id), - sugg_span, + self_source, + args, + trait_ref.instantiate( + self.tcx, + self.fresh_args_for_item(sugg_span, impl_did) + ).with_self_ty(self.tcx, rcvr_ty), idx, - self.tcx.sess.source_map(), - item.fn_has_self_parameter, - ); + sugg_span, + item, + ) + { + suggs.push(sugg); } } CandidateSource::Trait(trait_did) => { @@ -1357,30 +1388,119 @@ err.span_note(item_span, msg); None }; - if let Some(sugg_span) = sugg_span { - let path = self.tcx.def_path_str(trait_did); - print_disambiguation_help( - item_name, - args, + if let Some(sugg_span) = sugg_span + && let Some(sugg) = print_disambiguation_help( + self.tcx, err, - path, - rcvr_ty, - item.kind, - self.tcx.def_kind_descr(item.kind.as_def_kind(), item.def_id), - sugg_span, + self_source, + args, + ty::TraitRef::new( + self.tcx, + trait_did, + self.fresh_args_for_item(sugg_span, trait_did) + ).with_self_ty(self.tcx, rcvr_ty), idx, - self.tcx.sess.source_map(), - item.fn_has_self_parameter, - ); + sugg_span, + item, + ) + { + suggs.push(sugg); } } } } + if !suggs.is_empty() && let Some(span) = sugg_span { + err.span_suggestions( + span.with_hi(item_name.span.lo()), + "use fully-qualified syntax to disambiguate", + suggs, + Applicability::MachineApplicable, + ); + } if sources.len() > limit { err.note(format!("and {} others", sources.len() - limit)); } } + /// Look at all the associated functions without receivers in the type's inherent impls + /// to look for builders that return `Self`, `Option` or `Result`. + fn find_builder_fn(&self, err: &mut Diagnostic, rcvr_ty: Ty<'tcx>) { + let ty::Adt(adt_def, _) = rcvr_ty.kind() else { + return; + }; + let mut items = self + .tcx + .inherent_impls(adt_def.did()) + .iter() + .flat_map(|i| self.tcx.associated_items(i).in_definition_order()) + // Only assoc fn with no receivers. + .filter(|item| matches!(item.kind, ty::AssocKind::Fn) && !item.fn_has_self_parameter) + .filter_map(|item| { + // Only assoc fns that return `Self`, `Option` or `Result`. + let ret_ty = self.tcx.fn_sig(item.def_id).skip_binder().output(); + let ret_ty = self.tcx.erase_late_bound_regions(ret_ty); + let ty::Adt(def, args) = ret_ty.kind() else { + return None; + }; + // Check for `-> Self` + if self.can_eq(self.param_env, ret_ty, rcvr_ty) { + return Some((item.def_id, ret_ty)); + } + // Check for `-> Option` or `-> Result` + if ![self.tcx.lang_items().option_type(), self.tcx.get_diagnostic_item(sym::Result)] + .contains(&Some(def.did())) + { + return None; + } + let arg = args.get(0)?.expect_ty(); + if self.can_eq(self.param_env, rcvr_ty, arg) { + Some((item.def_id, ret_ty)) + } else { + None + } + }) + .collect::>(); + let post = if items.len() > 5 { + let items_len = items.len(); + items.truncate(4); + format!("\nand {} others", items_len - 4) + } else { + String::new() + }; + match &items[..] { + [] => {} + [(def_id, ret_ty)] => { + err.span_note( + self.tcx.def_span(def_id), + format!( + "if you're trying to build a new `{rcvr_ty}`, consider using `{}` which \ + returns `{ret_ty}`", + self.tcx.def_path_str(def_id), + ), + ); + } + _ => { + let span: MultiSpan = items + .iter() + .map(|(def_id, _)| self.tcx.def_span(def_id)) + .collect::>() + .into(); + err.span_note( + span, + format!( + "if you're trying to build a new `{rcvr_ty}` consider using one of the \ + following associated functions:\n{}{post}", + items + .iter() + .map(|(def_id, _ret_ty)| self.tcx.def_path_str(def_id)) + .collect::>() + .join("\n") + ), + ); + } + } + } + /// Suggest calling `Ty::method` if `.method()` isn't found because the method /// doesn't take a `self` receiver. fn suggest_associated_call_syntax( @@ -1390,7 +1510,7 @@ rcvr_ty: Ty<'tcx>, source: SelfSource<'tcx>, item_name: Ident, - args: Option>, + args: Option<&'tcx [hir::Expr<'tcx>]>, sugg_span: Span, ) { let mut has_unsuggestable_args = false; @@ -1453,47 +1573,51 @@ && assoc.kind == ty::AssocKind::Fn { let sig = self.tcx.fn_sig(assoc.def_id).instantiate_identity(); - sig.inputs().skip_binder().get(0).and_then(|first| if first.peel_refs() == rcvr_ty.peel_refs() { - None - } else { - Some(first.ref_mutability().map_or("", |mutbl| mutbl.ref_prefix_str())) + sig.inputs().skip_binder().get(0).and_then(|first| { + if first.peel_refs() == rcvr_ty.peel_refs() { + None + } else { + Some(first.ref_mutability().map_or("", |mutbl| mutbl.ref_prefix_str())) + } }) } else { None }; let mut applicability = Applicability::MachineApplicable; - let args = if let Some(MethodCallComponents { receiver, args, .. }) = args { - // The first arg is the same kind as the receiver - let explicit_args = if first_arg.is_some() { - std::iter::once(receiver).chain(args.iter()).collect::>() + let args = if let SelfSource::MethodCall(receiver) = source + && let Some(args) = args + { + // The first arg is the same kind as the receiver + let explicit_args = if first_arg.is_some() { + std::iter::once(receiver).chain(args.iter()).collect::>() + } else { + // There is no `Self` kind to infer the arguments from + if has_unsuggestable_args { + applicability = Applicability::HasPlaceholders; + } + args.iter().collect() + }; + format!( + "({}{})", + first_arg.unwrap_or(""), + explicit_args + .iter() + .map(|arg| self + .tcx + .sess + .source_map() + .span_to_snippet(arg.span) + .unwrap_or_else(|_| { + applicability = Applicability::HasPlaceholders; + "_".to_owned() + })) + .collect::>() + .join(", "), + ) } else { - // There is no `Self` kind to infer the arguments from - if has_unsuggestable_args { - applicability = Applicability::HasPlaceholders; - } - args.iter().collect() + applicability = Applicability::HasPlaceholders; + "(...)".to_owned() }; - format!( - "({}{})", - first_arg.unwrap_or(""), - explicit_args - .iter() - .map(|arg| self - .tcx - .sess - .source_map() - .span_to_snippet(arg.span) - .unwrap_or_else(|_| { - applicability = Applicability::HasPlaceholders; - "_".to_owned() - })) - .collect::>() - .join(", "), - ) - } else { - applicability = Applicability::HasPlaceholders; - "(...)".to_owned() - }; err.span_suggestion( sugg_span, "use associated function syntax instead", @@ -1616,7 +1740,11 @@ continue; } - let range_def_id = self.tcx.require_lang_item(lang_item.unwrap(), None); + let Some(range_def_id) = + lang_item.and_then(|lang_item| self.tcx.lang_items().get(lang_item)) + else { + continue; + }; let range_ty = self.tcx.type_of(range_def_id).instantiate(self.tcx, &[actual.into()]); @@ -1725,8 +1853,7 @@ let span = tcx.hir().span(hir_id); let filename = tcx.sess.source_map().span_to_filename(span); - let parent_node = - self.tcx.hir().get_parent(hir_id); + let parent_node = self.tcx.hir().get_parent(hir_id); let msg = format!( "you must specify a type for this binding, like `{concrete_type}`", ); @@ -1740,7 +1867,9 @@ .. }), ) => { - let type_span = ty.map(|ty| ty.span.with_lo(span.hi())).unwrap_or(span.shrink_to_hi()); + let type_span = ty + .map(|ty| ty.span.with_lo(span.hi())) + .unwrap_or(span.shrink_to_hi()); err.span_suggestion( // account for `let x: _ = 42;` // ^^^ @@ -1839,9 +1968,9 @@ return_type: Option>, ) { if let SelfSource::MethodCall(expr) = source - && let mod_id = self.tcx.parent_module(expr.hir_id).to_def_id() - && let Some((fields, args)) = - self.get_field_candidates_considering_privacy(span, actual, mod_id) + && let mod_id = self.tcx.parent_module(expr.hir_id).to_def_id() + && let Some((fields, args)) = + self.get_field_candidates_considering_privacy(span, actual, mod_id) { let call_expr = self.tcx.hir().expect_expr(self.tcx.hir().parent_id(expr.hir_id)); @@ -2320,7 +2449,14 @@ // <&[_]>::len or <&[u32]>::len doesn't need an extra "<>" between // but for Adt type like Vec::function() // we would suggest <[_]>::function(); - _ if self.tcx.sess.source_map().span_wrapped_by_angle_or_parentheses(ty.span) => format!("{deref_ty}"), + _ if self + .tcx + .sess + .source_map() + .span_wrapped_by_angle_or_parentheses(ty.span) => + { + format!("{deref_ty}") + } _ => format!("<{deref_ty}>"), }; err.span_suggestion_verbose( @@ -2425,6 +2561,7 @@ &self, err: &mut Diagnostic, valid_out_of_scope_traits: Vec, + explain: bool, ) -> bool { if !valid_out_of_scope_traits.is_empty() { let mut candidates = valid_out_of_scope_traits; @@ -2437,7 +2574,9 @@ .find(|did| self.tcx.is_diagnostic_item(sym::TryInto, **did)) .copied(); - err.help("items from traits can only be used if the trait is in scope"); + if explain { + err.help("items from traits can only be used if the trait is in scope"); + } let msg = format!( "the following {traits_are} implemented but not in scope; \ perhaps add a `use` for {one_of_them}:", @@ -2494,10 +2633,18 @@ // Try alternative arbitrary self types that could fulfill this call. // FIXME: probe for all types that *could* be arbitrary self-types, not // just this list. - for (rcvr_ty, post) in &[ - (rcvr_ty, ""), - (Ty::new_mut_ref(self.tcx, self.tcx.lifetimes.re_erased, rcvr_ty), "&mut "), - (Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, rcvr_ty), "&"), + for (rcvr_ty, post, pin_call) in &[ + (rcvr_ty, "", None), + ( + Ty::new_mut_ref(self.tcx, self.tcx.lifetimes.re_erased, rcvr_ty), + "&mut ", + Some("as_mut"), + ), + ( + Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, rcvr_ty), + "&", + Some("as_ref"), + ), ] { match self.lookup_probe_for_diagnostic( item_name, @@ -2531,6 +2678,16 @@ Err(_) => (), } + let Some(unpin_trait) = self.tcx.lang_items().unpin_trait() else { + return; + }; + let pred = ty::TraitRef::new(self.tcx, unpin_trait, [*rcvr_ty]); + let unpin = self.predicate_must_hold_considering_regions(&Obligation::new( + self.tcx, + ObligationCause::misc(rcvr.span, self.body_id), + self.param_env, + pred, + )); for (rcvr_ty, pre) in &[ (Ty::new_lang_item(self.tcx, *rcvr_ty, LangItem::OwnedBox), "Box::new"), (Ty::new_lang_item(self.tcx, *rcvr_ty, LangItem::Pin), "Pin::new"), @@ -2554,8 +2711,19 @@ // Explicitly ignore the `Pin::as_ref()` method as `Pin` does not // implement the `AsRef` trait. let skip = skippable.contains(&did) - || (("Pin::new" == *pre) && (sym::as_ref == item_name.name)) - || inputs_len.is_some_and(|inputs_len| pick.item.kind == ty::AssocKind::Fn && self.tcx.fn_sig(pick.item.def_id).skip_binder().skip_binder().inputs().len() != inputs_len); + || (("Pin::new" == *pre) + && ((sym::as_ref == item_name.name) || !unpin)) + || inputs_len.is_some_and(|inputs_len| { + pick.item.kind == ty::AssocKind::Fn + && self + .tcx + .fn_sig(pick.item.def_id) + .skip_binder() + .skip_binder() + .inputs() + .len() + != inputs_len + }); // Make sure the method is defined for the *actual* receiver: we don't // want to treat `Box` as a receiver if it only works because of // an autoderef to `&self` @@ -2566,7 +2734,7 @@ ); err.multipart_suggestion( "consider wrapping the receiver expression with the \ - appropriate type", + appropriate type", vec![ (rcvr.span.shrink_to_lo(), format!("{pre}({post}")), (rcvr.span.shrink_to_hi(), ")".to_string()), @@ -2578,9 +2746,54 @@ } } } + // We special case the situation where `Pin::new` wouldn't work, and instead + // suggest using the `pin!()` macro instead. + if let Some(new_rcvr_t) = Ty::new_lang_item(self.tcx, *rcvr_ty, LangItem::Pin) + // We didn't find an alternative receiver for the method. + && !alt_rcvr_sugg + // `T: !Unpin` + && !unpin + // The method isn't `as_ref`, as it would provide a wrong suggestion for `Pin`. + && sym::as_ref != item_name.name + // Either `Pin::as_ref` or `Pin::as_mut`. + && let Some(pin_call) = pin_call + // Search for `item_name` as a method accessible on `Pin`. + && let Ok(pick) = self.lookup_probe_for_diagnostic( + item_name, + new_rcvr_t, + rcvr, + ProbeScope::AllTraits, + return_type, + ) + // We skip some common traits that we don't want to consider because autoderefs + // would take care of them. + && !skippable.contains(&Some(pick.item.container_id(self.tcx))) + // We don't want to go through derefs. + && pick.autoderefs == 0 + // Check that the method of the same name that was found on the new `Pin` + // receiver has the same number of arguments that appear in the user's code. + && inputs_len.is_some_and(|inputs_len| pick.item.kind == ty::AssocKind::Fn && self.tcx.fn_sig(pick.item.def_id).skip_binder().skip_binder().inputs().len() == inputs_len) + { + let indent = self + .tcx + .sess + .source_map() + .indentation_before(rcvr.span) + .unwrap_or_else(|| " ".to_string()); + err.multipart_suggestion( + "consider pinning the expression", + vec![ + (rcvr.span.shrink_to_lo(), format!("let mut pinned = std::pin::pin!(")), + (rcvr.span.shrink_to_hi(), format!(");\n{indent}pinned.{pin_call}()")), + ], + Applicability::MaybeIncorrect, + ); + // We don't care about the other suggestions. + alt_rcvr_sugg = true; + } } } - if self.suggest_valid_traits(err, valid_out_of_scope_traits) { + if self.suggest_valid_traits(err, valid_out_of_scope_traits, true) { return; } @@ -2857,22 +3070,39 @@ (candidates, Vec::new()) }; + let impls_trait = |def_id: DefId| { + let args = ty::GenericArgs::for_item(self.tcx, def_id, |param, _| { + if param.index == 0 { + rcvr_ty.into() + } else { + self.infcx.var_for_def(span, param) + } + }); + self.infcx + .type_implements_trait(def_id, args, self.param_env) + .must_apply_modulo_regions() + && param_type.is_none() + }; match &potential_candidates[..] { [] => {} [trait_info] if trait_info.def_id.is_local() => { - err.subdiagnostic(CandidateTraitNote { - span: self.tcx.def_span(trait_info.def_id), - trait_name: self.tcx.def_path_str(trait_info.def_id), - item_name, - action_or_ty: if trait_missing_method { - "NONE".to_string() - } else { - param_type.map_or_else( - || "implement".to_string(), // FIXME: it might only need to be imported into scope, not implemented. - ToString::to_string, - ) - }, - }); + if impls_trait(trait_info.def_id) { + self.suggest_valid_traits(err, vec![trait_info.def_id], false); + } else { + err.subdiagnostic(CandidateTraitNote { + span: self.tcx.def_span(trait_info.def_id), + trait_name: self.tcx.def_path_str(trait_info.def_id), + item_name, + action_or_ty: if trait_missing_method { + "NONE".to_string() + } else { + param_type.map_or_else( + || "implement".to_string(), // FIXME: it might only need to be imported into scope, not implemented. + ToString::to_string, + ) + }, + }); + } } trait_infos => { let mut msg = message(param_type.map_or_else( @@ -2880,6 +3110,9 @@ |param| format!("restrict type parameter `{param}` with"), )); for (i, trait_info) in trait_infos.iter().enumerate() { + if impls_trait(trait_info.def_id) { + self.suggest_valid_traits(err, vec![trait_info.def_id], false); + } msg.push_str(&format!( "\ncandidate #{}: `{}`", i + 1, @@ -2930,14 +3163,15 @@ } let parent = self.tcx.hir().parent_id(expr.hir_id); - if let Some(Node::Expr(call_expr)) = self.tcx.hir().find(parent) && - let hir::ExprKind::MethodCall( + if let Some(Node::Expr(call_expr)) = self.tcx.hir().find(parent) + && let hir::ExprKind::MethodCall( hir::PathSegment { ident: method_name, .. }, self_expr, args, .., - ) = call_expr.kind && - let Some(self_ty) = self.typeck_results.borrow().expr_ty_opt(self_expr) { + ) = call_expr.kind + && let Some(self_ty) = self.typeck_results.borrow().expr_ty_opt(self_expr) + { let new_name = Ident { name: Symbol::intern(&format!("{}_else", method_name.as_str())), span: method_name.span, @@ -2951,10 +3185,11 @@ ); // check the method arguments number - if let Ok(pick) = probe && - let fn_sig = self.tcx.fn_sig(pick.item.def_id) && - let fn_args = fn_sig.skip_binder().skip_binder().inputs() && - fn_args.len() == args.len() + 1 { + if let Ok(pick) = probe + && let fn_sig = self.tcx.fn_sig(pick.item.def_id) + && let fn_args = fn_sig.skip_binder().skip_binder().inputs() + && fn_args.len() == args.len() + 1 + { err.span_suggestion_verbose( method_name.span.shrink_to_hi(), format!("try calling `{}` instead", new_name.name.as_str()), @@ -3040,57 +3275,59 @@ } fn print_disambiguation_help<'tcx>( - item_name: Ident, - args: Option>, + tcx: TyCtxt<'tcx>, err: &mut Diagnostic, - trait_name: String, - rcvr_ty: Ty<'_>, - kind: ty::AssocKind, - def_kind_descr: &'static str, + source: SelfSource<'tcx>, + args: Option<&'tcx [hir::Expr<'tcx>]>, + trait_ref: ty::TraitRef<'tcx>, + candidate_idx: Option, span: Span, - candidate: Option, - source_map: &source_map::SourceMap, - fn_has_self_parameter: bool, -) { - let mut applicability = Applicability::MachineApplicable; - let (span, sugg) = if let ( - ty::AssocKind::Fn, - Some(MethodCallComponents { receiver, args, .. }), - ) = (kind, args) - { - let args = format!( - "({}{})", - rcvr_ty.ref_mutability().map_or("", |mutbl| mutbl.ref_prefix_str()), - std::iter::once(receiver) - .chain(args.iter()) - .map(|arg| source_map.span_to_snippet(arg.span).unwrap_or_else(|_| { - applicability = Applicability::HasPlaceholders; - "_".to_owned() - })) - .collect::>() - .join(", "), - ); - let trait_name = if !fn_has_self_parameter { - format!("<{rcvr_ty} as {trait_name}>") - } else { - trait_name - }; - (span, format!("{trait_name}::{item_name}{args}")) + item: ty::AssocItem, +) -> Option { + let trait_ref = if item.fn_has_self_parameter { + trait_ref.print_only_trait_name().to_string() } else { - (span.with_hi(item_name.span.lo()), format!("<{rcvr_ty} as {trait_name}>::")) + format!("<{} as {}>", trait_ref.args[0], trait_ref.print_only_trait_name()) }; - err.span_suggestion_verbose( - span, - format!( - "disambiguate the {} for {}", - def_kind_descr, - if let Some(candidate) = candidate { - format!("candidate #{candidate}") - } else { - "the candidate".to_string() - }, - ), - sugg, - applicability, - ); + Some( + if matches!(item.kind, ty::AssocKind::Fn) + && let SelfSource::MethodCall(receiver) = source + && let Some(args) = args + { + let def_kind_descr = tcx.def_kind_descr(item.kind.as_def_kind(), item.def_id); + let item_name = item.ident(tcx); + let rcvr_ref = tcx.fn_sig(item.def_id).skip_binder().skip_binder().inputs()[0] + .ref_mutability() + .map_or("", |mutbl| mutbl.ref_prefix_str()); + let args = format!( + "({}{})", + rcvr_ref, + std::iter::once(receiver) + .chain(args.iter()) + .map(|arg| tcx + .sess + .source_map() + .span_to_snippet(arg.span) + .unwrap_or_else(|_| { "_".to_owned() })) + .collect::>() + .join(", "), + ); + err.span_suggestion_verbose( + span, + format!( + "disambiguate the {def_kind_descr} for {}", + if let Some(candidate) = candidate_idx { + format!("candidate #{candidate}") + } else { + "the candidate".to_string() + }, + ), + format!("{trait_ref}::{item_name}{args}"), + Applicability::HasPlaceholders, + ); + return None; + } else { + format!("{trait_ref}::") + }, + ) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/op.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/op.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/op.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/op.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,7 +20,7 @@ use rustc_trait_selection::infer::InferCtxtExt; use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt as _; use rustc_trait_selection::traits::{self, FulfillmentError, ObligationCtxt}; -use rustc_type_ir::sty::TyKind::*; +use rustc_type_ir::TyKind::*; impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Checks a `a = b` @@ -379,6 +379,13 @@ (err, output_def_id) } }; + if self.check_for_missing_semi(expr, &mut err) + && let hir::Node::Expr(expr) = self.tcx.hir().get_parent(expr.hir_id) + && let hir::ExprKind::Assign(..) = expr.kind + { + // We defer to the later error produced by `check_lhs_assignable`. + err.delay_as_bug(); + } let suggest_deref_binop = |err: &mut DiagnosticBuilder<'_, _>, lhs_deref_ty: Ty<'tcx>| { @@ -430,33 +437,35 @@ if let Some(lhs_new_mutbl) = lhs_new_mutbl && let Some(rhs_new_mutbl) = rhs_new_mutbl && lhs_new_mutbl.is_not() - && rhs_new_mutbl.is_not() { + && rhs_new_mutbl.is_not() + { err.multipart_suggestion_verbose( "consider reborrowing both sides", vec![ (lhs_expr.span.shrink_to_lo(), "&*".to_string()), - (rhs_expr.span.shrink_to_lo(), "&*".to_string()) + (rhs_expr.span.shrink_to_lo(), "&*".to_string()), ], rustc_errors::Applicability::MachineApplicable, ); } else { - let mut suggest_new_borrow = |new_mutbl: ast::Mutability, sp: Span| { - // Can reborrow (&mut -> &) - if new_mutbl.is_not() { - err.span_suggestion_verbose( - sp.shrink_to_lo(), - "consider reborrowing this side", - "&*", - rustc_errors::Applicability::MachineApplicable, - ); - // Works on &mut but have & - } else { - err.span_help( - sp, - "consider making this expression a mutable borrow", - ); - } - }; + let mut suggest_new_borrow = + |new_mutbl: ast::Mutability, sp: Span| { + // Can reborrow (&mut -> &) + if new_mutbl.is_not() { + err.span_suggestion_verbose( + sp.shrink_to_lo(), + "consider reborrowing this side", + "&*", + rustc_errors::Applicability::MachineApplicable, + ); + // Works on &mut but have & + } else { + err.span_help( + sp, + "consider making this expression a mutable borrow", + ); + } + }; if let Some(lhs_new_mutbl) = lhs_new_mutbl { suggest_new_borrow(lhs_new_mutbl, lhs_expr.span); @@ -493,20 +502,14 @@ } else if is_assign == IsAssign::No && let Ref(region, lhs_deref_ty, mutbl) = lhs_ty.kind() { - if self.type_is_copy_modulo_regions( - self.param_env, - *lhs_deref_ty, - ) { + if self.type_is_copy_modulo_regions(self.param_env, *lhs_deref_ty) { suggest_deref_binop(&mut err, *lhs_deref_ty); } else { let lhs_inv_mutbl = mutbl.invert(); let lhs_inv_mutbl_ty = Ty::new_ref( self.tcx, *region, - ty::TypeAndMut { - ty: *lhs_deref_ty, - mutbl: lhs_inv_mutbl, - }, + ty::TypeAndMut { ty: *lhs_deref_ty, mutbl: lhs_inv_mutbl }, ); suggest_different_borrow( @@ -522,10 +525,7 @@ let rhs_inv_mutbl_ty = Ty::new_ref( self.tcx, *region, - ty::TypeAndMut { - ty: *rhs_deref_ty, - mutbl: rhs_inv_mutbl, - }, + ty::TypeAndMut { ty: *rhs_deref_ty, mutbl: rhs_inv_mutbl }, ); suggest_different_borrow( @@ -599,7 +599,8 @@ if let Some(output_def_id) = output_def_id && let Some(trait_def_id) = trait_def_id && self.tcx.parent(output_def_id) == trait_def_id - && let Some(output_ty) = output_ty.make_suggestable(self.tcx, false) + && let Some(output_ty) = + output_ty.make_suggestable(self.tcx, false) { Some(("Output", output_ty)) } else { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/pat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/pat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/pat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/pat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,13 +12,14 @@ use rustc_hir::{HirId, Pat, PatKind}; use rustc_infer::infer; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; -use rustc_middle::middle::stability::EvalResult; +use rustc_middle::mir::interpret::ErrorHandled; use rustc_middle::ty::{self, Adt, BindingMode, Ty, TypeVisitableExt}; use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::hygiene::DesugaringKind; -use rustc_span::source_map::{Span, Spanned}; +use rustc_span::source_map::Spanned; use rustc_span::symbol::{kw, sym, Ident}; +use rustc_span::Span; use rustc_span::{BytePos, DUMMY_SP}; use rustc_target::abi::FieldIdx; use rustc_trait_selection::traits::{ObligationCause, Pattern}; @@ -165,9 +166,9 @@ fn check_pat(&self, pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, pat_info: PatInfo<'tcx, '_>) { let PatInfo { binding_mode: def_bm, top_info: ti, .. } = pat_info; let path_res = match &pat.kind { - PatKind::Path(qpath) => { - Some(self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span)) - } + PatKind::Path(qpath) => Some( + self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span, None), + ), _ => None, }; let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res)); @@ -407,16 +408,19 @@ .borrow_mut() .treat_byte_string_as_slice .insert(lt.hir_id.local_id); - pat_ty = Ty::new_imm_ref(tcx,tcx.lifetimes.re_static, Ty::new_slice(tcx,tcx.types.u8)); + pat_ty = + Ty::new_imm_ref(tcx, tcx.lifetimes.re_static, Ty::new_slice(tcx, tcx.types.u8)); } } - if self.tcx.features().string_deref_patterns && let hir::ExprKind::Lit(Spanned { node: ast::LitKind::Str(..), .. }) = lt.kind { + if self.tcx.features().string_deref_patterns + && let hir::ExprKind::Lit(Spanned { node: ast::LitKind::Str(..), .. }) = lt.kind + { let tcx = self.tcx; let expected = self.resolve_vars_if_possible(expected); pat_ty = match expected.kind() { ty::Adt(def, _) if Some(def.did()) == tcx.lang_items().string() => expected, - ty::Str => Ty::new_static_str(tcx,), + ty::Str => Ty::new_static_str(tcx), _ => pat_ty, }; } @@ -708,7 +712,8 @@ fn borrow_pat_suggestion(&self, err: &mut Diagnostic, pat: &Pat<'_>) { let tcx = self.tcx; if let PatKind::Ref(inner, mutbl) = pat.kind - && let PatKind::Binding(_, _, binding, ..) = inner.kind { + && let PatKind::Binding(_, _, binding, ..) = inner.kind + { let binding_parent_id = tcx.hir().parent_id(pat.hir_id); let binding_parent = tcx.hir().get(binding_parent_id); debug!(?inner, ?pat, ?binding_parent); @@ -755,7 +760,6 @@ format!("to declare a mutable {ident_kind} use"), format!("mut {binding}"), )) - }; match binding_parent { @@ -778,7 +782,8 @@ hir::Node::Pat(pt) if let PatKind::TupleStruct(_, pat_arr, _) = pt.kind => { for i in pat_arr.iter() { if let PatKind::Ref(the_ref, _) = i.kind - && let PatKind::Binding(mt, _, ident, _) = the_ref.kind { + && let PatKind::Binding(mt, _, ident, _) = the_ref.kind + { let hir::BindingAnnotation(_, mtblty) = mt; err.span_suggestion_verbose( i.span, @@ -1055,7 +1060,7 @@ // Resolve the path and check the definition for errors. let (res, opt_ty, segments) = - self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span); + self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span, None); if res == Res::Err { let e = tcx.sess.delay_span_bug(pat.span, "`Res::Err` but no error emitted"); self.set_tainted_by_errors(e); @@ -1408,6 +1413,7 @@ adt.variant_descr(), &inexistent_fields, &mut unmentioned_fields, + pat, variant, args, )) @@ -1434,15 +1440,7 @@ let accessible_unmentioned_fields: Vec<_> = unmentioned_fields .iter() .copied() - .filter(|(field, _)| { - field.vis.is_accessible_from(tcx.parent_module(pat.hir_id), tcx) - && !matches!( - tcx.eval_stability(field.did, None, DUMMY_SP, None), - EvalResult::Deny { .. } - ) - // We only want to report the error if it is hidden and not local - && !(tcx.is_doc_hidden(field.did) && !field.did.is_local()) - }) + .filter(|(field, _)| self.is_field_suggestable(field, pat.hir_id, pat.span)) .collect(); if !has_rest_pat { @@ -1488,7 +1486,8 @@ (Some(mut err), None) => { err.emit(); } - (None, None) if let Some(mut err) = + (None, None) + if let Some(mut err) = self.error_tuple_variant_index_shorthand(variant, pat, fields) => { err.emit(); @@ -1512,9 +1511,7 @@ { let has_shorthand_field_name = field_patterns.iter().any(|field| field.is_shorthand); if has_shorthand_field_name { - let path = rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| { - s.print_qpath(qpath, false) - }); + let path = rustc_hir_pretty::qpath_to_string(qpath); let mut err = struct_span_err!( self.tcx.sess, pat.span, @@ -1578,12 +1575,13 @@ kind_name: &str, inexistent_fields: &[&hir::PatField<'tcx>], unmentioned_fields: &mut Vec<(&'tcx ty::FieldDef, Ident)>, + pat: &'tcx Pat<'tcx>, variant: &ty::VariantDef, args: &'tcx ty::List>, ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { let tcx = self.tcx; - let (field_names, t, plural) = if inexistent_fields.len() == 1 { - (format!("a field named `{}`", inexistent_fields[0].ident), "this", "") + let (field_names, t, plural) = if let [field] = inexistent_fields { + (format!("a field named `{}`", field.ident), "this", "") } else { ( format!( @@ -1620,10 +1618,11 @@ ), ); - if unmentioned_fields.len() == 1 { - let input = - unmentioned_fields.iter().map(|(_, field)| field.name).collect::>(); - let suggested_name = find_best_match_for_name(&input, pat_field.ident.name, None); + if let [(field_def, field)] = unmentioned_fields.as_slice() + && self.is_field_suggestable(field_def, pat.hir_id, pat.span) + { + let suggested_name = + find_best_match_for_name(&[field.name], pat_field.ident.name, None); if let Some(suggested_name) = suggested_name { err.span_suggestion( pat_field.ident.span, @@ -1646,22 +1645,17 @@ PatKind::Lit(expr) if !self.can_coerce( self.typeck_results.borrow().expr_ty(expr), - self.field_ty( - unmentioned_fields[0].1.span, - unmentioned_fields[0].0, - args, - ), + self.field_ty(field.span, field_def, args), ) => {} _ => { - let unmentioned_field = unmentioned_fields[0].1.name; err.span_suggestion_short( pat_field.ident.span, format!( "`{}` has a field named `{}`", tcx.def_path_str(variant.def_id), - unmentioned_field + field.name, ), - unmentioned_field.to_string(), + field.name, Applicability::MaybeIncorrect, ); } @@ -1699,9 +1693,7 @@ return None; } - let path = rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| { - s.print_qpath(qpath, false) - }); + let path = rustc_hir_pretty::qpath_to_string(qpath); let mut err = struct_span_err!( self.tcx.sess, pat.span, @@ -1751,9 +1743,7 @@ f } } - Err(_) => rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| { - s.print_pat(field.pat) - }), + Err(_) => rustc_hir_pretty::pat_to_string(field.pat), } }) .collect::>() @@ -1871,8 +1861,8 @@ fields: &'tcx [hir::PatField<'tcx>], ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { let inaccessible = if have_inaccessible_fields { " and inaccessible fields" } else { "" }; - let field_names = if unmentioned_fields.len() == 1 { - format!("field `{}`{}", unmentioned_fields[0].1, inaccessible) + let field_names = if let [(_, field)] = unmentioned_fields { + format!("field `{field}`{inaccessible}") } else { let fields = unmentioned_fields .iter() @@ -2176,7 +2166,19 @@ len: ty::Const<'tcx>, min_len: u64, ) -> (Option>, Ty<'tcx>) { - let guar = if let Some(len) = len.try_eval_target_usize(self.tcx, self.param_env) { + let len = match len.eval(self.tcx, self.param_env, None) { + Ok(val) => val + .try_to_scalar() + .and_then(|scalar| scalar.try_to_int().ok()) + .and_then(|int| int.try_to_target_usize(self.tcx).ok()), + Err(ErrorHandled::Reported(..)) => { + let guar = self.error_scrutinee_unfixed_length(span); + return (Some(Ty::new_error(self.tcx, guar)), arr_ty); + } + Err(ErrorHandled::TooGeneric(..)) => None, + }; + + let guar = if let Some(len) = len { // Now we know the length... if slice.is_none() { // ...and since there is no variable-length pattern, @@ -2280,7 +2282,8 @@ && let ty::Array(..) | ty::Slice(..) = ty.kind() { err.help("the semantics of slice patterns changed recently; see issue #62254"); - } else if self.autoderef(span, expected_ty) + } else if self + .autoderef(span, expected_ty) .any(|(ty, _)| matches!(ty.kind(), ty::Slice(..) | ty::Array(..))) && let Some(span) = ti.span && let Some(_) = ti.origin_expr @@ -2301,7 +2304,7 @@ Applicability::MaybeIncorrect, ); } - _ => () + _ => (), } if is_slice_or_array_or_vector.0 { err.span_suggestion( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/upvar.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/upvar.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/upvar.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/upvar.rs 2023-12-21 16:55:28.000000000 +0000 @@ -172,7 +172,7 @@ let ty = self.node_ty(closure_hir_id); let (closure_def_id, args) = match *ty.kind() { ty::Closure(def_id, args) => (def_id, UpvarArgs::Closure(args)), - ty::Generator(def_id, args, _) => (def_id, UpvarArgs::Generator(args)), + ty::Coroutine(def_id, args, _) => (def_id, UpvarArgs::Coroutine(args)), ty::Error(_) => { // #51714: skip analysis when we have already encountered type errors return; @@ -366,7 +366,7 @@ /// Note that we *always* infer a minimal kind, even if /// we don't always *use* that in the final result (i.e., sometimes /// we've taken the closure kind from the expectations instead, and - /// for generators we don't even implement the closure traits + /// for coroutines we don't even implement the closure traits /// really). /// /// If we inferred that the closure needs to be FnMut/FnOnce, last element of the returned tuple @@ -424,7 +424,7 @@ origin = updated.1; let (place, capture_kind) = match capture_clause { - hir::CaptureBy::Value => adjust_for_move_closure(place, capture_kind), + hir::CaptureBy::Value { .. } => adjust_for_move_closure(place, capture_kind), hir::CaptureBy::Ref => adjust_for_non_move_closure(place, capture_kind), }; @@ -958,7 +958,7 @@ let ty = self.resolve_vars_if_possible(self.node_ty(var_hir_id)); let ty = match closure_clause { - hir::CaptureBy::Value => ty, // For move closure the capture kind should be by value + hir::CaptureBy::Value { .. } => ty, // For move closure the capture kind should be by value hir::CaptureBy::Ref => { // For non move closure the capture kind is the max capture kind of all captures // according to the ordering ImmBorrow < UniqueImmBorrow < MutBorrow < ByValue @@ -1073,7 +1073,7 @@ match closure_clause { // Only migrate if closure is a move closure - hir::CaptureBy::Value => { + hir::CaptureBy::Value { .. } => { let mut diagnostics_info = FxIndexSet::default(); let upvars = self.tcx.upvars_mentioned(closure_def_id).expect("must be an upvar"); @@ -1479,10 +1479,12 @@ // If the data will be moved out of this place, then the place will be truncated // at the first Deref in `adjust_upvar_borrow_kind_for_consume` and then moved into // the closure. - hir::CaptureBy::Value if !place.deref_tys().any(Ty::is_ref) => { + hir::CaptureBy::Value { .. } if !place.deref_tys().any(Ty::is_ref) => { ty::UpvarCapture::ByValue } - hir::CaptureBy::Value | hir::CaptureBy::Ref => ty::UpvarCapture::ByRef(ty::ImmBorrow), + hir::CaptureBy::Value { .. } | hir::CaptureBy::Ref => { + ty::UpvarCapture::ByRef(ty::ImmBorrow) + } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/writeback.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/writeback.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/writeback.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_hir_typeck/src/writeback.rs 2023-12-21 16:55:28.000000000 +0000 @@ -63,7 +63,7 @@ wbcx.visit_coercion_casts(); wbcx.visit_user_provided_tys(); wbcx.visit_user_provided_sigs(); - wbcx.visit_generator_interior(); + wbcx.visit_coroutine_interior(); wbcx.visit_offset_of_container_types(); wbcx.typeck_results.rvalue_scopes = @@ -174,7 +174,8 @@ } } hir::ExprKind::AssignOp(..) - if let Some(a) = self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) => + if let Some(a) = + self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) => { a.pop(); } @@ -247,7 +248,8 @@ // Since this is "after" the other adjustment to be // discarded, we do an extra `pop()` if let Some(Adjustment { - kind: Adjust::Pointer(PointerCoercion::Unsize), .. + kind: Adjust::Pointer(PointerCoercion::Unsize), + .. }) = a.pop() { // So the borrow discard actually happens here @@ -538,16 +540,16 @@ ); } - fn visit_generator_interior(&mut self) { + fn visit_coroutine_interior(&mut self) { let fcx_typeck_results = self.fcx.typeck_results.borrow(); assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner); self.tcx().with_stable_hashing_context(move |ref hcx| { for (&expr_def_id, predicates) in - fcx_typeck_results.generator_interior_predicates.to_sorted(hcx, false).into_iter() + fcx_typeck_results.coroutine_interior_predicates.to_sorted(hcx, false).into_iter() { let predicates = self.resolve(predicates.clone(), &self.fcx.tcx.def_span(expr_def_id)); - self.typeck_results.generator_interior_predicates.insert(expr_def_id, predicates); + self.typeck_results.coroutine_interior_predicates.insert(expr_def_id, predicates); } }) } @@ -568,10 +570,8 @@ // Here we only detect impl trait definition conflicts when they // are equal modulo regions. - if let Some(last_opaque_ty) = self - .typeck_results - .concrete_opaque_types - .insert(opaque_type_key, hidden_type) + if let Some(last_opaque_ty) = + self.typeck_results.concrete_opaque_types.insert(opaque_type_key, hidden_type) && last_opaque_ty.ty != hidden_type.ty { assert!(!self.fcx.next_trait_solver()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,15 +3,14 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start rand = "0.8.4" rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_fs_util = { path = "../rustc_fs_util" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_fs_util = { path = "../rustc_fs_util" } rustc_graphviz = { path = "../rustc_graphviz" } rustc_hir = { path = "../rustc_hir" } rustc_macros = { path = "../rustc_macros" } @@ -21,3 +20,4 @@ rustc_span = { path = "../rustc_span" } thin-vec = "0.2.12" tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -30,8 +30,6 @@ incremental compilation: could not create session directory lock file: {$lock_err} incremental_create_new = failed to create {$name} at `{$path}`: {$err} -incremental_decode_incr_cache = could not decode incremental cache: {$err} - incremental_delete_full = error deleting incremental compilation session directory `{$path}`: {$err} incremental_delete_incompatible = @@ -46,8 +44,6 @@ incremental_delete_workproduct = file-system error deleting outdated file `{$path}`: {$err} -incremental_field_associated_value_expected = associated value expected for `{$name}` - incremental_finalize = error finalizing incremental compilation session directory `{$path}`: {$err} incremental_finalized_gc_failed = @@ -63,25 +59,15 @@ incremental_lock_unsupported = the filesystem for the incremental path at {$session_dir} does not appear to support locking, consider changing the incremental path to a filesystem that supports locking or disable incremental compilation -incremental_malformed_cgu_name = - found malformed codegen unit name `{$user_path}`. codegen units names must always start with the name of the crate (`{$crate_name}` in this case). incremental_missing_depnode = missing `DepNode` variant incremental_missing_if_this_changed = no `#[rustc_if_this_changed]` annotation detected -incremental_missing_query_depgraph = - found CGU-reuse attribute but `-Zquery-dep-graph` was not specified - incremental_move_dep_graph = failed to move dependency graph from `{$from}` to `{$to}`: {$err} incremental_no_cfg = no cfg attribute -incremental_no_field = no field `{$name}` - -incremental_no_module_named = - no module named `{$user_path}` (mangled: {$cgu_name}). available modules: {$cgu_names} - incremental_no_path = no path from `{$source}` to `{$target}` incremental_not_clean = `{$dep_node_str}` should be clean but is not @@ -107,8 +93,6 @@ incremental_unknown_item = unknown item `{$name}` -incremental_unknown_reuse_kind = unknown cgu-reuse-kind `{$kind}` specified - incremental_unrecognized_depnode = unrecognized `DepNode` variant: {$name} incremental_unrecognized_depnode_label = dep-node label `{$label}` not recognized diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/assert_module_sources.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/assert_module_sources.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/assert_module_sources.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/assert_module_sources.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -//! This pass is only used for UNIT TESTS related to incremental -//! compilation. It tests whether a particular `.o` file will be re-used -//! from a previous compilation or whether it must be regenerated. -//! -//! The user adds annotations to the crate of the following form: -//! -//! ``` -//! # #![feature(rustc_attrs)] -//! # #![allow(internal_features)] -//! #![rustc_partition_reused(module="spike", cfg="rpass2")] -//! #![rustc_partition_codegened(module="spike-x", cfg="rpass2")] -//! ``` -//! -//! The first indicates (in the cfg `rpass2`) that `spike.o` will be -//! reused, the second that `spike-x.o` will be recreated. If these -//! annotations are inaccurate, errors are reported. -//! -//! The reason that we use `cfg=...` and not `#[cfg_attr]` is so that -//! the HIR doesn't change as a result of the annotations, which might -//! perturb the reuse results. -//! -//! `#![rustc_expected_cgu_reuse(module="spike", cfg="rpass2", kind="post-lto")]` -//! allows for doing a more fine-grained check to see if pre- or post-lto data -//! was re-used. - -use crate::errors; -use rustc_ast as ast; -use rustc_data_structures::unord::UnordSet; -use rustc_hir::def_id::LOCAL_CRATE; -use rustc_middle::mir::mono::CodegenUnitNameBuilder; -use rustc_middle::ty::TyCtxt; -use rustc_session::cgu_reuse_tracker::*; -use rustc_span::symbol::{sym, Symbol}; -use thin_vec::ThinVec; - -#[allow(missing_docs)] -pub fn assert_module_sources(tcx: TyCtxt<'_>) { - tcx.dep_graph.with_ignore(|| { - if tcx.sess.opts.incremental.is_none() { - return; - } - - let available_cgus = - tcx.collect_and_partition_mono_items(()).1.iter().map(|cgu| cgu.name()).collect(); - - let ams = AssertModuleSource { tcx, available_cgus }; - - for attr in tcx.hir().attrs(rustc_hir::CRATE_HIR_ID) { - ams.check_attr(attr); - } - }) -} - -struct AssertModuleSource<'tcx> { - tcx: TyCtxt<'tcx>, - available_cgus: UnordSet, -} - -impl<'tcx> AssertModuleSource<'tcx> { - fn check_attr(&self, attr: &ast::Attribute) { - let (expected_reuse, comp_kind) = if attr.has_name(sym::rustc_partition_reused) { - (CguReuse::PreLto, ComparisonKind::AtLeast) - } else if attr.has_name(sym::rustc_partition_codegened) { - (CguReuse::No, ComparisonKind::Exact) - } else if attr.has_name(sym::rustc_expected_cgu_reuse) { - match self.field(attr, sym::kind) { - sym::no => (CguReuse::No, ComparisonKind::Exact), - sym::pre_dash_lto => (CguReuse::PreLto, ComparisonKind::Exact), - sym::post_dash_lto => (CguReuse::PostLto, ComparisonKind::Exact), - sym::any => (CguReuse::PreLto, ComparisonKind::AtLeast), - other => { - self.tcx - .sess - .emit_fatal(errors::UnknownReuseKind { span: attr.span, kind: other }); - } - } - } else { - return; - }; - - if !self.tcx.sess.opts.unstable_opts.query_dep_graph { - self.tcx.sess.emit_fatal(errors::MissingQueryDepGraph { span: attr.span }); - } - - if !self.check_config(attr) { - debug!("check_attr: config does not match, ignoring attr"); - return; - } - - let user_path = self.field(attr, sym::module).to_string(); - let crate_name = self.tcx.crate_name(LOCAL_CRATE).to_string(); - - if !user_path.starts_with(&crate_name) { - self.tcx.sess.emit_fatal(errors::MalformedCguName { - span: attr.span, - user_path, - crate_name, - }); - } - - // Split of the "special suffix" if there is one. - let (user_path, cgu_special_suffix) = if let Some(index) = user_path.rfind('.') { - (&user_path[..index], Some(&user_path[index + 1..])) - } else { - (&user_path[..], None) - }; - - let mut iter = user_path.split('-'); - - // Remove the crate name - assert_eq!(iter.next().unwrap(), crate_name); - - let cgu_path_components = iter.collect::>(); - - let cgu_name_builder = &mut CodegenUnitNameBuilder::new(self.tcx); - let cgu_name = - cgu_name_builder.build_cgu_name(LOCAL_CRATE, cgu_path_components, cgu_special_suffix); - - debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name); - - if !self.available_cgus.contains(&cgu_name) { - let cgu_names: Vec<&str> = - self.available_cgus.items().map(|cgu| cgu.as_str()).into_sorted_stable_ord(); - self.tcx.sess.emit_err(errors::NoModuleNamed { - span: attr.span, - user_path, - cgu_name, - cgu_names: cgu_names.join(", "), - }); - } - - self.tcx.sess.cgu_reuse_tracker.set_expectation( - cgu_name, - &user_path, - attr.span, - expected_reuse, - comp_kind, - ); - } - - fn field(&self, attr: &ast::Attribute, name: Symbol) -> Symbol { - for item in attr.meta_item_list().unwrap_or_else(ThinVec::new) { - if item.has_name(name) { - if let Some(value) = item.value_str() { - return value; - } else { - self.tcx.sess.emit_fatal(errors::FieldAssociatedValueExpected { - span: item.span(), - name, - }); - } - } - } - - self.tcx.sess.emit_fatal(errors::NoField { span: attr.span, name }); - } - - /// Scan for a `cfg="foo"` attribute and check whether we have a - /// cfg flag called `foo`. - fn check_config(&self, attr: &ast::Attribute) -> bool { - let config = &self.tcx.sess.parse_sess.config; - let value = self.field(attr, sym::cfg); - debug!("check_config(config={:?}, value={:?})", config, value); - if config.iter().any(|&(name, _)| name == value) { - debug!("check_config: matched"); - return true; - } - debug!("check_config: no match found"); - false - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -41,56 +41,6 @@ } #[derive(Diagnostic)] -#[diag(incremental_unknown_reuse_kind)] -pub struct UnknownReuseKind { - #[primary_span] - pub span: Span, - pub kind: Symbol, -} - -#[derive(Diagnostic)] -#[diag(incremental_missing_query_depgraph)] -pub struct MissingQueryDepGraph { - #[primary_span] - pub span: Span, -} - -#[derive(Diagnostic)] -#[diag(incremental_malformed_cgu_name)] -pub struct MalformedCguName { - #[primary_span] - pub span: Span, - pub user_path: String, - pub crate_name: String, -} - -#[derive(Diagnostic)] -#[diag(incremental_no_module_named)] -pub struct NoModuleNamed<'a> { - #[primary_span] - pub span: Span, - pub user_path: &'a str, - pub cgu_name: Symbol, - pub cgu_names: String, -} - -#[derive(Diagnostic)] -#[diag(incremental_field_associated_value_expected)] -pub struct FieldAssociatedValueExpected { - #[primary_span] - pub span: Span, - pub name: Symbol, -} - -#[derive(Diagnostic)] -#[diag(incremental_no_field)] -pub struct NoField { - #[primary_span] - pub span: Span, - pub name: Symbol, -} - -#[derive(Diagnostic)] #[diag(incremental_assertion_auto)] pub struct AssertionAuto<'a> { #[primary_span] @@ -321,12 +271,6 @@ } #[derive(Diagnostic)] -#[diag(incremental_decode_incr_cache)] -pub struct DecodeIncrCache { - pub err: String, -} - -#[derive(Diagnostic)] #[diag(incremental_write_dep_graph)] pub struct WriteDepGraph<'a> { pub path: &'a Path, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,9 @@ #![deny(missing_docs)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] -#![feature(never_type)] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![recursion_limit = "256"] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] @@ -13,19 +15,14 @@ extern crate tracing; mod assert_dep_graph; -pub mod assert_module_sources; mod errors; mod persist; -use assert_dep_graph::assert_dep_graph; pub use persist::copy_cgu_workproduct_to_incr_comp_cache_dir; -pub use persist::delete_workproduct_files; pub use persist::finalize_session_directory; -pub use persist::garbage_collect_session_directories; pub use persist::in_incr_comp_dir; pub use persist::in_incr_comp_dir_sess; pub use persist::load_query_result_cache; -pub use persist::prepare_session_directory; pub use persist::save_dep_graph; pub use persist::save_work_product_index; pub use persist::setup_dep_graph; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -53,7 +53,7 @@ //! ## Synchronization //! //! There is some synchronization needed in order for the compiler to be able to -//! determine whether a given private session directory is not in used any more. +//! determine whether a given private session directory is not in use any more. //! This is done by creating a lock file for each session directory and //! locking it while the directory is still being used. Since file locks have //! operating system support, we can rely on the lock being released if the @@ -136,26 +136,29 @@ const INT_ENCODE_BASE: usize = base_n::CASE_INSENSITIVE; /// Returns the path to a session's dependency graph. -pub fn dep_graph_path(sess: &Session) -> PathBuf { +pub(crate) fn dep_graph_path(sess: &Session) -> PathBuf { in_incr_comp_dir_sess(sess, DEP_GRAPH_FILENAME) } + /// Returns the path to a session's staging dependency graph. /// /// On the difference between dep-graph and staging dep-graph, /// see `build_dep_graph`. -pub fn staging_dep_graph_path(sess: &Session) -> PathBuf { +pub(crate) fn staging_dep_graph_path(sess: &Session) -> PathBuf { in_incr_comp_dir_sess(sess, STAGING_DEP_GRAPH_FILENAME) } -pub fn work_products_path(sess: &Session) -> PathBuf { + +pub(crate) fn work_products_path(sess: &Session) -> PathBuf { in_incr_comp_dir_sess(sess, WORK_PRODUCTS_FILENAME) } + /// Returns the path to a session's query cache. pub fn query_cache_path(sess: &Session) -> PathBuf { in_incr_comp_dir_sess(sess, QUERY_CACHE_FILENAME) } /// Locks a given session directory. -pub fn lock_file_path(session_dir: &Path) -> PathBuf { +fn lock_file_path(session_dir: &Path) -> PathBuf { let crate_dir = session_dir.parent().unwrap(); let directory_name = session_dir.file_name().unwrap().to_string_lossy(); @@ -202,7 +205,7 @@ /// The garbage collection will take care of it. /// /// [`rustc_interface::queries::dep_graph`]: ../../rustc_interface/struct.Queries.html#structfield.dep_graph -pub fn prepare_session_directory( +pub(crate) fn prepare_session_directory( sess: &Session, crate_name: Symbol, stable_crate_id: StableCrateId, @@ -373,7 +376,7 @@ let _ = garbage_collect_session_directories(sess); } -pub fn delete_all_session_dir_contents(sess: &Session) -> io::Result<()> { +pub(crate) fn delete_all_session_dir_contents(sess: &Session) -> io::Result<()> { let sess_dir_iterator = sess.incr_comp_session_dir().read_dir()?; for entry in sess_dir_iterator { let entry = entry?; @@ -621,7 +624,7 @@ } /// Runs garbage collection for the current session. -pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { +pub(crate) fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { debug!("garbage_collect_session_directories() - begin"); let session_directory = sess.incr_comp_session_dir(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/load.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/load.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/load.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/load.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,4 @@ -//! Code to save/load the dep-graph from files. +//! Code to load the dep-graph from files. use crate::errors; use rustc_data_structures::memmap::Mmap; @@ -30,8 +30,6 @@ DataOutOfDate, /// Loading the dep graph failed. LoadDepGraph(PathBuf, std::io::Error), - /// Decoding loaded incremental cache failed. - DecodeIncrCache(Box), } impl LoadResult { @@ -44,9 +42,7 @@ } ( Some(IncrementalStateAssertion::Loaded), - LoadResult::LoadDepGraph(..) - | LoadResult::DecodeIncrCache(..) - | LoadResult::DataOutOfDate, + LoadResult::LoadDepGraph(..) | LoadResult::DataOutOfDate, ) => { sess.emit_fatal(errors::AssertLoaded); } @@ -58,10 +54,6 @@ sess.emit_warning(errors::LoadDepGraph { path, err }); Default::default() } - LoadResult::DecodeIncrCache(err) => { - sess.emit_warning(errors::DecodeIncrCache { err: format!("{err:?}") }); - Default::default() - } LoadResult::DataOutOfDate => { if let Err(err) = delete_all_session_dir_contents(sess) { sess.emit_err(errors::DeleteIncompatible { path: dep_graph_path(sess), err }); @@ -150,7 +142,6 @@ match load_data(&path, sess) { LoadResult::DataOutOfDate => LoadResult::DataOutOfDate, LoadResult::LoadDepGraph(path, err) => LoadResult::LoadDepGraph(path, err), - LoadResult::DecodeIncrCache(err) => LoadResult::DecodeIncrCache(err), LoadResult::Ok { data: (bytes, start_pos) } => { let mut decoder = MemDecoder::new(&bytes, start_pos); let prev_commandline_args_hash = u64::decode(&mut decoder); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,14 +11,11 @@ mod work_product; pub use fs::finalize_session_directory; -pub use fs::garbage_collect_session_directories; pub use fs::in_incr_comp_dir; pub use fs::in_incr_comp_dir_sess; -pub use fs::prepare_session_directory; pub use load::load_query_result_cache; pub use load::setup_dep_graph; pub use load::LoadResult; pub use save::save_dep_graph; pub use save::save_work_product_index; pub use work_product::copy_cgu_workproduct_to_incr_comp_cache_dir; -pub use work_product::delete_workproduct_files; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/save.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/save.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/save.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/save.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use crate::assert_dep_graph::assert_dep_graph; use crate::errors; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sync::join; @@ -39,7 +40,7 @@ let dep_graph_path = dep_graph_path(sess); let staging_dep_graph_path = staging_dep_graph_path(sess); - sess.time("assert_dep_graph", || crate::assert_dep_graph(tcx)); + sess.time("assert_dep_graph", || assert_dep_graph(tcx)); sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx)); if sess.opts.unstable_opts.incremental_info { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/work_product.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/work_product.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/work_product.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_incremental/src/persist/work_product.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,7 +11,8 @@ use std::fs as std_fs; use std::path::Path; -/// Copies a CGU work product to the incremental compilation directory, so next compilation can find and reuse it. +/// Copies a CGU work product to the incremental compilation directory, so next compilation can +/// find and reuse it. pub fn copy_cgu_workproduct_to_incr_comp_cache_dir( sess: &Session, cgu_name: &str, @@ -45,7 +46,7 @@ } /// Removes files for a given work product. -pub fn delete_workproduct_files(sess: &Session, work_product: &WorkProduct) { +pub(crate) fn delete_workproduct_files(sess: &Session, work_product: &WorkProduct) { for (_, path) in work_product.saved_files.items().into_sorted_stable_ord() { let path = in_incr_comp_dir_sess(sess, path); if let Err(err) = std_fs::remove_file(&path) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_index/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_index/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_index/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_index/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,14 +3,16 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start arrayvec = { version = "0.7", default-features = false } -rustc_serialize = { path = "../rustc_serialize", optional = true } rustc_macros = { path = "../rustc_macros", optional = true } +rustc_serialize = { path = "../rustc_serialize", optional = true } smallvec = "1.8.1" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start default = ["nightly"] nightly = ["rustc_serialize", "rustc_macros"] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_index/src/bit_set.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_index/src/bit_set.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_index/src/bit_set.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_index/src/bit_set.rs 2023-12-21 16:55:28.000000000 +0000 @@ -365,7 +365,7 @@ /// All operations that involve an element will panic if the element is equal /// to or greater than the domain size. All operations that involve two bitsets /// will panic if the bitsets have differing domain sizes. -#[derive(Debug, PartialEq, Eq)] +#[derive(PartialEq, Eq)] pub struct ChunkedBitSet { domain_size: usize, @@ -1072,6 +1072,12 @@ fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result { w.debug_list().entries(self.iter()).finish() } +} + +impl fmt::Debug for ChunkedBitSet { + fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result { + w.debug_list().entries(self.iter()).finish() + } } impl ToString for BitSet { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -7,15 +7,17 @@ doctest = false [dependencies] -tracing = "0.1" -rustc_middle = { path = "../rustc_middle" } +# tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -66,7 +66,6 @@ infer_await_future = consider `await`ing on the `Future` infer_await_note = calling an async function returns a future -infer_borrowed_too_long = a value of type `{$ty}` is borrowed for too long infer_but_calling_introduces = {$has_param_name -> [true] `{$param_name}` *[false] `fn` parameter @@ -181,22 +180,20 @@ } but calling `{$ident}` introduces an implicit `'static` lifetime requirement infer_msl_introduces_static = introduces a `'static` lifetime requirement -infer_msl_trait_note = this has an implicit `'static` lifetime requirement -infer_msl_trait_sugg = consider relaxing the implicit `'static` requirement infer_msl_unmet_req = because this has an unmet lifetime requirement -infer_need_type_info_in_generator = - type inside {$generator_kind -> +infer_need_type_info_in_coroutine = + type inside {$coroutine_kind -> [async_block] `async` block [async_closure] `async` closure [async_fn] `async fn` body - *[generator] generator + *[coroutine] coroutine } must be known in this context infer_nothing = {""} infer_oc_cant_coerce = cannot coerce intrinsics to function pointers -infer_oc_closure_selfref = closure/generator type that references itself +infer_oc_closure_selfref = closure/coroutine type that references itself infer_oc_const_compat = const not compatible with trait infer_oc_fn_lang_correct_type = {$lang_item_name -> [panic_impl] `#[panic_handler]` @@ -233,7 +230,6 @@ infer_prlf_must_outlive_with_sup = ...must outlive the lifetime `{$sup_symbol}` defined here infer_prlf_must_outlive_without_sup = ...must outlive the lifetime defined here infer_reborrow = ...so that reference does not outlive borrowed content -infer_reborrow_upvar = ...so that closure can access `{$name}` infer_ref_longer_than_data = in type `{$ty}`, reference has a longer lifetime than the data it references infer_reference_outlives_referent = ...so that the reference type `{$name}` does not outlive the data it points at @@ -288,7 +284,7 @@ infer_source_kind_closure_return = try giving this closure an explicit return type -# generator_kind may need to be translated +# coroutine_kind may need to be translated infer_source_kind_fully_qualified = try using a fully qualified path to specify the expected types diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -194,13 +194,13 @@ data: &'a FnRetTy<'a>, should_wrap_expr: Option, ) -> Self { - let (arrow, post) = match data { - FnRetTy::DefaultReturn(_) => ("-> ", " "), - _ => ("", ""), + let arrow = match data { + FnRetTy::DefaultReturn(_) => " -> ", + _ => "", }; let (start_span, start_span_code, end_span) = match should_wrap_expr { - Some(end_span) => (data.span(), format!("{arrow}{ty_info}{post}{{ "), Some(end_span)), - None => (data.span(), format!("{arrow}{ty_info}{post}"), None), + Some(end_span) => (data.span(), format!("{arrow}{ty_info} {{"), Some(end_span)), + None => (data.span(), format!("{arrow}{ty_info}"), None), }; Self::ClosureReturn { start_span, start_span_code, end_span } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/note_and_explain.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/note_and_explain.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/note_and_explain.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/errors/note_and_explain.rs 2023-12-21 16:55:28.000000000 +0000 @@ -60,9 +60,7 @@ let span = Some(tcx.def_span(scope)); (span, "defined_here", String::new()) } - _ => { - (Some(tcx.def_span(scope)), "defined_here_reg", region.to_string()) - } + _ => (Some(tcx.def_span(scope)), "defined_here_reg", region.to_string()), } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs 2023-12-21 16:55:28.000000000 +0000 @@ -457,8 +457,8 @@ } ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Bool | ty::Char | ty::Int(..) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/canonical/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -27,7 +27,7 @@ use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::GenericArg; use rustc_middle::ty::{self, List, Ty, TyCtxt}; -use rustc_span::source_map::Span; +use rustc_span::Span; pub use rustc_middle::infer::canonical::*; pub use substitute::CanonicalExt; @@ -152,7 +152,7 @@ ) .into(), CanonicalVarKind::Effect => { - let vid = self.inner.borrow_mut().effect_unification_table().new_key(None); + let vid = self.inner.borrow_mut().effect_unification_table().new_key(None).vid; ty::Const::new_infer(self.tcx, ty::InferConst::EffectVar(vid), self.tcx.types.bool) .into() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/combine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/combine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/combine.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/combine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -320,7 +320,7 @@ #[instrument(level = "debug", skip(self))] fn unify_const_variable( &self, - target_vid: ty::ConstVid<'tcx>, + target_vid: ty::ConstVid, ct: ty::Const<'tcx>, param_env: ty::ParamEnv<'tcx>, ) -> RelateResult<'tcx, ty::Const<'tcx>> { @@ -381,7 +381,7 @@ fn unify_effect_variable( &self, vid_is_expected: bool, - vid: ty::EffectVid<'tcx>, + vid: ty::EffectVid, val: EffectVarValue<'tcx>, ) -> RelateResult<'tcx, ty::Const<'tcx>> { self.inner diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/equate.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/equate.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/equate.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/equate.rs 2023-12-21 16:55:28.000000000 +0000 @@ -56,7 +56,7 @@ // performing trait matching (which then performs equality // unification). - relate::relate_args(self, a_arg, b_arg) + relate::relate_args_invariantly(self, a_arg, b_arg) } fn relate_with_variance>( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -59,20 +59,19 @@ }; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; +use rustc_errors::{error_code, Applicability, DiagnosticBuilder, DiagnosticStyledString}; use rustc_errors::{pluralize, struct_span_err, Diagnostic, ErrorGuaranteed, IntoDiagnosticArg}; -use rustc_errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString}; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::Visitor; use rustc_hir::lang_items::LangItem; -use rustc_hir::Node; use rustc_middle::dep_graph::DepContext; -use rustc_middle::ty::print::with_forced_trimmed_paths; +use rustc_middle::ty::print::{with_forced_trimmed_paths, PrintError}; use rustc_middle::ty::relate::{self, RelateResult, TypeRelation}; use rustc_middle::ty::{ - self, error::TypeError, List, Region, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, - TypeVisitable, TypeVisitableExt, + self, error::TypeError, IsSuggestable, List, Region, Ty, TyCtxt, TypeFoldable, + TypeSuperVisitable, TypeVisitable, TypeVisitableExt, }; use rustc_span::{sym, symbol::kw, BytePos, DesugaringKind, Pos, Span}; use rustc_target::spec::abi; @@ -228,8 +227,10 @@ let scope = region.free_region_binding_scope(tcx).expect_local(); match fr.bound_region { ty::BoundRegionKind::BrNamed(_, name) => { - let span = if let Some(param) = - tcx.hir().get_generics(scope).and_then(|generics| generics.get_named(name)) + let span = if let Some(param) = tcx + .hir() + .get_generics(scope) + .and_then(|generics| generics.get_named(name)) { param.span } else { @@ -244,7 +245,7 @@ } ty::BrAnon => ( "the anonymous lifetime as defined here".to_string(), - Some(tcx.def_span(scope)) + Some(tcx.def_span(scope)), ), _ => ( format!("the lifetime `{region}` as defined here"), @@ -579,76 +580,68 @@ struct AbsolutePathPrinter<'tcx> { tcx: TyCtxt<'tcx>, + segments: Vec, } - struct NonTrivialPath; - impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { - type Error = NonTrivialPath; - - type Path = Vec; - type Region = !; - type Type = !; - type DynExistential = !; - type Const = !; - fn tcx<'a>(&'a self) -> TyCtxt<'tcx> { self.tcx } - fn print_region(self, _region: ty::Region<'_>) -> Result { - Err(NonTrivialPath) + fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { + Err(fmt::Error) } - fn print_type(self, _ty: Ty<'tcx>) -> Result { - Err(NonTrivialPath) + fn print_type(&mut self, _ty: Ty<'tcx>) -> Result<(), PrintError> { + Err(fmt::Error) } fn print_dyn_existential( - self, + &mut self, _predicates: &'tcx ty::List>, - ) -> Result { - Err(NonTrivialPath) + ) -> Result<(), PrintError> { + Err(fmt::Error) } - fn print_const(self, _ct: ty::Const<'tcx>) -> Result { - Err(NonTrivialPath) + fn print_const(&mut self, _ct: ty::Const<'tcx>) -> Result<(), PrintError> { + Err(fmt::Error) } - fn path_crate(self, cnum: CrateNum) -> Result { - Ok(vec![self.tcx.crate_name(cnum).to_string()]) + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + self.segments = vec![self.tcx.crate_name(cnum).to_string()]; + Ok(()) } fn path_qualified( - self, + &mut self, _self_ty: Ty<'tcx>, _trait_ref: Option>, - ) -> Result { - Err(NonTrivialPath) + ) -> Result<(), PrintError> { + Err(fmt::Error) } fn path_append_impl( - self, - _print_prefix: impl FnOnce(Self) -> Result, + &mut self, + _print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _disambiguated_data: &DisambiguatedDefPathData, _self_ty: Ty<'tcx>, _trait_ref: Option>, - ) -> Result { - Err(NonTrivialPath) + ) -> Result<(), PrintError> { + Err(fmt::Error) } fn path_append( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result { - let mut path = print_prefix(self)?; - path.push(disambiguated_data.to_string()); - Ok(path) + ) -> Result<(), PrintError> { + print_prefix(self)?; + self.segments.push(disambiguated_data.to_string()); + Ok(()) } fn path_generic_args( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _args: &[GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { print_prefix(self) } } @@ -658,12 +651,14 @@ // are from a local module we could have false positives, e.g. // let _ = [{struct Foo; Foo}, {struct Foo; Foo}]; if did1.krate != did2.krate { - let abs_path = - |def_id| AbsolutePathPrinter { tcx: self.tcx }.print_def_path(def_id, &[]); + let abs_path = |def_id| { + let mut printer = AbsolutePathPrinter { tcx: self.tcx, segments: vec![] }; + printer.print_def_path(def_id, &[]).map(|_| printer.segments) + }; // We compare strings because DefPath can be different // for imported and non-imported crates - let same_path = || -> Result<_, NonTrivialPath> { + let same_path = || -> Result<_, PrintError> { Ok(self.tcx.def_path_str(did1) == self.tcx.def_path_str(did2) || abs_path(did1)? == abs_path(did2)?) }; @@ -716,13 +711,17 @@ && let ty::Adt(def, args) = ty.kind() && Some(def.did()) == self.tcx.get_diagnostic_item(sym::Option) { - err.span_label(span, format!("this is an iterator with items of type `{}`", args.type_at(0))); + err.span_label( + span, + format!("this is an iterator with items of type `{}`", args.type_at(0)), + ); } else { - err.span_label(span, format!("this expression has type `{ty}`")); - } + err.span_label(span, format!("this expression has type `{ty}`")); + } } if let Some(ty::error::ExpectedFound { found, .. }) = exp_found - && ty.is_box() && ty.boxed_ty() == found + && ty.is_box() + && ty.boxed_ty() == found && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { err.span_suggestion( @@ -744,9 +743,9 @@ let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id); let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind { let arg_expr = args.first().expect("try desugaring call w/out arg"); - self.typeck_results.as_ref().and_then(|typeck_results| { - typeck_results.expr_ty_opt(arg_expr) - }) + self.typeck_results + .as_ref() + .and_then(|typeck_results| typeck_results.expr_ty_opt(arg_expr)) } else { bug!("try desugaring w/out call expr as scrutinee"); }; @@ -764,7 +763,7 @@ _ => {} } } - }, + } ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause { arm_block_id, arm_span, @@ -776,6 +775,7 @@ ref prior_arms, opt_suggest_box_span, scrut_span, + scrut_hir_id, .. }) => match source { hir::MatchSource::TryDesugar(scrut_hir_id) => { @@ -783,9 +783,9 @@ let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id); let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind { let arg_expr = args.first().expect("try desugaring call w/out arg"); - self.typeck_results.as_ref().and_then(|typeck_results| { - typeck_results.expr_ty_opt(arg_expr) - }) + self.typeck_results + .as_ref() + .and_then(|typeck_results| typeck_results.expr_ty_opt(arg_expr)) } else { bug!("try desugaring w/out call expr as scrutinee"); }; @@ -843,6 +843,18 @@ ) { err.subdiagnostic(subdiag); } + if let Some(hir::Node::Expr(m)) = self.tcx.hir().find_parent(scrut_hir_id) + && let Some(hir::Node::Stmt(stmt)) = self.tcx.hir().find_parent(m.hir_id) + && let hir::StmtKind::Expr(_) = stmt.kind + { + err.span_suggestion_verbose( + stmt.span.shrink_to_hi(), + "consider using a semicolon here, but this will discard any values \ + in the match arms", + ";", + Applicability::MaybeIncorrect, + ); + } if let Some(ret_sp) = opt_suggest_box_span { // Get return type span and point to it. self.suggest_boxing_for_return_impl_trait( @@ -879,8 +891,7 @@ } // don't suggest wrapping either blocks in `if .. {} else {}` let is_empty_arm = |id| { - let hir::Node::Block(blk) = self.tcx.hir().get(id) - else { + let hir::Node::Block(blk) = self.tcx.hir().get(id) else { return false; }; if blk.expr.is_some() || !blk.stmts.is_empty() { @@ -909,12 +920,11 @@ } _ => { if let ObligationCauseCode::BindingObligation(_, span) - | ObligationCauseCode::ExprBindingObligation(_, span, ..) - = cause.code().peel_derives() + | ObligationCauseCode::ExprBindingObligation(_, span, ..) = + cause.code().peel_derives() && let TypeError::RegionsPlaceholderMismatch = terr { - err.span_note( * span, - "the lifetime requirement is introduced here"); + err.span_note(*span, "the lifetime requirement is introduced here"); } } } @@ -1060,7 +1070,7 @@ let get_lifetimes = |sig| { use rustc_hir::def::Namespace; - let (_, sig, reg) = ty::print::FmtPrinter::new(self.tcx, Namespace::TypeNS) + let (sig, reg) = ty::print::FmtPrinter::new(self.tcx, Namespace::TypeNS) .name_all_regions(sig) .unwrap(); let lts: Vec = reg.into_values().map(|kind| kind.to_string()).collect(); @@ -1574,14 +1584,13 @@ target: &str, types: &FxIndexMap>, ) { - for (key, values) in types.iter() { + for (kind, values) in types.iter() { let count = values.len(); - let kind = key.descr(); for &sp in values { err.span_label( sp, format!( - "{}{} {}{}", + "{}{} {:#}{}", if count == 1 { "the " } else { "one of the " }, target, kind, @@ -1743,19 +1752,25 @@ } let similarity = |ExpectedFound { expected, found }: ExpectedFound>| { - if let ty::Adt(expected, _) = expected.kind() && let Some(primitive) = found.primitive_symbol() { + if let ty::Adt(expected, _) = expected.kind() + && let Some(primitive) = found.primitive_symbol() + { let path = self.tcx.def_path(expected.did()).data; let name = path.last().unwrap().data.get_opt_name(); if name == Some(primitive) { return Some(Similar::PrimitiveFound { expected: *expected, found }); } - } else if let Some(primitive) = expected.primitive_symbol() && let ty::Adt(found, _) = found.kind() { + } else if let Some(primitive) = expected.primitive_symbol() + && let ty::Adt(found, _) = found.kind() + { let path = self.tcx.def_path(found.did()).data; let name = path.last().unwrap().data.get_opt_name(); if name == Some(primitive) { return Some(Similar::PrimitiveExpected { expected, found: *found }); } - } else if let ty::Adt(expected, _) = expected.kind() && let ty::Adt(found, _) = found.kind() { + } else if let ty::Adt(expected, _) = expected.kind() + && let ty::Adt(found, _) = found.kind() + { if !expected.did().is_local() && expected.did().krate == found.did().krate { // Most likely types from different versions of the same crate // are in play, in which case this message isn't so helpful. @@ -1765,8 +1780,10 @@ let f_path = self.tcx.def_path(found.did()).data; let e_path = self.tcx.def_path(expected.did()).data; - if let (Some(e_last), Some(f_last)) = (e_path.last(), f_path.last()) && e_last == f_last { - return Some(Similar::Adts{expected: *expected, found: *found}); + if let (Some(e_last), Some(f_last)) = (e_path.last(), f_path.last()) + && e_last == f_last + { + return Some(Similar::Adts { expected: *expected, found: *found }); } } None @@ -1797,7 +1814,7 @@ }; let diagnose_adts = - |expected_adt : ty::AdtDef<'tcx>, + |expected_adt: ty::AdtDef<'tcx>, found_adt: ty::AdtDef<'tcx>, diagnostic: &mut Diagnostic| { let found_name = values.found.sort_string(self.tcx); @@ -1817,8 +1834,11 @@ .tcx .parent_module_from_def_id(defid.expect_local()) .to_def_id(); - let module_name = self.tcx.def_path(module).to_string_no_crate_verbose(); - format!("{name} is defined in module `crate{module_name}` of the current crate") + let module_name = + self.tcx.def_path(module).to_string_no_crate_verbose(); + format!( + "{name} is defined in module `crate{module_name}` of the current crate" + ) } else if defid.is_local() { format!("{name} is defined in the current crate") } else { @@ -1830,13 +1850,11 @@ }; match s { - Similar::Adts{expected, found} => { - diagnose_adts(expected, found, diag) - } - Similar::PrimitiveFound{expected, found: prim} => { + Similar::Adts { expected, found } => diagnose_adts(expected, found, diag), + Similar::PrimitiveFound { expected, found: prim } => { diagnose_primitive(prim, values.expected, expected.did(), diag) } - Similar::PrimitiveExpected{expected: prim, found} => { + Similar::PrimitiveExpected { expected: prim, found } => { diagnose_primitive(prim, values.found, found.did(), diag) } } @@ -1878,7 +1896,8 @@ } s }; - if !(values.expected.is_simple_text(self.tcx) && values.found.is_simple_text(self.tcx)) + if !(values.expected.is_simple_text(self.tcx) + && values.found.is_simple_text(self.tcx)) || (exp_found.is_some_and(|ef| { // This happens when the type error is a subset of the expectation, // like when you have two references but one is `usize` and the other @@ -1968,13 +1987,7 @@ && let exp_found = TypeError::Sorts(exp_found) && exp_found != terr { - self.note_and_explain_type_err( - diag, - exp_found, - cause, - span, - cause.body_id.to_def_id(), - ); + self.note_and_explain_type_err(diag, exp_found, cause, span, cause.body_id.to_def_id()); } if let Some(ValuePairs::PolyTraitRefs(exp_found)) = values @@ -1984,7 +1997,12 @@ { let span = self.tcx.def_span(def_id); diag.span_note(span, "this closure does not fulfill the lifetime requirements"); - self.suggest_for_all_lifetime_closure(span, self.tcx.hir().get_by_def_id(def_id), &exp_found, diag); + self.suggest_for_all_lifetime_closure( + span, + self.tcx.hir().get_by_def_id(def_id), + &exp_found, + diag, + ); } // It reads better to have the error origin as the final @@ -2010,7 +2028,7 @@ // parentheses around it, perhaps the user meant to write `(expr,)` to // build a tuple (issue #86100) (ty::Tuple(fields), _) => { - suggestions.extend(self.suggest_wrap_to_build_a_tuple( span, found, fields)) + suggestions.extend(self.suggest_wrap_to_build_a_tuple(span, found, fields)) } // If a byte was expected and the found expression is a char literal // containing a single ASCII character, perhaps the user meant to write `b'c'` to @@ -2060,8 +2078,10 @@ } // For code `if Some(..) = expr `, the type mismatch may be expected `bool` but found `()`, // we try to suggest to add the missing `let` for `if let Some(..) = expr` - (ty::Bool, ty::Tuple(list)) => if list.len() == 0 { - suggestions.extend(self.suggest_let_for_letchains(&trace.cause, span)); + (ty::Bool, ty::Tuple(list)) => { + if list.len() == 0 { + suggestions.extend(self.suggest_let_for_letchains(&trace.cause, span)); + } } (ty::Array(_, _), ty::Array(_, _)) => { suggestions.extend(self.suggest_specify_actual_length(terr, trace, span)) @@ -2071,8 +2091,7 @@ } let code = trace.cause.code(); if let &(MatchExpressionArm(box MatchExpressionArmCause { source, .. }) - | BlockTailExpression(.., source) - ) = code + | BlockTailExpression(.., source)) = code && let hir::MatchSource::TryDesugar(_) = source && let Some((expected_ty, found_ty, _, _)) = self.values_str(trace.values) { @@ -2109,17 +2128,16 @@ // Find a local statement where the initializer has // the same span as the error and the type is specified. if let hir::Stmt { - kind: hir::StmtKind::Local(hir::Local { - init: Some(hir::Expr { - span: init_span, + kind: + hir::StmtKind::Local(hir::Local { + init: Some(hir::Expr { span: init_span, .. }), + ty: Some(array_ty), .. }), - ty: Some(array_ty), - .. - }), .. } = s - && init_span == &self.span { + && init_span == &self.span + { self.result = Some(*array_ty); } } @@ -2317,113 +2335,18 @@ bound_kind: GenericKind<'tcx>, sub: Region<'tcx>, ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - // Attempt to obtain the span of the parameter so we can - // suggest adding an explicit lifetime bound to it. - let generics = self.tcx.generics_of(generic_param_scope); - // type_param_span is (span, has_bounds) - let mut is_synthetic = false; - let mut ast_generics = None; - let type_param_span = match bound_kind { - GenericKind::Param(ref param) => { - // Account for the case where `param` corresponds to `Self`, - // which doesn't have the expected type argument. - if !(generics.has_self && param.index == 0) { - let type_param = generics.type_param(param, self.tcx); - is_synthetic = type_param.kind.is_synthetic(); - type_param.def_id.as_local().map(|def_id| { - // Get the `hir::Param` to verify whether it already has any bounds. - // We do this to avoid suggesting code that ends up as `T: 'a'b`, - // instead we suggest `T: 'a + 'b` in that case. - let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id); - ast_generics = self.tcx.hir().get_generics(hir_id.owner.def_id); - let bounds = - ast_generics.and_then(|g| g.bounds_span_for_suggestions(def_id)); - // `sp` only covers `T`, change it so that it covers - // `T:` when appropriate - if let Some(span) = bounds { - (span, true) - } else { - let sp = self.tcx.def_span(def_id); - (sp.shrink_to_hi(), false) - } - }) - } else { - None - } - } - _ => None, - }; - - let new_lt = { - let mut possible = (b'a'..=b'z').map(|c| format!("'{}", c as char)); - let lts_names = - iter::successors(Some(generics), |g| g.parent.map(|p| self.tcx.generics_of(p))) - .flat_map(|g| &g.params) - .filter(|p| matches!(p.kind, ty::GenericParamDefKind::Lifetime)) - .map(|p| p.name.as_str()) - .collect::>(); - possible - .find(|candidate| !lts_names.contains(&&candidate[..])) - .unwrap_or("'lt".to_string()) - }; - - let mut add_lt_suggs: Vec> = vec![]; - if is_synthetic { - if let Some(ast_generics) = ast_generics { - let named_lifetime_param_exist = ast_generics.params.iter().any(|p| { - matches!( - p.kind, - hir::GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Explicit } - ) - }); - if named_lifetime_param_exist && let [param, ..] = ast_generics.params - { - add_lt_suggs.push(Some(( - self.tcx.def_span(param.def_id).shrink_to_lo(), - format!("{new_lt}, "), - ))); - } else { - add_lt_suggs - .push(Some((ast_generics.span.shrink_to_hi(), format!("<{new_lt}>")))); - } - } - } else { - if let [param, ..] = &generics.params[..] && let Some(def_id) = param.def_id.as_local() - { - add_lt_suggs - .push(Some((self.tcx.def_span(def_id).shrink_to_lo(), format!("{new_lt}, ")))); - } - } - - if let Some(ast_generics) = ast_generics { - for p in ast_generics.params { - if p.is_elided_lifetime() { - if self - .tcx - .sess - .source_map() - .span_to_prev_source(p.span.shrink_to_hi()) - .ok() - .is_some_and(|s| *s.as_bytes().last().unwrap() == b'&') - { - add_lt_suggs - .push(Some( - ( - p.span.shrink_to_hi(), - if let Ok(snip) = self.tcx.sess.source_map().span_to_next_source(p.span) - && snip.starts_with(' ') - { - new_lt.to_string() - } else { - format!("{new_lt} ") - } - ) - )); - } else { - add_lt_suggs.push(Some((p.span.shrink_to_hi(), format!("<{new_lt}>")))); - } - } - } + if let Some(SubregionOrigin::CompareImplItemObligation { + span, + impl_item_def_id, + trait_item_def_id, + }) = origin + { + return self.report_extra_impl_obligation( + span, + impl_item_def_id, + trait_item_def_id, + &format!("`{bound_kind}: {sub}`"), + ); } let labeled_user_string = match bound_kind { @@ -2437,223 +2360,211 @@ }, }; - if let Some(SubregionOrigin::CompareImplItemObligation { + let mut err = self.tcx.sess.struct_span_err_with_code( span, - impl_item_def_id, - trait_item_def_id, - }) = origin - { - return self.report_extra_impl_obligation( - span, - impl_item_def_id, - trait_item_def_id, - &format!("`{bound_kind}: {sub}`"), - ); + format!("{labeled_user_string} may not live long enough"), + match sub.kind() { + ty::ReEarlyBound(_) | ty::ReFree(_) if sub.has_name() => error_code!(E0309), + ty::ReStatic => error_code!(E0310), + _ => error_code!(E0311), + }, + ); + + '_explain: { + let (description, span) = match sub.kind() { + ty::ReEarlyBound(_) | ty::ReFree(_) | ty::ReStatic => { + msg_span_from_named_region(self.tcx, sub, Some(span)) + } + _ => (format!("lifetime `{sub}`"), Some(span)), + }; + let prefix = format!("{labeled_user_string} must be valid for "); + label_msg_span(&mut err, &prefix, description, span, "..."); + if let Some(origin) = origin { + self.note_region_origin(&mut err, &origin); + } } - fn binding_suggestion<'tcx, S: fmt::Display>( - err: &mut Diagnostic, - type_param_span: Option<(Span, bool)>, - bound_kind: GenericKind<'tcx>, - sub: S, - add_lt_suggs: Vec>, - ) { + 'suggestion: { let msg = "consider adding an explicit lifetime bound"; - if let Some((sp, has_lifetimes)) = type_param_span { - let suggestion = - if has_lifetimes { format!(" + {sub}") } else { format!(": {sub}") }; - let mut suggestions = vec![(sp, suggestion)]; - for add_lt_sugg in add_lt_suggs.into_iter().flatten() { - suggestions.push(add_lt_sugg); + + if (bound_kind, sub).has_infer_regions() + || (bound_kind, sub).has_placeholders() + || !bound_kind.is_suggestable(self.tcx, false) + { + let lt_name = sub.get_name_or_anon().to_string(); + err.help(format!("{msg} `{bound_kind}: {lt_name}`...")); + break 'suggestion; + } + + let mut generic_param_scope = generic_param_scope; + while self.tcx.def_kind(generic_param_scope) == DefKind::OpaqueTy { + generic_param_scope = self.tcx.local_parent(generic_param_scope); + } + + // type_param_sugg_span is (span, has_bounds) + let (type_scope, type_param_sugg_span) = match bound_kind { + GenericKind::Param(ref param) => { + let generics = self.tcx.generics_of(generic_param_scope); + let def_id = generics.type_param(param, self.tcx).def_id.expect_local(); + let scope = self.tcx.local_def_id_to_hir_id(def_id).owner.def_id; + // Get the `hir::Param` to verify whether it already has any bounds. + // We do this to avoid suggesting code that ends up as `T: 'a'b`, + // instead we suggest `T: 'a + 'b` in that case. + let hir_generics = self.tcx.hir().get_generics(scope).unwrap(); + let sugg_span = match hir_generics.bounds_span_for_suggestions(def_id) { + Some(span) => Some((span, true)), + // If `param` corresponds to `Self`, no usable suggestion span. + None if generics.has_self && param.index == 0 => None, + None => Some((self.tcx.def_span(def_id).shrink_to_hi(), false)), + }; + (scope, sugg_span) } - err.multipart_suggestion_verbose( - format!("{msg}..."), - suggestions, - Applicability::MaybeIncorrect, // Issue #41966 - ); + _ => (generic_param_scope, None), + }; + let suggestion_scope = { + let lifetime_scope = match sub.kind() { + ty::ReStatic => hir::def_id::CRATE_DEF_ID, + _ => match self.tcx.is_suitable_region(sub) { + Some(info) => info.def_id, + None => generic_param_scope, + }, + }; + match self.tcx.is_descendant_of(type_scope.into(), lifetime_scope.into()) { + true => type_scope, + false => lifetime_scope, + } + }; + + let mut suggs = vec![]; + let lt_name = self.suggest_name_region(sub, &mut suggs); + + if let Some((sp, has_lifetimes)) = type_param_sugg_span + && suggestion_scope == type_scope + { + let suggestion = + if has_lifetimes { format!(" + {lt_name}") } else { format!(": {lt_name}") }; + suggs.push((sp, suggestion)) + } else if let Some(generics) = self.tcx.hir().get_generics(suggestion_scope) { + let pred = format!("{bound_kind}: {lt_name}"); + let suggestion = format!("{} {}", generics.add_where_or_trailing_comma(), pred); + suggs.push((generics.tail_span_for_predicate_suggestion(), suggestion)) } else { let consider = format!("{msg} `{bound_kind}: {sub}`..."); err.help(consider); } + + if !suggs.is_empty() { + err.multipart_suggestion_verbose( + format!("{msg}"), + suggs, + Applicability::MaybeIncorrect, // Issue #41966 + ); + } } - let new_binding_suggestion = - |err: &mut Diagnostic, type_param_span: Option<(Span, bool)>| { - let msg = "consider introducing an explicit lifetime bound"; - if let Some((sp, has_lifetimes)) = type_param_span { - let suggestion = - if has_lifetimes { format!(" + {new_lt}") } else { format!(": {new_lt}") }; - let mut sugg = - vec![(sp, suggestion), (span.shrink_to_hi(), format!(" + {new_lt}"))]; - for lt in add_lt_suggs.clone().into_iter().flatten() { - sugg.push(lt); - sugg.rotate_right(1); - } - // `MaybeIncorrect` due to issue #41966. - err.multipart_suggestion(msg, sugg, Applicability::MaybeIncorrect); - } - }; + err + } - #[derive(Debug)] - enum SubOrigin<'hir> { - GAT(&'hir hir::Generics<'hir>), - Impl, - Trait, - Fn, - Unknown, - } - let sub_origin = 'origin: { - match *sub { - ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, .. }) => { - let node = self.tcx.hir().get_if_local(def_id).unwrap(); - match node { - Node::GenericParam(param) => { - for h in self.tcx.hir().parent_iter(param.hir_id) { - break 'origin match h.1 { - Node::ImplItem(hir::ImplItem { - kind: hir::ImplItemKind::Type(..), - generics, - .. - }) - | Node::TraitItem(hir::TraitItem { - kind: hir::TraitItemKind::Type(..), - generics, - .. - }) => SubOrigin::GAT(generics), - Node::ImplItem(hir::ImplItem { - kind: hir::ImplItemKind::Fn(..), - .. - }) - | Node::TraitItem(hir::TraitItem { - kind: hir::TraitItemKind::Fn(..), - .. - }) - | Node::Item(hir::Item { - kind: hir::ItemKind::Fn(..), .. - }) => SubOrigin::Fn, - Node::Item(hir::Item { - kind: hir::ItemKind::Trait(..), - .. - }) => SubOrigin::Trait, - Node::Item(hir::Item { - kind: hir::ItemKind::Impl(..), .. - }) => SubOrigin::Impl, - _ => continue, - }; - } + pub fn suggest_name_region( + &self, + lifetime: Region<'tcx>, + add_lt_suggs: &mut Vec<(Span, String)>, + ) -> String { + struct LifetimeReplaceVisitor<'tcx, 'a> { + tcx: TyCtxt<'tcx>, + needle: hir::LifetimeName, + new_lt: &'a str, + add_lt_suggs: &'a mut Vec<(Span, String)>, + } + + impl<'hir, 'tcx> hir::intravisit::Visitor<'hir> for LifetimeReplaceVisitor<'tcx, '_> { + fn visit_lifetime(&mut self, lt: &'hir hir::Lifetime) { + if lt.res == self.needle { + let (pos, span) = lt.suggestion_position(); + let new_lt = &self.new_lt; + let sugg = match pos { + hir::LifetimeSuggestionPosition::Normal => format!("{new_lt}"), + hir::LifetimeSuggestionPosition::Ampersand => format!("{new_lt} "), + hir::LifetimeSuggestionPosition::ElidedPath => format!("<{new_lt}>"), + hir::LifetimeSuggestionPosition::ElidedPathArgument => { + format!("{new_lt}, ") } - _ => {} - } + hir::LifetimeSuggestionPosition::ObjectDefault => format!("+ {new_lt}"), + }; + self.add_lt_suggs.push((span, sugg)); } - _ => {} } - SubOrigin::Unknown - }; - debug!(?sub_origin); - let mut err = match (*sub, sub_origin) { - // In the case of GATs, we have to be careful. If we a type parameter `T` on an impl, - // but a lifetime `'a` on an associated type, then we might need to suggest adding - // `where T: 'a`. Importantly, this is on the GAT span, not on the `T` declaration. - (ty::ReEarlyBound(ty::EarlyBoundRegion { name: _, .. }), SubOrigin::GAT(generics)) => { - // Does the required lifetime have a nice name we can print? - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0309, - "{} may not live long enough", - labeled_user_string - ); - let pred = format!("{bound_kind}: {sub}"); - let suggestion = format!("{} {}", generics.add_where_or_trailing_comma(), pred,); - err.span_suggestion( - generics.tail_span_for_predicate_suggestion(), - "consider adding a where clause", - suggestion, - Applicability::MaybeIncorrect, - ); - err + fn visit_ty(&mut self, ty: &'hir hir::Ty<'hir>) { + let hir::TyKind::OpaqueDef(item_id, _, _) = ty.kind else { + return hir::intravisit::walk_ty(self, ty); + }; + let opaque_ty = self.tcx.hir().item(item_id).expect_opaque_ty(); + if let Some(&(_, b)) = + opaque_ty.lifetime_mapping.iter().find(|&(a, _)| a.res == self.needle) + { + let prev_needle = + std::mem::replace(&mut self.needle, hir::LifetimeName::Param(b)); + for bound in opaque_ty.bounds { + self.visit_param_bound(bound); + } + self.needle = prev_needle; + } } - ( - ty::ReEarlyBound(ty::EarlyBoundRegion { name, .. }) - | ty::ReFree(ty::FreeRegion { bound_region: ty::BrNamed(_, name), .. }), - _, - ) if name != kw::UnderscoreLifetime => { - // Does the required lifetime have a nice name we can print? - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0309, - "{} may not live long enough", - labeled_user_string - ); - // Explicitly use the name instead of `sub`'s `Display` impl. The `Display` impl - // for the bound is not suitable for suggestions when `-Zverbose` is set because it - // uses `Debug` output, so we handle it specially here so that suggestions are - // always correct. - binding_suggestion(&mut err, type_param_span, bound_kind, name, vec![]); - err - } - - (ty::ReStatic, _) => { - // Does the required lifetime have a nice name we can print? - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0310, - "{} may not live long enough", - labeled_user_string - ); - binding_suggestion(&mut err, type_param_span, bound_kind, "'static", vec![]); - err + } + + let (lifetime_def_id, lifetime_scope) = match self.tcx.is_suitable_region(lifetime) { + Some(info) if !lifetime.has_name() => { + (info.boundregion.get_id().unwrap().expect_local(), info.def_id) } + _ => return lifetime.get_name_or_anon().to_string(), + }; - _ => { - // If not, be less specific. - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0311, - "{} may not live long enough", - labeled_user_string - ); - note_and_explain_region( - self.tcx, - &mut err, - &format!("{labeled_user_string} must be valid for "), - sub, - "...", - None, - ); - if let Some(infer::RelateParamBound(_, t, _)) = origin { - let t = self.resolve_vars_if_possible(t); - match t.kind() { - // We've got: - // fn get_later(g: G, dest: &mut T) -> impl FnOnce() + '_ - // suggest: - // fn get_later<'a, G: 'a, T>(g: G, dest: &mut T) -> impl FnOnce() + '_ + 'a - ty::Closure(..) | ty::Alias(ty::Opaque, ..) => { - new_binding_suggestion(&mut err, type_param_span); - } - _ => { - binding_suggestion( - &mut err, - type_param_span, - bound_kind, - new_lt, - add_lt_suggs, - ); - } + let new_lt = { + let generics = self.tcx.generics_of(lifetime_scope); + let mut used_names = + iter::successors(Some(generics), |g| g.parent.map(|p| self.tcx.generics_of(p))) + .flat_map(|g| &g.params) + .filter(|p| matches!(p.kind, ty::GenericParamDefKind::Lifetime)) + .map(|p| p.name) + .collect::>(); + if let Some(hir_id) = self.tcx.opt_local_def_id_to_hir_id(lifetime_scope) { + // consider late-bound lifetimes ... + used_names.extend(self.tcx.late_bound_vars(hir_id).into_iter().filter_map(|p| { + match p { + ty::BoundVariableKind::Region(lt) => lt.get_name(), + _ => None, } - } - err + })) } + (b'a'..=b'z') + .map(|c| format!("'{}", c as char)) + .find(|candidate| !used_names.iter().any(|e| e.as_str() == candidate)) + .unwrap_or("'lt".to_string()) }; - if let Some(origin) = origin { - self.note_region_origin(&mut err, &origin); - } - err + let mut visitor = LifetimeReplaceVisitor { + tcx: self.tcx, + needle: hir::LifetimeName::Param(lifetime_def_id), + add_lt_suggs, + new_lt: &new_lt, + }; + match self.tcx.hir().expect_owner(lifetime_scope) { + hir::OwnerNode::Item(i) => visitor.visit_item(i), + hir::OwnerNode::ForeignItem(i) => visitor.visit_foreign_item(i), + hir::OwnerNode::ImplItem(i) => visitor.visit_impl_item(i), + hir::OwnerNode::TraitItem(i) => visitor.visit_trait_item(i), + hir::OwnerNode::Crate(_) => bug!("OwnerNode::Crate doesn't not have generics"), + } + + let ast_generics = self.tcx.hir().get_generics(lifetime_scope).unwrap(); + let sugg = ast_generics + .span_for_lifetime_suggestion() + .map(|span| (span, format!("{new_lt}, "))) + .unwrap_or_else(|| (ast_generics.span, format!("<{new_lt}>"))); + add_lt_suggs.push(sugg); + + new_lt } fn report_sub_sup_conflict( @@ -2925,7 +2836,7 @@ // say, also take a look at the error code, maybe we can // tailor to that. _ => match terr { - TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_generator() => Error0644, + TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_coroutine() => Error0644, TypeError::IntrinsicCast => Error0308, _ => Error0308, }, @@ -2972,7 +2883,7 @@ // say, also take a look at the error code, maybe we can // tailor to that. _ => match terr { - TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_generator() => { + TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_coroutine() => { ObligationCauseFailureCode::ClosureSelfref { span } } TypeError::IntrinsicCast => { @@ -3040,21 +2951,23 @@ Closure, Opaque, OpaqueFuture, - Generator(hir::GeneratorKind), + Coroutine(hir::CoroutineKind), Foreign, } -impl TyCategory { - fn descr(&self) -> &'static str { +impl fmt::Display for TyCategory { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::Closure => "closure", - Self::Opaque => "opaque type", - Self::OpaqueFuture => "future", - Self::Generator(gk) => gk.descr(), - Self::Foreign => "foreign type", + Self::Closure => "closure".fmt(f), + Self::Opaque => "opaque type".fmt(f), + Self::OpaqueFuture => "future".fmt(f), + Self::Coroutine(gk) => gk.fmt(f), + Self::Foreign => "foreign type".fmt(f), } } +} +impl TyCategory { pub fn from_ty(tcx: TyCtxt<'_>, ty: Ty<'_>) -> Option<(Self, DefId)> { match *ty.kind() { ty::Closure(def_id, _) => Some((Self::Closure, def_id)), @@ -3063,8 +2976,8 @@ if tcx.ty_is_opaque_future(ty) { Self::OpaqueFuture } else { Self::Opaque }; Some((kind, def_id)) } - ty::Generator(def_id, ..) => { - Some((Self::Generator(tcx.generator_kind(def_id).unwrap()), def_id)) + ty::Coroutine(def_id, ..) => { + Some((Self::Coroutine(tcx.coroutine_kind(def_id).unwrap()), def_id)) } ty::Foreign(def_id) => Some((Self::Foreign, def_id)), _ => None, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs 2023-12-21 16:55:28.000000000 +0000 @@ -27,7 +27,7 @@ pub enum TypeAnnotationNeeded { /// ```compile_fail,E0282 - /// let x = "hello".chars().rev().collect(); + /// let x; /// ``` E0282, /// An implementation cannot be chosen unambiguously because of lack of information. @@ -163,13 +163,13 @@ let ty_vars = infcx_inner.type_variables(); let var_origin = ty_vars.var_origin(ty_vid); if let TypeVariableOriginKind::TypeParameterDefinition(name, def_id) = var_origin.kind - && name != kw::SelfUpper && !var_origin.span.from_expansion() + && name != kw::SelfUpper + && !var_origin.span.from_expansion() { let generics = infcx.tcx.generics_of(infcx.tcx.parent(def_id)); let idx = generics.param_def_id_to_index(infcx.tcx, def_id).unwrap(); let generic_param_def = generics.param_at(idx as usize, infcx.tcx); - if let ty::GenericParamDefKind::Type { synthetic: true, .. } = generic_param_def.kind - { + if let ty::GenericParamDefKind::Type { synthetic: true, .. } = generic_param_def.kind { None } else { Some(name) @@ -200,12 +200,15 @@ ty: Ty<'tcx>, called_method_def_id: Option, ) -> String { - let printer = fmt_printer(infcx, Namespace::TypeNS); + let mut printer = fmt_printer(infcx, Namespace::TypeNS); let ty = infcx.resolve_vars_if_possible(ty); match (ty.kind(), called_method_def_id) { // We don't want the regular output for `fn`s because it includes its path in // invalid pseudo-syntax, we want the `fn`-pointer output instead. - (ty::FnDef(..), _) => ty.fn_sig(infcx.tcx).print(printer).unwrap().into_buffer(), + (ty::FnDef(..), _) => { + ty.fn_sig(infcx.tcx).print(&mut printer).unwrap(); + printer.into_buffer() + } (_, Some(def_id)) if ty.is_ty_or_numeric_infer() && infcx.tcx.get_diagnostic_item(sym::iterator_collect_fn) == Some(def_id) => @@ -218,7 +221,10 @@ // // We do have to hide the `extern "rust-call"` ABI in that case though, // which is too much of a bother for now. - _ => ty.print(printer).unwrap().into_buffer(), + _ => { + ty.print(&mut printer).unwrap(); + printer.into_buffer() + } } } @@ -285,8 +291,9 @@ if let Some(highlight) = highlight { printer.region_highlight_mode = highlight; } + ty.print(&mut printer).unwrap(); InferenceDiagnosticsData { - name: ty.print(printer).unwrap().into_buffer(), + name: printer.into_buffer(), span: None, kind: UnderspecifiedArgKind::Type { prefix: ty.prefix_string(self.tcx) }, parent: None, @@ -312,8 +319,9 @@ if let Some(highlight) = highlight { printer.region_highlight_mode = highlight; } + ct.print(&mut printer).unwrap(); InferenceDiagnosticsData { - name: ct.print(printer).unwrap().into_buffer(), + name: printer.into_buffer(), span: Some(origin.span), kind: UnderspecifiedArgKind::Const { is_parameter: false }, parent: None, @@ -329,8 +337,9 @@ if let Some(highlight) = highlight { printer.region_highlight_mode = highlight; } + ct.print(&mut printer).unwrap(); InferenceDiagnosticsData { - name: ct.print(printer).unwrap().into_buffer(), + name: printer.into_buffer(), span: None, kind: UnderspecifiedArgKind::Const { is_parameter: false }, parent: None, @@ -487,7 +496,8 @@ { "Vec<_>".to_string() } else { - fmt_printer(self, Namespace::TypeNS) + let mut printer = fmt_printer(self, Namespace::TypeNS); + printer .comma_sep(generic_args.iter().copied().map(|arg| { if arg.is_suggestable(self.tcx, true) { return arg; @@ -512,8 +522,8 @@ .into(), } })) - .unwrap() - .into_buffer() + .unwrap(); + printer.into_buffer() }; if !have_turbofish { @@ -525,8 +535,9 @@ } } InferSourceKind::FullyQualifiedMethodCall { receiver, successor, args, def_id } => { - let printer = fmt_printer(self, Namespace::ValueNS); - let def_path = printer.print_def_path(def_id, args).unwrap().into_buffer(); + let mut printer = fmt_printer(self, Namespace::ValueNS); + printer.print_def_path(def_id, args).unwrap(); + let def_path = printer.into_buffer(); // We only care about whether we have to add `&` or `&mut ` for now. // This is the case if the last adjustment is a borrow and the @@ -792,8 +803,9 @@ let cost = self.source_cost(&new_source) + self.attempt; debug!(?cost); self.attempt += 1; - if let Some(InferSource { kind: InferSourceKind::GenericArg { def_id: did, ..}, .. }) = self.infer_source - && let InferSourceKind::LetBinding { ref ty, ref mut def_id, ..} = new_source.kind + if let Some(InferSource { kind: InferSourceKind::GenericArg { def_id: did, .. }, .. }) = + self.infer_source + && let InferSourceKind::LetBinding { ref ty, ref mut def_id, .. } = new_source.kind && ty.is_ty_or_numeric_infer() { // Customize the output so we talk about `let x: Vec<_> = iter.collect();` instead of @@ -863,11 +875,11 @@ GenericArgKind::Type(ty) => { if matches!( ty.kind(), - ty::Alias(ty::Opaque, ..) | ty::Closure(..) | ty::Generator(..) + ty::Alias(ty::Opaque, ..) | ty::Closure(..) | ty::Coroutine(..) ) { // Opaque types can't be named by the user right now. // - // Both the generic arguments of closures and generators can + // Both the generic arguments of closures and coroutines can // also not be named. We may want to only look into the closure // signature in case it has no captures, as that can be represented // using `fn(T) -> R`. @@ -1242,7 +1254,7 @@ successor, args, def_id, - } + }, }) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ use crate::infer::lexical_region_resolve::RegionResolutionError::*; use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed}; use rustc_middle::ty::{self, TyCtxt}; -use rustc_span::source_map::Span; +use rustc_span::Span; mod different_lifetimes; pub mod find_anon_type; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -28,7 +28,7 @@ impl<'tcx, T> IntoDiagnosticArg for Highlighted<'tcx, T> where - T: for<'a> Print<'tcx, FmtPrinter<'a, 'tcx>, Error = fmt::Error, Output = FmtPrinter<'a, 'tcx>>, + T: for<'a> Print<'tcx, FmtPrinter<'a, 'tcx>>, { fn into_diagnostic_arg(self) -> rustc_errors::DiagnosticArgValue<'static> { rustc_errors::DiagnosticArgValue::Str(self.to_string().into()) @@ -43,14 +43,14 @@ impl<'tcx, T> fmt::Display for Highlighted<'tcx, T> where - T: for<'a> Print<'tcx, FmtPrinter<'a, 'tcx>, Error = fmt::Error, Output = FmtPrinter<'a, 'tcx>>, + T: for<'a> Print<'tcx, FmtPrinter<'a, 'tcx>>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut printer = ty::print::FmtPrinter::new(self.tcx, Namespace::TypeNS); printer.region_highlight_mode = self.highlight; - let s = self.value.print(printer)?.into_buffer(); - f.write_str(&s) + self.value.print(&mut printer)?; + f.write_str(&printer.into_buffer()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs 2023-12-21 16:55:28.000000000 +0000 @@ -214,7 +214,11 @@ ObligationCauseCode::MatchImpl(parent, ..) => parent.code(), _ => cause.code(), } - && let (&ObligationCauseCode::ItemObligation(item_def_id) | &ObligationCauseCode::ExprItemObligation(item_def_id, ..), None) = (code, override_error_code) + && let ( + &ObligationCauseCode::ItemObligation(item_def_id) + | &ObligationCauseCode::ExprItemObligation(item_def_id, ..), + None, + ) = (code, override_error_code) { // Same case of `impl Foo for dyn Bar { fn qux(&self) {} }` introducing a `'static` // lifetime as above, but called using a fully-qualified path to the method: @@ -322,13 +326,27 @@ let existing_lt_name = if let Some(id) = scope_def_id && let Some(generics) = tcx.hir().get_generics(id) && let named_lifetimes = generics - .params - .iter() - .filter(|p| matches!(p.kind, GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Explicit })) - .map(|p| { if let hir::ParamName::Plain(name) = p.name {Some(name.to_string())} else {None}}) - .filter(|n| ! matches!(n, None)) - .collect::>() - && named_lifetimes.len() > 0 { + .params + .iter() + .filter(|p| { + matches!( + p.kind, + GenericParamKind::Lifetime { + kind: hir::LifetimeParamKind::Explicit + } + ) + }) + .map(|p| { + if let hir::ParamName::Plain(name) = p.name { + Some(name.to_string()) + } else { + None + } + }) + .filter(|n| !matches!(n, None)) + .collect::>() + && named_lifetimes.len() > 0 + { named_lifetimes[0].clone() } else { None @@ -342,30 +360,28 @@ .params .iter() .filter(|p| p.is_elided_lifetime()) - .map(|p| - if p.span.hi() - p.span.lo() == rustc_span::BytePos(1) { // Ampersand (elided without '_) - (p.span.shrink_to_hi(),format!("{name} ")) - } else { // Underscore (elided with '_) - (p.span, name.to_string()) - } - ) + .map(|p| { + if p.span.hi() - p.span.lo() == rustc_span::BytePos(1) { + // Ampersand (elided without '_) + (p.span.shrink_to_hi(), format!("{name} ")) + } else { + // Underscore (elided with '_) + (p.span, name.to_string()) + } + }) .collect::>() && spans_suggs.len() > 1 { - let use_lt = - if existing_lt_name == None { + let use_lt = if existing_lt_name == None { spans_suggs.push((generics.span.shrink_to_hi(), format!("<{name}>"))); format!("you can introduce a named lifetime parameter `{name}`") } else { // make use the existing named lifetime format!("you can use the named lifetime parameter `{name}`") }; - spans_suggs - .push((fn_return.span.shrink_to_hi(), format!(" + {name} "))); + spans_suggs.push((fn_return.span.shrink_to_hi(), format!(" + {name} "))); err.multipart_suggestion_verbose( - format!( - "{declare} `{ty}` {captures}, {use_lt}", - ), + format!("{declare} `{ty}` {captures}, {use_lt}",), spans_suggs, Applicability::MaybeIncorrect, ); @@ -443,8 +459,7 @@ let trait_did = trait_id.to_def_id(); tcx.hir().trait_impls(trait_did).iter().find_map(|&impl_did| { if let Node::Item(Item { - kind: ItemKind::Impl(hir::Impl { self_ty, .. }), - .. + kind: ItemKind::Impl(hir::Impl { self_ty, .. }), .. }) = tcx.hir().find_by_def_id(impl_did)? && trait_objects.iter().all(|did| { // FIXME: we should check `self_ty` against the receiver diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/note_and_explain.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/note_and_explain.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/note_and_explain.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/note_and_explain.rs 2023-12-21 16:55:28.000000000 +0000 @@ -54,13 +54,17 @@ } (ty::Param(expected), ty::Param(found)) => { let generics = tcx.generics_of(body_owner_def_id); - let e_span = tcx.def_span(generics.type_param(expected, tcx).def_id); - if !sp.contains(e_span) { - diag.span_label(e_span, "expected type parameter"); - } - let f_span = tcx.def_span(generics.type_param(found, tcx).def_id); - if !sp.contains(f_span) { - diag.span_label(f_span, "found type parameter"); + if let Some(param) = generics.opt_type_param(expected, tcx) { + let e_span = tcx.def_span(param.def_id); + if !sp.contains(e_span) { + diag.span_label(e_span, "expected type parameter"); + } + } + if let Some(param) = generics.opt_type_param(found, tcx) { + let f_span = tcx.def_span(param.def_id); + if !sp.contains(f_span) { + diag.span_label(f_span, "found type parameter"); + } } diag.note( "a type parameter was expected, but a different one was found; \ @@ -72,32 +76,41 @@ #traits-as-parameters", ); } - (ty::Alias(ty::Projection | ty::Inherent, _), ty::Alias(ty::Projection | ty::Inherent, _)) => { + ( + ty::Alias(ty::Projection | ty::Inherent, _), + ty::Alias(ty::Projection | ty::Inherent, _), + ) => { diag.note("an associated type was expected, but a different one was found"); } // FIXME(inherent_associated_types): Extend this to support `ty::Inherent`, too. - (ty::Param(p), ty::Alias(ty::Projection, proj)) | (ty::Alias(ty::Projection, proj), ty::Param(p)) + (ty::Param(p), ty::Alias(ty::Projection, proj)) + | (ty::Alias(ty::Projection, proj), ty::Param(p)) if !tcx.is_impl_trait_in_trait(proj.def_id) => { - let p_def_id = tcx - .generics_of(body_owner_def_id) - .type_param(p, tcx) - .def_id; - let p_span = tcx.def_span(p_def_id); - if !sp.contains(p_span) { - diag.span_label(p_span, "this type parameter"); - } - let hir = tcx.hir(); - let mut note = true; - let parent = p_def_id - .as_local() - .and_then(|id| { - let local_id = hir.local_def_id_to_hir_id(id); - let generics = tcx.hir().find_parent(local_id)?.generics()?; - Some((id, generics)) + let parent = tcx.generics_of(body_owner_def_id) + .opt_type_param(p, tcx) + .and_then(|param| { + let p_def_id = param.def_id; + let p_span = tcx.def_span(p_def_id); + let expected = match (values.expected.kind(), values.found.kind()) { + (ty::Param(_), _) => "expected ", + (_, ty::Param(_)) => "found ", + _ => "", + }; + if !sp.contains(p_span) { + diag.span_label( + p_span, + format!("{expected}this type parameter"), + ); + } + p_def_id.as_local().and_then(|id| { + let local_id = tcx.hir().local_def_id_to_hir_id(id); + let generics = tcx.hir().find_parent(local_id)?.generics()?; + Some((id, generics)) + }) }); - if let Some((local_id, generics)) = parent - { + let mut note = true; + if let Some((local_id, generics)) = parent { // Synthesize the associated type restriction `Add`. // FIXME: extract this logic for use in other diagnostics. let (trait_ref, assoc_args) = proj.trait_ref_and_own_args(tcx); @@ -112,15 +125,17 @@ let mut matching_span = None; let mut matched_end_of_args = false; for bound in generics.bounds_for_param(local_id) { - let potential_spans = bound - .bounds - .iter() - .find_map(|bound| { - let bound_trait_path = bound.trait_ref()?.path; - let def_id = bound_trait_path.res.opt_def_id()?; - let generic_args = bound_trait_path.segments.iter().last().map(|path| path.args()); - (def_id == trait_ref.def_id).then_some((bound_trait_path.span, generic_args)) - }); + let potential_spans = bound.bounds.iter().find_map(|bound| { + let bound_trait_path = bound.trait_ref()?.path; + let def_id = bound_trait_path.res.opt_def_id()?; + let generic_args = bound_trait_path + .segments + .iter() + .last() + .map(|path| path.args()); + (def_id == trait_ref.def_id) + .then_some((bound_trait_path.span, generic_args)) + }); if let Some((end_of_trait, end_of_args)) = potential_spans { let args_span = end_of_args.and_then(|args| args.span()); @@ -167,9 +182,16 @@ (ty::Param(p), ty::Dynamic(..) | ty::Alias(ty::Opaque, ..)) | (ty::Dynamic(..) | ty::Alias(ty::Opaque, ..), ty::Param(p)) => { let generics = tcx.generics_of(body_owner_def_id); - let p_span = tcx.def_span(generics.type_param(p, tcx).def_id); - if !sp.contains(p_span) { - diag.span_label(p_span, "this type parameter"); + if let Some(param) = generics.opt_type_param(p, tcx) { + let p_span = tcx.def_span(param.def_id); + let expected = match (values.expected.kind(), values.found.kind()) { + (ty::Param(_), _) => "expected ", + (_, ty::Param(_)) => "found ", + _ => "", + }; + if !sp.contains(p_span) { + diag.span_label(p_span, format!("{expected}this type parameter")); + } } diag.help("type parameters must be constrained to match other types"); if tcx.sess.teach(&diag.get_code().unwrap()) { @@ -205,11 +227,13 @@ #traits-as-parameters", ); } - (ty::Param(p), ty::Closure(..) | ty::Generator(..)) => { + (ty::Param(p), ty::Closure(..) | ty::Coroutine(..)) => { let generics = tcx.generics_of(body_owner_def_id); - let p_span = tcx.def_span(generics.type_param(p, tcx).def_id); - if !sp.contains(p_span) { - diag.span_label(p_span, "this type parameter"); + if let Some(param) = generics.opt_type_param(p, tcx) { + let p_span = tcx.def_span(param.def_id); + if !sp.contains(p_span) { + diag.span_label(p_span, "expected this type parameter"); + } } diag.help(format!( "every closure has a distinct type and so could not always match the \ @@ -218,12 +242,21 @@ } (ty::Param(p), _) | (_, ty::Param(p)) => { let generics = tcx.generics_of(body_owner_def_id); - let p_span = tcx.def_span(generics.type_param(p, tcx).def_id); - if !sp.contains(p_span) { - diag.span_label(p_span, "this type parameter"); + if let Some(param) = generics.opt_type_param(p, tcx) { + let p_span = tcx.def_span(param.def_id); + let expected = match (values.expected.kind(), values.found.kind()) { + (ty::Param(_), _) => "expected ", + (_, ty::Param(_)) => "found ", + _ => "", + }; + if !sp.contains(p_span) { + diag.span_label(p_span, format!("{expected}this type parameter")); + } } } - (ty::Alias(ty::Projection | ty::Inherent, proj_ty), _) if !tcx.is_impl_trait_in_trait(proj_ty.def_id) => { + (ty::Alias(ty::Projection | ty::Inherent, proj_ty), _) + if !tcx.is_impl_trait_in_trait(proj_ty.def_id) => + { self.expected_projection( diag, proj_ty, @@ -232,11 +265,15 @@ cause.code(), ); } - (_, ty::Alias(ty::Projection | ty::Inherent, proj_ty)) if !tcx.is_impl_trait_in_trait(proj_ty.def_id) => { - let msg = || format!( - "consider constraining the associated type `{}` to `{}`", - values.found, values.expected, - ); + (_, ty::Alias(ty::Projection | ty::Inherent, proj_ty)) + if !tcx.is_impl_trait_in_trait(proj_ty.def_id) => + { + let msg = || { + format!( + "consider constraining the associated type `{}` to `{}`", + values.found, values.expected, + ) + }; if !(self.suggest_constraining_opaque_associated_type( diag, msg, @@ -256,19 +293,40 @@ ); } } - (ty::Alias(ty::Opaque, alias), _) | (_, ty::Alias(ty::Opaque, alias)) if alias.def_id.is_local() && matches!(tcx.def_kind(body_owner_def_id), DefKind::Fn | DefKind::Static(_) | DefKind::Const | DefKind::AssocFn | DefKind::AssocConst) => { + (ty::Alias(ty::Opaque, alias), _) | (_, ty::Alias(ty::Opaque, alias)) + if alias.def_id.is_local() + && matches!( + tcx.def_kind(body_owner_def_id), + DefKind::Fn + | DefKind::Static(_) + | DefKind::Const + | DefKind::AssocFn + | DefKind::AssocConst + ) => + { if tcx.is_type_alias_impl_trait(alias.def_id) { - if !tcx.opaque_types_defined_by(body_owner_def_id.expect_local()).contains(&alias.def_id.expect_local()) { - let sp = tcx.def_ident_span(body_owner_def_id).unwrap_or_else(|| tcx.def_span(body_owner_def_id)); - diag.span_note(sp, "\ + if !tcx + .opaque_types_defined_by(body_owner_def_id.expect_local()) + .contains(&alias.def_id.expect_local()) + { + let sp = tcx + .def_ident_span(body_owner_def_id) + .unwrap_or_else(|| tcx.def_span(body_owner_def_id)); + diag.span_note( + sp, + "\ this item must have the opaque type in its signature \ - in order to be able to register hidden types"); + in order to be able to register hidden types", + ); } } } - (ty::FnPtr(sig), ty::FnDef(def_id, _)) | (ty::FnDef(def_id, _), ty::FnPtr(sig)) => { + (ty::FnPtr(sig), ty::FnDef(def_id, _)) + | (ty::FnDef(def_id, _), ty::FnPtr(sig)) => { if tcx.fn_sig(*def_id).skip_binder().unsafety() < sig.unsafety() { - diag.note("unsafe functions cannot be coerced into safe function pointers"); + diag.note( + "unsafe functions cannot be coerced into safe function pointers", + ); } } _ => {} @@ -283,7 +341,7 @@ } CyclicTy(ty) => { // Watch out for various cases of cyclic types and try to explain. - if ty.is_closure() || ty.is_generator() { + if ty.is_closure() || ty.is_coroutine() { diag.note( "closures cannot capture themselves or take themselves as argument;\n\ this error may be the result of a recent compiler bug-fix,\n\ @@ -314,39 +372,53 @@ let tcx = self.tcx; let assoc = tcx.associated_item(proj_ty.def_id); let (trait_ref, assoc_args) = proj_ty.trait_ref_and_own_args(tcx); - if let Some(item) = tcx.hir().get_if_local(body_owner_def_id) { - if let Some(hir_generics) = item.generics() { - // Get the `DefId` for the type parameter corresponding to `A` in `::Foo`. - // This will also work for `impl Trait`. - let def_id = if let ty::Param(param_ty) = proj_ty.self_ty().kind() { - let generics = tcx.generics_of(body_owner_def_id); - generics.type_param(param_ty, tcx).def_id - } else { - return false; - }; - let Some(def_id) = def_id.as_local() else { - return false; - }; - - // First look in the `where` clause, as this might be - // `fn foo(x: T) where T: Trait`. - for pred in hir_generics.bounds_for_param(def_id) { - if self.constrain_generic_bound_associated_type_structured_suggestion( - diag, - &trait_ref, - pred.bounds, - assoc, - assoc_args, - ty, - &msg, - false, - ) { - return true; - } - } + let Some(item) = tcx.hir().get_if_local(body_owner_def_id) else { + return false; + }; + let Some(hir_generics) = item.generics() else { + return false; + }; + // Get the `DefId` for the type parameter corresponding to `A` in `::Foo`. + // This will also work for `impl Trait`. + let ty::Param(param_ty) = proj_ty.self_ty().kind() else { + return false; + }; + let generics = tcx.generics_of(body_owner_def_id); + let Some(param) = generics.opt_type_param(param_ty, tcx) else { + return false; + }; + let Some(def_id) = param.def_id.as_local() else { + return false; + }; + + // First look in the `where` clause, as this might be + // `fn foo(x: T) where T: Trait`. + for pred in hir_generics.bounds_for_param(def_id) { + if self.constrain_generic_bound_associated_type_structured_suggestion( + diag, + &trait_ref, + pred.bounds, + assoc, + assoc_args, + ty, + &msg, + false, + ) { + return true; } } - false + if (param_ty.index as usize) >= generics.parent_count { + // The param comes from the current item, do not look at the parent. (#117209) + return false; + } + // If associated item, look to constrain the params of the trait/impl. + let hir_id = match item { + hir::Node::ImplItem(item) => item.hir_id(), + hir::Node::TraitItem(item) => item.hir_id(), + _ => return false, + }; + let parent = tcx.hir().get_parent_item(hir_id).def_id; + self.suggest_constraint(diag, msg, parent.into(), proj_ty, ty) } /// An associated type was expected and a different type was found. @@ -399,21 +471,26 @@ let impl_comparison = matches!(cause_code, ObligationCauseCode::CompareImplItemObligation { .. }); let assoc = tcx.associated_item(proj_ty.def_id); - if !callable_scope || impl_comparison { + if impl_comparison { // We do not want to suggest calling functions when the reason of the - // type error is a comparison of an `impl` with its `trait` or when the - // scope is outside of a `Body`. + // type error is a comparison of an `impl` with its `trait`. } else { - // If we find a suitable associated function that returns the expected type, we don't - // want the more general suggestion later in this method about "consider constraining - // the associated type or calling a method that returns the associated type". - let point_at_assoc_fn = self.point_at_methods_that_satisfy_associated_type( - diag, - assoc.container_id(tcx), - current_method_ident, - proj_ty.def_id, - values.expected, - ); + let point_at_assoc_fn = if callable_scope + && self.point_at_methods_that_satisfy_associated_type( + diag, + assoc.container_id(tcx), + current_method_ident, + proj_ty.def_id, + values.expected, + ) { + // If we find a suitable associated function that returns the expected type, we + // don't want the more general suggestion later in this method about "consider + // constraining the associated type or calling a method that returns the associated + // type". + true + } else { + false + }; // Possibly suggest constraining the associated type to conform to the // found type. if self.suggest_constraint(diag, &msg, body_owner_def_id, proj_ty, values.found) @@ -616,7 +693,8 @@ for item in &items[..] { if let hir::AssocItemKind::Type = item.kind { let assoc_ty = tcx.type_of(item.id.owner_id).instantiate_identity(); - if let hir::Defaultness::Default { has_value: true } = tcx.defaultness(item.id.owner_id) + if let hir::Defaultness::Default { has_value: true } = + tcx.defaultness(item.id.owner_id) && self.infcx.can_eq(param_env, assoc_ty, found) { diag.span_label( @@ -706,9 +784,9 @@ } pub fn format_generic_args(&self, args: &[ty::GenericArg<'tcx>]) -> String { - FmtPrinter::new(self.tcx, hir::def::Namespace::TypeNS) - .path_generic_args(Ok, args) - .expect("could not write to `String`.") - .into_buffer() + FmtPrinter::print_string(self.tcx, hir::def::Namespace::TypeNS, |cx| { + cx.path_generic_args(|_| Ok(()), args) + }) + .expect("could not write to `String`.") } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/suggest.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/suggest.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/suggest.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/error_reporting/suggest.rs 2023-12-21 16:55:28.000000000 +0000 @@ -491,12 +491,17 @@ fn visit_stmt(&mut self, ex: &'v hir::Stmt<'v>) { if let hir::StmtKind::Local(hir::Local { - span, pat: hir::Pat{..}, ty: None, init: Some(_), .. - }) = &ex.kind - && self.found_if - && span.eq(&self.err_span) { - self.result = true; - } + span, + pat: hir::Pat { .. }, + ty: None, + init: Some(_), + .. + }) = &ex.kind + && self.found_if + && span.eq(&self.err_span) + { + self.result = true; + } walk_stmt(self, ex); } @@ -546,45 +551,59 @@ let expected = expected.unpack(); let found = found.unpack(); // 3. Extract the tuple type from Fn trait and suggest the change. - if let GenericArgKind::Type(expected) = expected && - let GenericArgKind::Type(found) = found && - let ty::Tuple(expected) = expected.kind() && - let ty::Tuple(found)= found.kind() && - expected.len() == found.len() { + if let GenericArgKind::Type(expected) = expected + && let GenericArgKind::Type(found) = found + && let ty::Tuple(expected) = expected.kind() + && let ty::Tuple(found) = found.kind() + && expected.len() == found.len() + { let mut suggestion = "|".to_string(); let mut is_first = true; let mut has_suggestion = false; - for (((expected, found), param_hir), arg_hir) in expected.iter() - .zip(found.iter()) - .zip(params.iter()) - .zip(fn_decl.inputs.iter()) { + for (((expected, found), param_hir), arg_hir) in + expected.iter().zip(found.iter()).zip(params.iter()).zip(fn_decl.inputs.iter()) + { if is_first { is_first = false; } else { suggestion += ", "; } - if let ty::Ref(expected_region, _, _) = expected.kind() && - let ty::Ref(found_region, _, _) = found.kind() && - expected_region.is_late_bound() && - !found_region.is_late_bound() && - let hir::TyKind::Infer = arg_hir.kind { + if let ty::Ref(expected_region, _, _) = expected.kind() + && let ty::Ref(found_region, _, _) = found.kind() + && expected_region.is_late_bound() + && !found_region.is_late_bound() + && let hir::TyKind::Infer = arg_hir.kind + { // If the expected region is late bound, the found region is not, and users are asking compiler // to infer the type, we can suggest adding `: &_`. if param_hir.pat.span == param_hir.ty_span { // for `|x|`, `|_|`, `|x: impl Foo|` - let Ok(pat) = self.tcx.sess.source_map().span_to_snippet(param_hir.pat.span) else { return; }; + let Ok(pat) = + self.tcx.sess.source_map().span_to_snippet(param_hir.pat.span) + else { + return; + }; suggestion += &format!("{pat}: &_"); } else { // for `|x: ty|`, `|_: ty|` - let Ok(pat) = self.tcx.sess.source_map().span_to_snippet(param_hir.pat.span) else { return; }; - let Ok(ty) = self.tcx.sess.source_map().span_to_snippet(param_hir.ty_span) else { return; }; + let Ok(pat) = + self.tcx.sess.source_map().span_to_snippet(param_hir.pat.span) + else { + return; + }; + let Ok(ty) = self.tcx.sess.source_map().span_to_snippet(param_hir.ty_span) + else { + return; + }; suggestion += &format!("{pat}: &{ty}"); } has_suggestion = true; } else { - let Ok(arg) = self.tcx.sess.source_map().span_to_snippet(param_hir.span) else { return; }; + let Ok(arg) = self.tcx.sess.source_map().span_to_snippet(param_hir.span) else { + return; + }; // Otherwise, keep it as-is. suggestion += &arg; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/freshen.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/freshen.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/freshen.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/freshen.rs 2023-12-21 16:55:28.000000000 +0000 @@ -42,7 +42,7 @@ ty_freshen_count: u32, const_freshen_count: u32, ty_freshen_map: FxHashMap>, - const_freshen_map: FxHashMap, ty::Const<'tcx>>, + const_freshen_map: FxHashMap>, } impl<'a, 'tcx> TypeFreshener<'a, 'tcx> { @@ -79,12 +79,12 @@ fn freshen_const( &mut self, opt_ct: Option>, - key: ty::InferConst<'tcx>, + key: ty::InferConst, freshener: F, ty: Ty<'tcx>, ) -> ty::Const<'tcx> where - F: FnOnce(u32) -> ty::InferConst<'tcx>, + F: FnOnce(u32) -> ty::InferConst, { if let Some(ct) = opt_ct { return ct.fold_with(self); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/fudge.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/fudge.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/fudge.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/fudge.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use rustc_middle::infer::unify_key::ConstVidKey; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable}; use rustc_middle::ty::{self, ConstVid, FloatVid, IntVid, RegionVid, Ty, TyCtxt, TyVid}; @@ -23,14 +24,14 @@ } fn const_vars_since_snapshot<'tcx>( - table: &mut UnificationTable<'_, 'tcx, ConstVid<'tcx>>, + table: &mut UnificationTable<'_, 'tcx, ConstVidKey<'tcx>>, snapshot_var_len: usize, -) -> (Range>, Vec) { +) -> (Range, Vec) { let range = vars_since_snapshot(table, snapshot_var_len); ( - range.start..range.end, - (range.start.index..range.end.index) - .map(|index| table.probe_value(ConstVid::from_index(index)).origin) + range.start.vid..range.end.vid, + (range.start.index()..range.end.index()) + .map(|index| table.probe_value(ConstVid::from_u32(index)).origin) .collect(), ) } @@ -172,7 +173,7 @@ int_vars: Range, float_vars: Range, region_vars: (Range, Vec), - const_vars: (Range>, Vec), + const_vars: (Range, Vec), } impl<'a, 'tcx> TypeFolder> for InferenceFudger<'a, 'tcx> { @@ -220,7 +221,9 @@ } fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { - if let ty::ReVar(vid) = *r && self.region_vars.0.contains(&vid) { + if let ty::ReVar(vid) = *r + && self.region_vars.0.contains(&vid) + { let idx = vid.index() - self.region_vars.0.start.index(); let origin = self.region_vars.1[idx]; return self.infcx.next_region_var(origin); @@ -233,7 +236,7 @@ if self.const_vars.0.contains(&vid) { // This variable was created during the fudging. // Recreate it with a fresh variable here. - let idx = (vid.index - self.const_vars.0.start.index) as usize; + let idx = (vid.index() - self.const_vars.0.start.index()) as usize; let origin = self.const_vars.1[idx]; self.infcx.next_const_var(ct.ty(), origin) } else { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/generalize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/generalize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/generalize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/generalize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,7 +17,7 @@ infcx: &InferCtxt<'tcx>, delegate: &mut D, term: T, - for_vid: impl Into>, + for_vid: impl Into, ambient_variance: ty::Variance, ) -> RelateResult<'tcx, Generalization> { let (for_universe, root_vid) = match for_vid.into() { @@ -27,7 +27,7 @@ ), ty::TermVid::Const(ct_vid) => ( infcx.probe_const_var(ct_vid).unwrap_err(), - ty::TermVid::Const(infcx.inner.borrow_mut().const_unification_table().find(ct_vid)), + ty::TermVid::Const(infcx.inner.borrow_mut().const_unification_table().find(ct_vid).vid), ), }; @@ -127,7 +127,7 @@ /// The vid of the type variable that is in the process of being /// instantiated. If we find this within the value we are folding, /// that means we would have created a cyclic value. - root_vid: ty::TermVid<'tcx>, + root_vid: ty::TermVid, /// The universe of the type variable that is in the process of being /// instantiated. If we find anything that this universe cannot name, @@ -183,7 +183,7 @@ // Avoid fetching the variance if we are in an invariant // context; no need, and it can induce dependency cycles // (e.g., #41849). - relate::relate_args(self, a_subst, b_subst) + relate::relate_args_invariantly(self, a_subst, b_subst) } else { let tcx = self.tcx(); let opt_variances = tcx.variances_of(item_def_id); @@ -376,7 +376,7 @@ // `vid` are related and we'd be inferring an infinitely // deep const. if ty::TermVid::Const( - self.infcx.inner.borrow_mut().const_unification_table().find(vid), + self.infcx.inner.borrow_mut().const_unification_table().find(vid).vid, ) == self.root_vid { return Err(self.cyclic_term_error()); @@ -394,10 +394,14 @@ if self.for_universe.can_name(universe) { Ok(c) } else { - let new_var_id = variable_table.new_key(ConstVarValue { - origin: var_value.origin, - val: ConstVariableValue::Unknown { universe: self.for_universe }, - }); + let new_var_id = variable_table + .new_key(ConstVarValue { + origin: var_value.origin, + val: ConstVariableValue::Unknown { + universe: self.for_universe, + }, + }) + .vid; Ok(ty::Const::new_var(self.tcx(), new_var_id, c.ty())) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -346,7 +346,9 @@ // tighter bound than `'static`. // // (This might e.g. arise from being asked to prove `for<'a> { 'b: 'a }`.) - if let ty::RePlaceholder(p) = *lub && b_universe.cannot_name(p.universe) { + if let ty::RePlaceholder(p) = *lub + && b_universe.cannot_name(p.universe) + { lub = self.tcx().lifetimes.re_static; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,9 @@ pub use self::SubregionOrigin::*; pub use self::ValuePairs::*; pub use combine::ObligationEmittingRelation; +use rustc_data_structures::captures::Captures; use rustc_data_structures::undo_log::UndoLogs; +use rustc_middle::infer::unify_key::{ConstVidKey, EffectVidKey}; use self::opaque_types::OpaqueTypeStorage; pub(crate) use self::undo_log::{InferCtxtUndoLogs, Snapshot, UndoLog}; @@ -36,11 +38,10 @@ use rustc_middle::ty::{ConstVid, EffectVid, FloatVid, IntVid, TyVid}; use rustc_middle::ty::{GenericArg, GenericArgKind, GenericArgs, GenericArgsRef}; use rustc_span::symbol::Symbol; -use rustc_span::Span; +use rustc_span::{Span, DUMMY_SP}; use std::cell::{Cell, RefCell}; use std::fmt; -use std::marker::PhantomData; use self::combine::CombineFields; use self::error_reporting::TypeErrCtxt; @@ -85,7 +86,7 @@ pub type InferResult<'tcx, T> = Result, TypeError<'tcx>>; pub type UnitResult<'tcx> = RelateResult<'tcx, ()>; // "unify result" -pub type FixupResult<'tcx, T> = Result>; // "fixup result" +pub type FixupResult = Result; // "fixup result" pub(crate) type UnificationTable<'a, 'tcx, T> = ut::UnificationTable< ut::InPlace, &'a mut InferCtxtUndoLogs<'tcx>>, @@ -108,7 +109,7 @@ type_variable_storage: type_variable::TypeVariableStorage<'tcx>, /// Map from const parameter variable to the kind of const it represents. - const_unification_storage: ut::UnificationTableStorage>, + const_unification_storage: ut::UnificationTableStorage>, /// Map from integral variable to the kind of integer it represents. int_unification_storage: ut::UnificationTableStorage, @@ -117,7 +118,7 @@ float_unification_storage: ut::UnificationTableStorage, /// Map from effect variable to the effect param it represents. - effect_unification_storage: ut::UnificationTableStorage>, + effect_unification_storage: ut::UnificationTableStorage>, /// Tracks the set of region variables and the constraints between them. /// @@ -224,11 +225,11 @@ } #[inline] - fn const_unification_table(&mut self) -> UnificationTable<'_, 'tcx, ty::ConstVid<'tcx>> { + fn const_unification_table(&mut self) -> UnificationTable<'_, 'tcx, ConstVidKey<'tcx>> { self.const_unification_storage.with_log(&mut self.undo_log) } - fn effect_unification_table(&mut self) -> UnificationTable<'_, 'tcx, ty::EffectVid<'tcx>> { + fn effect_unification_table(&mut self) -> UnificationTable<'_, 'tcx, EffectVidKey<'tcx>> { self.effect_unification_storage.with_log(&mut self.undo_log) } @@ -341,7 +342,9 @@ next_trait_solver: bool, } -impl<'tcx> ty::InferCtxtLike> for InferCtxt<'tcx> { +impl<'tcx> ty::InferCtxtLike for InferCtxt<'tcx> { + type Interner = TyCtxt<'tcx>; + fn universe_of_ty(&self, ty: ty::InferTy) -> Option { use InferTy::*; match ty { @@ -357,7 +360,7 @@ } } - fn universe_of_ct(&self, ct: ty::InferConst<'tcx>) -> Option { + fn universe_of_ct(&self, ct: ty::InferConst) -> Option { use ty::InferConst::*; match ct { // Same issue as with `universe_of_ty` @@ -546,11 +549,11 @@ // FIXME(eddyb) investigate overlap between this and `TyOrConstInferVar`. #[derive(Copy, Clone, Debug)] -pub enum FixupError<'tcx> { +pub enum FixupError { UnresolvedIntTy(IntVid), UnresolvedFloatTy(FloatVid), UnresolvedTy(TyVid), - UnresolvedConst(ConstVid<'tcx>), + UnresolvedConst(ConstVid), } /// See the `region_obligations` field for more information. @@ -561,7 +564,7 @@ pub origin: SubregionOrigin<'tcx>, } -impl<'tcx> fmt::Display for FixupError<'tcx> { +impl fmt::Display for FixupError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::FixupError::*; @@ -792,7 +795,7 @@ let mut table = inner.effect_unification_table(); (0..table.len()) - .map(|i| ty::EffectVid { index: i as u32, phantom: PhantomData }) + .map(|i| ty::EffectVid::from_usize(i)) .filter(|&vid| table.probe_value(vid).is_none()) .map(|v| { ty::Const::new_infer(self.tcx, ty::InferConst::EffectVar(v), self.tcx.types.bool) @@ -1070,15 +1073,20 @@ .inner .borrow_mut() .const_unification_table() - .new_key(ConstVarValue { origin, val: ConstVariableValue::Unknown { universe } }); + .new_key(ConstVarValue { origin, val: ConstVariableValue::Unknown { universe } }) + .vid; ty::Const::new_var(self.tcx, vid, ty) } - pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> { - self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { - origin, - val: ConstVariableValue::Unknown { universe: self.universe() }, - }) + pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid { + self.inner + .borrow_mut() + .const_unification_table() + .new_key(ConstVarValue { + origin, + val: ConstVariableValue::Unknown { universe: self.universe() }, + }) + .vid } fn next_int_var_id(&self) -> IntVid { @@ -1192,11 +1200,15 @@ ), span, }; - let const_var_id = - self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { + let const_var_id = self + .inner + .borrow_mut() + .const_unification_table() + .new_key(ConstVarValue { origin, val: ConstVariableValue::Unknown { universe: self.universe() }, - }); + }) + .vid; ty::Const::new_var( self.tcx, const_var_id, @@ -1211,7 +1223,7 @@ } pub fn var_for_effect(&self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { - let effect_vid = self.inner.borrow_mut().effect_unification_table().new_key(None); + let effect_vid = self.inner.borrow_mut().effect_unification_table().new_key(None).vid; let ty = self .tcx .type_of(param.def_id) @@ -1331,12 +1343,12 @@ self.inner.borrow_mut().type_variables().root_var(var) } - pub fn root_const_var(&self, var: ty::ConstVid<'tcx>) -> ty::ConstVid<'tcx> { - self.inner.borrow_mut().const_unification_table().find(var) + pub fn root_const_var(&self, var: ty::ConstVid) -> ty::ConstVid { + self.inner.borrow_mut().const_unification_table().find(var).vid } - pub fn root_effect_var(&self, var: ty::EffectVid<'tcx>) -> ty::EffectVid<'tcx> { - self.inner.borrow_mut().effect_unification_table().find(var) + pub fn root_effect_var(&self, var: ty::EffectVid) -> ty::EffectVid { + self.inner.borrow_mut().effect_unification_table().find(var).vid } /// Resolves an int var to a rigid int type, if it was constrained to one, @@ -1400,17 +1412,14 @@ value.visit_with(&mut resolve::UnresolvedTypeOrConstFinder::new(self)).break_value() } - pub fn probe_const_var( - &self, - vid: ty::ConstVid<'tcx>, - ) -> Result, ty::UniverseIndex> { + pub fn probe_const_var(&self, vid: ty::ConstVid) -> Result, ty::UniverseIndex> { match self.inner.borrow_mut().const_unification_table().probe_value(vid).val { ConstVariableValue::Known { value } => Ok(value), ConstVariableValue::Unknown { universe } => Err(universe), } } - pub fn probe_effect_var(&self, vid: EffectVid<'tcx>) -> Option> { + pub fn probe_effect_var(&self, vid: EffectVid) -> Option> { self.inner.borrow_mut().effect_unification_table().probe_value(vid) } @@ -1421,13 +1430,26 @@ /// /// This method is idempotent, but it not typically not invoked /// except during the writeback phase. - pub fn fully_resolve>>(&self, value: T) -> FixupResult<'tcx, T> { - let value = resolve::fully_resolve(self, value); - assert!( - value.as_ref().map_or(true, |value| !value.has_infer()), - "`{value:?}` is not fully resolved" - ); - value + pub fn fully_resolve>>(&self, value: T) -> FixupResult { + match resolve::fully_resolve(self, value) { + Ok(value) => { + if value.has_non_region_infer() { + bug!("`{value:?}` is not fully resolved"); + } + if value.has_infer_regions() { + let guar = self + .tcx + .sess + .delay_span_bug(DUMMY_SP, format!("`{value:?}` is not fully resolved")); + Ok(self.tcx.fold_regions(value, |re, _| { + if re.is_var() { ty::Region::new_error(self.tcx, guar) } else { re } + })) + } else { + Ok(value) + } + } + Err(e) => Err(e), + } } // Instantiates the bound variables in a given binder with fresh inference @@ -1632,11 +1654,11 @@ #[inline] pub fn is_ty_infer_var_definitely_unchanged<'a>( &'a self, - ) -> (impl Fn(TyOrConstInferVar<'tcx>) -> bool + 'a) { + ) -> (impl Fn(TyOrConstInferVar) -> bool + Captures<'tcx> + 'a) { // This hoists the borrow/release out of the loop body. let inner = self.inner.try_borrow(); - return move |infer_var: TyOrConstInferVar<'tcx>| match (infer_var, &inner) { + return move |infer_var: TyOrConstInferVar| match (infer_var, &inner) { (TyOrConstInferVar::Ty(ty_var), Ok(inner)) => { use self::type_variable::TypeVariableValue; @@ -1659,7 +1681,7 @@ /// inference variables), and it handles both `Ty` and `ty::Const` without /// having to resort to storing full `GenericArg`s in `stalled_on`. #[inline(always)] - pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar<'tcx>) -> bool { + pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar) -> bool { match infer_var { TyOrConstInferVar::Ty(v) => { use self::type_variable::TypeVariableValue; @@ -1766,7 +1788,7 @@ /// Helper for [InferCtxt::ty_or_const_infer_var_changed] (see comment on that), currently /// used only for `traits::fulfill`'s list of `stalled_on` inference variables. #[derive(Copy, Clone, Debug)] -pub enum TyOrConstInferVar<'tcx> { +pub enum TyOrConstInferVar { /// Equivalent to `ty::Infer(ty::TyVar(_))`. Ty(TyVid), /// Equivalent to `ty::Infer(ty::IntVar(_))`. @@ -1775,12 +1797,12 @@ TyFloat(FloatVid), /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::Var(_))`. - Const(ConstVid<'tcx>), + Const(ConstVid), /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::EffectVar(_))`. - Effect(EffectVid<'tcx>), + Effect(EffectVid), } -impl<'tcx> TyOrConstInferVar<'tcx> { +impl<'tcx> TyOrConstInferVar { /// Tries to extract an inference variable from a type or a constant, returns `None` /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/opaque_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/opaque_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/opaque_types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/opaque_types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -362,6 +362,8 @@ .collect(), ); + // FIXME(#42940): This should use the `FreeRegionsVisitor`, but that's + // not currently sound until we have existential regions. concrete_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor { tcx: self.tcx, op: |r| self.member_constraint(opaque_type_key, span, concrete_ty, r, &choice_regions), @@ -454,16 +456,16 @@ args.as_closure().sig_as_fn_ptr_ty().visit_with(self); } - ty::Generator(_, ref args, _) => { + ty::Coroutine(_, ref args, _) => { // Skip lifetime parameters of the enclosing item(s) // Also skip the witness type, because that has no free regions. - for upvar in args.as_generator().upvar_tys() { + for upvar in args.as_coroutine().upvar_tys() { upvar.visit_with(self); } - args.as_generator().return_ty().visit_with(self); - args.as_generator().yield_ty().visit_with(self); - args.as_generator().resume_ty().visit_with(self); + args.as_coroutine().return_ty().visit_with(self); + args.as_coroutine().yield_ty().visit_with(self); + args.as_coroutine().resume_ty().visit_with(self); } ty::Alias(ty::Opaque, ty::AliasTy { def_id, ref args, .. }) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/components.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/components.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/components.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/components.rs 2023-12-21 16:55:28.000000000 +0000 @@ -102,17 +102,17 @@ compute_components(tcx, tupled_ty, out, visited); } - ty::Generator(_, ref args, _) => { + ty::Coroutine(_, ref args, _) => { // Same as the closure case - let tupled_ty = args.as_generator().tupled_upvars_ty(); + let tupled_ty = args.as_coroutine().tupled_upvars_ty(); compute_components(tcx, tupled_ty, out, visited); - // We ignore regions in the generator interior as we don't + // We ignore regions in the coroutine interior as we don't // want these to affect region inference } // All regions are bound inside a witness - ty::GeneratorWitness(..) => (), + ty::CoroutineWitness(..) => (), // OutlivesTypeParameterEnv -- the actual checking that `X:'a` // is implied by the environment is done in regionck. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/for_liveness.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/for_liveness.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/for_liveness.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/for_liveness.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,129 @@ +use rustc_middle::ty::{ + self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, +}; + +use std::ops::ControlFlow; + +use crate::infer::outlives::test_type_match; +use crate::infer::region_constraints::VerifyIfEq; + +/// Visits free regions in the type that are relevant for liveness computation. +/// These regions are passed to `OP`. +/// +/// Specifically, we visit all of the regions of types recursively, except if +/// the type is an alias, we look at the outlives bounds in the param-env +/// and alias's item bounds. If there is a unique outlives bound, then visit +/// that instead. If there is not a unique but there is a `'static` outlives +/// bound, then don't visit anything. Otherwise, walk through the opaque's +/// regions structurally. +pub struct FreeRegionsVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> { + pub tcx: TyCtxt<'tcx>, + pub param_env: ty::ParamEnv<'tcx>, + pub op: OP, +} + +impl<'tcx, OP> TypeVisitor> for FreeRegionsVisitor<'tcx, OP> +where + OP: FnMut(ty::Region<'tcx>), +{ + fn visit_binder>>( + &mut self, + t: &ty::Binder<'tcx, T>, + ) -> ControlFlow { + t.super_visit_with(self); + ControlFlow::Continue(()) + } + + fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow { + match *r { + // ignore bound regions, keep visiting + ty::ReLateBound(_, _) => ControlFlow::Continue(()), + _ => { + (self.op)(r); + ControlFlow::Continue(()) + } + } + } + + fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow { + // We're only interested in types involving regions + if !ty.flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS) { + return ControlFlow::Continue(()); + } + + // FIXME: Don't consider alias bounds on types that have escaping bound + // vars. See #117455. + if ty.has_escaping_bound_vars() { + return ty.super_visit_with(self); + } + + match ty.kind() { + // We can prove that an alias is live two ways: + // 1. All the components are live. + // + // 2. There is a known outlives bound or where-clause, and that + // region is live. + // + // We search through the item bounds and where clauses for + // either `'static` or a unique outlives region, and if one is + // found, we just need to prove that that region is still live. + // If one is not found, then we continue to walk through the alias. + ty::Alias(kind, ty::AliasTy { def_id, args, .. }) => { + let tcx = self.tcx; + let param_env = self.param_env; + let outlives_bounds: Vec<_> = tcx + .item_bounds(def_id) + .iter_instantiated(tcx, args) + .chain(param_env.caller_bounds()) + .filter_map(|clause| { + let outlives = clause.as_type_outlives_clause()?; + if let Some(outlives) = outlives.no_bound_vars() + && outlives.0 == ty + { + Some(outlives.1) + } else { + test_type_match::extract_verify_if_eq( + tcx, + param_env, + &outlives.map_bound(|ty::OutlivesPredicate(ty, bound)| { + VerifyIfEq { ty, bound } + }), + ty, + ) + } + }) + .collect(); + // If we find `'static`, then we know the alias doesn't capture *any* regions. + // Otherwise, all of the outlives regions should be equal -- if they're not, + // we don't really know how to proceed, so we continue recursing through the + // alias. + if outlives_bounds.contains(&tcx.lifetimes.re_static) { + // no + } else if let Some(r) = outlives_bounds.first() + && outlives_bounds[1..].iter().all(|other_r| other_r == r) + { + assert!(r.type_flags().intersects(ty::TypeFlags::HAS_FREE_REGIONS)); + r.visit_with(self)?; + } else { + // Skip lifetime parameters that are not captures. + let variances = match kind { + ty::Opaque => Some(self.tcx.variances_of(*def_id)), + _ => None, + }; + + for (idx, s) in args.iter().enumerate() { + if variances.map(|variances| variances[idx]) != Some(ty::Variance::Bivariant) { + s.visit_with(self)?; + } + } + } + } + + _ => { + ty.super_visit_with(self)?; + } + } + + ControlFlow::Continue(()) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,6 +9,7 @@ pub mod components; pub mod env; +pub mod for_liveness; pub mod obligations; pub mod test_type_match; pub mod verify; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/test_type_match.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/test_type_match.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/test_type_match.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/test_type_match.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,7 +44,7 @@ test_ty: Ty<'tcx>, ) -> Option> { assert!(!verify_if_eq_b.has_escaping_bound_vars()); - let mut m = Match::new(tcx, param_env); + let mut m = MatchAgainstHigherRankedOutlives::new(tcx, param_env); let verify_if_eq = verify_if_eq_b.skip_binder(); m.relate(verify_if_eq.ty, test_ty).ok()?; @@ -87,24 +87,32 @@ // pointless micro-optimization true } else { - Match::new(tcx, param_env).relate(outlives_ty, erased_ty).is_ok() + MatchAgainstHigherRankedOutlives::new(tcx, param_env).relate(outlives_ty, erased_ty).is_ok() } } -struct Match<'tcx> { +struct MatchAgainstHigherRankedOutlives<'tcx> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, pattern_depth: ty::DebruijnIndex, map: FxHashMap>, } -impl<'tcx> Match<'tcx> { - fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Match<'tcx> { - Match { tcx, param_env, pattern_depth: ty::INNERMOST, map: FxHashMap::default() } +impl<'tcx> MatchAgainstHigherRankedOutlives<'tcx> { + fn new( + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> MatchAgainstHigherRankedOutlives<'tcx> { + MatchAgainstHigherRankedOutlives { + tcx, + param_env, + pattern_depth: ty::INNERMOST, + map: FxHashMap::default(), + } } } -impl<'tcx> Match<'tcx> { +impl<'tcx> MatchAgainstHigherRankedOutlives<'tcx> { /// Creates the "Error" variant that signals "no match". fn no_match(&self) -> RelateResult<'tcx, T> { Err(TypeError::Mismatch) @@ -134,7 +142,7 @@ } } -impl<'tcx> TypeRelation<'tcx> for Match<'tcx> { +impl<'tcx> TypeRelation<'tcx> for MatchAgainstHigherRankedOutlives<'tcx> { fn tag(&self) -> &'static str { "Match" } @@ -169,7 +177,9 @@ value: ty::Region<'tcx>, ) -> RelateResult<'tcx, ty::Region<'tcx>> { debug!("self.pattern_depth = {:?}", self.pattern_depth); - if let ty::RegionKind::ReLateBound(depth, br) = pattern.kind() && depth == self.pattern_depth { + if let ty::RegionKind::ReLateBound(depth, br) = pattern.kind() + && depth == self.pattern_depth + { self.bind(br, value) } else if pattern == value { Ok(pattern) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/verify.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/verify.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/verify.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/outlives/verify.rs 2023-12-21 16:55:28.000000000 +0000 @@ -108,20 +108,20 @@ let alias_ty_as_ty = alias_ty.to_ty(self.tcx); // Search the env for where clauses like `P: 'a`. - let env_bounds = self - .approx_declared_bounds_from_env(alias_ty) - .into_iter() - .map(|binder| { - if let Some(ty::OutlivesPredicate(ty, r)) = binder.no_bound_vars() && ty == alias_ty_as_ty { - // Micro-optimize if this is an exact match (this - // occurs often when there are no region variables - // involved). - VerifyBound::OutlivedBy(r) - } else { - let verify_if_eq_b = binder.map_bound(|ty::OutlivesPredicate(ty, bound)| VerifyIfEq { ty, bound }); - VerifyBound::IfEq(verify_if_eq_b) - } - }); + let env_bounds = self.approx_declared_bounds_from_env(alias_ty).into_iter().map(|binder| { + if let Some(ty::OutlivesPredicate(ty, r)) = binder.no_bound_vars() + && ty == alias_ty_as_ty + { + // Micro-optimize if this is an exact match (this + // occurs often when there are no region variables + // involved). + VerifyBound::OutlivedBy(r) + } else { + let verify_if_eq_b = + binder.map_bound(|ty::OutlivesPredicate(ty, bound)| VerifyIfEq { ty, bound }); + VerifyBound::IfEq(verify_if_eq_b) + } + }); // Extend with bounds that we can find from the definition. let definition_bounds = diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/region_constraints/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/region_constraints/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/region_constraints/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/region_constraints/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -457,7 +457,9 @@ debug!("RegionConstraintCollector: add_verify({:?})", verify); // skip no-op cases known to be satisfied - if let VerifyBound::AllBounds(ref bs) = verify.bound && bs.is_empty() { + if let VerifyBound::AllBounds(ref bs) = verify.bound + && bs.is_empty() + { return; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/resolve.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/resolve.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/resolve.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/resolve.rs 2023-12-21 16:55:28.000000000 +0000 @@ -192,7 +192,7 @@ /// Full type resolution replaces all type and region variables with /// their concrete results. If any variable cannot be replaced (never unified, etc) /// then an `Err` result is returned. -pub fn fully_resolve<'tcx, T>(infcx: &InferCtxt<'tcx>, value: T) -> FixupResult<'tcx, T> +pub fn fully_resolve<'tcx, T>(infcx: &InferCtxt<'tcx>, value: T) -> FixupResult where T: TypeFoldable>, { @@ -206,7 +206,7 @@ } impl<'a, 'tcx> FallibleTypeFolder> for FullTypeResolver<'a, 'tcx> { - type Error = FixupError<'tcx>; + type Error = FixupError; fn interner(&self) -> TyCtxt<'tcx> { self.infcx.tcx diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/undo_log.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/undo_log.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/undo_log.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/infer/undo_log.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ use rustc_data_structures::snapshot_vec as sv; use rustc_data_structures::undo_log::{Rollback, UndoLogs}; use rustc_data_structures::unify as ut; -use rustc_middle::infer::unify_key::RegionVidKey; +use rustc_middle::infer::unify_key::{ConstVidKey, EffectVidKey, RegionVidKey}; use rustc_middle::ty::{self, OpaqueHiddenType, OpaqueTypeKey}; use crate::{ @@ -21,10 +21,10 @@ pub(crate) enum UndoLog<'tcx> { OpaqueTypes(OpaqueTypeKey<'tcx>, Option>), TypeVariables(type_variable::UndoLog<'tcx>), - ConstUnificationTable(sv::UndoLog>>), + ConstUnificationTable(sv::UndoLog>>), IntUnificationTable(sv::UndoLog>), FloatUnificationTable(sv::UndoLog>), - EffectUnificationTable(sv::UndoLog>>), + EffectUnificationTable(sv::UndoLog>>), RegionConstraintCollector(region_constraints::UndoLog<'tcx>), RegionUnificationTable(sv::UndoLog>>), ProjectionCache(traits::UndoLog<'tcx>), @@ -56,9 +56,9 @@ IntUnificationTable(sv::UndoLog>), FloatUnificationTable(sv::UndoLog>), - EffectUnificationTable(sv::UndoLog>>), + EffectUnificationTable(sv::UndoLog>>), - ConstUnificationTable(sv::UndoLog>>), + ConstUnificationTable(sv::UndoLog>>), RegionUnificationTable(sv::UndoLog>>), ProjectionCache(traits::UndoLog<'tcx>), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,6 +13,9 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(control_flow_enum)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/error_reporting/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/error_reporting/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/error_reporting/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/error_reporting/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,8 @@ use rustc_errors::{struct_span_err, DiagnosticBuilder, ErrorGuaranteed, MultiSpan}; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; -use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::print::with_no_trimmed_paths; +use rustc_middle::ty::{self, TyCtxt}; use rustc_span::Span; use std::fmt; use std::iter; @@ -62,7 +63,9 @@ let mut multi_span = vec![]; let mut messages = vec![]; for violation in violations { - if let ObjectSafetyViolation::SizedSelf(sp) = &violation && !sp.is_empty() { + if let ObjectSafetyViolation::SizedSelf(sp) = &violation + && !sp.is_empty() + { // Do not report `SizedSelf` without spans pointing at `SizedSelf` obligations // with a `Span`. reported_violations.insert(ObjectSafetyViolation::SizedSelf(vec![].into())); @@ -106,5 +109,66 @@ violation.solution(&mut err); } } + + let impls_of = tcx.trait_impls_of(trait_def_id); + let impls = if impls_of.blanket_impls().is_empty() { + impls_of + .non_blanket_impls() + .values() + .flatten() + .filter(|def_id| { + !matches!(tcx.type_of(*def_id).instantiate_identity().kind(), ty::Dynamic(..)) + }) + .collect::>() + } else { + vec![] + }; + let externally_visible = if !impls.is_empty() + && let Some(def_id) = trait_def_id.as_local() + && tcx.effective_visibilities(()).is_exported(def_id) + { + true + } else { + false + }; + match &impls[..] { + [] => {} + _ if impls.len() > 9 => {} + [only] if externally_visible => { + err.help(with_no_trimmed_paths!(format!( + "only type `{}` is seen to implement the trait in this crate, consider using it \ + directly instead", + tcx.type_of(*only).instantiate_identity(), + ))); + } + [only] => { + err.help(with_no_trimmed_paths!(format!( + "only type `{}` implements the trait, consider using it directly instead", + tcx.type_of(*only).instantiate_identity(), + ))); + } + impls => { + let types = impls + .iter() + .map(|t| { + with_no_trimmed_paths!(format!(" {}", tcx.type_of(*t).instantiate_identity(),)) + }) + .collect::>(); + err.help(format!( + "the following types implement the trait, consider defining an enum where each \ + variant holds one of these types, implementing `{}` for this new enum and using \ + it instead:\n{}", + trait_str, + types.join("\n"), + )); + } + } + if externally_visible { + err.note(format!( + "`{trait_str}` can be implemented in other crates; if you want to support your users \ + passing their own types here, you can't refer to a specific type", + )); + } + err } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,7 +19,6 @@ pub use self::FulfillmentErrorCode::*; pub use self::ImplSource::*; -pub use self::ObligationCauseCode::*; pub use self::SelectionError::*; pub use self::engine::{TraitEngine, TraitEngineExt}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_infer/src/traits/util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,7 @@ use crate::infer::outlives::components::{push_outlives_components, Component}; use crate::traits::{self, Obligation, PredicateObligation}; -use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; +use rustc_data_structures::fx::FxHashSet; use rustc_middle::ty::{self, ToPredicate, Ty, TyCtxt}; use rustc_span::symbol::Ident; use rustc_span::Span; @@ -76,7 +76,13 @@ pub struct Elaborator<'tcx, O> { stack: Vec, visited: PredicateSet<'tcx>, - only_self: bool, + mode: Filter, +} + +enum Filter { + All, + OnlySelf, + OnlySelfThatDefines(Ident), } /// Describes how to elaborate an obligation into a sub-obligation. @@ -224,7 +230,7 @@ obligations: impl IntoIterator, ) -> Elaborator<'tcx, O> { let mut elaborator = - Elaborator { stack: Vec::new(), visited: PredicateSet::new(tcx), only_self: false }; + Elaborator { stack: Vec::new(), visited: PredicateSet::new(tcx), mode: Filter::All }; elaborator.extend_deduped(obligations); elaborator } @@ -242,7 +248,13 @@ /// Filter to only the supertraits of trait predicates, i.e. only the predicates /// that have `Self` as their self type, instead of all implied predicates. pub fn filter_only_self(mut self) -> Self { - self.only_self = true; + self.mode = Filter::OnlySelf; + self + } + + /// Filter to only the supertraits of trait predicates that define the assoc_ty. + pub fn filter_only_self_that_defines(mut self, assoc_ty: Ident) -> Self { + self.mode = Filter::OnlySelfThatDefines(assoc_ty); self } @@ -257,10 +269,12 @@ return; } // Get predicates implied by the trait, or only super predicates if we only care about self predicates. - let predicates = if self.only_self { - tcx.super_predicates_of(data.def_id()) - } else { - tcx.implied_predicates_of(data.def_id()) + let predicates = match self.mode { + Filter::All => tcx.implied_predicates_of(data.def_id()), + Filter::OnlySelf => tcx.super_predicates_of(data.def_id()), + Filter::OnlySelfThatDefines(ident) => { + tcx.super_predicates_that_define_assoc_item((data.def_id(), ident)) + } }; let obligations = @@ -409,14 +423,14 @@ pub fn supertraits<'tcx>( tcx: TyCtxt<'tcx>, trait_ref: ty::PolyTraitRef<'tcx>, -) -> impl Iterator> { +) -> FilterToTraits>> { elaborate(tcx, [trait_ref.to_predicate(tcx)]).filter_only_self().filter_to_traits() } pub fn transitive_bounds<'tcx>( tcx: TyCtxt<'tcx>, trait_refs: impl Iterator>, -) -> impl Iterator> { +) -> FilterToTraits>> { elaborate(tcx, trait_refs.map(|trait_ref| trait_ref.to_predicate(tcx))) .filter_only_self() .filter_to_traits() @@ -429,31 +443,12 @@ /// `T::Item` and helps to avoid cycle errors (see e.g. #35237). pub fn transitive_bounds_that_define_assoc_item<'tcx>( tcx: TyCtxt<'tcx>, - bounds: impl Iterator>, + trait_refs: impl Iterator>, assoc_name: Ident, -) -> impl Iterator> { - let mut stack: Vec<_> = bounds.collect(); - let mut visited = FxIndexSet::default(); - - std::iter::from_fn(move || { - while let Some(trait_ref) = stack.pop() { - let anon_trait_ref = tcx.anonymize_bound_vars(trait_ref); - if visited.insert(anon_trait_ref) { - let super_predicates = - tcx.super_predicates_that_define_assoc_item((trait_ref.def_id(), assoc_name)); - for (super_predicate, _) in super_predicates.predicates { - let subst_predicate = super_predicate.subst_supertrait(tcx, &trait_ref); - if let Some(binder) = subst_predicate.as_trait_clause() { - stack.push(binder.map_bound(|t| t.trait_ref)); - } - } - - return Some(trait_ref); - } - } - - return None; - }) +) -> FilterToTraits>> { + elaborate(tcx, trait_refs.map(|trait_ref| trait_ref.to_predicate(tcx))) + .filter_only_self_that_defines(assoc_name) + .filter_to_traits() } /////////////////////////////////////////////////////////////////////////// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,55 +3,55 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start libloading = "0.7.1" -tracing = "0.1" -rustc-rayon-core = { version = "0.5.0", optional = true } rustc-rayon = { version = "0.5.0", optional = true } +rustc-rayon-core = { version = "0.5.0", optional = true } rustc_ast = { path = "../rustc_ast" } +rustc_ast_lowering = { path = "../rustc_ast_lowering" } +rustc_ast_passes = { path = "../rustc_ast_passes" } rustc_attr = { path = "../rustc_attr" } rustc_borrowck = { path = "../rustc_borrowck" } rustc_builtin_macros = { path = "../rustc_builtin_macros" } +rustc_codegen_llvm = { path = "../rustc_codegen_llvm", optional = true } +rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } +rustc_const_eval = { path = "../rustc_const_eval" } +rustc_data_structures = { path = "../rustc_data_structures" } +rustc_errors = { path = "../rustc_errors" } rustc_expand = { path = "../rustc_expand" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_fs_util = { path = "../rustc_fs_util" } -rustc_macros = { path = "../rustc_macros" } -rustc_parse = { path = "../rustc_parse" } -rustc_session = { path = "../rustc_session" } -rustc_span = { path = "../rustc_span" } -rustc_middle = { path = "../rustc_middle" } -rustc_ast_lowering = { path = "../rustc_ast_lowering" } -rustc_ast_passes = { path = "../rustc_ast_passes" } -rustc_incremental = { path = "../rustc_incremental" } -rustc_index = { path = "../rustc_index" } -rustc_traits = { path = "../rustc_traits" } -rustc_data_structures = { path = "../rustc_data_structures" } -rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } -rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } -rustc_codegen_llvm = { path = "../rustc_codegen_llvm", optional = true } rustc_hir = { path = "../rustc_hir" } +rustc_hir_analysis = { path = "../rustc_hir_analysis" } +rustc_hir_typeck = { path = "../rustc_hir_typeck" } +rustc_incremental = { path = "../rustc_incremental" } +rustc_lint = { path = "../rustc_lint" } +rustc_macros = { path = "../rustc_macros" } rustc_metadata = { path = "../rustc_metadata" } -rustc_const_eval = { path = "../rustc_const_eval" } +rustc_middle = { path = "../rustc_middle" } rustc_mir_build = { path = "../rustc_mir_build" } rustc_mir_transform = { path = "../rustc_mir_transform" } rustc_monomorphize = { path = "../rustc_monomorphize" } +rustc_parse = { path = "../rustc_parse" } rustc_passes = { path = "../rustc_passes" } -rustc_hir_analysis = { path = "../rustc_hir_analysis" } -rustc_hir_typeck = { path = "../rustc_hir_typeck" } -rustc_lint = { path = "../rustc_lint" } -rustc_errors = { path = "../rustc_errors" } -rustc_plugin_impl = { path = "../rustc_plugin_impl" } rustc_privacy = { path = "../rustc_privacy" } -rustc_query_system = { path = "../rustc_query_system" } rustc_query_impl = { path = "../rustc_query_impl" } +rustc_query_system = { path = "../rustc_query_system" } rustc_resolve = { path = "../rustc_resolve" } +rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_symbol_mangling = { path = "../rustc_symbol_mangling" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } +rustc_traits = { path = "../rustc_traits" } rustc_ty_utils = { path = "../rustc_ty_utils" } +tracing = "0.1" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start llvm = ['rustc_codegen_llvm'] rustc_use_parallel_compiler = ['rustc-rayon', 'rustc-rayon-core', 'rustc_query_impl/rustc_use_parallel_compiler', 'rustc_errors/rustc_use_parallel_compiler'] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/callbacks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/callbacks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/callbacks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/callbacks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,16 +26,14 @@ }) } -/// This is a callback from `rustc_ast` as it cannot access the implicit state +/// This is a callback from `rustc_errors` as it cannot access the implicit state /// in `rustc_middle` otherwise. It is used when diagnostic messages are /// emitted and stores them in the current query, if there is one. fn track_diagnostic(diagnostic: &mut Diagnostic, f: &mut dyn FnMut(&mut Diagnostic)) { tls::with_context_opt(|icx| { if let Some(icx) = icx { if let Some(diagnostics) = icx.diagnostics { - let mut diagnostics = diagnostics.lock(); - diagnostics.extend(Some(diagnostic.clone())); - std::mem::drop(diagnostics); + diagnostics.lock().extend(Some(diagnostic.clone())); } // Diagnostics are tracked, we can ignore the dependency. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/interface.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/interface.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/interface.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/interface.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,6 +5,7 @@ use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::defer; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::sync::Lrc; use rustc_errors::registry::Registry; use rustc_errors::{ErrorGuaranteed, Handler}; @@ -14,15 +15,18 @@ use rustc_parse::maybe_new_parser_from_source_str; use rustc_query_impl::QueryCtxt; use rustc_query_system::query::print_query_stack; -use rustc_session::config::{self, CheckCfg, ExpectedValues, Input, OutFileName, OutputFilenames}; -use rustc_session::parse::{CrateConfig, ParseSess}; -use rustc_session::CompilerIO; -use rustc_session::Session; -use rustc_session::{lint, EarlyErrorHandler}; -use rustc_span::source_map::{FileLoader, FileName}; +use rustc_session::config::{ + self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName, OutputFilenames, +}; +use rustc_session::filesearch::sysroot_candidates; +use rustc_session::parse::ParseSess; +use rustc_session::{lint, CompilerIO, EarlyErrorHandler, Session}; +use rustc_span::source_map::FileLoader; use rustc_span::symbol::sym; +use rustc_span::FileName; use std::path::PathBuf; use std::result; +use std::sync::Arc; pub type Result = result::Result; @@ -59,184 +63,267 @@ } } -/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`. -pub fn parse_cfgspecs( - handler: &EarlyErrorHandler, - cfgspecs: Vec, -) -> FxHashSet<(String, Option)> { - rustc_span::create_default_session_if_not_set_then(move |_| { - let cfg = cfgspecs - .into_iter() - .map(|s| { - let sess = ParseSess::with_silent_emitter(Some(format!( - "this error occurred on the command line: `--cfg={s}`" - ))); - let filename = FileName::cfg_spec_source_code(&s); - - macro_rules! error { - ($reason: expr) => { - handler.early_error(format!( - concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), - s - )); - }; - } - - match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) { - Ok(mut parser) => match parser.parse_meta_item() { - Ok(meta_item) if parser.token == token::Eof => { - if meta_item.path.segments.len() != 1 { - error!("argument key must be an identifier"); - } - match &meta_item.kind { - MetaItemKind::List(..) => {} - MetaItemKind::NameValue(lit) if !lit.kind.is_str() => { - error!("argument value must be a string"); - } - MetaItemKind::NameValue(..) | MetaItemKind::Word => { - let ident = meta_item.ident().expect("multi-segment cfg key"); - return (ident.name, meta_item.value_str()); - } - } - } - Ok(..) => {} - Err(err) => err.cancel(), - }, - Err(errs) => drop(errs), - } - - // If the user tried to use a key="value" flag, but is missing the quotes, provide - // a hint about how to resolve this. - if s.contains('=') && !s.contains("=\"") && !s.ends_with('"') { - error!(concat!( - r#"expected `key` or `key="value"`, ensure escaping is appropriate"#, - r#" for your shell, try 'key="value"' or key=\"value\""# - )); - } else { - error!(r#"expected `key` or `key="value"`"#); - } - }) - .collect::(); - cfg.into_iter().map(|(a, b)| (a.to_string(), b.map(|b| b.to_string()))).collect() - }) -} - -/// Converts strings provided as `--check-cfg [specs]` into a `CheckCfg`. -pub fn parse_check_cfg(handler: &EarlyErrorHandler, specs: Vec) -> CheckCfg { - rustc_span::create_default_session_if_not_set_then(move |_| { - let mut check_cfg = CheckCfg::default(); - - for s in specs { +/// Converts strings provided as `--cfg [cfgspec]` into a `Cfg`. +pub(crate) fn parse_cfg(handler: &EarlyErrorHandler, cfgs: Vec) -> Cfg { + cfgs.into_iter() + .map(|s| { let sess = ParseSess::with_silent_emitter(Some(format!( - "this error occurred on the command line: `--check-cfg={s}`" + "this error occurred on the command line: `--cfg={s}`" ))); let filename = FileName::cfg_spec_source_code(&s); macro_rules! error { ($reason: expr) => { handler.early_error(format!( - concat!("invalid `--check-cfg` argument: `{}` (", $reason, ")"), + concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s - )) + )); }; } - let expected_error = || { - error!( - "expected `names(name1, name2, ... nameN)` or \ - `values(name, \"value1\", \"value2\", ... \"valueN\")`" - ) - }; - match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) { Ok(mut parser) => match parser.parse_meta_item() { Ok(meta_item) if parser.token == token::Eof => { - if let Some(args) = meta_item.meta_item_list() { - if meta_item.has_name(sym::names) { - check_cfg.exhaustive_names = true; - for arg in args { - if arg.is_word() && arg.ident().is_some() { - let ident = arg.ident().expect("multi-segment cfg key"); - check_cfg - .expecteds - .entry(ident.name.to_string()) - .or_insert(ExpectedValues::Any); - } else { - error!("`names()` arguments must be simple identifiers"); - } - } - } else if meta_item.has_name(sym::values) { - if let Some((name, values)) = args.split_first() { - if name.is_word() && name.ident().is_some() { - let ident = name.ident().expect("multi-segment cfg key"); - let expected_values = check_cfg - .expecteds - .entry(ident.name.to_string()) - .and_modify(|expected_values| match expected_values { - ExpectedValues::Some(_) => {} - ExpectedValues::Any => { - // handle the case where names(...) was done - // before values by changing to a list - *expected_values = - ExpectedValues::Some(FxHashSet::default()); - } - }) - .or_insert_with(|| { - ExpectedValues::Some(FxHashSet::default()) - }); - - let ExpectedValues::Some(expected_values) = expected_values - else { - bug!("`expected_values` should be a list a values") - }; - - for val in values { - if let Some(LitKind::Str(s, _)) = - val.lit().map(|lit| &lit.kind) - { - expected_values.insert(Some(s.to_string())); - } else { - error!( - "`values()` arguments must be string literals" - ); - } - } - - if values.is_empty() { - expected_values.insert(None); - } - } else { - error!( - "`values()` first argument must be a simple identifier" - ); - } - } else if args.is_empty() { - check_cfg.exhaustive_values = true; - } else { - expected_error(); - } - } else { - expected_error(); + if meta_item.path.segments.len() != 1 { + error!("argument key must be an identifier"); + } + match &meta_item.kind { + MetaItemKind::List(..) => {} + MetaItemKind::NameValue(lit) if !lit.kind.is_str() => { + error!("argument value must be a string"); } + MetaItemKind::NameValue(..) | MetaItemKind::Word => { + let ident = meta_item.ident().expect("multi-segment cfg key"); + return (ident.name, meta_item.value_str()); + } + } + } + Ok(..) => {} + Err(err) => err.cancel(), + }, + Err(errs) => drop(errs), + } + + // If the user tried to use a key="value" flag, but is missing the quotes, provide + // a hint about how to resolve this. + if s.contains('=') && !s.contains("=\"") && !s.ends_with('"') { + error!(concat!( + r#"expected `key` or `key="value"`, ensure escaping is appropriate"#, + r#" for your shell, try 'key="value"' or key=\"value\""# + )); + } else { + error!(r#"expected `key` or `key="value"`"#); + } + }) + .collect::() +} + +/// Converts strings provided as `--check-cfg [specs]` into a `CheckCfg`. +pub(crate) fn parse_check_cfg(handler: &EarlyErrorHandler, specs: Vec) -> CheckCfg { + // If any --check-cfg is passed then exhaustive_values and exhaustive_names + // are enabled by default. + let exhaustive_names = !specs.is_empty(); + let exhaustive_values = !specs.is_empty(); + let mut check_cfg = CheckCfg { exhaustive_names, exhaustive_values, ..CheckCfg::default() }; + + let mut old_syntax = None; + for s in specs { + let sess = ParseSess::with_silent_emitter(Some(format!( + "this error occurred on the command line: `--check-cfg={s}`" + ))); + let filename = FileName::cfg_spec_source_code(&s); + + macro_rules! error { + ($reason:expr) => { + handler.early_error(format!( + concat!("invalid `--check-cfg` argument: `{}` (", $reason, ")"), + s + )) + }; + } + + let expected_error = || -> ! { + error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`") + }; + + let Ok(mut parser) = maybe_new_parser_from_source_str(&sess, filename, s.to_string()) + else { + expected_error(); + }; + + let meta_item = match parser.parse_meta_item() { + Ok(meta_item) if parser.token == token::Eof => meta_item, + Ok(..) => expected_error(), + Err(err) => { + err.cancel(); + expected_error(); + } + }; + + let Some(args) = meta_item.meta_item_list() else { + expected_error(); + }; + + let mut set_old_syntax = || { + // defaults are flipped for the old syntax + if old_syntax == None { + check_cfg.exhaustive_names = false; + check_cfg.exhaustive_values = false; + } + old_syntax = Some(true); + }; + + if meta_item.has_name(sym::names) { + set_old_syntax(); + + check_cfg.exhaustive_names = true; + for arg in args { + if arg.is_word() && let Some(ident) = arg.ident() { + check_cfg.expecteds.entry(ident.name).or_insert(ExpectedValues::Any); + } else { + error!("`names()` arguments must be simple identifiers"); + } + } + } else if meta_item.has_name(sym::values) { + set_old_syntax(); + + if let Some((name, values)) = args.split_first() { + if name.is_word() && let Some(ident) = name.ident() { + let expected_values = check_cfg + .expecteds + .entry(ident.name) + .and_modify(|expected_values| match expected_values { + ExpectedValues::Some(_) => {} + ExpectedValues::Any => { + // handle the case where names(...) was done + // before values by changing to a list + *expected_values = ExpectedValues::Some(FxHashSet::default()); + } + }) + .or_insert_with(|| ExpectedValues::Some(FxHashSet::default())); + + let ExpectedValues::Some(expected_values) = expected_values else { + bug!("`expected_values` should be a list a values") + }; + + for val in values { + if let Some(LitKind::Str(s, _)) = val.lit().map(|lit| &lit.kind) { + expected_values.insert(Some(*s)); } else { - expected_error(); + error!("`values()` arguments must be string literals"); } } - Ok(..) => expected_error(), - Err(err) => { - err.cancel(); - expected_error(); + + if values.is_empty() { + expected_values.insert(None); + } + } else { + error!("`values()` first argument must be a simple identifier"); + } + } else if args.is_empty() { + check_cfg.exhaustive_values = true; + } else { + expected_error(); + } + } else if meta_item.has_name(sym::cfg) { + old_syntax = Some(false); + + let mut names = Vec::new(); + let mut values: FxHashSet<_> = Default::default(); + + let mut any_specified = false; + let mut values_specified = false; + let mut values_any_specified = false; + + for arg in args { + if arg.is_word() && let Some(ident) = arg.ident() { + if values_specified { + error!("`cfg()` names cannot be after values"); } - }, - Err(errs) => { - drop(errs); - expected_error(); + names.push(ident); + } else if arg.has_name(sym::any) && let Some(args) = arg.meta_item_list() { + if any_specified { + error!("`any()` cannot be specified multiple times"); + } + any_specified = true; + if !args.is_empty() { + error!("`any()` must be empty"); + } + } else if arg.has_name(sym::values) && let Some(args) = arg.meta_item_list() { + if names.is_empty() { + error!("`values()` cannot be specified before the names"); + } else if values_specified { + error!("`values()` cannot be specified multiple times"); + } + values_specified = true; + + for arg in args { + if let Some(LitKind::Str(s, _)) = arg.lit().map(|lit| &lit.kind) { + values.insert(Some(*s)); + } else if arg.has_name(sym::any) && let Some(args) = arg.meta_item_list() { + if values_any_specified { + error!("`any()` in `values()` cannot be specified multiple times"); + } + values_any_specified = true; + if !args.is_empty() { + error!("`any()` must be empty"); + } + } else { + error!("`values()` arguments must be string literals or `any()`"); + } + } + } else { + error!( + "`cfg()` arguments must be simple identifiers, `any()` or `values(...)`" + ); } } + + if values.is_empty() && !values_any_specified && !any_specified { + values.insert(None); + } else if !values.is_empty() && values_any_specified { + error!( + "`values()` arguments cannot specify string literals and `any()` at the same time" + ); + } + + if any_specified { + if names.is_empty() + && values.is_empty() + && !values_specified + && !values_any_specified + { + check_cfg.exhaustive_names = false; + } else { + error!("`cfg(any())` can only be provided in isolation"); + } + } else { + for name in names { + check_cfg + .expecteds + .entry(name.name) + .and_modify(|v| match v { + ExpectedValues::Some(v) if !values_any_specified => { + v.extend(values.clone()) + } + ExpectedValues::Some(_) => *v = ExpectedValues::Any, + ExpectedValues::Any => {} + }) + .or_insert_with(|| { + if values_any_specified { + ExpectedValues::Any + } else { + ExpectedValues::Some(values.clone()) + } + }); + } + } + } else { + expected_error(); } + } - check_cfg - }) + check_cfg } /// The compiler configuration @@ -244,9 +331,9 @@ /// Command line options pub opts: config::Options, - /// cfg! configuration in addition to the default ones - pub crate_cfg: FxHashSet<(String, Option)>, - pub crate_check_cfg: CheckCfg, + /// Unparsed cfg! configuration in addition to the default ones. + pub crate_cfg: Vec, + pub crate_check_cfg: Vec, pub input: Input, pub output_dir: Option, @@ -260,8 +347,14 @@ /// This is a callback from the driver that is called when [`ParseSess`] is created. pub parse_sess_created: Option>, + /// This is a callback to hash otherwise untracked state used by the caller, if the + /// hash changes between runs the incremental cache will be cleared. + /// + /// e.g. used by Clippy to hash its config file + pub hash_untracked_state: Option>, + /// This is a callback from the driver that is called when we're registering lints; - /// it is called during plugin registration when we have the LintStore in a non-shared state. + /// it is called during lint loading when we have the LintStore in a non-shared state. /// /// Note that if you find a Some here you probably want to call that function in the new /// function being registered. @@ -269,8 +362,6 @@ /// This is a callback from the driver that is called just after we have populated /// the list of queries. - /// - /// The second parameter is local providers and the third parameter is external providers. pub override_queries: Option, /// This is a callback from the driver that is called to create a codegen backend. @@ -280,6 +371,12 @@ /// Registry of diagnostics codes. pub registry: Registry, + /// The inner atomic value is set to true when a feature marked as `internal` is + /// enabled. Makes it so that "please report a bug" is hidden, as ICEs with + /// internal features are wontfix, and they are usually the cause of the ICEs. + /// None signifies that this is not tracked. + pub using_internal_features: Arc, + /// All commandline args used to invoke the compiler, with @file args fully expanded. /// This will only be used within debug info, e.g. in the pdb file on windows /// This is mainly useful for other tools that reads that debuginfo to figure out @@ -301,35 +398,81 @@ || { crate::callbacks::setup_callbacks(); - let registry = &config.registry; - let handler = EarlyErrorHandler::new(config.opts.error_format); + let codegen_backend = if let Some(make_codegen_backend) = config.make_codegen_backend { + make_codegen_backend(&config.opts) + } else { + util::get_codegen_backend( + &handler, + &config.opts.maybe_sysroot, + config.opts.unstable_opts.codegen_backend.as_deref(), + ) + }; + let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from); - let (mut sess, codegen_backend) = util::create_session( + + let bundle = match rustc_errors::fluent_bundle( + config.opts.maybe_sysroot.clone(), + sysroot_candidates().to_vec(), + config.opts.unstable_opts.translate_lang.clone(), + config.opts.unstable_opts.translate_additional_ftl.as_deref(), + config.opts.unstable_opts.translate_directionality_markers, + ) { + Ok(bundle) => bundle, + Err(e) => { + handler.early_error(format!("failed to load fluent bundle: {e}")); + } + }; + + let mut locale_resources = Vec::from(config.locale_resources); + locale_resources.push(codegen_backend.locale_resource()); + + // target_override is documented to be called before init(), so this is okay + let target_override = codegen_backend.target_override(&config.opts); + + let mut sess = rustc_session::build_session( &handler, config.opts, - config.crate_cfg, - config.crate_check_cfg, - config.locale_resources, - config.file_loader, CompilerIO { input: config.input, output_dir: config.output_dir, output_file: config.output_file, temps_dir, }, + bundle, + config.registry.clone(), + locale_resources, config.lint_caps, - config.make_codegen_backend, - registry.clone(), + config.file_loader, + target_override, + util::rustc_version_str().unwrap_or("unknown"), config.ice_file, + config.using_internal_features, config.expanded_args, ); + codegen_backend.init(&sess); + + let cfg = parse_cfg(&handler, config.crate_cfg); + let mut cfg = config::build_configuration(&sess, cfg); + util::add_configuration(&mut cfg, &mut sess, &*codegen_backend); + sess.parse_sess.config = cfg; + + let mut check_cfg = parse_check_cfg(&handler, config.crate_check_cfg); + check_cfg.fill_well_known(&sess.target); + sess.parse_sess.check_config = check_cfg; + if let Some(parse_sess_created) = config.parse_sess_created { parse_sess_created(&mut sess.parse_sess); } + if let Some(hash_untracked_state) = config.hash_untracked_state { + let mut hasher = StableHasher::new(); + hash_untracked_state(&sess, &mut hasher); + sess.opts.untracked_state_hash = hasher.finish() + } + let compiler = Compiler { sess: Lrc::new(sess), codegen_backend: Lrc::from(codegen_backend), @@ -340,7 +483,7 @@ rustc_span::set_source_map(compiler.sess.parse_sess.clone_source_map(), move || { let r = { let _sess_abort_error = defer(|| { - compiler.sess.finish_diagnostics(registry); + compiler.sess.finish_diagnostics(&config.registry); }); f(&compiler) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,7 @@ #![feature(internal_output_capture)] #![feature(thread_spawn_unchecked)] #![feature(lazy_cell)] +#![feature(let_chains)] #![feature(try_blocks)] #![recursion_limit = "256"] #![allow(rustc::potential_query_instability)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/passes.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/passes.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/passes.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/passes.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,11 +23,10 @@ use rustc_mir_build as mir_build; use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str, validate_attr}; use rustc_passes::{self, abi_test, hir_stats, layout_test}; -use rustc_plugin_impl as plugin; use rustc_resolve::Resolver; use rustc_session::code_stats::VTableSizeInfo; use rustc_session::config::{CrateType, Input, OutFileName, OutputFilenames, OutputType}; -use rustc_session::cstore::{MetadataLoader, Untracked}; +use rustc_session::cstore::Untracked; use rustc_session::output::filename_for_input; use rustc_session::search_paths::PathKind; use rustc_session::{Limit, Session}; @@ -75,25 +74,12 @@ pub(crate) fn create_lint_store( sess: &Session, - metadata_loader: &dyn MetadataLoader, register_lints: Option, - pre_configured_attrs: &[ast::Attribute], ) -> LintStore { let mut lint_store = rustc_lint::new_lint_store(sess.enable_internal_lints()); if let Some(register_lints) = register_lints { register_lints(sess, &mut lint_store); } - - let registrars = sess.time("plugin_loading", || { - plugin::load::load_plugins(sess, metadata_loader, pre_configured_attrs) - }); - sess.time("plugin_registration", || { - let mut registry = plugin::Registry { lint_store: &mut lint_store }; - for registrar in registrars { - registrar(&mut registry); - } - }); - lint_store } @@ -392,34 +378,16 @@ out_filenames } -// Runs `f` on every output file path and returns the first non-None result, or None if `f` -// returns None for every file path. -fn check_output(output_paths: &[PathBuf], f: F) -> Option -where - F: Fn(&PathBuf) -> Option, -{ - for output_path in output_paths { - if let Some(result) = f(output_path) { - return Some(result); - } - } - None -} - fn output_contains_path(output_paths: &[PathBuf], input_path: &Path) -> bool { let input_path = try_canonicalize(input_path).ok(); if input_path.is_none() { return false; } - let check = |output_path: &PathBuf| { - if try_canonicalize(output_path).ok() == input_path { Some(()) } else { None } - }; - check_output(output_paths, check).is_some() + output_paths.iter().any(|output_path| try_canonicalize(output_path).ok() == input_path) } -fn output_conflicts_with_dir(output_paths: &[PathBuf]) -> Option { - let check = |output_path: &PathBuf| output_path.is_dir().then(|| output_path.clone()); - check_output(output_paths, check) +fn output_conflicts_with_dir(output_paths: &[PathBuf]) -> Option<&PathBuf> { + output_paths.iter().find(|output_path| output_path.is_dir()) } fn escape_dep_filename(filename: &str) -> String { @@ -602,9 +570,7 @@ let (_, krate) = &*tcx.resolver_for_lowering(()).borrow(); let crate_name = tcx.crate_name(LOCAL_CRATE); - // FIXME: rustdoc passes &[] instead of &krate.attrs here let outputs = util::build_output_filenames(&krate.attrs, sess); - let output_paths = generated_output_paths(tcx, &outputs, sess.io.output_file.is_some(), crate_name); @@ -775,12 +741,16 @@ rustc_hir_analysis::check_crate(tcx)?; sess.time("MIR_borrow_checking", || { - tcx.hir().par_body_owners(|def_id| tcx.ensure().mir_borrowck(def_id)); + tcx.hir().par_body_owners(|def_id| { + // Run THIR unsafety check because it's responsible for stealing + // and deallocating THIR when enabled. + tcx.ensure().thir_check_unsafety(def_id); + tcx.ensure().mir_borrowck(def_id) + }); }); sess.time("MIR_effect_checking", || { for def_id in tcx.hir().body_owners() { - tcx.ensure().thir_check_unsafety(def_id); if !tcx.sess.opts.unstable_opts.thir_unsafeck { rustc_mir_transform::check_unsafety::check_unsafety(tcx, def_id); } @@ -799,9 +769,9 @@ }); tcx.hir().par_body_owners(|def_id| { - if let rustc_hir::def::DefKind::Generator = tcx.def_kind(def_id) { - tcx.ensure().mir_generator_witnesses(def_id); - tcx.ensure().check_generator_obligations(def_id); + if let rustc_hir::def::DefKind::Coroutine = tcx.def_kind(def_id) { + tcx.ensure().mir_coroutine_witnesses(def_id); + tcx.ensure().check_coroutine_obligations(def_id); } }); @@ -852,6 +822,11 @@ // This check has to be run after all lints are done processing. We don't // define a lint filter, as all lint checks should have finished at this point. sess.time("check_lint_expectations", || tcx.ensure().check_expectations(None)); + + // This query is only invoked normally if a diagnostic is emitted that needs any + // diagnostic item. If the crate compiles without checking any diagnostic items, + // we will fail to emit overlap diagnostics. Thus we invoke it here unconditionally. + let _ = tcx.all_diagnostic_items(()); }); if sess.opts.unstable_opts.print_vtable_sizes { @@ -873,16 +848,18 @@ let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(tcx, tr)); - // A slightly edited version of the code in `rustc_trait_selection::traits::vtable::vtable_entries`, - // that works without self type and just counts number of entries. + // A slightly edited version of the code in + // `rustc_trait_selection::traits::vtable::vtable_entries`, that works without self + // type and just counts number of entries. // - // Note that this is technically wrong, for traits which have associated types in supertraits: + // Note that this is technically wrong, for traits which have associated types in + // supertraits: // // trait A: AsRef + AsRef<()> { type T; } // - // Without self type we can't normalize `Self::T`, so we can't know if `AsRef` and - // `AsRef<()>` are the same trait, thus we assume that those are different, and potentially - // over-estimate how many vtable entries there are. + // Without self type we can't normalize `Self::T`, so we can't know if `AsRef` + // and `AsRef<()>` are the same trait, thus we assume that those are different, and + // potentially over-estimate how many vtable entries there are. // // Similarly this is wrong for traits that have methods with possibly-impossible bounds. // For example: @@ -909,10 +886,10 @@ let own_existential_entries = tcx.own_existential_vtable_entries(trait_ref.def_id()); - // The original code here ignores the method if its predicates are impossible. - // We can't really do that as, for example, all not trivial bounds on generic - // parameters are impossible (since we don't know the parameters...), - // see the comment above. + // The original code here ignores the method if its predicates are + // impossible. We can't really do that as, for example, all not trivial + // bounds on generic parameters are impossible (since we don't know the + // parameters...), see the comment above. entries_ignoring_upcasting += own_existential_entries.len(); if emit_vptr { @@ -957,10 +934,9 @@ codegen_backend.codegen_crate(tcx, metadata, need_metadata_module) }); - // Don't run these test assertions when not doing codegen. Compiletest tries to build + // Don't run this test assertions when not doing codegen. Compiletest tries to build // build-fail tests in check mode first and expects it to not give an error in that case. if tcx.sess.opts.output_types.should_codegen() { - rustc_incremental::assert_module_sources::assert_module_sources(tcx); rustc_symbol_mangling::test::report_symbol_names(tcx); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/queries.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/queries.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/queries.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/queries.rs 2023-12-21 16:55:28.000000000 +0000 @@ -148,12 +148,8 @@ ); let dep_graph = setup_dep_graph(sess, crate_name, stable_crate_id)?; - let lint_store = Lrc::new(passes::create_lint_store( - sess, - &*self.codegen_backend().metadata_loader(), - self.compiler.register_lints.as_deref(), - &pre_configured_attrs, - )); + let lint_store = + Lrc::new(passes::create_lint_store(sess, self.compiler.register_lints.as_deref())); let cstore = FreezeLock::new(Box::new(CStore::new( self.codegen_backend().metadata_loader(), stable_crate_id, @@ -181,9 +177,11 @@ feed.crate_name(crate_name); let feed = tcx.feed_unit_query(); - feed.features_query( - tcx.arena.alloc(rustc_expand::config::features(sess, &pre_configured_attrs)), - ); + feed.features_query(tcx.arena.alloc(rustc_expand::config::features( + sess, + &pre_configured_attrs, + crate_name, + ))); feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs)))); }); Ok(qcx) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,54 +1,33 @@ #![allow(rustc::bad_opt_access)] -use crate::interface::parse_cfgspecs; - -use rustc_data_structures::fx::FxHashSet; +use crate::interface::parse_cfg; use rustc_data_structures::profiling::TimePassesFormat; use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig}; -use rustc_session::config::rustc_optgroups; -use rustc_session::config::DebugInfo; -use rustc_session::config::Input; -use rustc_session::config::InstrumentXRay; -use rustc_session::config::LinkSelfContained; -use rustc_session::config::TraitSolver; -use rustc_session::config::{build_configuration, build_session_options, to_crate_config}; use rustc_session::config::{ - BranchProtection, Externs, OomStrategy, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, - ProcMacroExecutionStrategy, SymbolManglingVersion, WasiExecModel, + build_configuration, build_session_options, rustc_optgroups, BranchProtection, CFGuard, Cfg, + DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation, Externs, + InliningThreshold, Input, InstrumentCoverage, InstrumentXRay, LinkSelfContained, + LinkerPluginLto, LocationDetail, LtoCli, MirSpanview, OomStrategy, Options, OutFileName, + OutputType, OutputTypes, PAuthKey, PacRet, Passes, Polonius, ProcMacroExecutionStrategy, Strip, + SwitchWithOptPath, SymbolManglingVersion, TraitSolver, WasiExecModel, }; -use rustc_session::config::{CFGuard, ExternEntry, LinkerPluginLto, LtoCli, SwitchWithOptPath}; -use rustc_session::config::{DumpMonoStatsFormat, MirSpanview}; -use rustc_session::config::{ErrorOutputType, ExternLocation, LocationDetail, Options, Strip}; -use rustc_session::config::{InstrumentCoverage, Passes}; use rustc_session::lint::Level; use rustc_session::search_paths::SearchPath; use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind}; -use rustc_session::{build_session, getopts, Session}; -use rustc_session::{CompilerIO, EarlyErrorHandler}; +use rustc_session::{build_session, getopts, CompilerIO, EarlyErrorHandler, Session}; use rustc_span::edition::{Edition, DEFAULT_EDITION}; use rustc_span::symbol::sym; -use rustc_span::FileName; -use rustc_span::SourceFileHashAlgorithm; +use rustc_span::{FileName, SourceFileHashAlgorithm}; use rustc_target::spec::{CodeModel, LinkerFlavorCli, MergeFunctions, PanicStrategy, RelocModel}; use rustc_target::spec::{RelroLevel, SanitizerSet, SplitDebuginfo, StackProtector, TlsModel}; - use std::collections::{BTreeMap, BTreeSet}; use std::num::NonZeroUsize; use std::path::{Path, PathBuf}; +use std::sync::Arc; -type CfgSpecs = FxHashSet<(String, Option)>; - -fn build_session_options_and_crate_config( - handler: &mut EarlyErrorHandler, - matches: getopts::Matches, -) -> (Options, CfgSpecs) { - let sessopts = build_session_options(handler, &matches); - let cfg = parse_cfgspecs(handler, matches.opt_strs("cfg")); - (sessopts, cfg) -} - -fn mk_session(handler: &mut EarlyErrorHandler, matches: getopts::Matches) -> (Session, CfgSpecs) { +fn mk_session(handler: &mut EarlyErrorHandler, matches: getopts::Matches) -> (Session, Cfg) { let registry = registry::Registry::new(&[]); - let (sessopts, cfg) = build_session_options_and_crate_config(handler, matches); + let sessopts = build_session_options(handler, &matches); + let cfg = parse_cfg(handler, matches.opt_strs("cfg")); let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from); let io = CompilerIO { input: Input::Str { name: FileName::Custom(String::new()), input: String::new() }, @@ -68,6 +47,7 @@ None, "", None, + Arc::default(), Default::default(), ); (sess, cfg) @@ -138,7 +118,7 @@ let matches = optgroups().parse(&["--test".to_string()]).unwrap(); let mut handler = EarlyErrorHandler::new(ErrorOutputType::default()); let (sess, cfg) = mk_session(&mut handler, matches); - let cfg = build_configuration(&sess, to_crate_config(cfg)); + let cfg = build_configuration(&sess, cfg); assert!(cfg.contains(&(sym::test, None))); }); } @@ -150,7 +130,7 @@ let matches = optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]).unwrap(); let mut handler = EarlyErrorHandler::new(ErrorOutputType::default()); let (sess, cfg) = mk_session(&mut handler, matches); - let cfg = build_configuration(&sess, to_crate_config(cfg)); + let cfg = build_configuration(&sess, cfg); let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test); assert!(test_items.next().is_some()); assert!(test_items.next().is_none()); @@ -611,7 +591,7 @@ tracked!(force_frame_pointers, Some(false)); tracked!(force_unwind_tables, Some(true)); tracked!(inline_threshold, Some(0xf007ba11)); - tracked!(instrument_coverage, Some(InstrumentCoverage::All)); + tracked!(instrument_coverage, InstrumentCoverage::All); tracked!(link_dead_code, Some(true)); tracked!(linker_plugin_lto, LinkerPluginLto::LinkerPluginAuto); tracked!(llvm_args, vec![String::from("1"), String::from("2")]); @@ -681,7 +661,6 @@ // tidy-alphabetical-start untracked!(assert_incr_state, Some(String::from("loaded"))); untracked!(deduplicate_diagnostics, false); - untracked!(dep_tasks, true); untracked!(dont_buffer_diagnostics, true); untracked!(dump_dep_graph, true); untracked!(dump_mir, Some(String::from("abc"))); @@ -769,6 +748,7 @@ ); tracked!(codegen_backend, Some("abc".to_string())); tracked!(crate_attr, vec!["abc".to_string()]); + tracked!(cross_crate_inline_threshold, InliningThreshold::Always); tracked!(debug_info_for_profiling, true); tracked!(debug_macros, true); tracked!(dep_info_omit_d_target, true); @@ -787,7 +767,6 @@ tracked!(inline_mir, Some(true)); tracked!(inline_mir_hint_threshold, Some(123)); tracked!(inline_mir_threshold, Some(123)); - tracked!(instrument_coverage, Some(InstrumentCoverage::All)); tracked!(instrument_mcount, true); tracked!(instrument_xray, Some(InstrumentXRay::default())); tracked!(link_directives, false); @@ -814,7 +793,7 @@ tracked!(panic_abort_tests, true); tracked!(panic_in_drop, PanicStrategy::Abort); tracked!(plt, Some(true)); - tracked!(polonius, true); + tracked!(polonius, Polonius::Legacy); tracked!(precise_enum_drop_elaboration, false); tracked!(print_fuel, Some("abc".to_string())); tracked!(profile, true); @@ -838,7 +817,6 @@ tracked!(split_lto_unit, Some(true)); tracked!(src_hash_algorithm, Some(SourceFileHashAlgorithm::Sha1)); tracked!(stack_protector, StackProtector::All); - tracked!(symbol_mangling_version, Some(SymbolManglingVersion::V0)); tracked!(teach, true); tracked!(thinlto, Some(true)); tracked!(thir_unsafeck, true); @@ -877,6 +855,6 @@ let mut handler = EarlyErrorHandler::new(ErrorOutputType::default()); let matches = optgroups().parse(&["--edition=2018".to_string()]).unwrap(); - let (sessopts, _) = build_session_options_and_crate_config(&mut handler, matches); + let sessopts = build_session_options(&mut handler, &matches); assert!(sessopts.edition == Edition::Edition2018) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_interface/src/util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,24 +3,18 @@ use libloading::Library; use rustc_ast as ast; use rustc_codegen_ssa::traits::CodegenBackend; -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; #[cfg(parallel_compiler)] use rustc_data_structures::sync; -use rustc_errors::registry::Registry; use rustc_parse::validate_attr; use rustc_session as session; -use rustc_session::config::CheckCfg; -use rustc_session::config::{self, CrateType}; -use rustc_session::config::{OutFileName, OutputFilenames, OutputTypes}; +use rustc_session::config::{self, Cfg, CrateType, OutFileName, OutputFilenames, OutputTypes}; use rustc_session::filesearch::sysroot_candidates; use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer}; -use rustc_session::parse::CrateConfig; use rustc_session::{filesearch, output, Session}; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::edition::Edition; -use rustc_span::source_map::FileLoader; use rustc_span::symbol::{sym, Symbol}; -use session::{CompilerIO, EarlyErrorHandler}; +use session::EarlyErrorHandler; use std::env; use std::env::consts::{DLL_PREFIX, DLL_SUFFIX}; use std::mem; @@ -37,11 +31,7 @@ /// /// This is performed by checking whether a set of permitted features /// is available on the target machine, by querying the codegen backend. -pub fn add_configuration( - cfg: &mut CrateConfig, - sess: &mut Session, - codegen_backend: &dyn CodegenBackend, -) { +pub fn add_configuration(cfg: &mut Cfg, sess: &mut Session, codegen_backend: &dyn CodegenBackend) { let tf = sym::target_feature; let unstable_target_features = codegen_backend.target_features(sess, true); @@ -57,80 +47,6 @@ } } -pub fn create_session( - handler: &EarlyErrorHandler, - sopts: config::Options, - cfg: FxHashSet<(String, Option)>, - check_cfg: CheckCfg, - locale_resources: &'static [&'static str], - file_loader: Option>, - io: CompilerIO, - lint_caps: FxHashMap, - make_codegen_backend: Option< - Box Box + Send>, - >, - descriptions: Registry, - ice_file: Option, - expanded_args: Vec, -) -> (Session, Box) { - let codegen_backend = if let Some(make_codegen_backend) = make_codegen_backend { - make_codegen_backend(&sopts) - } else { - get_codegen_backend( - handler, - &sopts.maybe_sysroot, - sopts.unstable_opts.codegen_backend.as_deref(), - ) - }; - - // target_override is documented to be called before init(), so this is okay - let target_override = codegen_backend.target_override(&sopts); - - let bundle = match rustc_errors::fluent_bundle( - sopts.maybe_sysroot.clone(), - sysroot_candidates().to_vec(), - sopts.unstable_opts.translate_lang.clone(), - sopts.unstable_opts.translate_additional_ftl.as_deref(), - sopts.unstable_opts.translate_directionality_markers, - ) { - Ok(bundle) => bundle, - Err(e) => { - handler.early_error(format!("failed to load fluent bundle: {e}")); - } - }; - - let mut locale_resources = Vec::from(locale_resources); - locale_resources.push(codegen_backend.locale_resource()); - - let mut sess = session::build_session( - handler, - sopts, - io, - bundle, - descriptions, - locale_resources, - lint_caps, - file_loader, - target_override, - rustc_version_str().unwrap_or("unknown"), - ice_file, - expanded_args, - ); - - codegen_backend.init(&sess); - - let mut cfg = config::build_configuration(&sess, config::to_crate_config(cfg)); - add_configuration(&mut cfg, &mut sess, &*codegen_backend); - - let mut check_cfg = config::to_crate_check_config(check_cfg); - check_cfg.fill_well_known(&sess.target); - - sess.parse_sess.config = cfg; - sess.parse_sess.check_config = check_cfg; - - (sess, codegen_backend) -} - const STACK_SIZE: usize = 8 * 1024 * 1024; fn get_stack_size() -> Option { @@ -485,21 +401,6 @@ } pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec { - // Unconditionally collect crate types from attributes to make them used - let attr_types: Vec = attrs - .iter() - .filter_map(|a| { - if a.has_name(sym::crate_type) { - match a.value_str() { - Some(s) => categorize_crate_type(s), - _ => None, - } - } else { - None - } - }) - .collect(); - // If we're generating a test executable, then ignore all other output // styles at all other locations if session.opts.test { @@ -513,6 +414,13 @@ #[allow(rustc::bad_opt_access)] let mut base = session.opts.crate_types.clone(); if base.is_empty() { + let attr_types = attrs.iter().filter_map(|a| { + if a.has_name(sym::crate_type) && let Some(s) = a.value_str() { + categorize_crate_type(s) + } else { + None + } + }); base.extend(attr_types); if base.is_empty() { base.push(output::default_output_for_target(session)); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lexer/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lexer/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lexer/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lexer/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -1,9 +1,8 @@ [package] name = "rustc_lexer" -version = "0.1.0" +version = "0.0.0" license = "MIT OR Apache-2.0" edition = "2021" - repository = "https://github.com/rust-lang/rust/" description = """ Rust lexer used by rustc. No stability guarantees are provided. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,23 +4,25 @@ edition = "2021" [dependencies] -tracing = "0.1" -unicode-security = "0.1.0" -rustc_middle = { path = "../rustc_middle" } +# tidy-alphabetical-start +rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_attr = { path = "../rustc_attr" } -rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_target = { path = "../rustc_target" } -rustc_ast = { path = "../rustc_ast" } -rustc_span = { path = "../rustc_span" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } +rustc_infer = { path = "../rustc_infer" } +rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } +rustc_parse_format = { path = "../rustc_parse_format" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -rustc_parse_format = { path = "../rustc_parse_format" } -rustc_infer = { path = "../rustc_infer" } rustc_type_ir = { path = "../rustc_type_ir" } -rustc_macros = { path = "../rustc_macros" } +tracing = "0.1" +unicode-security = "0.1.0" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -5,6 +5,10 @@ .use_explicit_into_iter_suggestion = or use `IntoIterator::into_iter(..)` instead of `.into_iter()` to explicitly iterate by value +lint_async_fn_in_trait = use of `async fn` in public traits is discouraged as auto trait bounds cannot be specified + .note = you can suppress this lint if you plan to use the trait only in your own code, or do not care about auto traits like `Send` on the `Future` + .suggestion = you can alternatively desugar to a normal `fn` that returns `impl Future` and add any desired bounds such as `Send`, but these cannot be relaxed without a breaking API change + lint_atomic_ordering_fence = memory fences cannot have `Relaxed` ordering .help = consider using ordering modes `Acquire`, `Release`, `AcqRel` or `SeqCst` @@ -319,6 +323,8 @@ lint_invalid_reference_casting_note_book = for more information, visit +lint_invalid_reference_casting_note_ty_has_interior_mutability = even for types with interior mutability, the only legal way to obtain a mutable pointer from a shared reference is through `UnsafeCell::get` + lint_lintpass_by_hand = implementing `LintPass` by hand .help = try using `declare_lint_pass!` or `impl_lint_pass!` instead @@ -488,6 +494,8 @@ lint_requested_level = requested on the command line with `{$level} {$lint_name}` +lint_span_use_eq_ctxt = use `.eq_ctxt()` instead of `.ctxt() == .ctxt()` + lint_supertrait_as_deref_target = `{$t}` implements `Deref` with supertrait `{$target_principal}` as target .label = target type is set here @@ -543,19 +551,19 @@ lint_unused_comparisons = comparison is useless due to type limits +lint_unused_coroutine = + unused {$pre}{$count -> + [one] coroutine + *[other] coroutine + }{$post} that must be used + .note = coroutines are lazy and do nothing unless resumed + lint_unused_def = unused {$pre}`{$def}`{$post} that must be used .suggestion = use `let _ = ...` to ignore the resulting value lint_unused_delim = unnecessary {$delim} around {$item} .suggestion = remove these {$delim} -lint_unused_generator = - unused {$pre}{$count -> - [one] generator - *[other] generator - }{$post} that must be used - .note = generators are lazy and do nothing unless resumed - lint_unused_import_braces = braces around {$node} is unnecessary lint_unused_op = unused {$op} that must be used diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/async_fn_in_trait.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/async_fn_in_trait.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/async_fn_in_trait.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/async_fn_in_trait.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,127 @@ +use crate::lints::AsyncFnInTraitDiag; +use crate::LateContext; +use crate::LateLintPass; +use rustc_hir as hir; +use rustc_trait_selection::traits::error_reporting::suggestions::suggest_desugaring_async_fn_to_impl_future_in_trait; + +declare_lint! { + /// The `async_fn_in_trait` lint detects use of `async fn` in the + /// definition of a publicly-reachable trait. + /// + /// ### Example + /// + /// ```rust + /// pub trait Trait { + /// async fn method(&self); + /// } + /// # fn main() {} + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// When `async fn` is used in a trait definition, the trait does not + /// promise that the opaque [`Future`] returned by the associated function + /// or method will implement any [auto traits] such as [`Send`]. This may + /// be surprising and may make the associated functions or methods on the + /// trait less useful than intended. On traits exposed publicly from a + /// crate, this may affect downstream crates whose authors cannot alter + /// the trait definition. + /// + /// For example, this code is invalid: + /// + /// ```rust,compile_fail + /// pub trait Trait { + /// async fn method(&self) {} + /// } + /// + /// fn test(x: T) { + /// fn spawn(_: T) {} + /// spawn(x.method()); // Not OK. + /// } + /// ``` + /// + /// This lint exists to warn authors of publicly-reachable traits that + /// they may want to consider desugaring the `async fn` to a normal `fn` + /// that returns an opaque `impl Future<..> + Send` type. + /// + /// For example, instead of: + /// + /// ```rust + /// pub trait Trait { + /// async fn method(&self) {} + /// } + /// ``` + /// + /// The author of the trait may want to write: + /// + /// + /// ```rust + /// use core::future::Future; + /// pub trait Trait { + /// fn method(&self) -> impl Future + Send { async {} } + /// } + /// ``` + /// + /// This still allows the use of `async fn` within impls of the trait. + /// However, it also means that the trait will never be compatible with + /// impls where the returned [`Future`] of the method does not implement + /// `Send`. + /// + /// Conversely, if the trait is used only locally, if it is never used in + /// generic functions, or if it is only used in single-threaded contexts + /// that do not care whether the returned [`Future`] implements [`Send`], + /// then the lint may be suppressed. + /// + /// [`Future`]: https://doc.rust-lang.org/core/future/trait.Future.html + /// [`Send`]: https://doc.rust-lang.org/core/marker/trait.Send.html + /// [auto traits]: https://doc.rust-lang.org/reference/special-types-and-traits.html#auto-traits + pub ASYNC_FN_IN_TRAIT, + Warn, + "use of `async fn` in definition of a publicly-reachable trait" +} + +declare_lint_pass!( + /// Lint for use of `async fn` in the definition of a publicly-reachable + /// trait. + AsyncFnInTrait => [ASYNC_FN_IN_TRAIT] +); + +impl<'tcx> LateLintPass<'tcx> for AsyncFnInTrait { + fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'tcx>) { + if let hir::TraitItemKind::Fn(sig, body) = item.kind + && let hir::IsAsync::Async(async_span) = sig.header.asyncness + { + // RTN can be used to bound `async fn` in traits in a better way than "always" + if cx.tcx.features().return_type_notation { + return; + } + + // Only need to think about library implications of reachable traits + if !cx.tcx.effective_visibilities(()).is_reachable(item.owner_id.def_id) { + return; + } + + let hir::FnRetTy::Return(hir::Ty { kind: hir::TyKind::OpaqueDef(def, ..), .. }) = + sig.decl.output + else { + // This should never happen, but let's not ICE. + return; + }; + let sugg = suggest_desugaring_async_fn_to_impl_future_in_trait( + cx.tcx, + sig, + body, + def.owner_id.def_id, + " + Send", + ); + cx.tcx.emit_spanned_lint( + ASYNC_FN_IN_TRAIT, + item.hir_id(), + async_span, + AsyncFnInTraitDiag { sugg }, + ); + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/builtin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/builtin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/builtin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/builtin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -121,16 +121,14 @@ { let condition_span = e.span.with_hi(cond.span.hi()); let replace = format!( - "{}loop", - label.map_or_else(String::new, |label| format!( - "{}: ", - label.ident, - )) - ); - cx.emit_spanned_lint(WHILE_TRUE, condition_span, BuiltinWhileTrue { - suggestion: condition_span, - replace, - }); + "{}loop", + label.map_or_else(String::new, |label| format!("{}: ", label.ident,)) + ); + cx.emit_spanned_lint( + WHILE_TRUE, + condition_span, + BuiltinWhileTrue { suggestion: condition_span, replace }, + ); } } } @@ -164,7 +162,9 @@ impl BoxPointers { fn check_heap_type(&self, cx: &LateContext<'_>, span: Span, ty: Ty<'_>) { for leaf in ty.walk() { - if let GenericArgKind::Type(leaf_ty) = leaf.unpack() && leaf_ty.is_box() { + if let GenericArgKind::Type(leaf_ty) = leaf.unpack() + && leaf_ty.is_box() + { cx.emit_spanned_lint(BOX_POINTERS, span, BuiltinBoxPointers { ty }); } } @@ -677,11 +677,17 @@ if type_implements_negative_copy_modulo_regions(cx.tcx, ty, param_env) { return; } + if def.is_variant_list_non_exhaustive() + || def.variants().iter().any(|variant| variant.is_field_list_non_exhaustive()) + { + return; + } // We shouldn't recommend implementing `Copy` on stateful things, // such as iterators. if let Some(iter_trait) = cx.tcx.get_diagnostic_item(sym::Iterator) - && cx.tcx + && cx + .tcx .infer_ctxt() .build() .type_implements_trait(iter_trait, [ty], param_env) @@ -1298,10 +1304,14 @@ // Now, check if the function has the `#[track_caller]` attribute && let Some(attr) = cx.tcx.get_attr(def_id, sym::track_caller) { - cx.emit_spanned_lint(UNGATED_ASYNC_FN_TRACK_CALLER, attr.span, BuiltinUngatedAsyncFnTrackCaller { - label: span, - parse_sess: &cx.tcx.sess.parse_sess, - }); + cx.emit_spanned_lint( + UNGATED_ASYNC_FN_TRACK_CALLER, + attr.span, + BuiltinUngatedAsyncFnTrackCaller { + label: span, + parse_sess: &cx.tcx.sess.parse_sess, + }, + ); } } } @@ -2244,7 +2254,7 @@ } declare_lint_pass!( - /// Check for used feature gates in `INCOMPLETE_FEATURES` in `rustc_feature/src/active.rs`. + /// Check for used feature gates in `INCOMPLETE_FEATURES` in `rustc_feature/src/unstable.rs`. IncompleteInternalFeatures => [INCOMPLETE_FEATURES, INTERNAL_FEATURES] ); @@ -2258,23 +2268,19 @@ .chain(features.declared_lib_features.iter().map(|(name, span)| (name, span))) .filter(|(&name, _)| features.incomplete(name) || features.internal(name)) .for_each(|(&name, &span)| { - let note = rustc_feature::find_feature_issue(name, GateIssue::Language) - .map(|n| BuiltinFeatureIssueNote { n }); - if features.incomplete(name) { + let note = rustc_feature::find_feature_issue(name, GateIssue::Language) + .map(|n| BuiltinFeatureIssueNote { n }); let help = HAS_MIN_FEATURES.contains(&name).then_some(BuiltinIncompleteFeaturesHelp); + cx.emit_spanned_lint( INCOMPLETE_FEATURES, span, BuiltinIncompleteFeatures { name, note, help }, ); } else { - cx.emit_spanned_lint( - INTERNAL_FEATURES, - span, - BuiltinInternalFeatures { name, note }, - ); + cx.emit_spanned_lint(INTERNAL_FEATURES, span, BuiltinInternalFeatures { name }); } }); } @@ -2469,7 +2475,7 @@ ty: Ty<'tcx>, init: InitKind, ) -> Option { - use rustc_type_ir::sty::TyKind::*; + use rustc_type_ir::TyKind::*; match ty.kind() { // Primitive types that don't like 0 as a value. Ref(..) => Some("references must be non-null".into()), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -31,7 +31,7 @@ use rustc_middle::middle::privacy::EffectiveVisibilities; use rustc_middle::middle::stability; use rustc_middle::ty::layout::{LayoutError, LayoutOfHelpers, TyAndLayout}; -use rustc_middle::ty::print::with_no_trimmed_paths; +use rustc_middle::ty::print::{with_no_trimmed_paths, PrintError}; use rustc_middle::ty::{self, print::Printer, GenericArg, RegisteredTools, Ty, TyCtxt}; use rustc_session::config::ExpectedValues; use rustc_session::lint::{BuiltinLintDiagnostics, LintExpectationId}; @@ -109,7 +109,7 @@ struct LintGroup { lint_ids: Vec, - from_plugin: bool, + is_loaded: bool, depr: Option, } @@ -160,9 +160,7 @@ // Don't display deprecated lint groups. depr.is_none() }) - .map(|(k, LintGroup { lint_ids, from_plugin, .. })| { - (*k, lint_ids.clone(), *from_plugin) - }) + .map(|(k, LintGroup { lint_ids, is_loaded, .. })| (*k, lint_ids.clone(), *is_loaded)) } pub fn register_early_pass( @@ -221,7 +219,7 @@ .entry(edition.lint_name()) .or_insert(LintGroup { lint_ids: vec![], - from_plugin: lint.is_plugin, + is_loaded: lint.is_loaded, depr: None, }) .lint_ids @@ -234,7 +232,7 @@ .entry("future_incompatible") .or_insert(LintGroup { lint_ids: vec![], - from_plugin: lint.is_plugin, + is_loaded: lint.is_loaded, depr: None, }) .lint_ids @@ -249,7 +247,7 @@ alias, LintGroup { lint_ids: vec![], - from_plugin: false, + is_loaded: false, depr: Some(LintAlias { name: lint_name, silent: true }), }, ); @@ -257,21 +255,21 @@ pub fn register_group( &mut self, - from_plugin: bool, + is_loaded: bool, name: &'static str, deprecated_name: Option<&'static str>, to: Vec, ) { let new = self .lint_groups - .insert(name, LintGroup { lint_ids: to, from_plugin, depr: None }) + .insert(name, LintGroup { lint_ids: to, is_loaded, depr: None }) .is_none(); if let Some(deprecated) = deprecated_name { self.lint_groups.insert( deprecated, LintGroup { lint_ids: vec![], - from_plugin, + is_loaded, depr: Some(LintAlias { name, silent: false }), }, ); @@ -727,11 +725,14 @@ .collect::>(); possibilities.sort(); + let mut should_print_possibilities = true; if let Some((value, value_span)) = value { if best_match_values.contains(&Some(value)) { db.span_suggestion(name_span, "there is a config with a similar name and value", best_match, Applicability::MaybeIncorrect); + should_print_possibilities = false; } else if best_match_values.contains(&None) { db.span_suggestion(name_span.to(value_span), "there is a config with a similar name and no value", best_match, Applicability::MaybeIncorrect); + should_print_possibilities = false; } else if let Some(first_value) = possibilities.first() { db.span_suggestion(name_span.to(value_span), "there is a config with a similar name and different values", format!("{best_match} = \"{first_value}\""), Applicability::MaybeIncorrect); } else { @@ -741,13 +742,25 @@ db.span_suggestion(name_span, "there is a config with a similar name", best_match, Applicability::MaybeIncorrect); } - if !possibilities.is_empty() { + if !possibilities.is_empty() && should_print_possibilities { let possibilities = possibilities.join("`, `"); db.help(format!("expected values for `{best_match}` are: `{possibilities}`")); } } else { db.span_suggestion(name_span, "there is a config with a similar name", best_match, Applicability::MaybeIncorrect); } + } else if !possibilities.is_empty() { + let mut possibilities = possibilities.iter() + .map(Symbol::as_str) + .collect::>(); + possibilities.sort(); + let possibilities = possibilities.join("`, `"); + + // The list of expected names can be long (even by default) and + // so the diagnostic produced can take a lot of space. To avoid + // cloging the user output we only want to print that diagnostic + // once. + db.help_once(format!("expected names are: `{possibilities}`")); } }, BuiltinLintDiagnostics::UnexpectedCfgValue((name, name_span), value) => { @@ -1185,51 +1198,45 @@ /// } /// ``` pub fn get_def_path(&self, def_id: DefId) -> Vec { - pub struct AbsolutePathPrinter<'tcx> { - pub tcx: TyCtxt<'tcx>, + struct AbsolutePathPrinter<'tcx> { + tcx: TyCtxt<'tcx>, + path: Vec, } impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { - type Error = !; - - type Path = Vec; - type Region = (); - type Type = (); - type DynExistential = (); - type Const = (); - fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } - fn print_region(self, _region: ty::Region<'_>) -> Result { + fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { Ok(()) } - fn print_type(self, _ty: Ty<'tcx>) -> Result { + fn print_type(&mut self, _ty: Ty<'tcx>) -> Result<(), PrintError> { Ok(()) } fn print_dyn_existential( - self, + &mut self, _predicates: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { Ok(()) } - fn print_const(self, _ct: ty::Const<'tcx>) -> Result { + fn print_const(&mut self, _ct: ty::Const<'tcx>) -> Result<(), PrintError> { Ok(()) } - fn path_crate(self, cnum: CrateNum) -> Result { - Ok(vec![self.tcx.crate_name(cnum)]) + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { + self.path = vec![self.tcx.crate_name(cnum)]; + Ok(()) } fn path_qualified( - self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { if trait_ref.is_none() { if let ty::Adt(def, args) = self_ty.kind() { return self.print_def_path(def.did(), args); @@ -1238,24 +1245,25 @@ // This shouldn't ever be needed, but just in case: with_no_trimmed_paths!({ - Ok(vec![match trait_ref { + self.path = vec![match trait_ref { Some(trait_ref) => Symbol::intern(&format!("{trait_ref:?}")), None => Symbol::intern(&format!("<{self_ty}>")), - }]) + }]; + Ok(()) }) } fn path_append_impl( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { - let mut path = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; // This shouldn't ever be needed, but just in case: - path.push(match trait_ref { + self.path.push(match trait_ref { Some(trait_ref) => { with_no_trimmed_paths!(Symbol::intern(&format!( "", @@ -1268,35 +1276,37 @@ } }); - Ok(path) + Ok(()) } fn path_append( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result { - let mut path = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; // Skip `::{{extern}}` blocks and `::{{constructor}}` on tuple/unit structs. if let DefPathData::ForeignMod | DefPathData::Ctor = disambiguated_data.data { - return Ok(path); + return Ok(()); } - path.push(Symbol::intern(&disambiguated_data.data.to_string())); - Ok(path) + self.path.push(Symbol::intern(&disambiguated_data.data.to_string())); + Ok(()) } fn path_generic_args( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _args: &[GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { print_prefix(self) } } - AbsolutePathPrinter { tcx: self.tcx }.print_def_path(def_id, &[]).unwrap() + let mut printer = AbsolutePathPrinter { tcx: self.tcx, path: vec![] }; + printer.print_def_path(def_id, &[]).unwrap(); + printer.path } /// Returns the associated type `name` for `self_ty` as an implementation of `trait_id`. @@ -1342,7 +1352,7 @@ && let Some(init) = match parent_node { hir::Node::Expr(expr) => Some(expr), hir::Node::Local(hir::Local { init, .. }) => *init, - _ => None + _ => None, } { expr = init.peel_blocks(); @@ -1391,9 +1401,9 @@ hir::ItemKind::Const(.., body_id) | hir::ItemKind::Static(.., body_id) => { Some(self.tcx.hir().body(body_id).value) } - _ => None - } - _ => None + _ => None, + }, + _ => None, } { expr = init.peel_blocks(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,9 +4,10 @@ }; use rustc_hir as hir; -use rustc_middle::{traits::util::supertraits, ty}; +use rustc_middle::ty; use rustc_session::lint::FutureIncompatibilityReason; use rustc_span::sym; +use rustc_trait_selection::traits::supertraits; declare_lint! { /// The `deref_into_dyn_supertrait` lint is output whenever there is a use of the @@ -75,14 +76,16 @@ && supertraits(cx.tcx, t_principal.with_self_ty(cx.tcx, cx.tcx.types.trait_object_dummy_self)) .any(|sup| sup.map_bound(|x| ty::ExistentialTraitRef::erase_self_ty(cx.tcx, x)) == target_principal) { - let label = impl_.items.iter().find_map(|i| (i.ident.name == sym::Target).then_some(i.span)).map(|label| SupertraitAsDerefTargetLabel { - label, - }); - cx.emit_spanned_lint(DEREF_INTO_DYN_SUPERTRAIT, cx.tcx.def_span(item.owner_id.def_id), SupertraitAsDerefTarget { - t, - target_principal, - label, - }); + let label = impl_ + .items + .iter() + .find_map(|i| (i.ident.name == sym::Target).then_some(i.span)) + .map(|label| SupertraitAsDerefTargetLabel { label }); + cx.emit_spanned_lint( + DEREF_INTO_DYN_SUPERTRAIT, + cx.tcx.def_span(item.owner_id.def_id), + SupertraitAsDerefTarget { t, target_principal, label }, + ); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/drop_forget_useless.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/drop_forget_useless.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/drop_forget_useless.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/drop_forget_useless.rs 2023-12-21 16:55:28.000000000 +0000 @@ -149,18 +149,37 @@ let drop_is_single_call_in_arm = is_single_call_in_arm(cx, arg, expr); match fn_name { sym::mem_drop if arg_ty.is_ref() && !drop_is_single_call_in_arm => { - cx.emit_spanned_lint(DROPPING_REFERENCES, expr.span, DropRefDiag { arg_ty, label: arg.span }); - }, + cx.emit_spanned_lint( + DROPPING_REFERENCES, + expr.span, + DropRefDiag { arg_ty, label: arg.span }, + ); + } sym::mem_forget if arg_ty.is_ref() => { - cx.emit_spanned_lint(FORGETTING_REFERENCES, expr.span, ForgetRefDiag { arg_ty, label: arg.span }); - }, + cx.emit_spanned_lint( + FORGETTING_REFERENCES, + expr.span, + ForgetRefDiag { arg_ty, label: arg.span }, + ); + } sym::mem_drop if is_copy && !drop_is_single_call_in_arm => { - cx.emit_spanned_lint(DROPPING_COPY_TYPES, expr.span, DropCopyDiag { arg_ty, label: arg.span }); + cx.emit_spanned_lint( + DROPPING_COPY_TYPES, + expr.span, + DropCopyDiag { arg_ty, label: arg.span }, + ); } sym::mem_forget if is_copy => { - cx.emit_spanned_lint(FORGETTING_COPY_TYPES, expr.span, ForgetCopyDiag { arg_ty, label: arg.span }); + cx.emit_spanned_lint( + FORGETTING_COPY_TYPES, + expr.span, + ForgetCopyDiag { arg_ty, label: arg.span }, + ); } - sym::mem_drop if let ty::Adt(adt, _) = arg_ty.kind() && adt.is_manually_drop() => { + sym::mem_drop + if let ty::Adt(adt, _) = arg_ty.kind() + && adt.is_manually_drop() => + { cx.emit_spanned_lint( UNDROPPED_MANUALLY_DROPS, expr.span, @@ -169,9 +188,9 @@ label: arg.span, suggestion: UndroppedManuallyDropsSuggestion { start_span: arg.span.shrink_to_lo(), - end_span: arg.span.shrink_to_hi() - } - } + end_span: arg.span.shrink_to_hi(), + }, + }, ); } _ => return, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/expect.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/expect.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/expect.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/expect.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,7 +11,7 @@ } fn check_expectations(tcx: TyCtxt<'_>, tool_filter: Option) { - if !tcx.features().enabled(sym::lint_reasons) { + if !tcx.features().active(sym::lint_reasons) { return; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/for_loops_over_fallibles.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/for_loops_over_fallibles.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/for_loops_over_fallibles.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/for_loops_over_fallibles.rs 2023-12-21 16:55:28.000000000 +0000 @@ -59,13 +59,20 @@ _ => return, }; - let sub = if let Some(recv) = extract_iterator_next_call(cx, arg) + let sub = if let Some(recv) = extract_iterator_next_call(cx, arg) && let Ok(recv_snip) = cx.sess().source_map().span_to_snippet(recv.span) - { - ForLoopsOverFalliblesLoopSub::RemoveNext { suggestion: recv.span.between(arg.span.shrink_to_hi()), recv_snip } - } else { - ForLoopsOverFalliblesLoopSub::UseWhileLet { start_span: expr.span.with_hi(pat.span.lo()), end_span: pat.span.between(arg.span), var } - } ; + { + ForLoopsOverFalliblesLoopSub::RemoveNext { + suggestion: recv.span.between(arg.span.shrink_to_hi()), + recv_snip, + } + } else { + ForLoopsOverFalliblesLoopSub::UseWhileLet { + start_span: expr.span.with_hi(pat.span.lo()), + end_span: pat.span.between(arg.span), + var, + } + }; let question_mark = suggest_question_mark(cx, adt, args, expr.span) .then(|| ForLoopsOverFalliblesQuestionMark { suggestion: arg.span.shrink_to_hi() }); let suggestion = ForLoopsOverFalliblesSuggestion { @@ -84,13 +91,13 @@ fn extract_for_loop<'tcx>(expr: &Expr<'tcx>) -> Option<(&'tcx Pat<'tcx>, &'tcx Expr<'tcx>)> { if let hir::ExprKind::DropTemps(e) = expr.kind - && let hir::ExprKind::Match(iterexpr, [arm], hir::MatchSource::ForLoopDesugar) = e.kind - && let hir::ExprKind::Call(_, [arg]) = iterexpr.kind - && let hir::ExprKind::Loop(block, ..) = arm.body.kind - && let [stmt] = block.stmts - && let hir::StmtKind::Expr(e) = stmt.kind - && let hir::ExprKind::Match(_, [_, some_arm], _) = e.kind - && let hir::PatKind::Struct(_, [field], _) = some_arm.pat.kind + && let hir::ExprKind::Match(iterexpr, [arm], hir::MatchSource::ForLoopDesugar) = e.kind + && let hir::ExprKind::Call(_, [arg]) = iterexpr.kind + && let hir::ExprKind::Loop(block, ..) = arm.body.kind + && let [stmt] = block.stmts + && let hir::StmtKind::Expr(e) = stmt.kind + && let hir::ExprKind::Match(_, [_, some_arm], _) = e.kind + && let hir::PatKind::Struct(_, [field], _) = some_arm.pat.kind { Some((field.pat, arg)) } else { @@ -104,11 +111,11 @@ ) -> Option<&'tcx Expr<'tcx>> { // This won't work for `Iterator::next(iter)`, is this an issue? if let hir::ExprKind::MethodCall(_, recv, _, _) = expr.kind - && cx.typeck_results().type_dependent_def_id(expr.hir_id) == cx.tcx.lang_items().next_fn() + && cx.typeck_results().type_dependent_def_id(expr.hir_id) == cx.tcx.lang_items().next_fn() { Some(recv) } else { - return None + return None; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/foreign_modules.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/foreign_modules.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/foreign_modules.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/foreign_modules.rs 2023-12-21 16:55:28.000000000 +0000 @@ -262,7 +262,7 @@ true } else { // Do a full, depth-first comparison between the two. - use rustc_type_ir::sty::TyKind::*; + use rustc_type_ir::TyKind::*; let a_kind = a.kind(); let b_kind = b.kind(); @@ -369,8 +369,8 @@ (Dynamic(..), Dynamic(..)) | (Error(..), Error(..)) | (Closure(..), Closure(..)) - | (Generator(..), Generator(..)) - | (GeneratorWitness(..), GeneratorWitness(..)) + | (Coroutine(..), Coroutine(..)) + | (CoroutineWitness(..), CoroutineWitness(..)) | (Alias(ty::Projection, ..), Alias(ty::Projection, ..)) | (Alias(ty::Inherent, ..), Alias(ty::Inherent, ..)) | (Alias(ty::Opaque, ..), Alias(ty::Opaque, ..)) => false, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/internal.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/internal.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/internal.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/internal.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,14 +3,14 @@ use crate::lints::{ BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistentDocKeyword, - QueryInstability, TyQualified, TykindDiag, TykindKind, UntranslatableDiag, + QueryInstability, SpanUseEqCtxtDiag, TyQualified, TykindDiag, TykindKind, UntranslatableDiag, UntranslatableDiagnosticTrivial, }; use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; use rustc_ast as ast; use rustc_hir::def::Res; use rustc_hir::{def_id::DefId, Expr, ExprKind, GenericArg, PatKind, Path, PathSegment, QPath}; -use rustc_hir::{HirId, Impl, Item, ItemKind, Node, Pat, Ty, TyKind}; +use rustc_hir::{BinOp, BinOpKind, HirId, Impl, Item, ItemKind, Node, Pat, Ty, TyKind}; use rustc_middle::ty; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::hygiene::{ExpnKind, MacroKind}; @@ -62,13 +62,11 @@ if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) => { Some((segment.ident.span, def_id, cx.typeck_results().node_args(expr.hir_id))) - }, - _ => { - match cx.typeck_results().node_type(expr.hir_id).kind() { - &ty::FnDef(def_id, args) => Some((expr.span, def_id, args)), - _ => None, - } } + _ => match cx.typeck_results().node_type(expr.hir_id).kind() { + &ty::FnDef(def_id, args) => Some((expr.span, def_id, args)), + _ => None, + }, } } @@ -134,14 +132,11 @@ _: rustc_hir::HirId, ) { if let Some(segment) = path.segments.iter().nth_back(1) - && lint_ty_kind_usage(cx, &segment.res) + && lint_ty_kind_usage(cx, &segment.res) { - let span = path.span.with_hi( - segment.args.map_or(segment.ident.span, |a| a.span_ext).hi() - ); - cx.emit_spanned_lint(USAGE_OF_TY_TYKIND, path.span, TykindKind { - suggestion: span, - }); + let span = + path.span.with_hi(segment.args.map_or(segment.ident.span, |a| a.span_ext).hi()); + cx.emit_spanned_lint(USAGE_OF_TY_TYKIND, path.span, TykindKind { suggestion: span }); } } @@ -166,10 +161,7 @@ None } } - Some(Node::Expr(Expr { - kind: ExprKind::Path(qpath), - .. - })) => { + Some(Node::Expr(Expr { kind: ExprKind::Path(qpath), .. })) => { if let QPath::TypeRelative(qpath_ty, ..) = qpath && qpath_ty.hir_id == ty.hir_id { @@ -180,10 +172,7 @@ } // Can't unify these two branches because qpath below is `&&` and above is `&` // and `A | B` paths don't play well together with adjustments, apparently. - Some(Node::Expr(Expr { - kind: ExprKind::Struct(qpath, ..), - .. - })) => { + Some(Node::Expr(Expr { kind: ExprKind::Struct(qpath, ..), .. })) => { if let QPath::TypeRelative(qpath_ty, ..) = qpath && qpath_ty.hir_id == ty.hir_id { @@ -192,22 +181,28 @@ None } } - _ => None + _ => None, }; match span { Some(span) => { - cx.emit_spanned_lint(USAGE_OF_TY_TYKIND, path.span, TykindKind { - suggestion: span, - }); - }, + cx.emit_spanned_lint( + USAGE_OF_TY_TYKIND, + path.span, + TykindKind { suggestion: span }, + ); + } None => cx.emit_spanned_lint(USAGE_OF_TY_TYKIND, path.span, TykindDiag), } - } else if !ty.span.from_expansion() && path.segments.len() > 1 && let Some(ty) = is_ty_or_ty_ctxt(cx, &path) { - cx.emit_spanned_lint(USAGE_OF_QUALIFIED_TY, path.span, TyQualified { - ty, - suggestion: path.span, - }); + } else if !ty.span.from_expansion() + && path.segments.len() > 1 + && let Some(ty) = is_ty_or_ty_ctxt(cx, &path) + { + cx.emit_spanned_lint( + USAGE_OF_QUALIFIED_TY, + path.span, + TyQualified { ty, suggestion: path.span }, + ); } } _ => {} @@ -398,11 +393,11 @@ } debug!(?parent); - if let Node::Item(Item { kind: ItemKind::Impl(impl_), .. }) = parent && - let Impl { of_trait: Some(of_trait), .. } = impl_ && - let Some(def_id) = of_trait.trait_def_id() && - let Some(name) = cx.tcx.get_diagnostic_name(def_id) && - matches!(name, sym::IntoDiagnostic | sym::AddToDiagnostic | sym::DecorateLint) + if let Node::Item(Item { kind: ItemKind::Impl(impl_), .. }) = parent + && let Impl { of_trait: Some(of_trait), .. } = impl_ + && let Some(def_id) = of_trait.trait_def_id() + && let Some(name) = cx.tcx.get_diagnostic_name(def_id) + && matches!(name, sym::IntoDiagnostic | sym::AddToDiagnostic | sym::DecorateLint) { found_impl = true; break; @@ -416,9 +411,9 @@ let mut found_diagnostic_message = false; for ty in args.types() { debug!(?ty); - if let Some(adt_def) = ty.ty_adt_def() && - let Some(name) = cx.tcx.get_diagnostic_name(adt_def.did()) && - matches!(name, sym::DiagnosticMessage | sym::SubdiagnosticMessage) + if let Some(adt_def) = ty.ty_adt_def() + && let Some(name) = cx.tcx.get_diagnostic_name(adt_def.did()) + && matches!(name, sym::DiagnosticMessage | sym::SubdiagnosticMessage) { found_diagnostic_message = true; break; @@ -486,8 +481,9 @@ } }; if let ast::ExprKind::Lit(lit) = arg.kind - && let ast::token::LitKind::Str = lit.kind { - true + && let ast::token::LitKind::Str = lit.kind + { + true } else { false } @@ -524,17 +520,50 @@ } for field in adt_def.all_fields() { - if field.name == target.name && - let Some(attr) = cx.tcx.get_attr(field.did, sym::rustc_lint_opt_deny_field_access) && - let Some(items) = attr.meta_item_list() && - let Some(item) = items.first() && - let Some(lit) = item.lit() && - let ast::LitKind::Str(val, _) = lit.kind + if field.name == target.name + && let Some(attr) = + cx.tcx.get_attr(field.did, sym::rustc_lint_opt_deny_field_access) + && let Some(items) = attr.meta_item_list() + && let Some(item) = items.first() + && let Some(lit) = item.lit() + && let ast::LitKind::Str(val, _) = lit.kind { - cx.emit_spanned_lint(BAD_OPT_ACCESS, expr.span, BadOptAccessDiag { - msg: val.as_str(), - }); + cx.emit_spanned_lint( + BAD_OPT_ACCESS, + expr.span, + BadOptAccessDiag { msg: val.as_str() }, + ); } } } } + +declare_tool_lint! { + pub rustc::SPAN_USE_EQ_CTXT, + Allow, + "forbid uses of `==` with `Span::ctxt`, suggest `Span::eq_ctxt` instead", + report_in_external_macro: true +} + +declare_lint_pass!(SpanUseEqCtxt => [SPAN_USE_EQ_CTXT]); + +impl<'tcx> LateLintPass<'tcx> for SpanUseEqCtxt { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) { + if let ExprKind::Binary(BinOp { node: BinOpKind::Eq, .. }, lhs, rhs) = expr.kind { + if is_span_ctxt_call(cx, lhs) && is_span_ctxt_call(cx, rhs) { + cx.emit_spanned_lint(SPAN_USE_EQ_CTXT, expr.span, SpanUseEqCtxtDiag); + } + } + } +} + +fn is_span_ctxt_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + match &expr.kind { + ExprKind::MethodCall(..) => cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .is_some_and(|call_did| cx.tcx.is_diagnostic_item(sym::SpanCtxt, call_did)), + + _ => false, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/invalid_from_utf8.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/invalid_from_utf8.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/invalid_from_utf8.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/invalid_from_utf8.rs 2023-12-21 16:55:28.000000000 +0000 @@ -64,8 +64,13 @@ && let ExprKind::Path(ref qpath) = path.kind && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id() && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id) - && [sym::str_from_utf8, sym::str_from_utf8_mut, - sym::str_from_utf8_unchecked, sym::str_from_utf8_unchecked_mut].contains(&diag_item) + && [ + sym::str_from_utf8, + sym::str_from_utf8_mut, + sym::str_from_utf8_unchecked, + sym::str_from_utf8_unchecked_mut, + ] + .contains(&diag_item) { let lint = |label, utf8_error: Utf8Error| { let method = diag_item.as_str().strip_prefix("str_").unwrap(); @@ -74,13 +79,17 @@ let is_unchecked_variant = diag_item.as_str().contains("unchecked"); cx.emit_spanned_lint( - if is_unchecked_variant { INVALID_FROM_UTF8_UNCHECKED } else { INVALID_FROM_UTF8 }, + if is_unchecked_variant { + INVALID_FROM_UTF8_UNCHECKED + } else { + INVALID_FROM_UTF8 + }, expr.span, if is_unchecked_variant { InvalidFromUtf8Diag::Unchecked { method, valid_up_to, label } } else { InvalidFromUtf8Diag::Checked { method, valid_up_to, label } - } + }, ) }; @@ -95,18 +104,19 @@ { lint(init.span, utf8_error); } - }, + } ExprKind::Array(args) => { - let elements = args.iter().map(|e|{ - match &e.kind { + let elements = args + .iter() + .map(|e| match &e.kind { ExprKind::Lit(Spanned { node: lit, .. }) => match lit { LitKind::Byte(b) => Some(*b), LitKind::Int(b, _) => Some(*b as u8), - _ => None - } - _ => None - } - }).collect::>>(); + _ => None, + }, + _ => None, + }) + .collect::>>(); if let Some(elements) = elements && let Err(utf8_error) = std::str::from_utf8(&elements) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/levels.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/levels.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/levels.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/levels.rs 2023-12-21 16:55:28.000000000 +0000 @@ -634,7 +634,9 @@ /// diagnostic with no change to `specs`. fn insert_spec(&mut self, id: LintId, (mut level, src): LevelAndSource) { let (old_level, old_src) = self.provider.get_lint_level(id.lint, &self.sess); - if let Level::Expect(id) = &mut level && let LintExpectationId::Stable { .. } = id { + if let Level::Expect(id) = &mut level + && let LintExpectationId::Stable { .. } = id + { *id = id.normalize(); } // Setting to a non-forbid level is an error if the lint previously had @@ -706,7 +708,9 @@ // The lint `unfulfilled_lint_expectations` can't be expected, as it would suppress itself. // Handling expectations of this lint would add additional complexity with little to no // benefit. The expect level for this lint will therefore be ignored. - if let Level::Expect(_) = level && id == LintId::of(UNFULFILLED_LINT_EXPECTATIONS) { + if let Level::Expect(_) = level + && id == LintId::of(UNFULFILLED_LINT_EXPECTATIONS) + { return; } @@ -747,8 +751,9 @@ None => continue, // This is the only lint level with a `LintExpectationId` that can be created from an attribute Some(Level::Expect(unstable_id)) if let Some(hir_id) = source_hir_id => { - let LintExpectationId::Unstable { attr_id, lint_index } = unstable_id - else { bug!("stable id Level::from_attr") }; + let LintExpectationId::Unstable { attr_id, lint_index } = unstable_id else { + bug!("stable id Level::from_attr") + }; let stable_id = LintExpectationId::Stable { hir_id, @@ -1057,7 +1062,7 @@ #[track_caller] fn check_gated_lint(&self, lint_id: LintId, span: Span, lint_from_cli: bool) -> bool { if let Some(feature) = lint_id.lint.feature_gate { - if !self.features.enabled(feature) { + if !self.features.active(feature) { let lint = builtin::UNKNOWN_LINTS; let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS); struct_lint_level( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ //! all other analyses. The `LintPass`es built into rustc are defined //! within [rustc_session::lint::builtin], //! which has further comments on how to add such a lint. -//! rustc can also load user-defined lint plugins via the plugin mechanism. +//! rustc can also load external lint plugins, as is done for Clippy. //! //! Some of rustc's lints are defined elsewhere in the compiler and work by //! calling `add_lint()` on the overall `Session` object. This works when @@ -27,6 +27,8 @@ #![allow(rustc::potential_query_instability)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(array_windows)] #![feature(box_patterns)] #![feature(control_flow_enum)] @@ -50,6 +52,7 @@ extern crate tracing; mod array_into_iter; +mod async_fn_in_trait; pub mod builtin; mod context; mod deref_into_dyn_supertrait; @@ -96,6 +99,7 @@ }; use array_into_iter::ArrayIntoIter; +use async_fn_in_trait::AsyncFnInTrait; use builtin::*; use deref_into_dyn_supertrait::*; use drop_forget_useless::*; @@ -234,6 +238,7 @@ MapUnitFn: MapUnitFn, MissingDebugImplementations: MissingDebugImplementations, MissingDoc: MissingDoc, + AsyncFnInTrait: AsyncFnInTrait, ] ] ); @@ -501,6 +506,11 @@ "replaced with another group of lints, see RFC \ for more information", ); + store.register_removed( + "invalid_alignment", + "converted into hard error, see PR #104616 \ + for more information", + ); } fn register_internals(store: &mut LintStore) { @@ -521,6 +531,8 @@ store.register_late_mod_pass(|_| Box::new(BadOptAccess)); store.register_lints(&PassByValue::get_lints()); store.register_late_mod_pass(|_| Box::new(PassByValue)); + store.register_lints(&SpanUseEqCtxt::get_lints()); + store.register_late_mod_pass(|_| Box::new(SpanUseEqCtxt)); // FIXME(davidtwco): deliberately do not include `UNTRANSLATABLE_DIAGNOSTIC` and // `DIAGNOSTIC_OUTSIDE_OF_IMPL` here because `-Wrustc::internal` is provided to every crate and // these lints will trigger all of the time - change this once migration to diagnostic structs @@ -538,6 +550,7 @@ LintId::of(USAGE_OF_QUALIFIED_TY), LintId::of(EXISTING_DOC_KEYWORD), LintId::of(BAD_OPT_ACCESS), + LintId::of(SPAN_USE_EQ_CTXT), ], ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lints.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lints.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lints.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/lints.rs 2023-12-21 16:55:28.000000000 +0000 @@ -412,8 +412,6 @@ #[note] pub struct BuiltinInternalFeatures { pub name: Symbol, - #[subdiagnostic] - pub note: Option, } #[derive(Subdiagnostic)] @@ -771,12 +769,16 @@ BorrowAsMut { #[label] orig_cast: Option, + #[note(lint_invalid_reference_casting_note_ty_has_interior_mutability)] + ty_has_interior_mutability: Option<()>, }, #[diag(lint_invalid_reference_casting_assign_to_ref)] #[note(lint_invalid_reference_casting_note_book)] AssignToRef { #[label] orig_cast: Option, + #[note(lint_invalid_reference_casting_note_ty_has_interior_mutability)] + ty_has_interior_mutability: Option<()>, }, } @@ -899,6 +901,10 @@ } #[derive(LintDiagnostic)] +#[diag(lint_span_use_eq_ctxt)] +pub struct SpanUseEqCtxtDiag; + +#[derive(LintDiagnostic)] #[diag(lint_tykind_kind)] pub struct TykindKind { #[suggestion(code = "ty", applicability = "maybe-incorrect")] @@ -1694,9 +1700,9 @@ // FIXME(davidtwco): this isn't properly translatable because of the // pre/post strings #[derive(LintDiagnostic)] -#[diag(lint_unused_generator)] +#[diag(lint_unused_coroutine)] #[note] -pub struct UnusedGenerator<'a> { +pub struct UnusedCoroutine<'a> { pub count: usize, pub pre: &'a str, pub post: &'a str, @@ -1818,3 +1824,24 @@ #[derive(LintDiagnostic)] #[diag(lint_unused_allocation_mut)] pub struct UnusedAllocationMutDiag; + +pub struct AsyncFnInTraitDiag { + pub sugg: Option>, +} + +impl<'a> DecorateLint<'a, ()> for AsyncFnInTraitDiag { + fn decorate_lint<'b>( + self, + diag: &'b mut rustc_errors::DiagnosticBuilder<'a, ()>, + ) -> &'b mut rustc_errors::DiagnosticBuilder<'a, ()> { + diag.note(fluent::lint_note); + if let Some(sugg) = self.sugg { + diag.multipart_suggestion(fluent::lint_suggestion, sugg, Applicability::MaybeIncorrect); + } + diag + } + + fn msg(&self) -> rustc_errors::DiagnosticMessage { + fluent::lint_async_fn_in_trait + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/multiple_supertrait_upcastable.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/multiple_supertrait_upcastable.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/multiple_supertrait_upcastable.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/multiple_supertrait_upcastable.rs 2023-12-21 16:55:28.000000000 +0000 @@ -42,18 +42,17 @@ if let hir::ItemKind::Trait(_, _, _, _, _) = item.kind && cx.tcx.object_safety_violations(def_id).is_empty() { - let direct_super_traits_iter = cx.tcx - .super_predicates_of(def_id) - .predicates - .into_iter() - .filter_map(|(pred, _)| pred.as_trait_clause()); + let direct_super_traits_iter = cx + .tcx + .super_predicates_of(def_id) + .predicates + .into_iter() + .filter_map(|(pred, _)| pred.as_trait_clause()); if direct_super_traits_iter.count() > 1 { cx.emit_spanned_lint( MULTIPLE_SUPERTRAIT_UPCASTABLE, cx.tcx.def_span(def_id), - crate::lints::MultipleSupertraitUpcastable { - ident: item.ident - }, + crate::lints::MultipleSupertraitUpcastable { ident: item.ident }, ); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/nonstandard_style.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/nonstandard_style.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/nonstandard_style.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/nonstandard_style.rs 2023-12-21 16:55:28.000000000 +0000 @@ -511,7 +511,9 @@ } fn check_impl_item(&mut self, cx: &LateContext<'_>, ii: &hir::ImplItem<'_>) { - if let hir::ImplItemKind::Const(..) = ii.kind && !assoc_item_in_trait_impl(cx, ii) { + if let hir::ImplItemKind::Const(..) = ii.kind + && !assoc_item_in_trait_impl(cx, ii) + { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", &ii.ident); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,8 +37,6 @@ /// type Assoc: Duh; /// } /// - /// struct Struct; - /// /// impl Trait for F { /// type Assoc = F; /// } @@ -53,12 +51,12 @@ /// {{produces}} /// /// In this example, `test` declares that the associated type `Assoc` for - /// `impl Trait` is `impl Sized`, which does not satisfy the `Send` bound + /// `impl Trait` is `impl Sized`, which does not satisfy the bound `Duh` /// on the associated type. /// /// Although the hidden type, `i32` does satisfy this bound, we do not /// consider the return type to be well-formed with this lint. It can be - /// fixed by changing `Tait = impl Sized` into `Tait = impl Sized + Send`. + /// fixed by changing `Tait = impl Sized` into `Tait = impl Sized + Duh`. pub OPAQUE_HIDDEN_INFERRED_BOUND, Warn, "detects the use of nested `impl Trait` types in associated type bounds that are not general enough" @@ -79,9 +77,7 @@ for (pred, pred_span) in cx.tcx.explicit_item_bounds(def_id).instantiate_identity_iter_copied() { - // Liberate bound regions in the predicate since we - // don't actually care about lifetimes in this check. - let predicate = cx.tcx.liberate_late_bound_regions(def_id, pred.kind()); + let predicate = infcx.instantiate_binder_with_placeholders(pred.kind()); let ty::ClauseKind::Projection(proj) = predicate else { continue; }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/ptr_nulls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/ptr_nulls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/ptr_nulls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/ptr_nulls.rs 2023-12-21 16:55:28.000000000 +0000 @@ -46,22 +46,26 @@ if let ExprKind::MethodCall(_, _expr, [], _) = e.kind && let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) && cx.tcx.has_attr(def_id, sym::rustc_never_returns_null_ptr) - && let Some(fn_name) = cx.tcx.opt_item_ident(def_id) { + && let Some(fn_name) = cx.tcx.opt_item_ident(def_id) + { return Some(PtrNullChecksDiag::FnRet { fn_name }); } else if let ExprKind::Call(path, _args) = e.kind && let ExprKind::Path(ref qpath) = path.kind && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id() && cx.tcx.has_attr(def_id, sym::rustc_never_returns_null_ptr) - && let Some(fn_name) = cx.tcx.opt_item_ident(def_id) { + && let Some(fn_name) = cx.tcx.opt_item_ident(def_id) + { return Some(PtrNullChecksDiag::FnRet { fn_name }); } e = if let ExprKind::Cast(expr, t) = e.kind - && let TyKind::Ptr(_) = t.kind { + && let TyKind::Ptr(_) = t.kind + { had_at_least_one_cast = true; expr } else if let ExprKind::MethodCall(_, expr, [], _) = e.kind && let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::ptr_cast | sym::ptr_cast_mut)) { + && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::ptr_cast | sym::ptr_cast_mut)) + { had_at_least_one_cast = true; expr } else if had_at_least_one_cast { @@ -127,10 +131,11 @@ // (fn_ptr as * ) == (0 as ) ExprKind::Cast(cast_expr, _) if let ExprKind::Lit(spanned) = cast_expr.kind - && let LitKind::Int(v, _) = spanned.node && v == 0 => + && let LitKind::Int(v, _) = spanned.node + && v == 0 => { cx.emit_spanned_lint(USELESS_PTR_NULL_CHECKS, expr.span, diag) - }, + } // Catching: // (fn_ptr as * ) == std::ptr::null() @@ -141,9 +146,9 @@ && (diag_item == sym::ptr_null || diag_item == sym::ptr_null_mut) => { cx.emit_spanned_lint(USELESS_PTR_NULL_CHECKS, expr.span, diag) - }, + } - _ => {}, + _ => {} } } _ => {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/reference_casting.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/reference_casting.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/reference_casting.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/reference_casting.rs 2023-12-21 16:55:28.000000000 +0000 @@ -43,19 +43,19 @@ let init = cx.expr_or_init(e); - let orig_cast = if is_cast_from_const_to_mut(cx, init) { - if init.span != e.span { Some(init.span) } else { None } - } else { + let Some(ty_has_interior_mutability) = is_cast_from_const_to_mut(cx, init) else { return; }; + let orig_cast = if init.span != e.span { Some(init.span) } else { None }; + let ty_has_interior_mutability = ty_has_interior_mutability.then_some(()); cx.emit_spanned_lint( INVALID_REFERENCE_CASTING, expr.span, if is_assignment { - InvalidReferenceCastingDiag::AssignToRef { orig_cast } + InvalidReferenceCastingDiag::AssignToRef { orig_cast, ty_has_interior_mutability } } else { - InvalidReferenceCastingDiag::BorrowAsMut { orig_cast } + InvalidReferenceCastingDiag::BorrowAsMut { orig_cast, ty_has_interior_mutability } }, ); } @@ -93,7 +93,10 @@ if let ExprKind::Call(path, [arg_ptr, _arg_val]) = e.kind && let ExprKind::Path(ref qpath) = path.kind && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id() - && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::ptr_write | sym::ptr_write_volatile | sym::ptr_write_unaligned)) + && matches!( + cx.tcx.get_diagnostic_name(def_id), + Some(sym::ptr_write | sym::ptr_write_volatile | sym::ptr_write_unaligned) + ) { Some((true, arg_ptr)) } else { @@ -104,7 +107,10 @@ deref_assign_or_addr_of(e).or_else(|| ptr_write(cx, e)) } -fn is_cast_from_const_to_mut<'tcx>(cx: &LateContext<'tcx>, orig_expr: &'tcx Expr<'tcx>) -> bool { +fn is_cast_from_const_to_mut<'tcx>( + cx: &LateContext<'tcx>, + orig_expr: &'tcx Expr<'tcx>, +) -> Option { let mut need_check_freeze = false; let mut e = orig_expr; @@ -112,7 +118,7 @@ // Bail out early if the end type is **not** a mutable pointer. if !matches!(end_ty.kind(), ty::RawPtr(TypeAndMut { ty: _, mutbl: Mutability::Mut })) { - return false; + return None; } loop { @@ -155,10 +161,11 @@ // // We also consider non concrete skeleton types (ie generics) // to be an issue since there is no way to make it safe for abitrary types. - !need_check_freeze - || inner_ty.is_freeze(cx.tcx, cx.param_env) - || !inner_ty.has_concrete_skeleton() + let inner_ty_has_interior_mutability = + !inner_ty.is_freeze(cx.tcx, cx.param_env) && inner_ty.has_concrete_skeleton(); + (!need_check_freeze || !inner_ty_has_interior_mutability) + .then_some(inner_ty_has_interior_mutability) } else { - false + None } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -140,13 +140,15 @@ pub struct TypeLimits { /// Id of the last visited negated expression negated_expr_id: Option, + /// Span of the last visited negated expression + negated_expr_span: Option, } impl_lint_pass!(TypeLimits => [UNUSED_COMPARISONS, OVERFLOWING_LITERALS, INVALID_NAN_COMPARISONS]); impl TypeLimits { pub fn new() -> TypeLimits { - TypeLimits { negated_expr_id: None } + TypeLimits { negated_expr_id: None, negated_expr_span: None } } } @@ -161,8 +163,10 @@ ty: &str, ) -> bool { // Look past casts to support cases like `0..256 as u8` - let (expr, lit_span) = if let Node::Expr(par_expr) = cx.tcx.hir().get(cx.tcx.hir().parent_id(expr.hir_id)) - && let ExprKind::Cast(_, _) = par_expr.kind { + let (expr, lit_span) = if let Node::Expr(par_expr) = + cx.tcx.hir().get(cx.tcx.hir().parent_id(expr.hir_id)) + && let ExprKind::Cast(_, _) = par_expr.kind + { (par_expr, expr.span) } else { (expr, expr.span) @@ -426,17 +430,15 @@ return; } - let lit = cx - .sess() - .source_map() - .span_to_snippet(lit.span) - .expect("must get snippet from literal"); + let span = if negative { type_limits.negated_expr_span.unwrap() } else { e.span }; + let lit = + cx.sess().source_map().span_to_snippet(span).expect("must get snippet from literal"); let help = get_type_suggestion(cx.typeck_results().node_type(e.hir_id), v, negative) .map(|suggestion_ty| OverflowingIntHelp { suggestion_ty }); cx.emit_spanned_lint( OVERFLOWING_LITERALS, - e.span, + span, OverflowingInt { ty: t.name_str(), lit, min, max, help }, ); } @@ -580,8 +582,8 @@ ) -> InvalidNanComparisons { // FIXME(#72505): This suggestion can be restored if `f{32,64}::is_nan` is made const. let suggestion = (!cx.tcx.hir().is_inside_const_context(e.hir_id)).then(|| { - if let Some(l_span) = l.span.find_ancestor_inside(e.span) && - let Some(r_span) = r.span.find_ancestor_inside(e.span) + if let Some(l_span) = l.span.find_ancestor_inside(e.span) + && let Some(r_span) = r.span.find_ancestor_inside(e.span) { f(l_span, r_span) } else { @@ -622,9 +624,10 @@ fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) { match e.kind { hir::ExprKind::Unary(hir::UnOp::Neg, ref expr) => { - // propagate negation, if the negation itself isn't negated + // Propagate negation, if the negation itself isn't negated if self.negated_expr_id != Some(e.hir_id) { self.negated_expr_id = Some(expr.hir_id); + self.negated_expr_span = Some(e.span); } } hir::ExprKind::Binary(binop, ref l, ref r) => { @@ -1269,8 +1272,8 @@ | ty::Bound(..) | ty::Error(_) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Placeholder(..) | ty::FnDef(..) => bug!("unexpected type in foreign function: {:?}", ty), } @@ -1292,11 +1295,12 @@ CItemKind::Definition => "fn", }; let span_note = if let ty::Adt(def, _) = ty.kind() - && let Some(sp) = self.cx.tcx.hir().span_if_local(def.did()) { - Some(sp) - } else { - None - }; + && let Some(sp) = self.cx.tcx.hir().span_if_local(def.did()) + { + Some(sp) + } else { + None + }; self.cx.emit_spanned_lint( lint, sp, @@ -1459,7 +1463,9 @@ type BreakTy = Ty<'tcx>; fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow { - if let ty::FnPtr(sig) = ty.kind() && !self.visitor.is_internal_abi(sig.abi()) { + if let ty::FnPtr(sig) = ty.kind() + && !self.visitor.is_internal_abi(sig.abi()) + { self.tys.push(ty); } @@ -1733,7 +1739,8 @@ } fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) { - if let Some((method, args)) = Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store]) + if let Some((method, args)) = + Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store]) && let Some((ordering_arg, invalid_ordering)) = match method { sym::load => Some((&args[0], sym::Release)), sym::store => Some((&args[1], sym::Acquire)), @@ -1743,9 +1750,17 @@ && (ordering == invalid_ordering || ordering == sym::AcqRel) { if method == sym::load { - cx.emit_spanned_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad); + cx.emit_spanned_lint( + INVALID_ATOMIC_ORDERING, + ordering_arg.span, + AtomicOrderingLoad, + ); } else { - cx.emit_spanned_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore); + cx.emit_spanned_lint( + INVALID_ATOMIC_ORDERING, + ordering_arg.span, + AtomicOrderingStore, + ); }; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/unused.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/unused.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/unused.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint/src/unused.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use crate::lints::{ PathStatementDrop, PathStatementDropSub, PathStatementNoEffect, UnusedAllocationDiag, - UnusedAllocationMutDiag, UnusedClosure, UnusedDef, UnusedDefSuggestion, UnusedDelim, - UnusedDelimSuggestion, UnusedGenerator, UnusedImportBracesDiag, UnusedOp, UnusedOpSuggestion, + UnusedAllocationMutDiag, UnusedClosure, UnusedCoroutine, UnusedDef, UnusedDefSuggestion, + UnusedDelim, UnusedDelimSuggestion, UnusedImportBracesDiag, UnusedOp, UnusedOpSuggestion, UnusedResult, }; use crate::Lint; @@ -257,8 +257,8 @@ Array(Box, u64), /// The root of the unused_closures lint. Closure(Span), - /// The root of the unused_generators lint. - Generator(Span), + /// The root of the unused_coroutines lint. + Coroutine(Span), } #[instrument(skip(cx, expr), level = "debug", ret)] @@ -350,16 +350,16 @@ .map(|inner| MustUsePath::Array(Box::new(inner), len)), }, ty::Closure(..) => Some(MustUsePath::Closure(span)), - ty::Generator(def_id, ..) => { + ty::Coroutine(def_id, ..) => { // async fn should be treated as "implementor of `Future`" - let must_use = if cx.tcx.generator_is_async(def_id) { - let def_id = cx.tcx.lang_items().future_trait().unwrap(); + let must_use = if cx.tcx.coroutine_is_async(def_id) { + let def_id = cx.tcx.lang_items().future_trait()?; is_def_must_use(cx, def_id, span) .map(|inner| MustUsePath::Opaque(Box::new(inner))) } else { None }; - must_use.or(Some(MustUsePath::Generator(span))) + must_use.or(Some(MustUsePath::Coroutine(span))) } _ => None, } @@ -482,11 +482,11 @@ UnusedClosure { count: plural_len, pre: descr_pre, post: descr_post }, ); } - MustUsePath::Generator(span) => { + MustUsePath::Coroutine(span) => { cx.emit_spanned_lint( UNUSED_MUST_USE, *span, - UnusedGenerator { count: plural_len, pre: descr_pre, post: descr_post }, + UnusedCoroutine { count: plural_len, pre: descr_pre, post: descr_post }, ); } MustUsePath::Def(span, def_id, reason) => { @@ -782,21 +782,23 @@ }; let suggestion = spans.map(|(lo, hi)| { let sm = cx.sess().source_map(); - let lo_replace = - if (keep_space.0 || is_kw) && - let Ok(snip) = sm.span_to_prev_source(lo) && !snip.ends_with(' ') { - " " - } else { - "" - }; + let lo_replace = if (keep_space.0 || is_kw) + && let Ok(snip) = sm.span_to_prev_source(lo) + && !snip.ends_with(' ') + { + " " + } else { + "" + }; - let hi_replace = - if keep_space.1 && - let Ok(snip) = sm.span_to_next_source(hi) && !snip.starts_with(' ') { - " " - } else { - "" - }; + let hi_replace = if keep_space.1 + && let Ok(snip) = sm.span_to_next_source(hi) + && !snip.starts_with(' ') + { + " " + } else { + "" + }; UnusedDelimSuggestion { start_span: lo, start_replace: lo_replace, @@ -1056,10 +1058,10 @@ impl EarlyLintPass for UnusedParens { #[inline] fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { - if let ExprKind::Binary(op, lhs, _rhs) = &e.kind && - (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl) && - let ExprKind::Cast(_expr, ty) = &lhs.kind && - let ast::TyKind::Paren(_) = &ty.kind + if let ExprKind::Binary(op, lhs, _rhs) = &e.kind + && (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl) + && let ExprKind::Cast(_expr, ty) = &lhs.kind + && let ast::TyKind::Paren(_) = &ty.kind { self.parens_in_cast_in_lt.push(ty.id); } @@ -1111,13 +1113,19 @@ } fn check_expr_post(&mut self, _cx: &EarlyContext<'_>, e: &ast::Expr) { - if let ExprKind::Binary(op, lhs, _rhs) = &e.kind && - (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl) && - let ExprKind::Cast(_expr, ty) = &lhs.kind && - let ast::TyKind::Paren(_) = &ty.kind + if let ExprKind::Binary(op, lhs, _rhs) = &e.kind + && (op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl) + && let ExprKind::Cast(_expr, ty) = &lhs.kind + && let ast::TyKind::Paren(_) = &ty.kind { - let id = self.parens_in_cast_in_lt.pop().expect("check_expr and check_expr_post must balance"); - assert_eq!(id, ty.id, "check_expr, check_ty, and check_expr_post are called, in that order, by the visitor"); + let id = self + .parens_in_cast_in_lt + .pop() + .expect("check_expr and check_expr_post must balance"); + assert_eq!( + id, ty.id, + "check_expr, check_ty, and check_expr_post are called, in that order, by the visitor" + ); } } @@ -1146,7 +1154,7 @@ fn check_stmt(&mut self, cx: &EarlyContext<'_>, s: &ast::Stmt) { if let StmtKind::Local(ref local) = s.kind { - self.check_unused_parens_pat(cx, &local.pat, true, false, (false, false)); + self.check_unused_parens_pat(cx, &local.pat, true, false, (true, false)); } ::check_stmt(self, cx, s) @@ -1161,8 +1169,8 @@ } fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) { - if let ast::TyKind::Paren(_) = ty.kind && - Some(&ty.id) == self.parens_in_cast_in_lt.last() + if let ast::TyKind::Paren(_) = ty.kind + && Some(&ty.id) == self.parens_in_cast_in_lt.last() { return; } @@ -1206,13 +1214,14 @@ fn enter_where_predicate(&mut self, _: &EarlyContext<'_>, pred: &ast::WherePredicate) { use rustc_ast::{WhereBoundPredicate, WherePredicate}; if let WherePredicate::BoundPredicate(WhereBoundPredicate { - bounded_ty, - bound_generic_params, - .. - }) = pred && - let ast::TyKind::Paren(_) = &bounded_ty.kind && - bound_generic_params.is_empty() { - self.with_self_ty_parens = true; + bounded_ty, + bound_generic_params, + .. + }) = pred + && let ast::TyKind::Paren(_) = &bounded_ty.kind + && bound_generic_params.is_empty() + { + self.with_self_ty_parens = true; } } @@ -1516,9 +1525,8 @@ match e.kind { hir::ExprKind::Call(path_expr, [_]) if let hir::ExprKind::Path(qpath) = &path_expr.kind - && let Some(did) = cx.qpath_res(qpath, path_expr.hir_id).opt_def_id() - && cx.tcx.is_diagnostic_item(sym::box_new, did) - => {} + && let Some(did) = cx.qpath_res(qpath, path_expr.hir_id).opt_def_id() + && cx.tcx.is_diagnostic_item(sym::box_new, did) => {} _ => return, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,12 +4,14 @@ edition = "2021" [dependencies] -serde = { version = "1.0.125", features = ["derive"] } +# tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_error_messages = { path = "../rustc_error_messages" } -rustc_span = { path = "../rustc_span" } -rustc_serialize = { path = "../rustc_serialize" } +rustc_hir = { path = "../rustc_hir" } rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } +rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } -rustc_hir = { path = "../rustc_hir" } +serde = { version = "1.0.125", features = ["derive"] } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/builtin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/builtin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/builtin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/builtin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -987,45 +987,6 @@ } declare_lint! { - /// The `invalid_alignment` lint detects dereferences of misaligned pointers during - /// constant evaluation. - /// - /// ### Example - /// - /// ```rust,compile_fail - /// #![feature(const_mut_refs)] - /// const FOO: () = unsafe { - /// let x = &[0_u8; 4]; - /// let y = x.as_ptr().cast::(); - /// let mut z = 123; - /// y.copy_to_nonoverlapping(&mut z, 1); // the address of a `u8` array is unknown - /// // and thus we don't know if it is aligned enough for copying a `u32`. - /// }; - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// The compiler allowed dereferencing raw pointers irrespective of alignment - /// during const eval due to the const evaluator at the time not making it easy - /// or cheap to check. Now that it is both, this is not accepted anymore. - /// - /// Since it was undefined behaviour to begin with, this breakage does not violate - /// Rust's stability guarantees. Using undefined behaviour can cause arbitrary - /// behaviour, including failure to build. - /// - /// [future-incompatible]: ../index.md#future-incompatible-lints - pub INVALID_ALIGNMENT, - Deny, - "raw pointers must be aligned before dereferencing", - @future_incompatible = FutureIncompatibleInfo { - reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps, - reference: "issue #68585 ", - }; -} - -declare_lint! { /// The `exported_private_dependencies` lint detects private dependencies /// that are exposed in a public interface. /// @@ -2256,15 +2217,16 @@ /// /// ### Explanation /// - /// Previous versions of Rust allowed function pointers and wide raw pointers in patterns. + /// Previous versions of Rust allowed function pointers and all raw pointers in patterns. /// While these work in many cases as expected by users, it is possible that due to /// optimizations pointers are "not equal to themselves" or pointers to different functions /// compare as equal during runtime. This is because LLVM optimizations can deduplicate /// functions if their bodies are the same, thus also making pointers to these functions point /// to the same location. Additionally functions may get duplicated if they are instantiated - /// in different crates and not deduplicated again via LTO. + /// in different crates and not deduplicated again via LTO. Pointer identity for memory + /// created by `const` is similarly unreliable. pub POINTER_STRUCTURAL_MATCH, - Allow, + Warn, "pointers are not structural-match", @future_incompatible = FutureIncompatibleInfo { reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps, @@ -3430,7 +3392,6 @@ INDIRECT_STRUCTURAL_MATCH, INEFFECTIVE_UNSTABLE_TRAIT_IMPL, INLINE_NO_SANITIZE, - INVALID_ALIGNMENT, INVALID_DOC_ATTRIBUTES, INVALID_MACRO_EXPORT_ARGUMENTS, INVALID_TYPE_PARAM_DEFAULT, @@ -3993,8 +3954,13 @@ } declare_lint! { - /// The `non_exhaustive_omitted_patterns` lint detects when a wildcard (`_` or `..`) in a - /// pattern for a `#[non_exhaustive]` struct or enum is reachable. + /// The `non_exhaustive_omitted_patterns` lint aims to help consumers of a `#[non_exhaustive]` + /// struct or enum who want to match all of its fields/variants explicitly. + /// + /// The `#[non_exhaustive]` annotation forces matches to use wildcards, so exhaustiveness + /// checking cannot be used to ensure that all fields/variants are matched explicitly. To remedy + /// this, this allow-by-default lint warns the user when a match mentions some but not all of + /// the fields/variants of a `#[non_exhaustive]` struct or enum. /// /// ### Example /// @@ -4008,9 +3974,9 @@ /// /// // in crate B /// #![feature(non_exhaustive_omitted_patterns_lint)] + /// #[warn(non_exhaustive_omitted_patterns)] /// match Bar::A { /// Bar::A => {}, - /// #[warn(non_exhaustive_omitted_patterns)] /// _ => {}, /// } /// ``` @@ -4018,29 +3984,32 @@ /// This will produce: /// /// ```text - /// warning: reachable patterns not covered of non exhaustive enum + /// warning: some variants are not matched explicitly /// --> $DIR/reachable-patterns.rs:70:9 /// | - /// LL | _ => {} - /// | ^ pattern `B` not covered + /// LL | match Bar::A { + /// | ^ pattern `Bar::B` not covered /// | /// note: the lint level is defined here /// --> $DIR/reachable-patterns.rs:69:16 /// | /// LL | #[warn(non_exhaustive_omitted_patterns)] /// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - /// = help: ensure that all possible cases are being handled by adding the suggested match arms + /// = help: ensure that all variants are matched explicitly by adding the suggested match arms /// = note: the matched value is of type `Bar` and the `non_exhaustive_omitted_patterns` attribute was found /// ``` /// + /// Warning: setting this to `deny` will make upstream non-breaking changes (adding fields or + /// variants to a `#[non_exhaustive]` struct or enum) break your crate. This goes against + /// expected semver behavior. + /// /// ### Explanation /// - /// Structs and enums tagged with `#[non_exhaustive]` force the user to add a - /// (potentially redundant) wildcard when pattern-matching, to allow for future - /// addition of fields or variants. The `non_exhaustive_omitted_patterns` lint - /// detects when such a wildcard happens to actually catch some fields/variants. - /// In other words, when the match without the wildcard would not be exhaustive. - /// This lets the user be informed if new fields/variants were added. + /// Structs and enums tagged with `#[non_exhaustive]` force the user to add a (potentially + /// redundant) wildcard when pattern-matching, to allow for future addition of fields or + /// variants. The `non_exhaustive_omitted_patterns` lint detects when such a wildcard happens to + /// actually catch some fields/variants. In other words, when the match without the wildcard + /// would not be exhaustive. This lets the user be informed if new fields/variants were added. pub NON_EXHAUSTIVE_OMITTED_PATTERNS, Allow, "detect when patterns of types marked `non_exhaustive` are missed", @@ -4489,11 +4458,11 @@ /// on itself), the blanket impl is not considered to hold for `u8`. This will /// change in a future release. pub COINDUCTIVE_OVERLAP_IN_COHERENCE, - Warn, + Deny, "impls that are not considered to overlap may be considered to \ overlap in the future", @future_incompatible = FutureIncompatibleInfo { - reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps, + reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps, reference: "issue #114040 ", }; } @@ -4574,7 +4543,6 @@ /// ### Example /// /// ```rust,compile_fail - /// #![feature(return_position_impl_trait_in_trait)] /// #![deny(refining_impl_trait)] /// /// use std::fmt::Display; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_lint_defs/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -321,7 +321,7 @@ pub future_incompatible: Option, - pub is_plugin: bool, + pub is_loaded: bool, /// `Some` if this lint is feature gated, otherwise `None`. pub feature_gate: Option, @@ -399,7 +399,7 @@ default_level: Level::Forbid, desc: "", edition_lint_opts: None, - is_plugin: false, + is_loaded: false, report_in_external_macro: false, future_incompatible: None, feature_gate: None, @@ -735,7 +735,7 @@ name: stringify!($NAME), default_level: $crate::$Level, desc: $desc, - is_plugin: false, + is_loaded: false, $($v: true,)* $(feature_gate: Some($gate),)? $(future_incompatible: Some($crate::FutureIncompatibleInfo { @@ -777,7 +777,7 @@ edition_lint_opts: None, report_in_external_macro: $external, future_incompatible: None, - is_plugin: true, + is_loaded: true, $(feature_gate: Some($gate),)? crate_level_only: false, ..$crate::Lint::default_fields_for_macro() diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,12 +3,12 @@ version = "0.0.0" edition = "2021" -[features] -static-libstdcpp = [] -emscripten = [] - [dependencies] +# tidy-alphabetical-start libc = "0.2.73" +# tidy-alphabetical-end [build-dependencies] +# tidy-alphabetical-start cc = "1.0.69" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/build.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/build.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/build.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/build.rs 2023-12-21 16:55:28.000000000 +0000 @@ -258,6 +258,12 @@ { println!("cargo:rustc-link-lib=z"); } else if target.contains("netbsd") { + // On NetBSD/i386, gcc and g++ is built for i486 (to maximize backward compat) + // However, LLVM insists on using 64-bit atomics. + // This gives rise to a need to link rust itself with -latomic for these targets + if target.starts_with("i586") || target.starts_with("i686") { + println!("cargo:rustc-link-lib=atomic"); + } println!("cargo:rustc-link-lib=z"); println!("cargo:rustc-link-lib=execinfo"); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h 2023-12-21 16:55:28.000000000 +0000 @@ -25,7 +25,6 @@ #include "llvm/Transforms/IPO.h" #include "llvm/Transforms/Instrumentation.h" #include "llvm/Transforms/Scalar.h" -#include "llvm/Transforms/Vectorize.h" #define LLVM_VERSION_GE(major, minor) \ (LLVM_VERSION_MAJOR > (major) || \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp 2023-12-21 16:55:28.000000000 +0000 @@ -795,6 +795,20 @@ CGSCCAnalysisManager CGAM; ModuleAnalysisManager MAM; + if (LLVMPluginsLen) { + auto PluginsStr = StringRef(LLVMPlugins, LLVMPluginsLen); + SmallVector Plugins; + PluginsStr.split(Plugins, ',', -1, false); + for (auto PluginPath: Plugins) { + auto Plugin = PassPlugin::Load(PluginPath.str()); + if (!Plugin) { + LLVMRustSetLastError(("Failed to load pass plugin" + PluginPath.str()).c_str()); + return LLVMRustResult::Failure; + } + Plugin->registerPassBuilderCallbacks(PB); + } + } + FAM.registerPass([&] { return PB.buildDefaultAAPipeline(); }); Triple TargetTriple(TheModule->getTargetTriple()); @@ -918,20 +932,6 @@ } } - if (LLVMPluginsLen) { - auto PluginsStr = StringRef(LLVMPlugins, LLVMPluginsLen); - SmallVector Plugins; - PluginsStr.split(Plugins, ',', -1, false); - for (auto PluginPath: Plugins) { - auto Plugin = PassPlugin::Load(PluginPath.str()); - if (!Plugin) { - LLVMRustSetLastError(("Failed to load pass plugin" + PluginPath.str()).c_str()); - return LLVMRustResult::Failure; - } - Plugin->registerPassBuilderCallbacks(PB); - } - } - ModulePassManager MPM; bool NeedThinLTOBufferPasses = UseThinLTOBuffers; if (!NoPrepopulatePasses) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_llvm/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,9 @@ #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] // NOTE: This crate only exists to allow linking on mingw targets. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_log/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_log/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_log/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_log/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,13 +4,19 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start tracing = "0.1.28" +tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635 tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] } tracing-tree = "0.2.0" -tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635 +# tidy-alphabetical-end [dev-dependencies] +# tidy-alphabetical-start rustc_span = { path = "../rustc_span" } +# tidy-alphabetical-end [features] +# tidy-alphabetical-start max_level_info = ['tracing/max_level_info'] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_log/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_log/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_log/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_log/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,10 +23,10 @@ //! } //! ``` //! -//! Now `LOG=debug cargo run` will run your minimal main.rs and show +//! Now `LOG=debug cargo +nightly run` will run your minimal main.rs and show //! rustc's debug logging. In a workflow like this, one might also add //! `std::env::set_var("LOG", "debug")` to the top of main so that `cargo -//! run` by itself is sufficient to get logs. +//! +nightly run` by itself is sufficient to get logs. //! //! The reason rustc_log is a tiny separate crate, as opposed to exposing the //! same things in rustc_driver only, is to enable the above workflow. If you @@ -74,6 +74,11 @@ Some(v) => &v != "0", }; + let verbose_thread_ids = match env::var_os(String::from(env) + "_THREAD_IDS") { + None => false, + Some(v) => &v == "1", + }; + let layer = tracing_tree::HierarchicalLayer::default() .with_writer(io::stderr) .with_indent_lines(true) @@ -81,9 +86,9 @@ .with_targets(true) .with_verbose_exit(verbose_entry_exit) .with_verbose_entry(verbose_entry_exit) - .with_indent_amount(2); - #[cfg(all(parallel_compiler, debug_assertions))] - let layer = layer.with_thread_ids(true).with_thread_names(true); + .with_indent_amount(2) + .with_thread_ids(verbose_thread_ids) + .with_thread_names(verbose_thread_ids); let subscriber = tracing_subscriber::Registry::default().with(filter).with(layer); match env::var(format!("{env}_BACKTRACE")) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -1,13 +1,15 @@ [package] name = "rustc_macros" -version = "0.1.0" +version = "0.0.0" edition = "2021" [lib] proc-macro = true [dependencies] -synstructure = "0.13.0" -syn = { version = "2.0.9", features = ["full"] } +# tidy-alphabetical-start proc-macro2 = "1" quote = "1" +syn = { version = "2.0.9", features = ["full"] } +synstructure = "0.13.0" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/current_version.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/current_version.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/current_version.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/current_version.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,59 @@ +use proc_macro::TokenStream; +use proc_macro2::Span; +use quote::quote; +use syn::parse::{Parse, ParseStream}; +use syn::{parenthesized, parse_macro_input, LitStr, Token}; + +pub struct Input { + variable: LitStr, +} + +mod kw { + syn::custom_keyword!(env); +} + +impl Parse for Input { + // Input syntax is `env!("CFG_RELEASE")` to facilitate grepping. + fn parse(input: ParseStream<'_>) -> syn::Result { + let paren; + input.parse::()?; + input.parse::()?; + parenthesized!(paren in input); + let variable: LitStr = paren.parse()?; + Ok(Input { variable }) + } +} + +pub(crate) fn current_version(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as Input); + + TokenStream::from(match RustcVersion::parse_env_var(&input.variable) { + Ok(RustcVersion { major, minor, patch }) => quote!( + Self { major: #major, minor: #minor, patch: #patch } + ), + Err(err) => syn::Error::new(Span::call_site(), err).into_compile_error(), + }) +} + +struct RustcVersion { + major: u16, + minor: u16, + patch: u16, +} + +impl RustcVersion { + fn parse_env_var(env_var: &LitStr) -> Result> { + let value = proc_macro::tracked_env::var(env_var.value())?; + Self::parse_str(&value) + .ok_or_else(|| format!("failed to parse rustc version: {:?}", value).into()) + } + + fn parse_str(value: &str) -> Option { + // Ignore any suffixes such as "-dev" or "-nightly". + let mut components = value.split('-').next().unwrap().splitn(3, '.'); + let major = components.next()?.parse().ok()?; + let minor = components.next()?.parse().ok()?; + let patch = components.next().unwrap_or("0").parse().ok()?; + Some(RustcVersion { major, minor, patch }) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/diagnostic.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/diagnostic.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/diagnostic.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/diagnostic.rs 2023-12-21 16:55:28.000000000 +0000 @@ -42,19 +42,20 @@ let init = match builder.slug.value_ref() { None => { span_err(builder.span, "diagnostic slug not specified") - .help("specify the slug as the first argument to the `#[diag(...)]` \ - attribute, such as `#[diag(hir_analysis_example_error)]`") + .help( + "specify the slug as the first argument to the `#[diag(...)]` \ + attribute, such as `#[diag(hir_analysis_example_error)]`", + ) .emit(); return DiagnosticDeriveError::ErrorHandled.to_compile_error(); } - Some(slug) if let Some( Mismatch { slug_name, crate_name, slug_prefix }) = Mismatch::check(slug) => { + Some(slug) + if let Some(Mismatch { slug_name, crate_name, slug_prefix }) = + Mismatch::check(slug) => + { span_err(slug.span().unwrap(), "diagnostic slug and crate name do not match") - .note(format!( - "slug is `{slug_name}` but the crate name is `{crate_name}`" - )) - .help(format!( - "expected a slug starting with `{slug_prefix}_...`" - )) + .note(format!("slug is `{slug_name}` but the crate name is `{crate_name}`")) + .help(format!("expected a slug starting with `{slug_prefix}_...`")) .emit(); return DiagnosticDeriveError::ErrorHandled.to_compile_error(); } @@ -141,19 +142,20 @@ match builder.slug.value_ref() { None => { span_err(builder.span, "diagnostic slug not specified") - .help("specify the slug as the first argument to the attribute, such as \ - `#[diag(compiletest_example)]`") + .help( + "specify the slug as the first argument to the attribute, such as \ + `#[diag(compiletest_example)]`", + ) .emit(); DiagnosticDeriveError::ErrorHandled.to_compile_error() } - Some(slug) if let Some( Mismatch { slug_name, crate_name, slug_prefix }) = Mismatch::check(slug) => { + Some(slug) + if let Some(Mismatch { slug_name, crate_name, slug_prefix }) = + Mismatch::check(slug) => + { span_err(slug.span().unwrap(), "diagnostic slug and crate name do not match") - .note(format!( - "slug is `{slug_name}` but the crate name is `{crate_name}`" - )) - .help(format!( - "expected a slug starting with `{slug_prefix}_...`" - )) + .note(format!("slug is `{slug_name}` but the crate name is `{crate_name}`")) + .help(format!("expected a slug starting with `{slug_prefix}_...`")) .emit(); DiagnosticDeriveError::ErrorHandled.to_compile_error() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs 2023-12-21 16:55:28.000000000 +0000 @@ -577,7 +577,9 @@ } } _ => { - if let Some(span) = span_field && !no_span { + if let Some(span) = span_field + && !no_span + { quote! { #diag.#name(#span, #message); } } else { quote! { #diag.#name(#message); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,7 @@ #![feature(never_type)] #![feature(proc_macro_diagnostic)] #![feature(proc_macro_span)] +#![feature(proc_macro_tracked_env)] #![allow(rustc::default_hash_types)] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] @@ -14,6 +15,7 @@ use proc_macro::TokenStream; +mod current_version; mod diagnostics; mod hash_stable; mod lift; @@ -25,6 +27,11 @@ mod type_visitable; #[proc_macro] +pub fn current_rustc_version(input: TokenStream) -> TokenStream { + current_version::current_version(input) +} + +#[proc_macro] pub fn rustc_queries(input: TokenStream) -> TokenStream { query::rustc_queries(input) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/query.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/query.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/query.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/query.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,6 +97,9 @@ /// A cycle error results in a delay_bug call cycle_delay_bug: Option, + /// A cycle error results in a stashed cycle error that can be unstashed and canceled later + cycle_stash: Option, + /// Don't hash the result, instead just mark a query red if it runs no_hash: Option, @@ -114,6 +117,11 @@ /// Generate a `feed` method to set the query's value from another query. feedable: Option, + + /// Forward the result on ensure if the query gets recomputed, and + /// return `Ok(())` otherwise. Only applicable to queries returning + /// `Result<(), ErrorGuaranteed>` + ensure_forwards_result_if_red: Option, } fn parse_query_modifiers(input: ParseStream<'_>) -> Result { @@ -122,12 +130,14 @@ let mut desc = None; let mut fatal_cycle = None; let mut cycle_delay_bug = None; + let mut cycle_stash = None; let mut no_hash = None; let mut anon = None; let mut eval_always = None; let mut depth_limit = None; let mut separate_provide_extern = None; let mut feedable = None; + let mut ensure_forwards_result_if_red = None; while !input.is_empty() { let modifier: Ident = input.parse()?; @@ -175,6 +185,8 @@ try_insert!(fatal_cycle = modifier); } else if modifier == "cycle_delay_bug" { try_insert!(cycle_delay_bug = modifier); + } else if modifier == "cycle_stash" { + try_insert!(cycle_stash = modifier); } else if modifier == "no_hash" { try_insert!(no_hash = modifier); } else if modifier == "anon" { @@ -187,6 +199,8 @@ try_insert!(separate_provide_extern = modifier); } else if modifier == "feedable" { try_insert!(feedable = modifier); + } else if modifier == "ensure_forwards_result_if_red" { + try_insert!(ensure_forwards_result_if_red = modifier); } else { return Err(Error::new(modifier.span(), "unknown query modifier")); } @@ -200,12 +214,14 @@ desc, fatal_cycle, cycle_delay_bug, + cycle_stash, no_hash, anon, eval_always, depth_limit, separate_provide_extern, feedable, + ensure_forwards_result_if_red, }) } @@ -320,11 +336,13 @@ fatal_cycle, arena_cache, cycle_delay_bug, + cycle_stash, no_hash, anon, eval_always, depth_limit, separate_provide_extern, + ensure_forwards_result_if_red, ); if modifiers.cache.is_some() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/serialize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/serialize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/serialize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/serialize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,11 +5,16 @@ pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { let decoder_ty = quote! { __D }; - if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { - s.add_impl_generic(parse_quote! { 'tcx }); - } - s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_type_ir::codec::TyDecoder>}); - s.add_bounds(synstructure::AddBounds::Generics); + let bound = if s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { + quote! { > } + } else if s.ast().generics.type_params().any(|ty| ty.ident == "I") { + quote! { } + } else { + quote! {} + }; + + s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_type_ir::codec::TyDecoder #bound }); + s.add_bounds(synstructure::AddBounds::Fields); decodable_body(s, decoder_ty) } @@ -97,12 +102,17 @@ } pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { - if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { - s.add_impl_generic(parse_quote! {'tcx}); - } + let bound = if s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { + quote! { > } + } else if s.ast().generics.type_params().any(|ty| ty.ident == "I") { + quote! { } + } else { + quote! {} + }; + let encoder_ty = quote! { __E }; - s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_type_ir::codec::TyEncoder>}); - s.add_bounds(synstructure::AddBounds::Generics); + s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_type_ir::codec::TyEncoder #bound }); + s.add_bounds(synstructure::AddBounds::Fields); encodable_body(s, encoder_ty, false) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -27,7 +27,7 @@ let body_tokens = m.mac.tokens.clone(); - test_symbols_macro(body_tokens, &[]); + test_symbols_macro(body_tokens, &["proc_macro::tracked_env is not available in unit test"]); } fn test_symbols_macro(input: TokenStream, expected_errors: &[&str]) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_macros/src/symbols.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,7 @@ use quote::quote; use std::collections::HashMap; use syn::parse::{Parse, ParseStream, Result}; -use syn::{braced, punctuated::Punctuated, Ident, LitStr, Token}; +use syn::{braced, punctuated::Punctuated, Expr, Ident, Lit, LitStr, Macro, Token}; #[cfg(test)] mod tests; @@ -53,21 +53,46 @@ struct Symbol { name: Ident, - value: Option, + value: Value, +} + +enum Value { + SameAsName, + String(LitStr), + Env(LitStr, Macro), + Unsupported(Expr), } impl Parse for Symbol { fn parse(input: ParseStream<'_>) -> Result { let name = input.parse()?; - let value = match input.parse::() { - Ok(_) => Some(input.parse()?), - Err(_) => None, - }; + let colon_token: Option = input.parse()?; + let value = if colon_token.is_some() { input.parse()? } else { Value::SameAsName }; Ok(Symbol { name, value }) } } +impl Parse for Value { + fn parse(input: ParseStream<'_>) -> Result { + let expr: Expr = input.parse()?; + match &expr { + Expr::Lit(expr) => { + if let Lit::Str(lit) = &expr.lit { + return Ok(Value::String(lit.clone())); + } + } + Expr::Macro(expr) => { + if expr.mac.path.is_ident("env") && let Ok(lit) = expr.mac.parse_body() { + return Ok(Value::Env(lit, expr.mac.clone())); + } + } + _ => {} + } + Ok(Value::Unsupported(expr)) + } +} + struct Input { keywords: Punctuated, symbols: Punctuated, @@ -111,6 +136,37 @@ output } +struct Preinterned { + idx: u32, + span_of_name: Span, +} + +struct Entries { + map: HashMap, +} + +impl Entries { + fn with_capacity(capacity: usize) -> Self { + Entries { map: HashMap::with_capacity(capacity) } + } + + fn insert(&mut self, span: Span, str: &str, errors: &mut Errors) -> u32 { + if let Some(prev) = self.map.get(str) { + errors.error(span, format!("Symbol `{str}` is duplicated")); + errors.error(prev.span_of_name, "location of previous definition".to_string()); + prev.idx + } else { + let idx = self.len(); + self.map.insert(str.to_string(), Preinterned { idx, span_of_name: span }); + idx + } + } + + fn len(&self) -> u32 { + u32::try_from(self.map.len()).expect("way too many symbols") + } +} + fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec) { let mut errors = Errors::default(); @@ -127,20 +183,9 @@ let mut keyword_stream = quote! {}; let mut symbols_stream = quote! {}; let mut prefill_stream = quote! {}; - let mut counter = 0u32; - let mut keys = - HashMap::::with_capacity(input.keywords.len() + input.symbols.len() + 10); + let mut entries = Entries::with_capacity(input.keywords.len() + input.symbols.len() + 10); let mut prev_key: Option<(Span, String)> = None; - let mut check_dup = |span: Span, str: &str, errors: &mut Errors| { - if let Some(prev_span) = keys.get(str) { - errors.error(span, format!("Symbol `{str}` is duplicated")); - errors.error(*prev_span, "location of previous definition".to_string()); - } else { - keys.insert(str.to_string(), span); - } - }; - let mut check_order = |span: Span, str: &str, errors: &mut Errors| { if let Some((prev_span, ref prev_str)) = prev_key { if str < prev_str { @@ -156,49 +201,98 @@ let name = &keyword.name; let value = &keyword.value; let value_string = value.value(); - check_dup(keyword.name.span(), &value_string, &mut errors); + let idx = entries.insert(keyword.name.span(), &value_string, &mut errors); prefill_stream.extend(quote! { #value, }); keyword_stream.extend(quote! { - pub const #name: Symbol = Symbol::new(#counter); + pub const #name: Symbol = Symbol::new(#idx); }); - counter += 1; } // Generate the listed symbols. for symbol in input.symbols.iter() { let name = &symbol.name; + check_order(symbol.name.span(), &name.to_string(), &mut errors); + let value = match &symbol.value { - Some(value) => value.value(), - None => name.to_string(), + Value::SameAsName => name.to_string(), + Value::String(lit) => lit.value(), + Value::Env(..) => continue, // in another loop below + Value::Unsupported(expr) => { + errors.list.push(syn::Error::new_spanned( + expr, + concat!( + "unsupported expression for symbol value; implement support for this in ", + file!(), + ), + )); + continue; + } }; - check_dup(symbol.name.span(), &value, &mut errors); - check_order(symbol.name.span(), &name.to_string(), &mut errors); + let idx = entries.insert(symbol.name.span(), &value, &mut errors); prefill_stream.extend(quote! { #value, }); symbols_stream.extend(quote! { - pub const #name: Symbol = Symbol::new(#counter); + pub const #name: Symbol = Symbol::new(#idx); }); - counter += 1; } // Generate symbols for the strings "0", "1", ..., "9". - let digits_base = counter; - counter += 10; for n in 0..10 { let n = n.to_string(); - check_dup(Span::call_site(), &n, &mut errors); + entries.insert(Span::call_site(), &n, &mut errors); prefill_stream.extend(quote! { #n, }); } + // Symbols whose value comes from an environment variable. It's allowed for + // these to have the same value as another symbol. + for symbol in &input.symbols { + let (env_var, expr) = match &symbol.value { + Value::Env(lit, expr) => (lit, expr), + Value::SameAsName | Value::String(_) | Value::Unsupported(_) => continue, + }; + + if !proc_macro::is_available() { + errors.error( + Span::call_site(), + "proc_macro::tracked_env is not available in unit test".to_owned(), + ); + break; + } + + let value = match proc_macro::tracked_env::var(env_var.value()) { + Ok(value) => value, + Err(err) => { + errors.list.push(syn::Error::new_spanned(expr, err)); + continue; + } + }; + + let idx = if let Some(prev) = entries.map.get(&value) { + prev.idx + } else { + prefill_stream.extend(quote! { + #value, + }); + entries.insert(symbol.name.span(), &value, &mut errors) + }; + + let name = &symbol.name; + symbols_stream.extend(quote! { + pub const #name: Symbol = Symbol::new(#idx); + }); + } + + let symbol_digits_base = entries.map["0"].idx; + let preinterned_symbols_count = entries.len(); let output = quote! { - const SYMBOL_DIGITS_BASE: u32 = #digits_base; - const PREINTERNED_SYMBOLS_COUNT: u32 = #counter; + const SYMBOL_DIGITS_BASE: u32 = #symbol_digits_base; + const PREINTERNED_SYMBOLS_COUNT: u32 = #preinterned_symbols_count; #[doc(hidden)] #[allow(non_upper_case_globals)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,30 +3,30 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" libloading = "0.7.1" odht = { version = "0.3.1", features = ["nightly"] } -snap = "1" -tracing = "0.1" -tempfile = "3.2" -rustc_middle = { path = "../rustc_middle" } +rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } +rustc_expand = { path = "../rustc_expand" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_fs_util = { path = "../rustc_fs_util" } rustc_hir = { path = "../rustc_hir" } rustc_hir_pretty = { path = "../rustc_hir_pretty" } -rustc_target = { path = "../rustc_target" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } -rustc_ast = { path = "../rustc_ast" } -rustc_expand = { path = "../rustc_expand" } -rustc_span = { path = "../rustc_span" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } rustc_type_ir = { path = "../rustc_type_ir" } +snap = "1" +tempfile = "3.2" +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -196,9 +196,6 @@ metadata_no_crate_with_triple = couldn't find crate `{$crate_name}` with expected target triple {$locator_triple}{$add_info} -metadata_no_dylib_plugin = - plugin `{$crate_name}` only found in rlib format, but must be available in dylib format - metadata_no_link_mod_override = overriding linking modifiers from command line is not supported diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/creader.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/creader.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/creader.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/creader.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,6 +10,7 @@ use rustc_data_structures::svh::Svh; use rustc_data_structures::sync::{FreezeReadGuard, FreezeWriteGuard}; use rustc_expand::base::SyntaxExtension; +use rustc_fs_util::try_canonicalize; use rustc_hir::def_id::{CrateNum, LocalDefId, StableCrateId, StableCrateIdMap, LOCAL_CRATE}; use rustc_hir::definitions::Definitions; use rustc_index::IndexVec; @@ -31,7 +32,7 @@ use std::ops::Fn; use std::path::Path; use std::time::Duration; -use std::{cmp, env, iter}; +use std::{cmp, iter}; pub struct CStore { metadata_loader: Box, @@ -677,7 +678,7 @@ stable_crate_id: StableCrateId, ) -> Result<&'static [ProcMacro], CrateError> { // Make sure the path contains a / or the linker will search for it. - let path = env::current_dir().unwrap().join(path); + let path = try_canonicalize(path).unwrap(); let lib = load_dylib(&path, 5).map_err(|err| CrateError::DlOpen(err))?; let sym_name = self.sess.generate_proc_macro_decls_symbol(stable_crate_id); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/dependency_format.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/dependency_format.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/dependency_format.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/dependency_format.rs 2023-12-21 16:55:28.000000000 +0000 @@ -396,11 +396,14 @@ continue; } - if let Some(found_strategy) = tcx.required_panic_strategy(cnum) && desired_strategy != found_strategy { + if let Some(found_strategy) = tcx.required_panic_strategy(cnum) + && desired_strategy != found_strategy + { sess.emit_err(RequiredPanicStrategy { crate_name: tcx.crate_name(cnum), found_strategy, - desired_strategy}); + desired_strategy, + }); } let found_drop_strategy = tcx.panic_in_drop_strategy(cnum); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -419,7 +419,9 @@ // if it looks like the user has provided a complete filename rather just the bare lib name, // then provide a note that they might want to try trimming the name let suggested_name = if !verbatim { - if let Some(libname) = libname.strip_prefix("lib") && let Some(libname) = libname.strip_suffix(".a") { + if let Some(libname) = libname.strip_prefix("lib") + && let Some(libname) = libname.strip_suffix(".a") + { // this is a unix style filename so trim prefix & suffix Some(libname) } else if let Some(libname) = libname.strip_suffix(".lib") { @@ -681,14 +683,6 @@ } #[derive(Diagnostic)] -#[diag(metadata_no_dylib_plugin, code = "E0457")] -pub struct NoDylibPlugin { - #[primary_span] - pub span: Span, - pub crate_name: Symbol, -} - -#[derive(Diagnostic)] #[diag(metadata_crate_location_unknown_type)] pub struct CrateLocationUnknownType<'a> { #[primary_span] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,9 +1,14 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(decl_macro)] #![feature(extract_if)] -#![feature(generators)] -#![feature(iter_from_generator)] +#![cfg_attr(bootstrap, feature(generators))] +#![cfg_attr(not(bootstrap), feature(coroutines))] +#![feature(iter_from_coroutine)] #![feature(let_chains)] +#![feature(if_let_guard)] #![feature(proc_macro_internals)] #![feature(macro_metavar_expr)] #![feature(min_specialization)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/locator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/locator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/locator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/locator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -220,7 +220,7 @@ use rustc_data_structures::memmap::Mmap; use rustc_data_structures::owned_slice::slice_owned; use rustc_data_structures::svh::Svh; -use rustc_errors::{DiagnosticArgValue, FatalError, IntoDiagnosticArg}; +use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg}; use rustc_fs_util::try_canonicalize; use rustc_session::config; use rustc_session::cstore::{CrateSource, MetadataLoader}; @@ -857,46 +857,6 @@ } } -/// Look for a plugin registrar. Returns its library path and crate disambiguator. -pub fn find_plugin_registrar( - sess: &Session, - metadata_loader: &dyn MetadataLoader, - span: Span, - name: Symbol, -) -> PathBuf { - find_plugin_registrar_impl(sess, metadata_loader, name).unwrap_or_else(|err| { - // `core` is always available if we got as far as loading plugins. - err.report(sess, span, false); - FatalError.raise() - }) -} - -fn find_plugin_registrar_impl<'a>( - sess: &'a Session, - metadata_loader: &dyn MetadataLoader, - name: Symbol, -) -> Result { - info!("find plugin registrar `{}`", name); - let mut locator = CrateLocator::new( - sess, - metadata_loader, - name, - false, // is_rlib - None, // hash - None, // extra_filename - true, // is_host - PathKind::Crate, - ); - - match locator.maybe_load_library_crate()? { - Some(library) => match library.source.dylib { - Some(dylib) => Ok(dylib.0), - None => Err(CrateError::NonDylibPlugin(name)), - }, - None => Err(locator.into_error(None)), - } -} - /// A diagnostic function for dumping crate metadata to an output stream. pub fn list_file_metadata( target: &Target, @@ -964,7 +924,6 @@ DlOpen(String), DlSym(String), LocatorCombined(Box), - NonDylibPlugin(Symbol), NotFound(Symbol), } @@ -1134,9 +1093,6 @@ }); } } - CrateError::NonDylibPlugin(crate_name) => { - sess.emit_err(errors::NoDylibPlugin { span, crate_name }); - } CrateError::NotFound(crate_name) => { sess.emit_err(errors::CannotFindCrate { span, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/native_libs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/native_libs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/native_libs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/native_libs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -383,7 +383,9 @@ // First, check for errors let mut renames = FxHashSet::default(); for lib in &self.tcx.sess.opts.libs { - if let NativeLibKind::Framework { .. } = lib.kind && !self.tcx.sess.target.is_like_osx { + if let NativeLibKind::Framework { .. } = lib.kind + && !self.tcx.sess.target.is_like_osx + { // Cannot check this when parsing options because the target is not yet available. self.tcx.sess.emit_err(errors::LibFrameworkApple); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -221,7 +221,7 @@ optimized_mir => { table } mir_for_ctfe => { table } closure_saved_names_of_captured_variables => { table } - mir_generator_witnesses => { table } + mir_coroutine_witnesses => { table } promoted_mir => { table } def_span => { table } def_ident_span => { table } @@ -241,7 +241,7 @@ rendered_const => { table } asyncness => { table_direct } fn_arg_names => { table } - generator_kind => { table } + coroutine_kind => { table } trait_def => { table } deduced_param_attrs => { table } is_type_alias_impl_trait => { @@ -287,6 +287,7 @@ item_attrs => { tcx.arena.alloc_from_iter(cdata.get_item_attrs(def_id.index, tcx.sess)) } is_mir_available => { cdata.is_item_mir_available(def_id.index) } is_ctfe_mir_available => { cdata.is_ctfe_mir_available(def_id.index) } + cross_crate_inlinable => { cdata.cross_crate_inlinable(def_id.index) } dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) } is_private_dep => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/decoder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1239,7 +1239,7 @@ id: DefIndex, sess: &'a Session, ) -> impl Iterator + 'a { - iter::from_generator(move || { + iter::from_coroutine(move || { if let Some(data) = &self.root.proc_macro_data { // If we are loading as a proc macro, we want to return // the view of this crate as a proc macro crate. @@ -1273,6 +1273,10 @@ self.root.tables.optimized_mir.get(self, id).is_some() } + fn cross_crate_inlinable(self, id: DefIndex) -> bool { + self.root.tables.cross_crate_inlinable.get(self, id).unwrap_or(false) + } + fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool { self.root .tables @@ -1692,17 +1696,22 @@ // `try_to_translate_virtual_to_real` don't have to worry about how the // compiler is bootstrapped. if let Some(virtual_dir) = &sess.opts.unstable_opts.simulate_remapped_rust_src_base - && let Some(real_dir) = &sess.opts.real_rust_source_base_dir - && let rustc_span::FileName::Real(ref mut old_name) = name { + && let Some(real_dir) = &sess.opts.real_rust_source_base_dir + && let rustc_span::FileName::Real(ref mut old_name) = name + { let relative_path = match old_name { - rustc_span::RealFileName::LocalPath(local) => local.strip_prefix(real_dir).ok(), + rustc_span::RealFileName::LocalPath(local) => { + local.strip_prefix(real_dir).ok() + } rustc_span::RealFileName::Remapped { virtual_name, .. } => { - option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR").and_then(|virtual_dir| virtual_name.strip_prefix(virtual_dir).ok()) + option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR") + .and_then(|virtual_dir| virtual_name.strip_prefix(virtual_dir).ok()) } }; debug!(?relative_path, ?virtual_dir, "simulate_remapped_rust_src_base"); for subdir in ["library", "compiler"] { - if let Some(rest) = relative_path.and_then(|p| p.strip_prefix(subdir).ok()) { + if let Some(rest) = relative_path.and_then(|p| p.strip_prefix(subdir).ok()) + { *old_name = rustc_span::RealFileName::Remapped { local_path: None, // FIXME: maybe we should preserve this? virtual_name: virtual_dir.join(subdir).join(rest), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/encoder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/encoder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/encoder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/encoder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -525,9 +525,17 @@ // the remapped version -- as is necessary for reproducible builds. let mut source_file = match source_file.name { FileName::Real(ref original_file_name) => { - let adapted_file_name = source_map - .path_mapping() - .to_embeddable_absolute_path(original_file_name.clone(), working_directory); + let adapted_file_name = if self.tcx.sess.should_prefer_remapped_for_codegen() { + source_map.path_mapping().to_embeddable_absolute_path( + original_file_name.clone(), + working_directory, + ) + } else { + source_map.path_mapping().to_local_embeddable_absolute_path( + original_file_name.clone(), + working_directory, + ) + }; if adapted_file_name != *original_file_name { let mut adapted: SourceFile = (**source_file).clone(); @@ -848,7 +856,7 @@ | DefKind::Field | DefKind::Impl { .. } | DefKind::Closure - | DefKind::Generator => true, + | DefKind::Coroutine => true, DefKind::ForeignMod | DefKind::GlobalAsm => false, } } @@ -889,7 +897,7 @@ | DefKind::OpaqueTy | DefKind::LifetimeParam | DefKind::GlobalAsm - | DefKind::Generator => false, + | DefKind::Coroutine => false, } } @@ -925,7 +933,7 @@ | DefKind::LifetimeParam | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Generator => false, + | DefKind::Coroutine => false, } } @@ -960,7 +968,7 @@ | DefKind::GlobalAsm | DefKind::Impl { .. } | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::ExternCrate => false, } } @@ -996,7 +1004,7 @@ | DefKind::InlineConst | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::ExternCrate => false, } } @@ -1046,14 +1054,14 @@ || (tcx.sess.opts.output_types.should_codegen() && reachable_set.contains(&def_id) && (generics.requires_monomorphization(tcx) - || tcx.codegen_fn_attrs(def_id).requests_inline())); + || tcx.cross_crate_inlinable(def_id))); // The function has a `const` modifier or is in a `#[const_trait]`. let is_const_fn = tcx.is_const_fn_raw(def_id.to_def_id()) || tcx.is_const_default_method(def_id.to_def_id()); (is_const_fn, opt) } - // Generators require optimized MIR to compute layout. - DefKind::Generator => (false, true), + // Coroutines require optimized MIR to compute layout. + DefKind::Coroutine => (false, true), // The others don't have MIR. _ => (false, false), } @@ -1089,7 +1097,7 @@ | DefKind::InlineConst | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::ExternCrate => false, DefKind::TyAlias => tcx.type_alias_is_lazy(def_id), } @@ -1119,7 +1127,7 @@ | DefKind::Field | DefKind::TyParam | DefKind::Closure - | DefKind::Generator => true, + | DefKind::Coroutine => true, DefKind::Mod | DefKind::ForeignMod | DefKind::ConstParam @@ -1148,14 +1156,15 @@ | DefKind::AssocFn | DefKind::AssocConst | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::ConstParam | DefKind::AnonConst | DefKind::InlineConst => true, DefKind::OpaqueTy => { let origin = tcx.opaque_type_origin(def_id); - if let hir::OpaqueTyOrigin::FnReturn(fn_def_id) | hir::OpaqueTyOrigin::AsyncFn(fn_def_id) = origin + if let hir::OpaqueTyOrigin::FnReturn(fn_def_id) + | hir::OpaqueTyOrigin::AsyncFn(fn_def_id) = origin && let hir::Node::TraitItem(trait_item) = tcx.hir().get_by_def_id(fn_def_id) && let (_, hir::TraitFn::Required(..)) = trait_item.expect_fn() { @@ -1208,7 +1217,7 @@ | DefKind::Impl { .. } | DefKind::AssocConst | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::ConstParam | DefKind::AnonConst | DefKind::InlineConst @@ -1247,7 +1256,7 @@ | DefKind::OpaqueTy | DefKind::Impl { of_trait: false } | DefKind::ForeignTy - | DefKind::Generator + | DefKind::Coroutine | DefKind::ConstParam | DefKind::InlineConst | DefKind::AssocTy @@ -1282,7 +1291,7 @@ | DefKind::Impl { .. } | DefKind::AssocFn | DefKind::Closure - | DefKind::Generator + | DefKind::Coroutine | DefKind::ConstParam | DefKind::AssocTy | DefKind::TyParam @@ -1357,7 +1366,9 @@ if should_encode_expn_that_defined(def_kind) { record!(self.tables.expn_that_defined[def_id] <- self.tcx.expn_that_defined(def_id)); } - if should_encode_span(def_kind) && let Some(ident_span) = tcx.def_ident_span(def_id) { + if should_encode_span(def_kind) + && let Some(ident_span) = tcx.def_ident_span(def_id) + { record!(self.tables.def_ident_span[def_id] <- ident_span); } if def_kind.has_codegen_attrs() { @@ -1435,9 +1446,9 @@ self.encode_info_for_assoc_item(def_id); } } - if let DefKind::Generator = def_kind { - let data = self.tcx.generator_kind(def_id).unwrap(); - record!(self.tables.generator_kind[def_id] <- data); + if let DefKind::Coroutine = def_kind { + let data = self.tcx.coroutine_kind(def_id).unwrap(); + record!(self.tables.coroutine_kind[def_id] <- data); } if let DefKind::Enum | DefKind::Struct | DefKind::Union = def_kind { self.encode_info_for_adt(local_id); @@ -1612,13 +1623,16 @@ debug!("EntryBuilder::encode_mir({:?})", def_id); if encode_opt { record!(self.tables.optimized_mir[def_id.to_def_id()] <- tcx.optimized_mir(def_id)); + self.tables + .cross_crate_inlinable + .set(def_id.to_def_id().index, Some(self.tcx.cross_crate_inlinable(def_id))); record!(self.tables.closure_saved_names_of_captured_variables[def_id.to_def_id()] <- tcx.closure_saved_names_of_captured_variables(def_id)); - if let DefKind::Generator = self.tcx.def_kind(def_id) - && let Some(witnesses) = tcx.mir_generator_witnesses(def_id) + if let DefKind::Coroutine = self.tcx.def_kind(def_id) + && let Some(witnesses) = tcx.mir_coroutine_witnesses(def_id) { - record!(self.tables.mir_generator_witnesses[def_id.to_def_id()] <- witnesses); + record!(self.tables.mir_coroutine_witnesses[def_id.to_def_id()] <- witnesses); } } if encode_const { @@ -1642,10 +1656,10 @@ } record!(self.tables.promoted_mir[def_id.to_def_id()] <- tcx.promoted_mir(def_id)); - if let DefKind::Generator = self.tcx.def_kind(def_id) - && let Some(witnesses) = tcx.mir_generator_witnesses(def_id) + if let DefKind::Coroutine = self.tcx.def_kind(def_id) + && let Some(witnesses) = tcx.mir_coroutine_witnesses(def_id) { - record!(self.tables.mir_generator_witnesses[def_id.to_def_id()] <- witnesses); + record!(self.tables.mir_coroutine_witnesses[def_id.to_def_id()] <- witnesses); } let instance = ty::InstanceDef::Item(def_id.to_def_id()); @@ -1958,8 +1972,11 @@ record!(self.tables.impl_trait_ref[def_id] <- trait_ref); let trait_ref = trait_ref.instantiate_identity(); - let simplified_self_ty = - fast_reject::simplify_type(self.tcx, trait_ref.self_ty(), TreatParams::AsCandidateKey); + let simplified_self_ty = fast_reject::simplify_type( + self.tcx, + trait_ref.self_ty(), + TreatParams::AsCandidateKey, + ); fx_hash_map .entry(trait_ref.def_id) .or_default() @@ -2369,30 +2386,32 @@ } } - let classification = classify(value); - - if classification == Literal - && !value.span.from_expansion() - && let Ok(snippet) = tcx.sess.source_map().span_to_snippet(value.span) { - // For literals, we avoid invoking the pretty-printer and use the source snippet instead to - // preserve certain stylistic choices the user likely made for the sake legibility like + match classify(value) { + // For non-macro literals, we avoid invoking the pretty-printer and use the source snippet + // instead to preserve certain stylistic choices the user likely made for the sake of + // legibility, like: // // * hexadecimal notation // * underscores // * character escapes // // FIXME: This passes through `-/*spacer*/0` verbatim. - snippet - } else if classification == Simple { + Literal if !value.span.from_expansion() + && let Ok(snippet) = tcx.sess.source_map().span_to_snippet(value.span) => { + snippet + } + // Otherwise we prefer pretty-printing to get rid of extraneous whitespace, comments and // other formatting artifacts. - id_to_string(&hir, body.hir_id) - } else if tcx.def_kind(hir.body_owner_def_id(body).to_def_id()) == DefKind::AnonConst { + Literal | Simple => id_to_string(&hir, body.hir_id), + // FIXME: Omit the curly braces if the enclosing expression is an array literal // with a repeated element (an `ExprKind::Repeat`) as in such case it // would not actually need any disambiguation. - "{ _ }".to_owned() - } else { - "_".to_owned() + Complex => if tcx.def_kind(hir.body_owner_def_id(body).to_def_id()) == DefKind::AnonConst { + "{ _ }".to_owned() + } else { + "_".to_owned() + } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -427,8 +427,9 @@ object_lifetime_default: Table>, optimized_mir: Table>>, mir_for_ctfe: Table>>, + cross_crate_inlinable: Table, closure_saved_names_of_captured_variables: Table>>, - mir_generator_witnesses: Table>>, + mir_coroutine_witnesses: Table>>, promoted_mir: Table>>>, thir_abstract_const: Table>>>, impl_parent: Table, @@ -441,7 +442,7 @@ rendered_const: Table>, asyncness: Table, fn_arg_names: Table>, - generator_kind: Table>, + coroutine_kind: Table>, trait_def: Table>, trait_item_def_id: Table, expn_that_defined: Table>, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/table.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/table.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/table.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_metadata/src/rmeta/table.rs 2023-12-21 16:55:28.000000000 +0000 @@ -167,7 +167,7 @@ ( Impl { of_trait: false } ) ( Impl { of_trait: true } ) ( Closure ) - ( Generator ) + ( Coroutine ) ( Static(ast::Mutability::Not) ) ( Static(ast::Mutability::Mut) ) ( Ctor(CtorOf::Struct, CtorKind::Fn) ) @@ -299,6 +299,30 @@ } } +impl FixedSizeEncoding for Option { + type ByteArray = [u8; 1]; + + #[inline] + fn from_bytes(b: &[u8; 1]) -> Self { + match b[0] { + 0 => Some(false), + 1 => Some(true), + 2 => None, + _ => unreachable!(), + } + } + + #[inline] + fn write_to_bytes(self, b: &mut [u8; 1]) { + debug_assert!(!self.is_default()); + b[0] = match self { + Some(false) => 0, + Some(true) => 1, + None => 2, + }; + } +} + impl FixedSizeEncoding for UnusedGenericParams { type ByteArray = [u8; 4]; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,24 +3,24 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" derive_more = "0.99.17" either = "1.5.0" -gsgdt = "0.1.2" field-offset = "0.3.5" +gsgdt = "0.1.2" measureme = "10.0.0" polonius-engine = "0.13.0" +rustc-rayon = { version = "0.5.0", optional = true } +rustc-rayon-core = { version = "0.5.0", optional = true } rustc_apfloat = "0.2.0" rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } +rustc_error_messages = { path = "../rustc_error_messages" } # Used for intra-doc links rustc_errors = { path = "../rustc_errors" } -# Used for intra-doc links -rustc_error_messages = { path = "../rustc_error_messages" } rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_graphviz = { path = "../rustc_graphviz" } @@ -28,8 +28,6 @@ rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_query_system = { path = "../rustc_query_system" } -rustc-rayon-core = { version = "0.5.0", optional = true } -rustc-rayon = { version = "0.5.0", optional = true } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } @@ -38,6 +36,9 @@ smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" tracing = "0.1" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ["rustc-rayon", "rustc-rayon-core"] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -5,12 +5,14 @@ middle_assert_async_resume_after_return = `async fn` resumed after completion +middle_assert_coroutine_resume_after_panic = coroutine resumed after panicking + +middle_assert_coroutine_resume_after_return = coroutine resumed after completion + middle_assert_divide_by_zero = attempt to divide `{$val}` by zero -middle_assert_generator_resume_after_panic = generator resumed after panicking - -middle_assert_generator_resume_after_return = generator resumed after completion +middle_assert_gen_resume_after_panic = `gen` fn or block cannot be further iterated on after it panicked middle_assert_misaligned_ptr_deref = misaligned pointer dereference: address must be a multiple of {$required} but is {$found} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/arena.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/arena.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/arena.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/arena.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ macro_rules! arena_types { ($macro:path) => ( $macro!([ - [] layout: rustc_target::abi::LayoutS, + [] layout: rustc_target::abi::LayoutS, [] fn_abi: rustc_target::abi::call::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>, // AdtDef are interned and compared by address [decode] adt_def: rustc_middle::ty::AdtDefData, @@ -69,6 +69,7 @@ [] dtorck_constraint: rustc_middle::traits::query::DropckConstraint<'tcx>, [] candidate_step: rustc_middle::traits::query::CandidateStep<'tcx>, [] autoderef_bad_ty: rustc_middle::traits::query::MethodAutoderefBadTy<'tcx>, + [] canonical_goal_evaluation: rustc_middle::traits::solve::inspect::GoalEvaluationStep<'tcx>, [] query_region_constraints: rustc_middle::infer::canonical::QueryRegionConstraints<'tcx>, [] type_op_subtype: rustc_middle::infer::canonical::Canonical<'tcx, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/map/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/map/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/map/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/map/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,7 @@ use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::svh::Svh; -use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync}; +use rustc_data_structures::sync::{par_for_each_in, try_par_for_each_in, DynSend, DynSync}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId, LOCAL_CRATE}; use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash}; @@ -16,7 +16,7 @@ use rustc_middle::hir::nested_filter; use rustc_span::def_id::StableCrateId; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::Span; +use rustc_span::{ErrorGuaranteed, Span}; use rustc_target::spec::abi::Abi; #[inline] @@ -240,7 +240,7 @@ Node::Field(_) => DefKind::Field, Node::Expr(expr) => match expr.kind { ExprKind::Closure(Closure { movability: None, .. }) => DefKind::Closure, - ExprKind::Closure(Closure { movability: Some(_), .. }) => DefKind::Generator, + ExprKind::Closure(Closure { movability: Some(_), .. }) => DefKind::Coroutine, _ => bug!("def_kind: unsupported node: {}", self.node_to_string(hir_id)), }, Node::GenericParam(param) => match param.kind { @@ -445,7 +445,7 @@ } DefKind::InlineConst => BodyOwnerKind::Const { inline: true }, DefKind::Ctor(..) | DefKind::Fn | DefKind::AssocFn => BodyOwnerKind::Fn, - DefKind::Closure | DefKind::Generator => BodyOwnerKind::Closure, + DefKind::Closure | DefKind::Coroutine => BodyOwnerKind::Closure, DefKind::Static(mt) => BodyOwnerKind::Static(mt), dk => bug!("{:?} is not a body node: {:?}", def_id, dk), } @@ -632,6 +632,17 @@ }) } + #[inline] + pub fn try_par_for_each_module( + self, + f: impl Fn(LocalModDefId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, + ) -> Result<(), ErrorGuaranteed> { + let crate_items = self.tcx.hir_crate_items(()); + try_par_for_each_in(&crate_items.submodules[..], |module| { + f(LocalModDefId::new_unchecked(module.def_id)) + }) + } + /// Returns an iterator for the nodes in the ancestor tree of the `current_id` /// until the crate root is reached. Prefer this over your own loop using `parent_id`. #[inline] @@ -970,12 +981,15 @@ // SyntaxContext of the visibility. sig.span.find_ancestor_in_same_ctxt(*outer_span).unwrap_or(*outer_span) } + // Impls, including their where clauses. + Node::Item(Item { + kind: ItemKind::Impl(Impl { generics, .. }), + span: outer_span, + .. + }) => until_within(*outer_span, generics.where_clause_span), // Constants and Statics. Node::Item(Item { - kind: - ItemKind::Const(ty, ..) - | ItemKind::Static(ty, ..) - | ItemKind::Impl(Impl { self_ty: ty, .. }), + kind: ItemKind::Const(ty, ..) | ItemKind::Static(ty, ..), span: outer_span, .. }) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,12 +9,12 @@ use crate::query::Providers; use crate::ty::{EarlyBinder, ImplSubject, TyCtxt}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync}; +use rustc_data_structures::sync::{try_par_for_each_in, DynSend, DynSync}; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId}; use rustc_hir::*; use rustc_query_system::ich::StableHashingContext; -use rustc_span::{ExpnId, DUMMY_SP}; +use rustc_span::{ErrorGuaranteed, ExpnId, DUMMY_SP}; /// Top-level HIR node for current owner. This only contains the node for which /// `HirId::local_id == 0`, and excludes bodies. @@ -78,20 +78,32 @@ self.owners().map(|id| id.def_id) } - pub fn par_items(&self, f: impl Fn(ItemId) + DynSend + DynSync) { - par_for_each_in(&self.items[..], |&id| f(id)) - } - - pub fn par_trait_items(&self, f: impl Fn(TraitItemId) + DynSend + DynSync) { - par_for_each_in(&self.trait_items[..], |&id| f(id)) - } - - pub fn par_impl_items(&self, f: impl Fn(ImplItemId) + DynSend + DynSync) { - par_for_each_in(&self.impl_items[..], |&id| f(id)) - } - - pub fn par_foreign_items(&self, f: impl Fn(ForeignItemId) + DynSend + DynSync) { - par_for_each_in(&self.foreign_items[..], |&id| f(id)) + pub fn par_items( + &self, + f: impl Fn(ItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, + ) -> Result<(), ErrorGuaranteed> { + try_par_for_each_in(&self.items[..], |&id| f(id)) + } + + pub fn par_trait_items( + &self, + f: impl Fn(TraitItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, + ) -> Result<(), ErrorGuaranteed> { + try_par_for_each_in(&self.trait_items[..], |&id| f(id)) + } + + pub fn par_impl_items( + &self, + f: impl Fn(ImplItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, + ) -> Result<(), ErrorGuaranteed> { + try_par_for_each_in(&self.impl_items[..], |&id| f(id)) + } + + pub fn par_foreign_items( + &self, + f: impl Fn(ForeignItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, + ) -> Result<(), ErrorGuaranteed> { + try_par_for_each_in(&self.foreign_items[..], |&id| f(id)) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/nested_filter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/nested_filter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/nested_filter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hir/nested_filter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ /// that are inside of an item-like. /// /// Notably, possible occurrences of bodies in non-item-like things -/// include: closures/generators, inline `const {}` blocks, and +/// include: closures/coroutines, inline `const {}` blocks, and /// constant arguments of types, e.g. in `let _: [(); /* HERE */];`. /// /// **This is the most common choice.** A very common pattern is diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hooks/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hooks/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hooks/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/hooks/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,8 @@ +//! "Hooks" provide a way for `tcx` functionality to be provided by some downstream crate without +//! everything in rustc having to depend on that crate. This is somewhat similar to queries, but +//! queries come with a lot of machinery for caching and incremental compilation, whereas hooks are +//! just plain function pointers without any of the query magic. + use crate::mir; use crate::query::TyCtxtAt; use crate::ty::{Ty, TyCtxt}; @@ -61,5 +66,8 @@ declare_hooks! { /// Tries to destructure an `mir::Const` ADT or array into its variant index /// and its field values. This should only be used for pretty printing. - hook try_destructure_mir_constant_for_diagnostics(val: mir::ConstValue<'tcx>, ty: Ty<'tcx>) -> Option>; + hook try_destructure_mir_constant_for_user_output(val: mir::ConstValue<'tcx>, ty: Ty<'tcx>) -> Option>; + + /// Getting a &core::panic::Location referring to a span. + hook const_caller_location(file: rustc_span::Symbol, line: u32, col: u32) -> mir::ConstValue<'tcx>; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/canonical.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/canonical.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/canonical.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/canonical.rs 2023-12-21 16:55:28.000000000 +0000 @@ -21,35 +21,17 @@ //! //! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html -use crate::infer::MemberConstraint; -use crate::mir::ConstraintCategory; -use crate::ty::GenericArg; -use crate::ty::{self, BoundVar, List, Region, Ty, TyCtxt}; use rustc_macros::HashStable; +use rustc_type_ir::Canonical as IrCanonical; use smallvec::SmallVec; -use std::fmt::Display; use std::ops::Index; -/// A "canonicalized" type `V` is one where all free inference -/// variables have been rewritten to "canonical vars". These are -/// numbered starting from 0 in order of first appearance. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyDecodable, TyEncodable)] -#[derive(HashStable, TypeFoldable, TypeVisitable)] -pub struct Canonical<'tcx, V> { - pub value: V, - pub max_universe: ty::UniverseIndex, - pub variables: CanonicalVarInfos<'tcx>, -} +use crate::infer::MemberConstraint; +use crate::mir::ConstraintCategory; +use crate::ty::GenericArg; +use crate::ty::{self, BoundVar, List, Region, Ty, TyCtxt}; -impl<'tcx, V: Display> std::fmt::Display for Canonical<'tcx, V> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "Canonical {{ value: {}, max_universe: {:?}, variables: {:?} }}", - self.value, self.max_universe, self.variables - ) - } -} +pub type Canonical<'tcx, V> = IrCanonical, V>; pub type CanonicalVarInfos<'tcx> = &'tcx List>; @@ -104,7 +86,7 @@ } else { // It's ok if this region var isn't unique } - }, + } ty::GenericArgKind::Type(ty) => { if let ty::Bound(ty::INNERMOST, bt) = *ty.kind() && var == bt.var @@ -240,7 +222,7 @@ Effect, /// A "placeholder" that represents "any const". - PlaceholderConst(ty::PlaceholderConst<'tcx>, Ty<'tcx>), + PlaceholderConst(ty::PlaceholderConst, Ty<'tcx>), } impl<'tcx> CanonicalVarKind<'tcx> { @@ -379,56 +361,6 @@ } } -impl<'tcx, R> Canonical<'tcx, QueryResponse<'tcx, R>> { - pub fn is_proven(&self) -> bool { - self.value.is_proven() - } - - pub fn is_ambiguous(&self) -> bool { - !self.is_proven() - } -} - -impl<'tcx, V> Canonical<'tcx, V> { - /// Allows you to map the `value` of a canonical while keeping the - /// same set of bound variables. - /// - /// **WARNING:** This function is very easy to mis-use, hence the - /// name! In particular, the new value `W` must use all **the - /// same type/region variables** in **precisely the same order** - /// as the original! (The ordering is defined by the - /// `TypeFoldable` implementation of the type in question.) - /// - /// An example of a **correct** use of this: - /// - /// ```rust,ignore (not real code) - /// let a: Canonical<'_, T> = ...; - /// let b: Canonical<'_, (T,)> = a.unchecked_map(|v| (v, )); - /// ``` - /// - /// An example of an **incorrect** use of this: - /// - /// ```rust,ignore (not real code) - /// let a: Canonical<'tcx, T> = ...; - /// let ty: Ty<'tcx> = ...; - /// let b: Canonical<'tcx, (T, Ty<'tcx>)> = a.unchecked_map(|v| (v, ty)); - /// ``` - pub fn unchecked_map(self, map_op: impl FnOnce(V) -> W) -> Canonical<'tcx, W> { - let Canonical { max_universe, variables, value } = self; - Canonical { max_universe, variables, value: map_op(value) } - } - - /// Allows you to map the `value` of a canonical while keeping the same set of - /// bound variables. - /// - /// **WARNING:** This function is very easy to mis-use, hence the name! See - /// the comment of [Canonical::unchecked_map] for more details. - pub fn unchecked_rebind(self, value: W) -> Canonical<'tcx, W> { - let Canonical { max_universe, variables, value: _ } = self; - Canonical { max_universe, variables, value } - } -} - pub type QueryOutlivesConstraint<'tcx> = (ty::OutlivesPredicate, Region<'tcx>>, ConstraintCategory<'tcx>); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/unify_key.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/unify_key.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/unify_key.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/infer/unify_key.rs 2023-12-21 16:55:28.000000000 +0000 @@ -141,18 +141,30 @@ pub val: ConstVariableValue<'tcx>, } -impl<'tcx> UnifyKey for ty::ConstVid<'tcx> { +#[derive(PartialEq, Copy, Clone, Debug)] +pub struct ConstVidKey<'tcx> { + pub vid: ty::ConstVid, + pub phantom: PhantomData>, +} + +impl<'tcx> From for ConstVidKey<'tcx> { + fn from(vid: ty::ConstVid) -> Self { + ConstVidKey { vid, phantom: PhantomData } + } +} + +impl<'tcx> UnifyKey for ConstVidKey<'tcx> { type Value = ConstVarValue<'tcx>; #[inline] fn index(&self) -> u32 { - self.index + self.vid.as_u32() } #[inline] fn from_index(i: u32) -> Self { - ty::ConstVid { index: i, phantom: PhantomData } + ConstVidKey::from(ty::ConstVid::from_u32(i)) } fn tag() -> &'static str { - "ConstVid" + "ConstVidKey" } } @@ -224,17 +236,29 @@ } } -impl<'tcx> UnifyKey for ty::EffectVid<'tcx> { +#[derive(PartialEq, Copy, Clone, Debug)] +pub struct EffectVidKey<'tcx> { + pub vid: ty::EffectVid, + pub phantom: PhantomData>, +} + +impl<'tcx> From for EffectVidKey<'tcx> { + fn from(vid: ty::EffectVid) -> Self { + EffectVidKey { vid, phantom: PhantomData } + } +} + +impl<'tcx> UnifyKey for EffectVidKey<'tcx> { type Value = Option>; #[inline] fn index(&self) -> u32 { - self.index + self.vid.as_u32() } #[inline] fn from_index(i: u32) -> Self { - ty::EffectVid { index: i, phantom: PhantomData } + EffectVidKey::from(ty::EffectVid::from_u32(i)) } fn tag() -> &'static str { - "EffectVid" + "EffectVidKey" } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,6 +23,8 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(allocator_api)] #![feature(array_windows)] #![feature(assert_matches)] @@ -30,11 +32,12 @@ #![feature(core_intrinsics)] #![feature(discriminant_kind)] #![feature(exhaustive_patterns)] -#![feature(generators)] +#![cfg_attr(bootstrap, feature(generators))] +#![cfg_attr(not(bootstrap), feature(coroutines))] #![feature(get_mut_unchecked)] #![feature(if_let_guard)] #![feature(inline_const)] -#![feature(iter_from_generator)] +#![feature(iter_from_coroutine)] #![feature(negative_impls)] #![feature(never_type)] #![feature(extern_types)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lint.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lint.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lint.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/lint.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,17 +9,15 @@ FutureIncompatibilityReason, Level, Lint, LintId, }; use rustc_session::Session; -use rustc_span::hygiene::MacroKind; -use rustc_span::source_map::{DesugaringKind, ExpnKind}; -use rustc_span::{symbol, Span, Symbol, DUMMY_SP}; +use rustc_span::hygiene::{ExpnKind, MacroKind}; +use rustc_span::{symbol, DesugaringKind, Span, Symbol, DUMMY_SP}; use crate::ty::TyCtxt; /// How a lint level was set. #[derive(Clone, Copy, PartialEq, Eq, HashStable, Debug)] pub enum LintLevelSource { - /// Lint is at the default level as declared - /// in rustc or a plugin. + /// Lint is at the default level as declared in rustc. Default, /// Lint level was set by an attribute. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -126,14 +126,6 @@ } } - /// Returns `true` if `#[inline]` or `#[inline(always)]` is present. - pub fn requests_inline(&self) -> bool { - match self.inline { - InlineAttr::Hint | InlineAttr::Always => true, - InlineAttr::None | InlineAttr::Never => false, - } - } - /// Returns `true` if it looks like this symbol needs to be exported, for example: /// /// * `#[no_mangle]` is present diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/privacy.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/privacy.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/privacy.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/privacy.rs 2023-12-21 16:55:28.000000000 +0000 @@ -244,7 +244,9 @@ if !(inherited_effective_vis_at_prev_level == inherited_effective_vis_at_level && level != l) { - calculated_effective_vis = if let Some(max_vis) = max_vis && !max_vis.is_at_least(inherited_effective_vis_at_level, tcx) { + calculated_effective_vis = if let Some(max_vis) = max_vis + && !max_vis.is_at_least(inherited_effective_vis_at_level, tcx) + { max_vis } else { inherited_effective_vis_at_level diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/region.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/region.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/region.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/region.rs 2023-12-21 16:55:28.000000000 +0000 @@ -308,7 +308,7 @@ /// The number of visit_expr and visit_pat calls done in the body. /// Used to sanity check visit_expr/visit_pat call count when - /// calculating generator interiors. + /// calculating coroutine interiors. pub body_expr_count: FxHashMap, } @@ -413,7 +413,7 @@ /// Gives the number of expressions visited in a body. /// Used to sanity check visit_expr call count when - /// calculating generator interiors. + /// calculating coroutine interiors. pub fn body_expr_count(&self, body_id: hir::BodyId) -> Option { self.body_expr_count.get(&body_id).copied() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/stability.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/stability.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/stability.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/middle/stability.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,9 @@ use crate::ty::{self, TyCtxt}; use rustc_ast::NodeId; -use rustc_attr::{self as attr, ConstStability, DefaultBodyStability, Deprecation, Stability}; +use rustc_attr::{ + self as attr, ConstStability, DefaultBodyStability, DeprecatedSince, Deprecation, Stability, +}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, Diagnostic}; use rustc_feature::GateIssue; @@ -123,44 +125,6 @@ } } -/// Checks whether an item marked with `deprecated(since="X")` is currently -/// deprecated (i.e., whether X is not greater than the current rustc version). -pub fn deprecation_in_effect(depr: &Deprecation) -> bool { - let is_since_rustc_version = depr.is_since_rustc_version; - let since = depr.since.as_ref().map(Symbol::as_str); - - fn parse_version(ver: &str) -> Vec { - // We ignore non-integer components of the version (e.g., "nightly"). - ver.split(|c| c == '.' || c == '-').flat_map(|s| s.parse()).collect() - } - - if !is_since_rustc_version { - // The `since` field doesn't have semantic purpose without `#![staged_api]`. - return true; - } - - if let Some(since) = since { - if since == "TBD" { - return false; - } - - if let Some(rustc) = option_env!("CFG_RELEASE") { - let since: Vec = parse_version(&since); - let rustc: Vec = parse_version(rustc); - // We simply treat invalid `since` attributes as relating to a previous - // Rust version, thus always displaying the warning. - if since.len() != 3 { - return true; - } - return since <= rustc; - } - }; - - // Assume deprecation is in effect if "since" field is missing - // or if we can't determine the current Rust version. - true -} - pub fn deprecation_suggestion( diag: &mut Diagnostic, kind: &str, @@ -183,7 +147,7 @@ fn deprecation_message( is_in_effect: bool, - since: Option, + since: DeprecatedSince, note: Option, kind: &str, path: &str, @@ -191,17 +155,18 @@ let message = if is_in_effect { format!("use of deprecated {kind} `{path}`") } else { - let since = since.as_ref().map(Symbol::as_str); - - if since == Some("TBD") { - format!("use of {kind} `{path}` that will be deprecated in a future Rust version") - } else { - format!( - "use of {} `{}` that will be deprecated in future version {}", - kind, - path, - since.unwrap() - ) + match since { + DeprecatedSince::RustcVersion(version) => format!( + "use of {kind} `{path}` that will be deprecated in future version {version}" + ), + DeprecatedSince::Future => { + format!("use of {kind} `{path}` that will be deprecated in a future Rust version") + } + DeprecatedSince::NonStandard(_) + | DeprecatedSince::Unspecified + | DeprecatedSince::Err => { + unreachable!("this deprecation is always in effect; {since:?}") + } } }; @@ -216,7 +181,7 @@ kind: &str, path: &str, ) -> (String, &'static Lint) { - let is_in_effect = deprecation_in_effect(depr); + let is_in_effect = depr.is_in_effect(); ( deprecation_message(is_in_effect, depr.since, depr.note, kind, path), deprecation_lint(is_in_effect), @@ -384,11 +349,11 @@ // With #![staged_api], we want to emit down the whole // hierarchy. let depr_attr = &depr_entry.attr; - if !skip || depr_attr.is_since_rustc_version { + if !skip || depr_attr.is_since_rustc_version() { // Calculating message for lint involves calling `self.def_path_str`. // Which by default to calculate visible path will invoke expensive `visible_parent_map` query. // So we skip message calculation altogether, if lint is allowed. - let is_in_effect = deprecation_in_effect(depr_attr); + let is_in_effect = depr_attr.is_in_effect(); let lint = deprecation_lint(is_in_effect); if self.lint_level_at_node(lint, id).0 != Level::Allow { let def_path = with_no_trimmed_paths!(self.def_path_str(def_id)); @@ -448,14 +413,16 @@ debug!("stability: skipping span={:?} since it is internal", span); return EvalResult::Allow; } - if self.features().active(feature) { + if self.features().declared(feature) { return EvalResult::Allow; } // If this item was previously part of a now-stabilized feature which is still // active (i.e. the user hasn't removed the attribute for the stabilized feature // yet) then allow use of this item. - if let Some(implied_by) = implied_by && self.features().active(implied_by) { + if let Some(implied_by) = implied_by + && self.features().declared(implied_by) + { return EvalResult::Allow; } @@ -532,7 +499,7 @@ debug!("body stability: skipping span={:?} since it is internal", span); return EvalResult::Allow; } - if self.features().active(feature) { + if self.features().declared(feature) { return EvalResult::Allow; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/consts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/consts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/consts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/consts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,7 @@ use rustc_hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::{self as hir}; +use rustc_session::RemapFileNameExt; use rustc_span::Span; use rustc_target::abi::{HasDataLayout, Size}; @@ -172,6 +173,24 @@ let end = end.try_into().unwrap(); Some(data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end)) } + + /// Check if a constant may contain provenance information. This is used by MIR opts. + /// Can return `true` even if there is no provenance. + pub fn may_have_provenance(&self, tcx: TyCtxt<'tcx>, size: Size) -> bool { + match *self { + ConstValue::ZeroSized | ConstValue::Scalar(Scalar::Int(_)) => return false, + ConstValue::Scalar(Scalar::Ptr(..)) => return true, + // It's hard to find out the part of the allocation we point to; + // just conservatively check everything. + ConstValue::Slice { data, meta: _ } => !data.inner().provenance().ptrs().is_empty(), + ConstValue::Indirect { alloc_id, offset } => !tcx + .global_alloc(alloc_id) + .unwrap_memory() + .inner() + .provenance() + .range_empty(super::AllocRange::from(offset..offset + size), &tcx), + } + } } /////////////////////////////////////////////////////////////////////////// @@ -213,10 +232,10 @@ pub fn try_to_scalar(self) -> Option { match self { Const::Ty(c) => match c.kind() { - ty::ConstKind::Value(valtree) => match valtree { - ty::ValTree::Leaf(scalar_int) => Some(Scalar::Int(scalar_int)), - ty::ValTree::Branch(_) => None, - }, + ty::ConstKind::Value(valtree) if c.ty().is_primitive() => { + // A valtree of a type where leaves directly represent the scalar const value. + Some(valtree.unwrap_leaf().into()) + } _ => None, }, Const::Val(val, _) => val.try_to_scalar(), @@ -279,7 +298,16 @@ tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, ) -> Option { - self.eval(tcx, param_env, None).ok()?.try_to_scalar() + match self { + Const::Ty(c) if c.ty().is_primitive() => { + // Avoid the `valtree_to_const_val` query. Can only be done on primitive types that + // are valtree leaves, and *not* on references. (References should return the + // pointer here, which valtrees don't represent.) + let val = c.eval(tcx, param_env, None).ok()?; + Some(val.unwrap_leaf().into()) + } + _ => self.eval(tcx, param_env, None).ok()?.try_to_scalar(), + } } #[inline] @@ -476,6 +504,40 @@ _ => Self::Ty(c), } } + + /// Return true if any evaluation of this constant always returns the same value, + /// taking into account even pointer identity tests. + pub fn is_deterministic(&self) -> bool { + // Some constants may generate fresh allocations for pointers they contain, + // so using the same constant twice can yield two different results: + // - valtrees purposefully generate new allocations + // - ConstValue::Slice also generate new allocations + match self { + Const::Ty(c) => match c.kind() { + ty::ConstKind::Param(..) => true, + // A valtree may be a reference. Valtree references correspond to a + // different allocation each time they are evaluated. Valtrees for primitive + // types are fine though. + ty::ConstKind::Value(_) => c.ty().is_primitive(), + ty::ConstKind::Unevaluated(..) | ty::ConstKind::Expr(..) => false, + // This can happen if evaluation of a constant failed. The result does not matter + // much since compilation is doomed. + ty::ConstKind::Error(..) => false, + // Should not appear in runtime MIR. + ty::ConstKind::Infer(..) + | ty::ConstKind::Bound(..) + | ty::ConstKind::Placeholder(..) => bug!(), + }, + Const::Unevaluated(..) => false, + // If the same slice appears twice in the MIR, we cannot guarantee that we will + // give the same `AllocId` to the data. + Const::Val(ConstValue::Slice { .. }, _) => false, + Const::Val( + ConstValue::ZeroSized | ConstValue::Scalar(_) | ConstValue::Indirect { .. }, + _, + ) => true, + } + } } /// An unevaluated (potentially generic) constant used in MIR. @@ -520,3 +582,20 @@ } } } + +/////////////////////////////////////////////////////////////////////////// +/// Const-related utilities + +impl<'tcx> TyCtxt<'tcx> { + pub fn span_as_caller_location(self, span: Span) -> ConstValue<'tcx> { + let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); + let caller = self.sess.source_map().lookup_char_pos(topmost.lo()); + self.const_caller_location( + rustc_span::symbol::Symbol::intern( + &caller.file.name.for_codegen(&self.sess).to_string_lossy(), + ), + caller.line as u32, + caller.col_display as u32 + 1, + ) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/coverage.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/coverage.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/coverage.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/coverage.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ //! Metadata from source code coverage analysis and instrumentation. +use rustc_index::IndexVec; use rustc_macros::HashStable; use rustc_span::Symbol; @@ -8,6 +9,11 @@ rustc_index::newtype_index! { /// ID of a coverage counter. Values ascend from 0. /// + /// Before MIR inlining, counter IDs are local to their enclosing function. + /// After MIR inlining, coverage statements may have been inlined into + /// another function, so use the statement's source-scope to find which + /// function/instance its IDs are meaningful for. + /// /// Note that LLVM handles counter IDs as `uint32_t`, so there is no need /// to use a larger representation on the Rust side. #[derive(HashStable)] @@ -18,16 +24,16 @@ impl CounterId { pub const START: Self = Self::from_u32(0); - - #[inline(always)] - pub fn next_id(self) -> Self { - Self::from_u32(self.as_u32() + 1) - } } rustc_index::newtype_index! { /// ID of a coverage-counter expression. Values ascend from 0. /// + /// Before MIR inlining, expression IDs are local to their enclosing function. + /// After MIR inlining, coverage statements may have been inlined into + /// another function, so use the statement's source-scope to find which + /// function/instance its IDs are meaningful for. + /// /// Note that LLVM handles expression IDs as `uint32_t`, so there is no need /// to use a larger representation on the Rust side. #[derive(HashStable)] @@ -38,26 +44,23 @@ impl ExpressionId { pub const START: Self = Self::from_u32(0); - - #[inline(always)] - pub fn next_id(self) -> Self { - Self::from_u32(self.as_u32() + 1) - } } -/// Operand of a coverage-counter expression. +/// Enum that can hold a constant zero value, the ID of an physical coverage +/// counter, or the ID of a coverage-counter expression. /// -/// Operands can be a constant zero value, an actual coverage counter, or another -/// expression. Counter/expression operands are referred to by ID. +/// This was originally only used for expression operands (and named `Operand`), +/// but the zero/counter/expression distinction is also useful for representing +/// the value of code/gap mappings, and the true/false arms of branch mappings. #[derive(Copy, Clone, PartialEq, Eq)] #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] -pub enum Operand { +pub enum CovTerm { Zero, Counter(CounterId), Expression(ExpressionId), } -impl Debug for Operand { +impl Debug for CovTerm { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Self::Zero => write!(f, "Zero"), @@ -69,40 +72,31 @@ #[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] pub enum CoverageKind { - Counter { - function_source_hash: u64, - /// ID of this counter within its enclosing function. - /// Expressions in the same function can refer to it as an operand. - id: CounterId, - }, - Expression { - /// ID of this coverage-counter expression within its enclosing function. - /// Other expressions in the same function can refer to it as an operand. - id: ExpressionId, - lhs: Operand, - op: Op, - rhs: Operand, - }, - Unreachable, + /// Marks the point in MIR control flow represented by a coverage counter. + /// + /// This is eventually lowered to `llvm.instrprof.increment` in LLVM IR. + /// + /// If this statement does not survive MIR optimizations, any mappings that + /// refer to this counter can have those references simplified to zero. + CounterIncrement { id: CounterId }, + + /// Marks the point in MIR control-flow represented by a coverage expression. + /// + /// If this statement does not survive MIR optimizations, any mappings that + /// refer to this expression can have those references simplified to zero. + /// + /// (This is only inserted for expression IDs that are directly used by + /// mappings. Intermediate expressions with no direct mappings are + /// retained/zeroed based on whether they are transitively used.) + ExpressionUsed { id: ExpressionId }, } impl Debug for CoverageKind { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { use CoverageKind::*; match self { - Counter { id, .. } => write!(fmt, "Counter({:?})", id.index()), - Expression { id, lhs, op, rhs } => write!( - fmt, - "Expression({:?}) = {:?} {} {:?}", - id.index(), - lhs, - match op { - Op::Add => "+", - Op::Subtract => "-", - }, - rhs, - ), - Unreachable => write!(fmt, "Unreachable"), + CounterIncrement { id } => write!(fmt, "CounterIncrement({:?})", id.index()), + ExpressionUsed { id } => write!(fmt, "ExpressionUsed({:?})", id.index()), } } } @@ -143,3 +137,38 @@ matches!(self, Self::Subtract) } } + +#[derive(Clone, Debug)] +#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] +pub struct Expression { + pub lhs: CovTerm, + pub op: Op, + pub rhs: CovTerm, +} + +#[derive(Clone, Debug)] +#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] +pub struct Mapping { + pub code_region: CodeRegion, + + /// Indicates whether this mapping uses a counter value, expression value, + /// or zero value. + /// + /// FIXME: When we add support for mapping kinds other than `Code` + /// (e.g. branch regions, expansion regions), replace this with a dedicated + /// mapping-kind enum. + pub term: CovTerm, +} + +/// Stores per-function coverage information attached to a `mir::Body`, +/// to be used in conjunction with the individual coverage statements injected +/// into the function's basic blocks. +#[derive(Clone, Debug)] +#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] +pub struct FunctionCoverageInfo { + pub function_source_hash: u64, + pub num_counters: usize, + + pub expressions: IndexVec, + pub mappings: Vec, +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs 2023-12-21 16:55:28.000000000 +0000 @@ -315,7 +315,9 @@ self.ptrs.insert_presorted(dest_ptrs.into()); } if Prov::OFFSET_IS_ADDR { - if let Some(dest_bytes) = copy.dest_bytes && !dest_bytes.is_empty() { + if let Some(dest_bytes) = copy.dest_bytes + && !dest_bytes.is_empty() + { self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into()); } } else { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/allocation.rs 2023-12-21 16:55:28.000000000 +0000 @@ -32,23 +32,16 @@ pub trait AllocBytes: Clone + fmt::Debug + Eq + PartialEq + Hash + Deref + DerefMut { - /// Adjust the bytes to the specified alignment -- by default, this is a no-op. - fn adjust_to_align(self, _align: Align) -> Self; - /// Create an `AllocBytes` from a slice of `u8`. fn from_bytes<'a>(slice: impl Into>, _align: Align) -> Self; - /// Create a zeroed `AllocBytes` of the specified size and alignment; - /// call the callback error handler if there is an error in allocating the memory. + /// Create a zeroed `AllocBytes` of the specified size and alignment. + /// Returns `None` if we ran out of memory on the host. fn zeroed(size: Size, _align: Align) -> Option; } // Default `bytes` for `Allocation` is a `Box<[u8]>`. impl AllocBytes for Box<[u8]> { - fn adjust_to_align(self, _align: Align) -> Self { - self - } - fn from_bytes<'a>(slice: impl Into>, _align: Align) -> Self { Box::<[u8]>::from(slice.into()) } @@ -299,6 +292,7 @@ } fn uninit_inner(size: Size, align: Align, fail: impl FnOnce() -> R) -> Result { + // We raise an error if we cannot create the allocation on the host. // This results in an error that can happen non-deterministically, since the memory // available to the compiler can change between runs. Normally queries are always // deterministic. However, we can be non-deterministic here because all uses of const @@ -351,10 +345,8 @@ extra: Extra, mut adjust_ptr: impl FnMut(Pointer) -> Result, Err>, ) -> Result, Err> { - // Compute new pointer provenance, which also adjusts the bytes, and realign the pointer if - // necessary. - let mut bytes = self.bytes.adjust_to_align(self.align); - + let mut bytes = self.bytes; + // Adjust provenance of pointers stored in this allocation. let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len()); let ptr_size = cx.data_layout().pointer_size.bytes_usize(); let endian = cx.data_layout().endian; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/error.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -43,21 +43,6 @@ } } - pub fn emit_err(&self, tcx: TyCtxt<'_>) -> ErrorGuaranteed { - match self { - &ErrorHandled::Reported(err, span) => { - if !err.is_tainted_by_errors && !span.is_dummy() { - tcx.sess.emit_err(error::ErroneousConstant { span }); - } - err.error - } - &ErrorHandled::TooGeneric(span) => tcx.sess.delay_span_bug( - span, - "encountered TooGeneric error when monomorphic data was expected", - ), - } - } - pub fn emit_note(&self, tcx: TyCtxt<'_>) { match self { &ErrorHandled::Reported(err, span) => { @@ -231,10 +216,8 @@ } /// Details of why a pointer had to be in-bounds. -#[derive(Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)] +#[derive(Debug, Copy, Clone)] pub enum CheckInAllocMsg { - /// We are dereferencing a pointer (i.e., creating a place). - DerefTest, /// We are access memory. MemoryAccessTest, /// We are doing pointer arithmetic. @@ -245,7 +228,16 @@ InboundsTest, } -#[derive(Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)] +/// Details of which pointer is not aligned. +#[derive(Debug, Copy, Clone)] +pub enum CheckAlignMsg { + /// The accessed pointer did not have proper alignment. + AccessedPtr, + /// The access ocurred with a place that was based on a misaligned pointer. + BasedOn, +} + +#[derive(Debug, Copy, Clone)] pub enum InvalidMetaKind { /// Size of a `[T]` is too big SliceTooBig, @@ -278,6 +270,13 @@ pub data_size: u64, } +/// Information about a misaligned pointer. +#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)] +pub struct Misalignment { + pub has: Align, + pub required: Align, +} + macro_rules! impl_into_diagnostic_arg_through_debug { ($($ty:ty),*$(,)?) => {$( impl IntoDiagnosticArg for $ty { @@ -339,7 +338,7 @@ /// Using an integer as a pointer in the wrong way. DanglingIntPointer(u64, CheckInAllocMsg), /// Used a pointer with bad alignment. - AlignmentCheckFailed { required: Align, has: Align }, + AlignmentCheckFailed(Misalignment, CheckAlignMsg), /// Writing to read-only memory. WriteToReadOnly(AllocId), /// Trying to access the data behind a function pointer. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/interpret/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -142,11 +142,12 @@ use crate::ty::{self, Instance, Ty, TyCtxt}; pub use self::error::{ - struct_error, BadBytesAccess, CheckInAllocMsg, ErrorHandled, EvalToAllocationRawResult, - EvalToConstValueResult, EvalToValTreeResult, ExpectedKind, InterpError, InterpErrorInfo, - InterpResult, InvalidMetaKind, InvalidProgramInfo, MachineStopType, PointerKind, - ReportedErrorInfo, ResourceExhaustionInfo, ScalarSizeMismatch, UndefinedBehaviorInfo, - UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind, + struct_error, BadBytesAccess, CheckAlignMsg, CheckInAllocMsg, ErrorHandled, + EvalToAllocationRawResult, EvalToConstValueResult, EvalToValTreeResult, ExpectedKind, + InterpError, InterpErrorInfo, InterpResult, InvalidMetaKind, InvalidProgramInfo, + MachineStopType, Misalignment, PointerKind, ReportedErrorInfo, ResourceExhaustionInfo, + ScalarSizeMismatch, UndefinedBehaviorInfo, UnsupportedOpInfo, ValidationErrorInfo, + ValidationErrorKind, }; pub use self::value::Scalar; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,7 @@ //! //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html -use crate::mir::interpret::{AllocRange, ConstAllocation, ErrorHandled, Scalar}; +use crate::mir::interpret::{AllocRange, ConstAllocation, Scalar}; use crate::mir::visit::MirVisitable; use crate::ty::codec::{TyDecoder, TyEncoder}; use crate::ty::fold::{FallibleTypeFolder, TypeFoldable}; @@ -17,7 +17,7 @@ use rustc_errors::{DiagnosticArgValue, DiagnosticMessage, ErrorGuaranteed, IntoDiagnosticArg}; use rustc_hir::def::{CtorKind, Namespace}; use rustc_hir::def_id::{DefId, CRATE_DEF_ID}; -use rustc_hir::{self, GeneratorKind, ImplicitSelfKind}; +use rustc_hir::{self, CoroutineKind, ImplicitSelfKind}; use rustc_hir::{self as hir, HirId}; use rustc_session::Session; use rustc_target::abi::{FieldIdx, VariantIdx}; @@ -246,19 +246,19 @@ } #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable, TypeVisitable)] -pub struct GeneratorInfo<'tcx> { - /// The yield type of the function, if it is a generator. +pub struct CoroutineInfo<'tcx> { + /// The yield type of the function, if it is a coroutine. pub yield_ty: Option>, - /// Generator drop glue. - pub generator_drop: Option>, + /// Coroutine drop glue. + pub coroutine_drop: Option>, - /// The layout of a generator. Produced by the state transformation. - pub generator_layout: Option>, + /// The layout of a coroutine. Produced by the state transformation. + pub coroutine_layout: Option>, - /// If this is a generator then record the type of source expression that caused this generator + /// If this is a coroutine then record the type of source expression that caused this coroutine /// to be created. - pub generator_kind: GeneratorKind, + pub coroutine_kind: CoroutineKind, } /// The lowered representation of a single function. @@ -284,7 +284,7 @@ /// and used for debuginfo. Indexed by a `SourceScope`. pub source_scopes: IndexVec>, - pub generator: Option>>, + pub coroutine: Option>>, /// Declarations of locals. /// @@ -345,6 +345,14 @@ pub injection_phase: Option, pub tainted_by_errors: Option, + + /// Per-function coverage information added by the `InstrumentCoverage` + /// pass, to be used in conjunction with the coverage statements injected + /// into this body's blocks. + /// + /// If `-Cinstrument-coverage` is not active, or if an individual function + /// is not eligible for coverage, then this should always be `None`. + pub function_coverage_info: Option>, } impl<'tcx> Body<'tcx> { @@ -357,7 +365,7 @@ arg_count: usize, var_debug_info: Vec>, span: Span, - generator_kind: Option, + coroutine_kind: Option, tainted_by_errors: Option, ) -> Self { // We need `arg_count` locals, and one for the return place. @@ -374,12 +382,12 @@ source, basic_blocks: BasicBlocks::new(basic_blocks), source_scopes, - generator: generator_kind.map(|generator_kind| { - Box::new(GeneratorInfo { + coroutine: coroutine_kind.map(|coroutine_kind| { + Box::new(CoroutineInfo { yield_ty: None, - generator_drop: None, - generator_layout: None, - generator_kind, + coroutine_drop: None, + coroutine_layout: None, + coroutine_kind, }) }), local_decls, @@ -392,6 +400,7 @@ is_polymorphic: false, injection_phase: None, tainted_by_errors, + function_coverage_info: None, }; body.is_polymorphic = body.has_non_region_param(); body @@ -409,7 +418,7 @@ source: MirSource::item(CRATE_DEF_ID.to_def_id()), basic_blocks: BasicBlocks::new(basic_blocks), source_scopes: IndexVec::new(), - generator: None, + coroutine: None, local_decls: IndexVec::new(), user_type_annotations: IndexVec::new(), arg_count: 0, @@ -420,6 +429,7 @@ is_polymorphic: false, injection_phase: None, tainted_by_errors: None, + function_coverage_info: None, }; body.is_polymorphic = body.has_non_region_param(); body @@ -538,22 +548,22 @@ #[inline] pub fn yield_ty(&self) -> Option> { - self.generator.as_ref().and_then(|generator| generator.yield_ty) + self.coroutine.as_ref().and_then(|coroutine| coroutine.yield_ty) } #[inline] - pub fn generator_layout(&self) -> Option<&GeneratorLayout<'tcx>> { - self.generator.as_ref().and_then(|generator| generator.generator_layout.as_ref()) + pub fn coroutine_layout(&self) -> Option<&CoroutineLayout<'tcx>> { + self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_layout.as_ref()) } #[inline] - pub fn generator_drop(&self) -> Option<&Body<'tcx>> { - self.generator.as_ref().and_then(|generator| generator.generator_drop.as_ref()) + pub fn coroutine_drop(&self) -> Option<&Body<'tcx>> { + self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_drop.as_ref()) } #[inline] - pub fn generator_kind(&self) -> Option { - self.generator.as_ref().map(|generator| generator.generator_kind) + pub fn coroutine_kind(&self) -> Option { + self.coroutine.as_ref().map(|coroutine| coroutine.coroutine_kind) } #[inline] @@ -569,32 +579,38 @@ self.injection_phase.is_some() } - /// *Must* be called once the full substitution for this body is known, to ensure that the body - /// is indeed fit for code generation or consumption more generally. - /// - /// Sadly there's no nice way to represent an "arbitrary normalizer", so we take one for - /// constants specifically. (`Option` could be used for that, but the fact - /// that `Instance::args_for_mir_body` is private and instead instance exposes normalization - /// functions makes it seem like exposing the generic args is not the intended strategy.) - /// - /// Also sadly, CTFE doesn't even know whether it runs on MIR that is already polymorphic or still monomorphic, - /// so we cannot just immediately ICE on TooGeneric. - /// - /// Returns Ok(()) if everything went fine, and `Err` if a problem occurred and got reported. - pub fn post_mono_checks( + /// For a `Location` in this scope, determine what the "caller location" at that point is. This + /// is interesting because of inlining: the `#[track_caller]` attribute of inlined functions + /// must be honored. Falls back to the `tracked_caller` value for `#[track_caller]` functions, + /// or the function's scope. + pub fn caller_location_span( &self, + mut source_info: SourceInfo, + caller_location: Option, tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - normalize_const: impl Fn(Const<'tcx>) -> Result, ErrorHandled>, - ) -> Result<(), ErrorHandled> { - // For now, the only thing we have to check is is to ensure that all the constants used in - // the body successfully evaluate. - for &const_ in &self.required_consts { - let c = normalize_const(const_.const_)?; - c.eval(tcx, param_env, Some(const_.span))?; + from_span: impl FnOnce(Span) -> T, + ) -> T { + loop { + let scope_data = &self.source_scopes[source_info.scope]; + + if let Some((callee, callsite_span)) = scope_data.inlined { + // Stop inside the most nested non-`#[track_caller]` function, + // before ever reaching its caller (which is irrelevant). + if !callee.def.requires_caller_location(tcx) { + return from_span(source_info.span); + } + source_info.span = callsite_span; + } + + // Skip past all of the parents with `inlined: None`. + match scope_data.inlined_parent_scope { + Some(parent) => source_info.scope = parent, + None => break, + } } - Ok(()) + // No inlined `SourceScope`s, or all of them were `#[track_caller]`. + caller_location.unwrap_or_else(|| from_span(source_info.span)) } } @@ -830,22 +846,6 @@ // FIXME(matthewjasper) Don't store in this in `Body` pub local_info: ClearCrossCrate>>, - /// `true` if this is an internal local. - /// - /// These locals are not based on types in the source code and are only used - /// for a few desugarings at the moment. - /// - /// The generator transformation will sanity check the locals which are live - /// across a suspension point against the type components of the generator - /// which type checking knows are live across a suspension point. We need to - /// flag drop flags to avoid triggering this check as they are introduced - /// outside of type inference. - /// - /// This should be sound because the drop flags are fully algebraic, and - /// therefore don't affect the auto-trait or outlives properties of the - /// generator. - pub internal: bool, - /// The type of this local. pub ty: Ty<'tcx>, @@ -1058,7 +1058,7 @@ self.source_info.span.desugaring_kind().is_some() } - /// Creates a new `LocalDecl` for a temporary: mutable, non-internal. + /// Creates a new `LocalDecl` for a temporary, mutable. #[inline] pub fn new(ty: Ty<'tcx>, span: Span) -> Self { Self::with_source_info(ty, SourceInfo::outermost(span)) @@ -1070,20 +1070,12 @@ LocalDecl { mutability: Mutability::Mut, local_info: ClearCrossCrate::Set(Box::new(LocalInfo::Boring)), - internal: false, ty, user_ty: None, source_info, } } - /// Converts `self` into same `LocalDecl` except tagged as internal. - #[inline] - pub fn internal(mut self) -> Self { - self.internal = true; - self - } - /// Converts `self` into same `LocalDecl` except tagged as immutable. #[inline] pub fn immutable(mut self) -> Self { @@ -1613,6 +1605,23 @@ } } } + +/// `DefLocation` represents the location of a definition - either an argument or an assignment +/// within MIR body. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum DefLocation { + Argument, + Body(Location), +} + +impl DefLocation { + pub fn dominates(self, location: Location, dominators: &Dominators) -> bool { + match self { + DefLocation::Argument => true, + DefLocation::Body(def) => def.successor_within_block().dominates(location, dominators), + } + } +} // Some nodes are used a lot. Make sure they don't unintentionally get bigger. #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mono.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mono.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mono.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/mono.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,8 +10,8 @@ use rustc_index::Idx; use rustc_query_system::ich::StableHashingContext; use rustc_session::config::OptLevel; -use rustc_span::source_map::Span; use rustc_span::symbol::Symbol; +use rustc_span::Span; use std::fmt; use std::hash::Hash; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/patch.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/patch.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/patch.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/patch.rs 2023-12-21 16:55:28.000000000 +0000 @@ -99,7 +99,9 @@ } pub fn terminate_block(&mut self, reason: UnwindTerminateReason) -> BasicBlock { - if let Some((cached_bb, cached_reason)) = self.terminate_block && reason == cached_reason { + if let Some((cached_bb, cached_reason)) = self.terminate_block + && reason == cached_reason + { return cached_bb; } @@ -127,7 +129,7 @@ Location { block: bb, statement_index: offset } } - pub fn new_internal_with_info( + pub fn new_local_with_info( &mut self, ty: Ty<'tcx>, span: Span, @@ -135,7 +137,7 @@ ) -> Local { let index = self.next_local; self.next_local += 1; - let mut new_decl = LocalDecl::new(ty, span).internal(); + let mut new_decl = LocalDecl::new(ty, span); **new_decl.local_info.as_mut().assert_crate_local() = local_info; self.new_locals.push(new_decl); Local::new(index) @@ -148,13 +150,6 @@ Local::new(index) } - pub fn new_internal(&mut self, ty: Ty<'tcx>, span: Span) -> Local { - let index = self.next_local; - self.next_local += 1; - self.new_locals.push(LocalDecl::new(ty, span).internal()); - Local::new(index) - } - pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock { let block = BasicBlock::new(self.patch_map.len()); debug!("MirPatch: new_block: {:?}: {:?}", block, data); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/pretty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/pretty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/pretty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/pretty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,7 +16,7 @@ Pointer, Provenance, }; use rustc_middle::mir::visit::Visitor; -use rustc_middle::mir::*; +use rustc_middle::mir::{self, *}; use rustc_middle::ty::{self, TyCtxt}; use rustc_target::abi::Size; @@ -130,8 +130,8 @@ Some(promoted) => write!(file, "::{promoted:?}`")?, } writeln!(file, " {disambiguator} {pass_name}")?; - if let Some(ref layout) = body.generator_layout() { - writeln!(file, "/* generator_layout = {layout:#?} */")?; + if let Some(ref layout) = body.coroutine_layout() { + writeln!(file, "/* coroutine_layout = {layout:#?} */")?; } writeln!(file)?; extra_data(PassWhere::BeforeCFG, &mut file)?; @@ -493,6 +493,27 @@ // Add an empty line before the first block is printed. writeln!(w)?; + if let Some(function_coverage_info) = &body.function_coverage_info { + write_function_coverage_info(function_coverage_info, w)?; + } + + Ok(()) +} + +fn write_function_coverage_info( + function_coverage_info: &coverage::FunctionCoverageInfo, + w: &mut dyn io::Write, +) -> io::Result<()> { + let coverage::FunctionCoverageInfo { expressions, mappings, .. } = function_coverage_info; + + for (id, expression) in expressions.iter_enumerated() { + writeln!(w, "{INDENT}coverage {id:?} => {expression:?};")?; + } + for coverage::Mapping { term, code_region } in mappings { + writeln!(w, "{INDENT}coverage {term:?} => {code_region:?};")?; + } + writeln!(w)?; + Ok(()) } @@ -685,10 +706,7 @@ AscribeUserType(box (ref place, ref c_ty), ref variance) => { write!(fmt, "AscribeUserType({place:?}, {variance:?}, {c_ty:?})") } - Coverage(box self::Coverage { ref kind, code_region: Some(ref rgn) }) => { - write!(fmt, "Coverage::{kind:?} for {rgn:?}") - } - Coverage(box ref coverage) => write!(fmt, "Coverage::{:?}", coverage.kind), + Coverage(box mir::Coverage { ref kind }) => write!(fmt, "Coverage::{kind:?}"), Intrinsic(box ref intrinsic) => write!(fmt, "{intrinsic}"), ConstEvalCounter => write!(fmt, "ConstEvalCounter"), Nop => write!(fmt, "nop"), @@ -764,10 +782,10 @@ Goto { .. } => write!(fmt, "goto"), SwitchInt { discr, .. } => write!(fmt, "switchInt({discr:?})"), Return => write!(fmt, "return"), - GeneratorDrop => write!(fmt, "generator_drop"), + CoroutineDrop => write!(fmt, "coroutine_drop"), UnwindResume => write!(fmt, "resume"), UnwindTerminate(reason) => { - write!(fmt, "abort({})", reason.as_short_str()) + write!(fmt, "terminate({})", reason.as_short_str()) } Yield { value, resume_arg, .. } => write!(fmt, "{resume_arg:?} = yield({value:?})"), Unreachable => write!(fmt, "unreachable"), @@ -847,7 +865,7 @@ pub fn fmt_successor_labels(&self) -> Vec> { use self::TerminatorKind::*; match *self { - Return | UnwindResume | UnwindTerminate(_) | Unreachable | GeneratorDrop => vec![], + Return | UnwindResume | UnwindTerminate(_) | Unreachable | CoroutineDrop => vec![], Goto { .. } => vec!["".into()], SwitchInt { ref targets, .. } => targets .values @@ -980,9 +998,9 @@ ty::tls::with(|tcx| { let variant_def = &tcx.adt_def(adt_did).variant(variant); let args = tcx.lift(args).expect("could not lift for printing"); - let name = FmtPrinter::new(tcx, Namespace::ValueNS) - .print_def_path(variant_def.def_id, args)? - .into_buffer(); + let name = FmtPrinter::print_string(tcx, Namespace::ValueNS, |cx| { + cx.print_def_path(variant_def.def_id, args) + })?; match variant_def.ctor_kind() { Some(CtorKind::Const) => fmt.write_str(&name), @@ -1028,8 +1046,8 @@ struct_fmt.finish() }), - AggregateKind::Generator(def_id, _, _) => ty::tls::with(|tcx| { - let name = format!("{{generator@{:?}}}", tcx.def_span(def_id)); + AggregateKind::Coroutine(def_id, _, _) => ty::tls::with(|tcx| { + let name = format!("{{coroutine@{:?}}}", tcx.def_span(def_id)); let mut struct_fmt = fmt.debug_struct(&name); // FIXME(project-rfc-2229#48): This should be a list of capture names/places @@ -1283,8 +1301,8 @@ self.push(&format!("+ args: {args:#?}")); } - AggregateKind::Generator(def_id, args, movability) => { - self.push("generator"); + AggregateKind::Coroutine(def_id, args, movability) => { + self.push("coroutine"); self.push(&format!("+ def_id: {def_id:?}")); self.push(&format!("+ args: {args:#?}")); self.push(&format!("+ movability: {movability:?}")); @@ -1695,7 +1713,7 @@ (_, ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) if !ty.has_non_region_param() => { let ct = tcx.lift(ct).unwrap(); let ty = tcx.lift(ty).unwrap(); - if let Some(contents) = tcx.try_destructure_mir_constant_for_diagnostics(ct, ty) { + if let Some(contents) = tcx.try_destructure_mir_constant_for_user_output(ct, ty) { let fields: Vec<(ConstValue<'_>, Ty<'_>)> = contents.fields.to_vec(); match *ty.kind() { ty::Array(..) => { @@ -1722,7 +1740,7 @@ let args = tcx.lift(args).unwrap(); let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); cx.print_alloc_ids = true; - let cx = cx.print_value_path(variant_def.def_id, args)?; + cx.print_value_path(variant_def.def_id, args)?; fmt.write_str(&cx.into_buffer())?; match variant_def.ctor_kind() { @@ -1757,14 +1775,14 @@ let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); cx.print_alloc_ids = true; let ty = tcx.lift(ty).unwrap(); - cx = cx.pretty_print_const_scalar(scalar, ty)?; + cx.pretty_print_const_scalar(scalar, ty)?; fmt.write_str(&cx.into_buffer())?; return Ok(()); } (ConstValue::ZeroSized, ty::FnDef(d, s)) => { let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); cx.print_alloc_ids = true; - let cx = cx.print_value_path(*d, s)?; + cx.print_value_path(*d, s)?; fmt.write_str(&cx.into_buffer())?; return Ok(()); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/query.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/query.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/query.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/query.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ //! Values computed by queries that use MIR. +use crate::mir; use crate::ty::{self, OpaqueHiddenType, Ty, TyCtxt}; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::unord::UnordSet; @@ -132,11 +133,11 @@ rustc_index::newtype_index! { #[derive(HashStable)] #[debug_format = "_{}"] - pub struct GeneratorSavedLocal {} + pub struct CoroutineSavedLocal {} } #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] -pub struct GeneratorSavedTy<'tcx> { +pub struct CoroutineSavedTy<'tcx> { pub ty: Ty<'tcx>, /// Source info corresponding to the local in the original MIR body. pub source_info: SourceInfo, @@ -144,18 +145,18 @@ pub ignore_for_traits: bool, } -/// The layout of generator state. +/// The layout of coroutine state. #[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] -pub struct GeneratorLayout<'tcx> { - /// The type of every local stored inside the generator. - pub field_tys: IndexVec>, +pub struct CoroutineLayout<'tcx> { + /// The type of every local stored inside the coroutine. + pub field_tys: IndexVec>, /// The name for debuginfo. - pub field_names: IndexVec>, + pub field_names: IndexVec>, /// Which of the above fields are in each variant. Note that one field may /// be stored in multiple variants. - pub variant_fields: IndexVec>, + pub variant_fields: IndexVec>, /// The source that led to each variant being created (usually, a yield or /// await). @@ -166,10 +167,10 @@ /// layout. #[type_foldable(identity)] #[type_visitable(ignore)] - pub storage_conflicts: BitMatrix, + pub storage_conflicts: BitMatrix, } -impl Debug for GeneratorLayout<'_> { +impl Debug for CoroutineLayout<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { /// Prints an iterator of (key, value) tuples as a map. struct MapPrinter<'a, K, V>(Cell + 'a>>>); @@ -184,7 +185,7 @@ } } - /// Prints the generator variant name. + /// Prints the coroutine variant name. struct GenVariantPrinter(VariantIdx); impl From for GenVariantPrinter { fn from(idx: VariantIdx) -> Self { @@ -193,7 +194,7 @@ } impl Debug for GenVariantPrinter { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let variant_name = ty::GeneratorArgs::variant_name(self.0); + let variant_name = ty::CoroutineArgs::variant_name(self.0); if fmt.alternate() { write!(fmt, "{:9}({:?})", variant_name, self.0) } else { @@ -210,7 +211,7 @@ } } - fmt.debug_struct("GeneratorLayout") + fmt.debug_struct("CoroutineLayout") .field("field_tys", &MapPrinter::new(self.field_tys.iter_enumerated())) .field( "variant_fields", @@ -258,7 +259,7 @@ /// /// The requirements are listed as being between various `RegionVid`. The 0th /// region refers to `'static`; subsequent region vids refer to the free -/// regions that appear in the closure (or generator's) type, in order of +/// regions that appear in the closure (or coroutine's) type, in order of /// appearance. (This numbering is actually defined by the `UniversalRegions` /// struct in the NLL region checker. See for example /// `UniversalRegions::closure_mapping`.) Note the free regions in the @@ -445,14 +446,19 @@ pub fields: &'tcx [(ConstValue<'tcx>, Ty<'tcx>)], } -/// Coverage information summarized from a MIR if instrumented for source code coverage (see -/// compiler option `-Cinstrument-coverage`). This information is generated by the -/// `InstrumentCoverage` MIR pass and can be retrieved via the `coverageinfo` query. +/// Summarizes coverage IDs inserted by the `InstrumentCoverage` MIR pass +/// (for compiler option `-Cinstrument-coverage`), after MIR optimizations +/// have had a chance to potentially remove some of them. +/// +/// Used by the `coverage_ids_info` query. #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable)] -pub struct CoverageInfo { - /// The total number of coverage region counters added to the MIR `Body`. - pub num_counters: u32, - - /// The total number of coverage region counter expressions added to the MIR `Body`. - pub num_expressions: u32, +pub struct CoverageIdsInfo { + /// Coverage codegen needs to know the highest counter ID that is ever + /// incremented within a function, so that it can set the `num-counters` + /// argument of the `llvm.instrprof.increment` intrinsic. + /// + /// This may be less than the highest counter ID emitted by the + /// InstrumentCoverage MIR pass, if the highest-numbered counter increments + /// were removed by MIR optimizations. + pub max_counter_id: mir::coverage::CounterId, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/syntax.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/syntax.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/syntax.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/syntax.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,7 @@ use super::{BasicBlock, Const, Local, UserTypeProjection}; -use crate::mir::coverage::{CodeRegion, CoverageKind}; +use crate::mir::coverage::CoverageKind; use crate::traits::Reveal; use crate::ty::adjustment::PointerCoercion; use crate::ty::GenericArgsRef; @@ -15,7 +15,7 @@ use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_hir::def_id::DefId; use rustc_hir::{self as hir}; -use rustc_hir::{self, GeneratorKind}; +use rustc_hir::{self, CoroutineKind}; use rustc_index::IndexVec; use rustc_target::abi::{FieldIdx, VariantIdx}; @@ -82,10 +82,10 @@ /// that Rust itself has them. Where exactly these are is generally subject to change, and so we /// don't document this here. Runtime MIR has most retags explicit (though implicit retags /// can still occur at `Rvalue::{Ref,AddrOf}`). - /// - Generator bodies: In analysis MIR, locals may actually be behind a pointer that user code has - /// access to. This occurs in generator bodies. Such locals do not behave like other locals, + /// - Coroutine bodies: In analysis MIR, locals may actually be behind a pointer that user code has + /// access to. This occurs in coroutine bodies. Such locals do not behave like other locals, /// because they eg may be aliased in surprising ways. Runtime MIR has no such special locals - - /// all generator bodies are lowered and so all places that look like locals really are locals. + /// all coroutine bodies are lowered and so all places that look like locals really are locals. /// /// Also note that the lint pass which reports eg `200_u8 + 200_u8` as an error is run as a part /// of analysis to runtime MIR lowering. To ensure lints are reported reliably, this means that @@ -137,7 +137,7 @@ /// In addition to the semantic changes, beginning with this phase, the following variants are /// disallowed: /// * [`TerminatorKind::Yield`] - /// * [`TerminatorKind::GeneratorDrop`] + /// * [`TerminatorKind::CoroutineDrop`] /// * [`Rvalue::Aggregate`] for any `AggregateKind` except `Array` /// * [`PlaceElem::OpaqueCast`] /// @@ -292,7 +292,7 @@ /// Write the discriminant for a variant to the enum Place. /// - /// This is permitted for both generators and ADTs. This does not necessarily write to the + /// This is permitted for both coroutines and ADTs. This does not necessarily write to the /// entire place; instead, it writes to the minimum set of bytes as required by the layout for /// the type. SetDiscriminant { place: Box>, variant_index: VariantIdx }, @@ -361,11 +361,16 @@ /// Disallowed after drop elaboration. AscribeUserType(Box<(Place<'tcx>, UserTypeProjection)>, ty::Variance), - /// Marks the start of a "coverage region", injected with '-Cinstrument-coverage'. A - /// `Coverage` statement carries metadata about the coverage region, used to inject a coverage - /// map into the binary. If `Coverage::kind` is a `Counter`, the statement also generates - /// executable code, to increment a counter variable at runtime, each time the code region is - /// executed. + /// Carries control-flow-sensitive information injected by `-Cinstrument-coverage`, + /// such as where to generate physical coverage-counter-increments during codegen. + /// + /// Coverage statements are used in conjunction with the coverage mappings and other + /// information stored in the function's + /// [`mir::Body::function_coverage_info`](crate::mir::Body::function_coverage_info). + /// (For inlined MIR, take care to look up the *original function's* coverage info.) + /// + /// Interpreters and codegen backends that don't support coverage instrumentation + /// can usually treat this as a no-op. Coverage(Box), /// Denotes a call to an intrinsic that does not require an unwind path and always returns. @@ -514,7 +519,6 @@ #[derive(TypeFoldable, TypeVisitable)] pub struct Coverage { pub kind: CoverageKind, - pub code_region: Option, } #[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)] @@ -622,8 +626,8 @@ /// `dest = move _0`. It might additionally do other things, like have side-effects in the /// aliasing model. /// - /// If the body is a generator body, this has slightly different semantics; it instead causes a - /// `GeneratorState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned + /// If the body is a coroutine body, this has slightly different semantics; it instead causes a + /// `CoroutineState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned /// to the return place. Return, @@ -705,14 +709,14 @@ /// Marks a suspend point. /// - /// Like `Return` terminators in generator bodies, this computes `value` and then a - /// `GeneratorState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to + /// Like `Return` terminators in coroutine bodies, this computes `value` and then a + /// `CoroutineState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to /// the return place of the function calling this one, and execution continues in the calling /// function. When next invoked with the same first argument, execution of this function /// continues at the `resume` basic block, with the second argument written to the `resume_arg` - /// place. If the generator is dropped before then, the `drop` basic block is invoked. + /// place. If the coroutine is dropped before then, the `drop` basic block is invoked. /// - /// Not permitted in bodies that are not generator bodies, or after generator lowering. + /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering. /// /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`? Yield { @@ -722,21 +726,21 @@ resume: BasicBlock, /// The place to store the resume argument in. resume_arg: Place<'tcx>, - /// Cleanup to be done if the generator is dropped at this suspend point. + /// Cleanup to be done if the coroutine is dropped at this suspend point. drop: Option, }, - /// Indicates the end of dropping a generator. + /// Indicates the end of dropping a coroutine. /// - /// Semantically just a `return` (from the generators drop glue). Only permitted in the same situations + /// Semantically just a `return` (from the coroutines drop glue). Only permitted in the same situations /// as `yield`. /// - /// **Needs clarification**: Is that even correct? The generator drop code is always confusing + /// **Needs clarification**: Is that even correct? The coroutine drop code is always confusing /// to me, because it's not even really in the current body. /// /// **Needs clarification**: Are there type system constraints on these terminators? Should /// there be a "block type" like `cleanup` blocks for them? - GeneratorDrop, + CoroutineDrop, /// A block where control flow only ever takes one real path, but borrowck needs to be more /// conservative. @@ -811,7 +815,7 @@ TerminatorKind::Call { .. } => "Call", TerminatorKind::Assert { .. } => "Assert", TerminatorKind::Yield { .. } => "Yield", - TerminatorKind::GeneratorDrop => "GeneratorDrop", + TerminatorKind::CoroutineDrop => "CoroutineDrop", TerminatorKind::FalseEdge { .. } => "FalseEdge", TerminatorKind::FalseUnwind { .. } => "FalseUnwind", TerminatorKind::InlineAsm { .. } => "InlineAsm", @@ -879,8 +883,8 @@ OverflowNeg(O), DivisionByZero(O), RemainderByZero(O), - ResumedAfterReturn(GeneratorKind), - ResumedAfterPanic(GeneratorKind), + ResumedAfterReturn(CoroutineKind), + ResumedAfterPanic(CoroutineKind), MisalignedPointerDereference { required: O, found: O }, } @@ -957,8 +961,8 @@ /// was unsized and so had metadata associated with it, then the metadata is retained if the /// field is unsized and thrown out if it is sized. /// -/// These projections are only legal for tuples, ADTs, closures, and generators. If the ADT or -/// generator has more than one variant, the parent place's variant index must be set, indicating +/// These projections are only legal for tuples, ADTs, closures, and coroutines. If the ADT or +/// coroutine has more than one variant, the parent place's variant index must be set, indicating /// which variant is being used. If it has just one variant, the variant index may or may not be /// included - the single possible variant is inferred if it is not included. /// - [`OpaqueCast`](ProjectionElem::OpaqueCast): This projection changes the place's type to the @@ -986,18 +990,15 @@ /// pointee's type. The resulting address is the address that was stored in the pointer. If the /// pointee type is unsized, the pointer additionally stored the value of the metadata. /// -/// Computing a place may cause UB. One possibility is that the pointer used for a `Deref` may not -/// be suitably aligned. Another possibility is that the place is not in bounds, meaning it does not -/// point to an actual allocation. -/// -/// However, if this is actually UB and when the UB kicks in is undecided. This is being discussed -/// in [UCG#319]. The options include that every place must obey those rules, that only some places -/// must obey them, or that places impose no rules of their own. -/// -/// [UCG#319]: https://github.com/rust-lang/unsafe-code-guidelines/issues/319 -/// -/// Rust currently requires that every place obey those two rules. This is checked by MIRI and taken -/// advantage of by codegen (via `gep inbounds`). That is possibly subject to change. +/// The "validity invariant" of places is the same as that of raw pointers, meaning that e.g. +/// `*ptr` on a dangling or unaligned pointer is never UB. (Later doing a load/store on that place +/// or turning it into a reference can be UB though!) The only ways for a place computation can +/// cause UB are: +/// - On a `Deref` projection, we do an actual load of the inner place, with all the usual +/// consequences (the inner place must be based on an aligned pointer, it must point to allocated +/// memory, the aliasig model must allow reads, this must not be a data race). +/// - For the projections that perform pointer arithmetic, the offset must in-bounds of an +/// allocation (i.e., the preconditions of `ptr::offset` must be met). #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, HashStable, TypeFoldable, TypeVisitable)] pub struct Place<'tcx> { pub local: Local, @@ -1067,7 +1068,7 @@ from_end: bool, }, - /// "Downcast" to a variant of an enum or a generator. + /// "Downcast" to a variant of an enum or a coroutine. /// /// The included Symbol is the name of the variant, used for printing MIR. Downcast(Option, VariantIdx), @@ -1277,8 +1278,8 @@ /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo` /// has a destructor. /// - /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After - /// generator lowering, `Generator` aggregate kinds are disallowed too. + /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After + /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too. Aggregate(Box>, IndexVec>), /// Transmutes a `*mut u8` into shallow-initialized `Box`. @@ -1343,7 +1344,7 @@ Adt(DefId, VariantIdx, GenericArgsRef<'tcx>, Option, Option), Closure(DefId, GenericArgsRef<'tcx>), - Generator(DefId, GenericArgsRef<'tcx>, hir::Movability), + Coroutine(DefId, GenericArgsRef<'tcx>, hir::Movability), } #[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)] @@ -1353,7 +1354,7 @@ /// Returns the minimum alignment of a type AlignOf, /// Returns the offset of a field - OffsetOf(&'tcx List), + OffsetOf(&'tcx List<(VariantIdx, FieldIdx)>), } #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/tcx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/tcx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/tcx.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/tcx.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,7 +11,7 @@ #[derive(Copy, Clone, Debug, TypeFoldable, TypeVisitable)] pub struct PlaceTy<'tcx> { pub ty: Ty<'tcx>, - /// Downcast to a particular variant of an enum or a generator, if included. + /// Downcast to a particular variant of an enum or a coroutine, if included. pub variant_index: Option, } @@ -205,8 +205,8 @@ } AggregateKind::Adt(did, _, args, _, _) => tcx.type_of(did).instantiate(tcx, args), AggregateKind::Closure(did, args) => Ty::new_closure(tcx, did, args), - AggregateKind::Generator(did, args, movability) => { - Ty::new_generator(tcx, did, args, movability) + AggregateKind::Coroutine(did, args, movability) => { + Ty::new_coroutine(tcx, did, args, movability) } }, Rvalue::ShallowInitBox(_, ty) => Ty::new_box(tcx, ty), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/terminator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/terminator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/terminator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/terminator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,12 +3,10 @@ use smallvec::SmallVec; use super::{BasicBlock, InlineAsmOperand, Operand, SourceInfo, TerminatorKind, UnwindAction}; -pub use rustc_ast::Mutability; use rustc_macros::HashStable; use std::iter; use std::slice; -pub use super::query::*; use super::*; impl SwitchTargets { @@ -28,6 +26,15 @@ Self { values: smallvec![value], targets: smallvec![then, else_] } } + /// Inverse of `SwitchTargets::static_if`. + pub fn as_static_if(&self) -> Option<(u128, BasicBlock, BasicBlock)> { + if let &[value] = &self.values[..] && let &[then, else_] = &self.targets[..] { + Some((value, then, else_)) + } else { + None + } + } + /// Returns the fallback target that is jumped to when none of the values match the operand. pub fn otherwise(&self) -> BasicBlock { *self.targets.last().unwrap() @@ -139,10 +146,17 @@ Overflow(op, _, _) => bug!("{:?} cannot overflow", op), DivisionByZero(_) => "attempt to divide by zero", RemainderByZero(_) => "attempt to calculate the remainder with a divisor of zero", - ResumedAfterReturn(GeneratorKind::Gen) => "generator resumed after completion", - ResumedAfterReturn(GeneratorKind::Async(_)) => "`async fn` resumed after completion", - ResumedAfterPanic(GeneratorKind::Gen) => "generator resumed after panicking", - ResumedAfterPanic(GeneratorKind::Async(_)) => "`async fn` resumed after panicking", + ResumedAfterReturn(CoroutineKind::Coroutine) => "coroutine resumed after completion", + ResumedAfterReturn(CoroutineKind::Async(_)) => "`async fn` resumed after completion", + ResumedAfterReturn(CoroutineKind::Gen(_)) => { + "`gen fn` should just keep returning `None` after completion" + } + ResumedAfterPanic(CoroutineKind::Coroutine) => "coroutine resumed after panicking", + ResumedAfterPanic(CoroutineKind::Async(_)) => "`async fn` resumed after panicking", + ResumedAfterPanic(CoroutineKind::Gen(_)) => { + "`gen fn` should just keep returning `None` after panicking" + } + BoundsCheck { .. } | MisalignedPointerDereference { .. } => { bug!("Unexpected AssertKind") } @@ -228,10 +242,18 @@ OverflowNeg(_) => middle_assert_overflow_neg, DivisionByZero(_) => middle_assert_divide_by_zero, RemainderByZero(_) => middle_assert_remainder_by_zero, - ResumedAfterReturn(GeneratorKind::Async(_)) => middle_assert_async_resume_after_return, - ResumedAfterReturn(GeneratorKind::Gen) => middle_assert_generator_resume_after_return, - ResumedAfterPanic(GeneratorKind::Async(_)) => middle_assert_async_resume_after_panic, - ResumedAfterPanic(GeneratorKind::Gen) => middle_assert_generator_resume_after_panic, + ResumedAfterReturn(CoroutineKind::Async(_)) => middle_assert_async_resume_after_return, + ResumedAfterReturn(CoroutineKind::Gen(_)) => { + bug!("gen blocks can be resumed after they return and will keep returning `None`") + } + ResumedAfterReturn(CoroutineKind::Coroutine) => { + middle_assert_coroutine_resume_after_return + } + ResumedAfterPanic(CoroutineKind::Async(_)) => middle_assert_async_resume_after_panic, + ResumedAfterPanic(CoroutineKind::Gen(_)) => middle_assert_gen_resume_after_panic, + ResumedAfterPanic(CoroutineKind::Coroutine) => { + middle_assert_coroutine_resume_after_panic + } MisalignedPointerDereference { .. } => middle_assert_misaligned_ptr_deref, } @@ -331,7 +353,7 @@ } UnwindResume | UnwindTerminate(_) - | GeneratorDrop + | CoroutineDrop | Return | Unreachable | Call { target: None, unwind: _, .. } @@ -373,7 +395,7 @@ } UnwindResume | UnwindTerminate(_) - | GeneratorDrop + | CoroutineDrop | Return | Unreachable | Call { target: None, unwind: _, .. } @@ -392,7 +414,7 @@ | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return | TerminatorKind::Unreachable - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } | TerminatorKind::SwitchInt { .. } | TerminatorKind::FalseEdge { .. } => None, @@ -411,7 +433,7 @@ | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return | TerminatorKind::Unreachable - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } | TerminatorKind::SwitchInt { .. } | TerminatorKind::FalseEdge { .. } => None, @@ -493,7 +515,7 @@ pub fn edges(&self) -> TerminatorEdges<'_, 'tcx> { use TerminatorKind::*; match *self { - Return | UnwindResume | UnwindTerminate(_) | GeneratorDrop | Unreachable => { + Return | UnwindResume | UnwindTerminate(_) | CoroutineDrop | Unreachable => { TerminatorEdges::None } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/type_foldable.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/type_foldable.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/type_foldable.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/type_foldable.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,8 +19,8 @@ hir::Movability, BasicBlock, SwitchTargets, - GeneratorKind, - GeneratorSavedLocal, + CoroutineKind, + CoroutineSavedLocal, } TrivialTypeTraversalImpls! { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/visit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/visit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/visit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/mir/visit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -184,6 +184,8 @@ visit_place_fns!($($mutability)?); + /// This is called for every constant in the MIR body and every `required_consts` + /// (i.e., including consts that have been dead-code-eliminated). fn visit_constant( &mut self, constant: & $($mutability)? ConstOperand<'tcx>, @@ -471,7 +473,7 @@ TerminatorKind::Goto { .. } | TerminatorKind::UnwindResume | TerminatorKind::UnwindTerminate(_) | - TerminatorKind::GeneratorDrop | + TerminatorKind::CoroutineDrop | TerminatorKind::Unreachable | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } => {} @@ -733,12 +735,12 @@ ) => { self.visit_args(closure_args, location); } - AggregateKind::Generator( + AggregateKind::Coroutine( _, - generator_args, + coroutine_args, _movability, ) => { - self.visit_args(generator_args, location); + self.visit_args(coroutine_args, location); } } @@ -815,7 +817,6 @@ ty, user_ty, source_info, - internal: _, local_info: _, } = local_decl; @@ -991,7 +992,7 @@ macro_rules! super_body { ($self:ident, $body:ident, $($mutability:ident, $invalidate:tt)?) => { let span = $body.span; - if let Some(gen) = &$($mutability)? $body.generator { + if let Some(gen) = &$($mutability)? $body.coroutine { if let Some(yield_ty) = $(& $mutability)? gen.yield_ty { $self.visit_ty( yield_ty, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/erase.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/erase.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/erase.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/erase.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,8 @@ use crate::query::CyclePlaceholder; use crate::traits; use crate::ty::{self, Ty}; -use std::mem::{size_of, transmute_copy, MaybeUninit}; +use std::intrinsics::transmute_unchecked; +use std::mem::{size_of, MaybeUninit}; #[derive(Copy, Clone)] pub struct Erased { @@ -29,8 +30,15 @@ }; Erased::<::Result> { + // `transmute_unchecked` is needed here because it does not have `transmute`'s size check + // (and thus allows to transmute between `T` and `MaybeUninit`) (we do the size + // check ourselves in the `const` block above). + // + // `transmute_copy` is also commonly used for this (and it would work here since + // `EraseType: Copy`), but `transmute_unchecked` better explains the intent. + // // SAFETY: It is safe to transmute to MaybeUninit for types with the same sizes. - data: unsafe { transmute_copy(&src) }, + data: unsafe { transmute_unchecked::>(src) }, } } @@ -38,22 +46,24 @@ #[inline(always)] pub fn restore(value: Erase) -> T { let value: Erased<::Result> = value; + // See comment in `erase` for why we use `transmute_unchecked`. + // // SAFETY: Due to the use of impl Trait in `Erase` the only way to safely create an instance // of `Erase` is to call `erase`, so we know that `value.data` is a valid instance of `T` of // the right size. - unsafe { transmute_copy(&value.data) } + unsafe { transmute_unchecked::, T>(value.data) } } impl EraseType for &'_ T { - type Result = [u8; size_of::<*const ()>()]; + type Result = [u8; size_of::<&'static ()>()]; } impl EraseType for &'_ [T] { - type Result = [u8; size_of::<*const [()]>()]; + type Result = [u8; size_of::<&'static [()]>()]; } impl EraseType for &'_ ty::List { - type Result = [u8; size_of::<*const ()>()]; + type Result = [u8; size_of::<&'static ty::List<()>>()]; } impl EraseType for &'_ rustc_index::IndexSlice { @@ -210,7 +220,7 @@ Option, Option, Option, - Option, + Option, Option, Option, Option, @@ -239,7 +249,7 @@ rustc_hir::def::DefKind, rustc_hir::Defaultness, rustc_hir::definitions::DefKey, - rustc_hir::GeneratorKind, + rustc_hir::CoroutineKind, rustc_hir::HirId, rustc_hir::IsAsync, rustc_hir::ItemLocalId, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/keys.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/keys.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/keys.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/keys.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,7 +12,6 @@ use rustc_query_system::query::{DefaultCacheSelector, SingleCacheSelector, VecCacheSelector}; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; -use rustc_target::abi::FieldIdx; /// Placeholder for `CrateNum`'s "local" counterpart #[derive(Copy, Clone, Debug)] @@ -360,30 +359,6 @@ } } -impl<'tcx> Key for (ty::Const<'tcx>, FieldIdx) { - type CacheSelector = DefaultCacheSelector; - - fn default_span(&self, _: TyCtxt<'_>) -> Span { - DUMMY_SP - } -} - -impl<'tcx> Key for (mir::ConstValue<'tcx>, Ty<'tcx>) { - type CacheSelector = DefaultCacheSelector; - - fn default_span(&self, _: TyCtxt<'_>) -> Span { - DUMMY_SP - } -} - -impl<'tcx> Key for mir::ConstAlloc<'tcx> { - type CacheSelector = DefaultCacheSelector; - - fn default_span(&self, _: TyCtxt<'_>) -> Span { - DUMMY_SP - } -} - impl<'tcx> Key for ty::PolyTraitRef<'tcx> { type CacheSelector = DefaultCacheSelector; @@ -412,14 +387,6 @@ type CacheSelector = DefaultCacheSelector; fn default_span(&self, _: TyCtxt<'_>) -> Span { - DUMMY_SP - } -} - -impl<'tcx> Key for mir::Const<'tcx> { - type CacheSelector = DefaultCacheSelector; - - fn default_span(&self, _: TyCtxt<'_>) -> Span { DUMMY_SP } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -25,7 +25,9 @@ use crate::mir::interpret::{LitToConstError, LitToConstInput}; use crate::mir::mono::CodegenUnit; use crate::query::erase::{erase, restore, Erase}; -use crate::query::plumbing::{query_ensure, query_get_at, CyclePlaceholder, DynamicQuery}; +use crate::query::plumbing::{ + query_ensure, query_ensure_error_guaranteed, query_get_at, CyclePlaceholder, DynamicQuery, +}; use crate::thir; use crate::traits::query::{ CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal, @@ -249,6 +251,7 @@ "computing type of opaque `{path}`", path = tcx.def_path_str(key), } + cycle_stash } query type_alias_is_lazy(key: DefId) -> bool { @@ -339,7 +342,7 @@ query opaque_types_defined_by( key: LocalDefId - ) -> &'tcx [LocalDefId] { + ) -> &'tcx ty::List { desc { |tcx| "computing the opaque types defined by `{}`", tcx.def_path_str(key.to_def_id()) @@ -541,28 +544,28 @@ } } - /// Returns names of captured upvars for closures and generators. + /// Returns names of captured upvars for closures and coroutines. /// /// Here are some examples: /// - `name__field1__field2` when the upvar is captured by value. /// - `_ref__name__field` when the upvar is captured by reference. /// - /// For generators this only contains upvars that are shared by all states. + /// For coroutines this only contains upvars that are shared by all states. query closure_saved_names_of_captured_variables(def_id: DefId) -> &'tcx IndexVec { arena_cache desc { |tcx| "computing debuginfo for closure `{}`", tcx.def_path_str(def_id) } separate_provide_extern } - query mir_generator_witnesses(key: DefId) -> &'tcx Option> { + query mir_coroutine_witnesses(key: DefId) -> &'tcx Option> { arena_cache - desc { |tcx| "generator witness types for `{}`", tcx.def_path_str(key) } + desc { |tcx| "coroutine witness types for `{}`", tcx.def_path_str(key) } cache_on_disk_if { key.is_local() } separate_provide_extern } - query check_generator_obligations(key: LocalDefId) { - desc { |tcx| "verify auto trait bounds for generator interior type `{}`", tcx.def_path_str(key) } + query check_coroutine_obligations(key: LocalDefId) { + desc { |tcx| "verify auto trait bounds for coroutine interior type `{}`", tcx.def_path_str(key) } } /// MIR after our optimization passes have run. This is MIR that is ready @@ -573,22 +576,12 @@ separate_provide_extern } - /// Returns coverage summary info for a function, after executing the `InstrumentCoverage` - /// MIR pass (assuming the -Cinstrument-coverage option is enabled). - query coverageinfo(key: ty::InstanceDef<'tcx>) -> &'tcx mir::CoverageInfo { - desc { |tcx| "retrieving coverage info from MIR for `{}`", tcx.def_path_str(key.def_id()) } - arena_cache - } - - /// Returns the `CodeRegions` for a function that has instrumented coverage, in case the - /// function was optimized out before codegen, and before being added to the Coverage Map. - query covered_code_regions(key: DefId) -> &'tcx Vec<&'tcx mir::coverage::CodeRegion> { - desc { - |tcx| "retrieving the covered `CodeRegion`s, if instrumented, for `{}`", - tcx.def_path_str(key) - } + /// Summarizes coverage IDs inserted by the `InstrumentCoverage` MIR pass + /// (for compiler option `-Cinstrument-coverage`), after MIR optimizations + /// have had a chance to potentially remove some of them. + query coverage_ids_info(key: ty::InstanceDef<'tcx>) -> &'tcx mir::CoverageIdsInfo { + desc { |tcx| "retrieving coverage IDs info from MIR for `{}`", tcx.def_path_str(key.def_id()) } arena_cache - cache_on_disk_if { key.is_local() } } /// The `DefId` is the `DefId` of the containing MIR body. Promoteds do not have their own @@ -753,9 +746,9 @@ desc { |tcx| "checking if item is promotable: `{}`", tcx.def_path_str(key) } } - /// Returns `Some(generator_kind)` if the node pointed to by `def_id` is a generator. - query generator_kind(def_id: DefId) -> Option { - desc { |tcx| "looking up generator kind of `{}`", tcx.def_path_str(def_id) } + /// Returns `Some(coroutine_kind)` if the node pointed to by `def_id` is a coroutine. + query coroutine_kind(def_id: DefId) -> Option { + desc { |tcx| "looking up coroutine kind of `{}`", tcx.def_path_str(def_id) } separate_provide_extern } @@ -975,8 +968,9 @@ desc { |tcx| "checking that impls are well-formed in {}", describe_as_module(key, tcx) } } - query check_mod_type_wf(key: LocalModDefId) -> () { + query check_mod_type_wf(key: LocalModDefId) -> Result<(), ErrorGuaranteed> { desc { |tcx| "checking that types are well-formed in {}", describe_as_module(key, tcx) } + ensure_forwards_result_if_red } query collect_mod_item_types(key: LocalModDefId) -> () { @@ -1107,10 +1101,6 @@ desc { "destructuring type level constant"} } - query const_caller_location(key: (rustc_span::Symbol, u32, u32)) -> mir::ConstValue<'tcx> { - desc { "getting a &core::panic::Location referring to a span" } - } - // FIXME get rid of this with valtrees query lit_to_const( key: LitToConstInput<'tcx> @@ -1509,8 +1499,9 @@ feedable } - query check_well_formed(key: hir::OwnerId) -> () { + query check_well_formed(key: hir::OwnerId) -> Result<(), ErrorGuaranteed> { desc { |tcx| "checking that `{}` is well-formed", tcx.def_path_str(key) } + ensure_forwards_result_if_red } // The `DefId`s of all non-generic functions and statics in the given crate @@ -1892,12 +1883,6 @@ desc { |tcx| "determining whether `{}` needs codegen", tcx.def_path_str(def_id) } } - /// All items participating in code generation together with items inlined into them. - query codegened_and_inlined_items(_: ()) -> &'tcx DefIdSet { - eval_always - desc { "collecting codegened and inlined items" } - } - query codegen_unit(sym: Symbol) -> &'tcx CodegenUnit<'tcx> { desc { "getting codegen unit `{sym}`" } } @@ -2202,6 +2187,11 @@ query generics_require_sized_self(def_id: DefId) -> bool { desc { "check whether the item has a `where Self: Sized` bound" } } + + query cross_crate_inlinable(def_id: DefId) -> bool { + desc { "whether the item should be made inlinable across crates" } + separate_provide_extern + } } rustc_query_append! { define_callbacks! } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/plumbing.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/plumbing.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/plumbing.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/query/plumbing.rs 2023-12-21 16:55:28.000000000 +0000 @@ -173,6 +173,45 @@ } } +#[inline] +pub fn query_ensure_error_guaranteed<'tcx, Cache>( + tcx: TyCtxt<'tcx>, + execute_query: fn(TyCtxt<'tcx>, Span, Cache::Key, QueryMode) -> Option, + query_cache: &Cache, + key: Cache::Key, + check_cache: bool, +) -> Result<(), ErrorGuaranteed> +where + Cache: QueryCache>>, +{ + let key = key.into_query_param(); + if let Some(res) = try_get_cached(tcx, query_cache, &key) { + super::erase::restore(res) + } else { + execute_query(tcx, DUMMY_SP, key, QueryMode::Ensure { check_cache }) + .map(super::erase::restore) + // Either we actually executed the query, which means we got a full `Result`, + // or we can just assume the query succeeded, because it was green in the + // incremental cache. If it is green, that means that the previous compilation + // that wrote to the incremental cache compiles successfully. That is only + // possible if the cache entry was `Ok(())`, so we emit that here, without + // actually encoding the `Result` in the cache or loading it from there. + .unwrap_or(Ok(())) + } +} + +macro_rules! query_ensure { + ([]$($args:tt)*) => { + query_ensure($($args)*) + }; + ([(ensure_forwards_result_if_red) $($rest:tt)*]$($args:tt)*) => { + query_ensure_error_guaranteed($($args)*) + }; + ([$other:tt $($modifiers:tt)*]$($args:tt)*) => { + query_ensure!([$($modifiers)*]$($args)*) + }; +} + macro_rules! query_helper_param_ty { (DefId) => { impl IntoQueryParam }; (LocalDefId) => { impl IntoQueryParam }; @@ -220,6 +259,18 @@ }; } +macro_rules! ensure_result { + ([][$ty:ty]) => { + () + }; + ([(ensure_forwards_result_if_red) $($rest:tt)*][$ty:ty]) => { + Result<(), ErrorGuaranteed> + }; + ([$other:tt $($modifiers:tt)*][$($args:tt)*]) => { + ensure_result!([$($modifiers)*][$($args)*]) + }; +} + macro_rules! separate_provide_extern_default { ([][$name:ident]) => { () @@ -343,14 +394,15 @@ impl<'tcx> TyCtxtEnsure<'tcx> { $($(#[$attr])* #[inline(always)] - pub fn $name(self, key: query_helper_param_ty!($($K)*)) { - query_ensure( + pub fn $name(self, key: query_helper_param_ty!($($K)*)) -> ensure_result!([$($modifiers)*][$V]) { + query_ensure!( + [$($modifiers)*] self.tcx, self.tcx.query_system.fns.engine.$name, &self.tcx.query_system.caches.$name, key.into_query_param(), false, - ); + ) })* } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir/visit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir/visit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir/visit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir/visit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -66,8 +66,9 @@ Use { source } => visitor.visit_expr(&visitor.thir()[source]), NeverToAny { source } => visitor.visit_expr(&visitor.thir()[source]), PointerCoercion { source, cast: _ } => visitor.visit_expr(&visitor.thir()[source]), - Let { expr, .. } => { + Let { expr, ref pat } => { visitor.visit_expr(&visitor.thir()[expr]); + visitor.visit_pat(pat); } Loop { body } => visitor.visit_expr(&visitor.thir()[body]), Match { scrutinee, ref arms, .. } => { @@ -226,23 +227,24 @@ is_primary: _, name: _, } => visitor.visit_pat(&subpattern), - Binding { .. } | Wild => {} + Binding { .. } | Wild | Error(_) => {} Variant { subpatterns, adt_def: _, args: _, variant_index: _ } | Leaf { subpatterns } => { for subpattern in subpatterns { visitor.visit_pat(&subpattern.pattern); } } Constant { value: _ } => {} + InlineConstant { def: _, subpattern } => visitor.visit_pat(subpattern), Range(_) => {} Slice { prefix, slice, suffix } | Array { prefix, slice, suffix } => { for subpattern in prefix.iter() { - visitor.visit_pat(&subpattern); + visitor.visit_pat(subpattern); } if let Some(pat) = slice { - visitor.visit_pat(&pat); + visitor.visit_pat(pat); } for subpattern in suffix.iter() { - visitor.visit_pat(&subpattern); + visitor.visit_pat(subpattern); } } Or { pats } => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/thir.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,16 +16,19 @@ use rustc_index::newtype_index; use rustc_index::IndexVec; use rustc_middle::middle::region; -use rustc_middle::mir::interpret::AllocId; +use rustc_middle::mir::interpret::{AllocId, Scalar}; use rustc_middle::mir::{self, BinOp, BorrowKind, FakeReadCause, Mutability, UnOp}; use rustc_middle::ty::adjustment::PointerCoercion; -use rustc_middle::ty::GenericArgsRef; -use rustc_middle::ty::{self, AdtDef, FnSig, List, Ty, UpvarArgs}; -use rustc_middle::ty::{CanonicalUserType, CanonicalUserTypeAnnotation}; +use rustc_middle::ty::layout::IntegerExt; +use rustc_middle::ty::{ + self, AdtDef, CanonicalUserType, CanonicalUserTypeAnnotation, FnSig, GenericArgsRef, List, Ty, + TyCtxt, UpvarArgs, +}; use rustc_span::def_id::LocalDefId; -use rustc_span::{sym, Span, Symbol, DUMMY_SP}; -use rustc_target::abi::{FieldIdx, VariantIdx}; +use rustc_span::{sym, ErrorGuaranteed, Span, Symbol, DUMMY_SP}; +use rustc_target::abi::{FieldIdx, Integer, Size, VariantIdx}; use rustc_target::asm::InlineAsmRegOrRegClass; +use std::cmp::Ordering; use std::fmt; use std::ops::Index; @@ -381,9 +384,9 @@ VarRef { id: LocalVarId, }, - /// Used to represent upvars mentioned in a closure/generator + /// Used to represent upvars mentioned in a closure/coroutine UpvarRef { - /// DefId of the closure/generator + /// DefId of the closure/coroutine closure_def_id: DefId, /// HirId of the root variable @@ -489,7 +492,7 @@ /// Field offset (`offset_of!`) OffsetOf { container: Ty<'tcx>, - fields: &'tcx List, + fields: &'tcx List<(VariantIdx, FieldIdx)>, }, /// An expression taking a reference to a thread local. ThreadLocalRef(DefId), @@ -581,13 +584,13 @@ ByRef(BorrowKind), } -#[derive(Clone, Debug, HashStable)] +#[derive(Clone, Debug, HashStable, TypeVisitable)] pub struct FieldPat<'tcx> { pub field: FieldIdx, pub pattern: Box>, } -#[derive(Clone, Debug, HashStable)] +#[derive(Clone, Debug, HashStable, TypeVisitable)] pub struct Pat<'tcx> { pub ty: Ty<'tcx>, pub span: Span, @@ -632,10 +635,11 @@ use PatKind::*; match &self.kind { - Wild | Range(..) | Binding { subpattern: None, .. } | Constant { .. } => {} + Wild | Range(..) | Binding { subpattern: None, .. } | Constant { .. } | Error(_) => {} AscribeUserType { subpattern, .. } | Binding { subpattern: Some(subpattern), .. } - | Deref { subpattern } => subpattern.walk_(it), + | Deref { subpattern } + | InlineConstant { subpattern, .. } => subpattern.walk_(it), Leaf { subpatterns } | Variant { subpatterns, .. } => { subpatterns.iter().for_each(|field| field.pattern.walk_(it)) } @@ -647,6 +651,21 @@ } } + /// Whether the pattern has a `PatKind::Error` nested within. + pub fn pat_error_reported(&self) -> Result<(), ErrorGuaranteed> { + let mut error = None; + self.walk(|pat| { + if let PatKind::Error(e) = pat.kind && error.is_none() { + error = Some(e); + } + error.is_none() + }); + match error { + None => Ok(()), + Some(e) => Err(e), + } + } + /// Walk the pattern in left-to-right order. /// /// If you always want to recurse, prefer this method over `walk`. @@ -664,7 +683,7 @@ } } -#[derive(Clone, Debug, HashStable)] +#[derive(Clone, Debug, HashStable, TypeVisitable)] pub struct Ascription<'tcx> { pub annotation: CanonicalUserTypeAnnotation<'tcx>, /// Variance to use when relating the `user_ty` to the **type of the value being @@ -688,7 +707,7 @@ pub variance: ty::Variance, } -#[derive(Clone, Debug, HashStable)] +#[derive(Clone, Debug, HashStable, TypeVisitable)] pub enum PatKind<'tcx> { /// A wildcard pattern: `_`. Wild, @@ -702,7 +721,9 @@ Binding { mutability: Mutability, name: Symbol, + #[type_visitable(ignore)] mode: BindingMode, + #[type_visitable(ignore)] var: LocalVarId, ty: Ty<'tcx>, subpattern: Option>>, @@ -746,6 +767,22 @@ value: mir::Const<'tcx>, }, + /// Inline constant found while lowering a pattern. + InlineConstant { + /// [LocalDefId] of the constant, we need this so that we have a + /// reference that can be used by unsafety checking to visit nested + /// unevaluated constants. + def: LocalDefId, + /// If the inline constant is used in a range pattern, this subpattern + /// represents the range (if both ends are inline constants, there will + /// be multiple InlineConstant wrappers). + /// + /// Otherwise, the actual pattern that the constant lowered to. As with + /// other constants, inline constants are matched structurally where + /// possible. + subpattern: Box>, + }, + Range(Box>), /// Matches against a slice, checking the length and extracting elements. @@ -769,13 +806,249 @@ Or { pats: Box<[Box>]>, }, + + /// An error has been encountered during lowering. We probably shouldn't report more lints + /// related to this pattern. + Error(ErrorGuaranteed), } -#[derive(Clone, Debug, PartialEq, HashStable)] +/// A range pattern. +/// The boundaries must be of the same type and that type must be numeric. +#[derive(Clone, Debug, PartialEq, HashStable, TypeVisitable)] pub struct PatRange<'tcx> { - pub lo: mir::Const<'tcx>, - pub hi: mir::Const<'tcx>, + pub lo: PatRangeBoundary<'tcx>, + pub hi: PatRangeBoundary<'tcx>, + #[type_visitable(ignore)] pub end: RangeEnd, + pub ty: Ty<'tcx>, +} + +impl<'tcx> PatRange<'tcx> { + /// Whether this range covers the full extent of possible values (best-effort, we ignore floats). + #[inline] + pub fn is_full_range(&self, tcx: TyCtxt<'tcx>) -> Option { + let (min, max, size, bias) = match *self.ty.kind() { + ty::Char => (0, std::char::MAX as u128, Size::from_bits(32), 0), + ty::Int(ity) => { + let size = Integer::from_int_ty(&tcx, ity).size(); + let max = size.truncate(u128::MAX); + let bias = 1u128 << (size.bits() - 1); + (0, max, size, bias) + } + ty::Uint(uty) => { + let size = Integer::from_uint_ty(&tcx, uty).size(); + let max = size.unsigned_int_max(); + (0, max, size, 0) + } + _ => return None, + }; + + // We want to compare ranges numerically, but the order of the bitwise representation of + // signed integers does not match their numeric order. Thus, to correct the ordering, we + // need to shift the range of signed integers to correct the comparison. This is achieved by + // XORing with a bias (see pattern/deconstruct_pat.rs for another pertinent example of this + // pattern). + // + // Also, for performance, it's important to only do the second `try_to_bits` if necessary. + let lo_is_min = match self.lo { + PatRangeBoundary::NegInfinity => true, + PatRangeBoundary::Finite(value) => { + let lo = value.try_to_bits(size).unwrap() ^ bias; + lo <= min + } + PatRangeBoundary::PosInfinity => false, + }; + if lo_is_min { + let hi_is_max = match self.hi { + PatRangeBoundary::NegInfinity => false, + PatRangeBoundary::Finite(value) => { + let hi = value.try_to_bits(size).unwrap() ^ bias; + hi > max || hi == max && self.end == RangeEnd::Included + } + PatRangeBoundary::PosInfinity => true, + }; + if hi_is_max { + return Some(true); + } + } + Some(false) + } + + #[inline] + pub fn contains( + &self, + value: mir::Const<'tcx>, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Option { + use Ordering::*; + debug_assert_eq!(self.ty, value.ty()); + let ty = self.ty; + let value = PatRangeBoundary::Finite(value); + // For performance, it's important to only do the second comparison if necessary. + Some( + match self.lo.compare_with(value, ty, tcx, param_env)? { + Less | Equal => true, + Greater => false, + } && match value.compare_with(self.hi, ty, tcx, param_env)? { + Less => true, + Equal => self.end == RangeEnd::Included, + Greater => false, + }, + ) + } + + #[inline] + pub fn overlaps( + &self, + other: &Self, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Option { + use Ordering::*; + debug_assert_eq!(self.ty, other.ty); + // For performance, it's important to only do the second comparison if necessary. + Some( + match other.lo.compare_with(self.hi, self.ty, tcx, param_env)? { + Less => true, + Equal => self.end == RangeEnd::Included, + Greater => false, + } && match self.lo.compare_with(other.hi, self.ty, tcx, param_env)? { + Less => true, + Equal => other.end == RangeEnd::Included, + Greater => false, + }, + ) + } +} + +impl<'tcx> fmt::Display for PatRange<'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let PatRangeBoundary::Finite(value) = &self.lo { + write!(f, "{value}")?; + } + if let PatRangeBoundary::Finite(value) = &self.hi { + write!(f, "{}", self.end)?; + write!(f, "{value}")?; + } else { + // `0..` is parsed as an inclusive range, we must display it correctly. + write!(f, "..")?; + } + Ok(()) + } +} + +/// A (possibly open) boundary of a range pattern. +/// If present, the const must be of a numeric type. +#[derive(Copy, Clone, Debug, PartialEq, HashStable, TypeVisitable)] +pub enum PatRangeBoundary<'tcx> { + Finite(mir::Const<'tcx>), + NegInfinity, + PosInfinity, +} + +impl<'tcx> PatRangeBoundary<'tcx> { + #[inline] + pub fn is_finite(self) -> bool { + matches!(self, Self::Finite(..)) + } + #[inline] + pub fn as_finite(self) -> Option> { + match self { + Self::Finite(value) => Some(value), + Self::NegInfinity | Self::PosInfinity => None, + } + } + #[inline] + pub fn to_const(self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> mir::Const<'tcx> { + match self { + Self::Finite(value) => value, + Self::NegInfinity => { + // Unwrap is ok because the type is known to be numeric. + let c = ty.numeric_min_val(tcx).unwrap(); + mir::Const::from_ty_const(c, tcx) + } + Self::PosInfinity => { + // Unwrap is ok because the type is known to be numeric. + let c = ty.numeric_max_val(tcx).unwrap(); + mir::Const::from_ty_const(c, tcx) + } + } + } + pub fn eval_bits(self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> u128 { + match self { + Self::Finite(value) => value.eval_bits(tcx, param_env), + Self::NegInfinity => { + // Unwrap is ok because the type is known to be numeric. + ty.numeric_min_and_max_as_bits(tcx).unwrap().0 + } + Self::PosInfinity => { + // Unwrap is ok because the type is known to be numeric. + ty.numeric_min_and_max_as_bits(tcx).unwrap().1 + } + } + } + + #[instrument(skip(tcx, param_env), level = "debug", ret)] + pub fn compare_with( + self, + other: Self, + ty: Ty<'tcx>, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Option { + use PatRangeBoundary::*; + match (self, other) { + // When comparing with infinities, we must remember that `0u8..` and `0u8..=255` + // describe the same range. These two shortcuts are ok, but for the rest we must check + // bit values. + (PosInfinity, PosInfinity) => return Some(Ordering::Equal), + (NegInfinity, NegInfinity) => return Some(Ordering::Equal), + + // This code is hot when compiling matches with many ranges. So we + // special-case extraction of evaluated scalars for speed, for types where + // raw data comparisons are appropriate. E.g. `unicode-normalization` has + // many ranges such as '\u{037A}'..='\u{037F}', and chars can be compared + // in this way. + (Finite(mir::Const::Ty(a)), Finite(mir::Const::Ty(b))) + if matches!(ty.kind(), ty::Uint(_) | ty::Char) => + { + return Some(a.kind().cmp(&b.kind())); + } + ( + Finite(mir::Const::Val(mir::ConstValue::Scalar(Scalar::Int(a)), _)), + Finite(mir::Const::Val(mir::ConstValue::Scalar(Scalar::Int(b)), _)), + ) if matches!(ty.kind(), ty::Uint(_) | ty::Char) => return Some(a.cmp(&b)), + _ => {} + } + + let a = self.eval_bits(ty, tcx, param_env); + let b = other.eval_bits(ty, tcx, param_env); + + match ty.kind() { + ty::Float(ty::FloatTy::F32) => { + use rustc_apfloat::Float; + let a = rustc_apfloat::ieee::Single::from_bits(a); + let b = rustc_apfloat::ieee::Single::from_bits(b); + a.partial_cmp(&b) + } + ty::Float(ty::FloatTy::F64) => { + use rustc_apfloat::Float; + let a = rustc_apfloat::ieee::Double::from_bits(a); + let b = rustc_apfloat::ieee::Double::from_bits(b); + a.partial_cmp(&b) + } + ty::Int(ity) => { + use rustc_middle::ty::layout::IntegerExt; + let size = rustc_target::abi::Integer::from_int_ty(&tcx, *ity).size(); + let a = size.sign_extend(a) as i128; + let b = size.sign_extend(b) as i128; + Some(a.cmp(&b)) + } + ty::Uint(_) | ty::Char => Some(a.cmp(&b)), + _ => bug!(), + } + } } impl<'tcx> fmt::Display for Pat<'tcx> { @@ -901,11 +1174,10 @@ write!(f, "{subpattern}") } PatKind::Constant { value } => write!(f, "{value}"), - PatKind::Range(box PatRange { lo, hi, end }) => { - write!(f, "{lo}")?; - write!(f, "{end}")?; - write!(f, "{hi}") + PatKind::InlineConstant { def: _, ref subpattern } => { + write!(f, "{} (from inline const)", subpattern) } + PatKind::Range(ref range) => write!(f, "{range}"), PatKind::Slice { ref prefix, ref slice, ref suffix } | PatKind::Array { ref prefix, ref slice, ref suffix } => { write!(f, "[")?; @@ -931,6 +1203,7 @@ } Ok(()) } + PatKind::Error(_) => write!(f, ""), } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -301,13 +301,18 @@ InlineAsmSized, /// Captured closure type must be `Sized`. SizedClosureCapture(LocalDefId), - /// Types live across generator yields must be `Sized`. - SizedGeneratorInterior(LocalDefId), + /// Types live across coroutine yields must be `Sized`. + SizedCoroutineInterior(LocalDefId), /// `[expr; N]` requires `type_of(expr): Copy`. RepeatElementCopy { - /// If element is a `const fn` we display a help message suggesting to move the - /// function call to a new `const` item while saying that `T` doesn't implement `Copy`. - is_const_fn: bool, + /// If element is a `const fn` or const ctor we display a help message suggesting + /// to move it to a new `const` item while saying that `T` doesn't implement `Copy`. + is_constable: IsConstable, + elt_type: Ty<'tcx>, + elt_span: Span, + /// Span of the statement/item in which the repeat expression occurs. We can use this to + /// place a `const` declaration before it + elt_stmt_span: Span, }, /// Types of fields (other than the last, except for packed structs) in a struct must be sized. @@ -455,6 +460,21 @@ TypeAlias(InternedObligationCauseCode<'tcx>, Span, DefId), } +/// Whether a value can be extracted into a const. +/// Used for diagnostics around array repeat expressions. +#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] +pub enum IsConstable { + No, + /// Call to a const fn + Fn, + /// Use of a const ctor + Ctor, +} + +crate::TrivialTypeTraversalAndLiftImpls! { + IsConstable, +} + /// The 'location' at which we try to perform HIR-based wf checking. /// This information is used to obtain an `hir::Ty`, which /// we can walk in order to obtain precise spans for any @@ -541,6 +561,7 @@ pub prior_arm_ty: Ty<'tcx>, pub prior_arm_span: Span, pub scrut_span: Span, + pub scrut_hir_id: hir::HirId, pub source: hir::MatchSource, pub prior_arms: Vec, pub opt_suggest_box_span: Option, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/query.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/query.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/query.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/query.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ use crate::ty::error::TypeError; use crate::ty::GenericArg; use crate::ty::{self, Ty, TyCtxt}; -use rustc_span::source_map::Span; +use rustc_span::Span; pub mod type_op { use crate::ty::fold::TypeFoldable; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/select.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/select.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/select.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/select.rs 2023-12-21 16:55:28.000000000 +0000 @@ -136,14 +136,18 @@ is_const: bool, }, - /// Implementation of a `Generator` trait by one of the anonymous types - /// generated for a generator. - GeneratorCandidate, + /// Implementation of a `Coroutine` trait by one of the anonymous types + /// generated for a coroutine. + CoroutineCandidate, - /// Implementation of a `Future` trait by one of the generator types + /// Implementation of a `Future` trait by one of the coroutine types /// generated for an async construct. FutureCandidate, + /// Implementation of an `Iterator` trait by one of the generator types + /// generated for a gen construct. + IteratorCandidate, + /// Implementation of a `Fn`-family trait by one of the anonymous /// types generated for a fn pointer type (e.g., `fn(int) -> int`) FnPointerCandidate { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/cache.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/cache.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/cache.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/cache.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,4 @@ -use super::{CanonicalInput, QueryResult}; +use super::{inspect, CanonicalInput, QueryResult}; use crate::ty::TyCtxt; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::sync::Lock; @@ -14,8 +14,10 @@ map: Lock, CacheEntry<'tcx>>>, } +#[derive(PartialEq, Eq)] pub struct CacheData<'tcx> { pub result: QueryResult<'tcx>, + pub proof_tree: Option<&'tcx [inspect::GoalEvaluationStep<'tcx>]>, pub reached_depth: usize, pub encountered_overflow: bool, } @@ -24,22 +26,33 @@ /// Insert a final result into the global cache. pub fn insert( &self, + tcx: TyCtxt<'tcx>, key: CanonicalInput<'tcx>, + proof_tree: Option<&'tcx [inspect::GoalEvaluationStep<'tcx>]>, reached_depth: usize, - did_overflow: bool, + encountered_overflow: bool, cycle_participants: FxHashSet>, dep_node: DepNodeIndex, result: QueryResult<'tcx>, ) { let mut map = self.map.borrow_mut(); let entry = map.entry(key).or_default(); - let data = WithDepNode::new(dep_node, result); + let data = WithDepNode::new(dep_node, QueryData { result, proof_tree }); entry.cycle_participants.extend(cycle_participants); - if did_overflow { + if encountered_overflow { entry.with_overflow.insert(reached_depth, data); } else { entry.success = Some(Success { data, reached_depth }); } + + if cfg!(debug_assertions) { + drop(map); + if Some(CacheData { result, proof_tree, reached_depth, encountered_overflow }) + != self.get(tcx, key, |_| false, Limit(reached_depth)) + { + bug!("unable to retrieve inserted element from cache: {key:?}"); + } + } } /// Try to fetch a cached result, checking the recursion limit @@ -62,27 +75,39 @@ if let Some(ref success) = entry.success { if available_depth.value_within_limit(success.reached_depth) { + let QueryData { result, proof_tree } = success.data.get(tcx); return Some(CacheData { - result: success.data.get(tcx), + result, + proof_tree, reached_depth: success.reached_depth, encountered_overflow: false, }); } } - entry.with_overflow.get(&available_depth.0).map(|e| CacheData { - result: e.get(tcx), - reached_depth: available_depth.0, - encountered_overflow: true, + entry.with_overflow.get(&available_depth.0).map(|e| { + let QueryData { result, proof_tree } = e.get(tcx); + CacheData { + result, + proof_tree, + reached_depth: available_depth.0, + encountered_overflow: true, + } }) } } struct Success<'tcx> { - data: WithDepNode>, + data: WithDepNode>, reached_depth: usize, } +#[derive(Clone, Copy)] +pub struct QueryData<'tcx> { + pub result: QueryResult<'tcx>, + pub proof_tree: Option<&'tcx [inspect::GoalEvaluationStep<'tcx>]>, +} + /// The cache entry for a goal `CanonicalInput`. /// /// This contains results whose computation never hit the @@ -96,5 +121,5 @@ /// See the doc comment of `StackEntry::cycle_participants` for more /// details. cycle_participants: FxHashSet>, - with_overflow: FxHashMap>>, + with_overflow: FxHashMap>>, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect/format.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect/format.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect/format.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect/format.rs 2023-12-21 16:55:28.000000000 +0000 @@ -74,13 +74,10 @@ CanonicalGoalEvaluationKind::Overflow => { writeln!(self.f, "OVERFLOW: {:?}", eval.result) } - CanonicalGoalEvaluationKind::CacheHit(CacheHit::Global) => { - writeln!(self.f, "GLOBAL CACHE HIT: {:?}", eval.result) + CanonicalGoalEvaluationKind::CycleInStack => { + writeln!(self.f, "CYCLE IN STACK: {:?}", eval.result) } - CanonicalGoalEvaluationKind::CacheHit(CacheHit::Provisional) => { - writeln!(self.f, "PROVISIONAL CACHE HIT: {:?}", eval.result) - } - CanonicalGoalEvaluationKind::Uncached { revisions } => { + CanonicalGoalEvaluationKind::Evaluation { revisions } => { for (n, step) in revisions.iter().enumerate() { writeln!(self.f, "REVISION {n}")?; self.nested(|this| this.format_evaluation_step(step))?; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/solve/inspect.rs 2023-12-21 16:55:28.000000000 +0000 @@ -42,12 +42,6 @@ pub type CanonicalState<'tcx, T> = Canonical<'tcx, State<'tcx, T>>; -#[derive(Debug, Eq, PartialEq)] -pub enum CacheHit { - Provisional, - Global, -} - /// When evaluating the root goals we also store the /// original values for the `CanonicalVarValues` of the /// canonicalized goal. We use this to map any [CanonicalState] @@ -78,8 +72,8 @@ #[derive(Eq, PartialEq)] pub enum CanonicalGoalEvaluationKind<'tcx> { Overflow, - CacheHit(CacheHit), - Uncached { revisions: Vec> }, + CycleInStack, + Evaluation { revisions: &'tcx [GoalEvaluationStep<'tcx>] }, } impl Debug for GoalEvaluation<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/traits/util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,9 +3,10 @@ use crate::ty::{PolyTraitRef, TyCtxt}; /// Given a PolyTraitRef, get the PolyTraitRefs of the trait's (transitive) supertraits. -/// -/// A simplified version of the same function at `rustc_infer::traits::util::supertraits`. -pub fn supertraits<'tcx>( +/// This only exists in `rustc_middle` because the more powerful elaborator depends on +/// `rustc_infer` for elaborating outlives bounds -- this should only be used for pretty +/// printing. +pub fn supertraits_for_pretty_printing<'tcx>( tcx: TyCtxt<'tcx>, trait_ref: PolyTraitRef<'tcx>, ) -> impl Iterator> { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/_match.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/_match.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/_match.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/_match.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,20 +18,20 @@ /// Like subtyping, matching is really a binary relation, so the only /// important thing about the result is Ok/Err. Also, matching never /// affects any type variables or unification state. -pub struct Match<'tcx> { +pub struct MatchAgainstFreshVars<'tcx> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, } -impl<'tcx> Match<'tcx> { - pub fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Match<'tcx> { - Match { tcx, param_env } +impl<'tcx> MatchAgainstFreshVars<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> MatchAgainstFreshVars<'tcx> { + MatchAgainstFreshVars { tcx, param_env } } } -impl<'tcx> TypeRelation<'tcx> for Match<'tcx> { +impl<'tcx> TypeRelation<'tcx> for MatchAgainstFreshVars<'tcx> { fn tag(&self) -> &'static str { - "Match" + "MatchAgainstFreshVars" } fn tcx(&self) -> TyCtxt<'tcx> { self.tcx diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/assoc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/assoc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/assoc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/assoc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,3 @@ -pub use self::AssocItemContainer::*; - use crate::ty; use rustc_data_structures::sorted_map::SortedIndexMultiMap; use rustc_hir as hir; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/codec.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/codec.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/codec.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/codec.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,7 +19,7 @@ use rustc_middle::ty::TyCtxt; use rustc_serialize::{Decodable, Encodable}; use rustc_span::Span; -use rustc_target::abi::FieldIdx; +use rustc_target::abi::{FieldIdx, VariantIdx}; pub use rustc_type_ir::{TyDecoder, TyEncoder}; use std::hash::Hash; use std::intrinsics; @@ -230,9 +230,9 @@ assert!(pos >= SHORTHAND_OFFSET); let shorthand = pos - SHORTHAND_OFFSET; - decoder.with_position(shorthand, ty::PredicateKind::decode) + decoder.with_position(shorthand, as Decodable>::decode) } else { - ty::PredicateKind::decode(decoder) + as Decodable>::decode(decoder) }, bound_vars, ) @@ -348,9 +348,10 @@ impl<'tcx, D: TyDecoder>> RefDecodable<'tcx, D> for [ty::ValTree<'tcx>] { fn decode(decoder: &mut D) -> &'tcx Self { - decoder.interner().arena.alloc_from_iter( - (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::>(), - ) + decoder + .interner() + .arena + .alloc_from_iter((0..decoder.read_usize()).map(|_| Decodable::decode(decoder))) } } @@ -368,9 +369,10 @@ impl<'tcx, D: TyDecoder>> RefDecodable<'tcx, D> for [(ty::Clause<'tcx>, Span)] { fn decode(decoder: &mut D) -> &'tcx Self { - decoder.interner().arena.alloc_from_iter( - (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::>(), - ) + decoder + .interner() + .arena + .alloc_from_iter((0..decoder.read_usize()).map(|_| Decodable::decode(decoder))) } } @@ -412,6 +414,17 @@ } } +impl<'tcx, D: TyDecoder>> RefDecodable<'tcx, D> + for ty::List<(VariantIdx, FieldIdx)> +{ + fn decode(decoder: &mut D) -> &'tcx Self { + let len = decoder.read_usize(); + decoder.interner().mk_offset_of_from_iter( + (0..len).map::<(VariantIdx, FieldIdx), _>(|_| Decodable::decode(decoder)), + ) + } +} + impl_decodable_via_ref! { &'tcx ty::TypeckResults<'tcx>, &'tcx ty::List>, @@ -424,6 +437,7 @@ &'tcx ty::List, &'tcx ty::List>, &'tcx ty::List, + &'tcx ty::List<(VariantIdx, FieldIdx)>, } #[macro_export] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts/kind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts/kind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts/kind.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts/kind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -80,19 +80,19 @@ /// An inference variable for a const, for use in const generics. #[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable, Hash)] -pub enum InferConst<'tcx> { +pub enum InferConst { /// Infer the value of the const. - Var(ty::ConstVid<'tcx>), + Var(ty::ConstVid), /// Infer the value of the effect. /// /// For why this is separate from the `Var` variant above, see the /// documentation on `EffectVid`. - EffectVar(ty::EffectVid<'tcx>), + EffectVar(ty::EffectVid), /// A fresh const variable. See `infer::freshen` for more details. Fresh(u32), } -impl HashStable for InferConst<'_> { +impl HashStable for InferConst { fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { match self { InferConst::Var(_) | InferConst::EffectVar(_) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/consts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -57,7 +57,7 @@ } #[inline] - pub fn new_var(tcx: TyCtxt<'tcx>, infer: ty::ConstVid<'tcx>, ty: Ty<'tcx>) -> Const<'tcx> { + pub fn new_var(tcx: TyCtxt<'tcx>, infer: ty::ConstVid, ty: Ty<'tcx>) -> Const<'tcx> { Const::new(tcx, ty::ConstKind::Infer(ty::InferConst::Var(infer)), ty) } @@ -67,7 +67,7 @@ } #[inline] - pub fn new_infer(tcx: TyCtxt<'tcx>, infer: ty::InferConst<'tcx>, ty: Ty<'tcx>) -> Const<'tcx> { + pub fn new_infer(tcx: TyCtxt<'tcx>, infer: ty::InferConst, ty: Ty<'tcx>) -> Const<'tcx> { Const::new(tcx, ty::ConstKind::Infer(infer), ty) } @@ -84,7 +84,7 @@ #[inline] pub fn new_placeholder( tcx: TyCtxt<'tcx>, - placeholder: ty::PlaceholderConst<'tcx>, + placeholder: ty::PlaceholderConst, ty: Ty<'tcx>, ) -> Const<'tcx> { Const::new(tcx, ty::ConstKind::Placeholder(placeholder), ty) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,7 @@ use crate::arena::Arena; use crate::dep_graph::{DepGraph, DepKindStruct}; -use crate::infer::canonical::CanonicalVarInfo; +use crate::infer::canonical::{CanonicalVarInfo, CanonicalVarInfos}; use crate::lint::struct_lint_level; use crate::metadata::ModChild; use crate::middle::codegen_fn_attrs::CodegenFnAttrs; @@ -65,7 +65,7 @@ use rustc_span::{Span, DUMMY_SP}; use rustc_target::abi::{FieldIdx, Layout, LayoutS, TargetDataLayout, VariantIdx}; use rustc_target::spec::abi; -use rustc_type_ir::sty::TyKind::*; +use rustc_type_ir::TyKind::*; use rustc_type_ir::WithCachedTypeInfo; use rustc_type_ir::{CollectAndApply, Interner, TypeFlags}; @@ -80,54 +80,59 @@ #[allow(rustc::usage_of_ty_tykind)] impl<'tcx> Interner for TyCtxt<'tcx> { + type DefId = DefId; type AdtDef = ty::AdtDef<'tcx>; - type GenericArgsRef = ty::GenericArgsRef<'tcx>; + type GenericArgs = ty::GenericArgsRef<'tcx>; type GenericArg = ty::GenericArg<'tcx>; - type DefId = DefId; + type Term = ty::Term<'tcx>; + type Binder = Binder<'tcx, T>; - type Ty = Ty<'tcx>; - type Const = ty::Const<'tcx>; - type Region = Region<'tcx>; - type Predicate = Predicate<'tcx>; type TypeAndMut = TypeAndMut<'tcx>; - type Mutability = hir::Mutability; - type Movability = hir::Movability; - type PolyFnSig = PolyFnSig<'tcx>; - type ListBinderExistentialPredicate = &'tcx List>; - type BinderListTy = Binder<'tcx, &'tcx List>>; - type ListTy = &'tcx List>; + type CanonicalVars = CanonicalVarInfos<'tcx>; + + type Ty = Ty<'tcx>; + type Tys = &'tcx List>; type AliasTy = ty::AliasTy<'tcx>; type ParamTy = ParamTy; type BoundTy = ty::BoundTy; - type PlaceholderType = ty::PlaceholderType; + type PlaceholderTy = ty::PlaceholderType; type InferTy = InferTy; + type ErrorGuaranteed = ErrorGuaranteed; - type PredicateKind = ty::PredicateKind<'tcx>; + type BoundExistentialPredicates = &'tcx List>; + type PolyFnSig = PolyFnSig<'tcx>; type AllocId = crate::mir::interpret::AllocId; - type InferConst = ty::InferConst<'tcx>; + type Const = ty::Const<'tcx>; + type InferConst = ty::InferConst; type AliasConst = ty::UnevaluatedConst<'tcx>; + type PlaceholderConst = ty::PlaceholderConst; type ParamConst = ty::ParamConst; type BoundConst = ty::BoundVar; - type PlaceholderConst = ty::PlaceholderConst<'tcx>; type ValueConst = ty::ValTree<'tcx>; type ExprConst = ty::Expr<'tcx>; + type Region = Region<'tcx>; type EarlyBoundRegion = ty::EarlyBoundRegion; type BoundRegion = ty::BoundRegion; type FreeRegion = ty::FreeRegion; - type RegionVid = ty::RegionVid; + type InferRegion = ty::RegionVid; type PlaceholderRegion = ty::PlaceholderRegion; + type Predicate = Predicate<'tcx>; + type TraitPredicate = ty::TraitPredicate<'tcx>; + type RegionOutlivesPredicate = ty::RegionOutlivesPredicate<'tcx>; + type TypeOutlivesPredicate = ty::TypeOutlivesPredicate<'tcx>; + type ProjectionPredicate = ty::ProjectionPredicate<'tcx>; + type SubtypePredicate = ty::SubtypePredicate<'tcx>; + type CoercePredicate = ty::CoercePredicate<'tcx>; + type ClosureKind = ty::ClosureKind; + fn ty_and_mut_to_parts( TypeAndMut { ty, mutbl }: TypeAndMut<'tcx>, - ) -> (Self::Ty, Self::Mutability) { + ) -> (Self::Ty, ty::Mutability) { (ty, mutbl) } - - fn mutability_is_mut(mutbl: Self::Mutability) -> bool { - mutbl.is_mut() - } } type InternedSet<'tcx, T> = ShardedHashMap, ()>; @@ -152,11 +157,13 @@ const_: InternedSet<'tcx, ConstData<'tcx>>, const_allocation: InternedSet<'tcx, Allocation>, bound_variable_kinds: InternedSet<'tcx, List>, - layout: InternedSet<'tcx, LayoutS>, + layout: InternedSet<'tcx, LayoutS>, adt_def: InternedSet<'tcx, AdtDefData>, external_constraints: InternedSet<'tcx, ExternalConstraintsData<'tcx>>, predefined_opaques_in_body: InternedSet<'tcx, PredefinedOpaquesData<'tcx>>, fields: InternedSet<'tcx, List>, + local_def_ids: InternedSet<'tcx, List>, + offset_of: InternedSet<'tcx, List<(VariantIdx, FieldIdx)>>, } impl<'tcx> CtxtInterners<'tcx> { @@ -182,6 +189,8 @@ external_constraints: Default::default(), predefined_opaques_in_body: Default::default(), fields: Default::default(), + local_def_ids: Default::default(), + offset_of: Default::default(), } } @@ -770,9 +779,20 @@ self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did) } - /// Returns `true` if the node pointed to by `def_id` is a generator for an async construct. - pub fn generator_is_async(self, def_id: DefId) -> bool { - matches!(self.generator_kind(def_id), Some(hir::GeneratorKind::Async(_))) + /// Returns `true` if the node pointed to by `def_id` is a coroutine for an async construct. + pub fn coroutine_is_async(self, def_id: DefId) -> bool { + matches!(self.coroutine_kind(def_id), Some(hir::CoroutineKind::Async(_))) + } + + /// Returns `true` if the node pointed to by `def_id` is a general coroutine that implements `Coroutine`. + /// This means it is neither an `async` or `gen` construct. + pub fn is_general_coroutine(self, def_id: DefId) -> bool { + matches!(self.coroutine_kind(def_id), Some(hir::CoroutineKind::Coroutine)) + } + + /// Returns `true` if the node pointed to by `def_id` is a coroutine for a gen construct. + pub fn coroutine_is_gen(self, def_id: DefId) -> bool { + matches!(self.coroutine_kind(def_id), Some(hir::CoroutineKind::Gen(_))) } pub fn stability(self) -> &'tcx stability::Index { @@ -960,7 +980,7 @@ self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE); let definitions = &self.untracked.definitions; - std::iter::from_generator(|| { + std::iter::from_coroutine(|| { let mut i = 0; // Recompute the number of definitions each time, because our caller may be creating @@ -1057,16 +1077,21 @@ } /// Returns the `DefId` and the `BoundRegionKind` corresponding to the given region. - pub fn is_suitable_region(self, region: Region<'tcx>) -> Option { - let (suitable_region_binding_scope, bound_region) = match *region { - ty::ReFree(ref free_region) => { - (free_region.scope.expect_local(), free_region.bound_region) + pub fn is_suitable_region(self, mut region: Region<'tcx>) -> Option { + let (suitable_region_binding_scope, bound_region) = loop { + let def_id = match region.kind() { + ty::ReFree(fr) => fr.bound_region.get_id()?.as_local()?, + ty::ReEarlyBound(ebr) => ebr.def_id.expect_local(), + _ => return None, // not a free region + }; + let scope = self.local_parent(def_id); + if self.def_kind(scope) == DefKind::OpaqueTy { + // Lifetime params of opaque types are synthetic and thus irrelevant to + // diagnostics. Map them back to their origin! + region = self.map_rpit_lifetime_to_fn_lifetime(def_id); + continue; } - ty::ReEarlyBound(ref ebr) => ( - self.local_parent(ebr.def_id.expect_local()), - ty::BoundRegionKind::BrNamed(ebr.def_id, ebr.name), - ), - _ => return None, // not a free region + break (scope, ty::BrNamed(def_id.into(), self.item_name(def_id.into()))); }; let is_impl_item = match self.hir().find_by_def_id(suitable_region_binding_scope) { @@ -1074,7 +1099,7 @@ Some(Node::ImplItem(..)) => { self.is_bound_region_in_impl_item(suitable_region_binding_scope) } - _ => return None, + _ => false, }; Some(FreeRegionInfo { @@ -1121,7 +1146,11 @@ { v.visit_ty(alias_ty); if !v.0.is_empty() { - return Some((v.0, alias_generics.span, alias_generics.span_for_lifetime_suggestion())); + return Some(( + v.0, + alias_generics.span, + alias_generics.span_for_lifetime_suggestion(), + )); } } return None; @@ -1382,8 +1411,8 @@ FnDef, FnPtr, Placeholder, - Generator, - GeneratorWitness, + Coroutine, + CoroutineWitness, Dynamic, Closure, Tuple, @@ -1521,7 +1550,7 @@ region: pub(crate) intern_region(RegionKind<'tcx>): Region -> Region<'tcx>, const_: intern_const(ConstData<'tcx>): Const -> Const<'tcx>, const_allocation: pub mk_const_alloc(Allocation): ConstAllocation -> ConstAllocation<'tcx>, - layout: pub mk_layout(LayoutS): Layout -> Layout<'tcx>, + layout: pub mk_layout(LayoutS): Layout -> Layout<'tcx>, adt_def: pub mk_adt_def_from_data(AdtDefData): AdtDef -> AdtDef<'tcx>, external_constraints: pub mk_external_constraints(ExternalConstraintsData<'tcx>): ExternalConstraints -> ExternalConstraints<'tcx>, @@ -1559,6 +1588,8 @@ place_elems: pub mk_place_elems(PlaceElem<'tcx>), bound_variable_kinds: pub mk_bound_variable_kinds(ty::BoundVariableKind), fields: pub mk_fields(FieldIdx), + local_def_ids: intern_local_def_ids(LocalDefId), + offset_of: pub mk_offset_of((VariantIdx, FieldIdx)), ); impl<'tcx> TyCtxt<'tcx> { @@ -1678,7 +1709,6 @@ && let DefKind::Impl { of_trait: false } = self.def_kind(self.parent(_def_id)) { // If this is an inherent projection. - generics.params.len() + 1 } else { generics.count() @@ -1789,6 +1819,13 @@ self.intern_clauses(clauses) } + pub fn mk_local_def_ids(self, clauses: &[LocalDefId]) -> &'tcx List { + // FIXME consider asking the input slice to be sorted to avoid + // re-interning permutations, in which case that would be asserted + // here. + self.intern_local_def_ids(clauses) + } + pub fn mk_const_list_from_iter(self, iter: I) -> T::Output where I: Iterator, @@ -1880,6 +1917,14 @@ T::collect_and_apply(iter, |xs| self.mk_fields(xs)) } + pub fn mk_offset_of_from_iter(self, iter: I) -> T::Output + where + I: Iterator, + T: CollectAndApply<(VariantIdx, FieldIdx), &'tcx List<(VariantIdx, FieldIdx)>>, + { + T::collect_and_apply(iter, |xs| self.mk_offset_of(xs)) + } + pub fn mk_args_trait( self, self_ty: Ty<'tcx>, @@ -1888,15 +1933,6 @@ self.mk_args_from_iter(iter::once(self_ty.into()).chain(rest)) } - pub fn mk_alias_ty( - self, - def_id: DefId, - args: impl IntoIterator>>, - ) -> ty::AliasTy<'tcx> { - let args = self.check_and_mk_args(def_id, args); - ty::AliasTy { def_id, args, _use_mk_alias_ty_instead: () } - } - pub fn mk_bound_variable_kinds_from_iter(self, iter: I) -> T::Output where I: Iterator, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,7 +17,7 @@ use rustc_hir::def_id::DefId; use rustc_hir::{PredicateOrigin, WherePredicate}; use rustc_span::{BytePos, Span}; -use rustc_type_ir::sty::TyKind::*; +use rustc_type_ir::TyKind::*; impl<'tcx> IntoDiagnosticArg for Ty<'tcx> { fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> { @@ -274,6 +274,8 @@ span, if span_to_replace.is_some() { constraint.clone() + } else if constraint.starts_with('<') { + constraint.to_string() } else if bound_list_non_empty { format!(" + {constraint}") } else { @@ -482,8 +484,8 @@ FnDef(..) | Closure(..) | Infer(..) - | Generator(..) - | GeneratorWitness(..) + | Coroutine(..) + | CoroutineWitness(..) | Bound(_, _) | Placeholder(_) | Error(_) => { @@ -494,7 +496,8 @@ let parent = self.tcx.parent(def_id); let parent_ty = self.tcx.type_of(parent).instantiate_identity(); if let DefKind::TyAlias | DefKind::AssocTy = self.tcx.def_kind(parent) - && let Alias(Opaque, AliasTy { def_id: parent_opaque_def_id, .. }) = *parent_ty.kind() + && let Alias(Opaque, AliasTy { def_id: parent_opaque_def_id, .. }) = + *parent_ty.kind() && parent_opaque_def_id == def_id { // Okay @@ -566,8 +569,8 @@ // FIXME(compiler-errors): We could replace these with infer, I guess. Closure(..) | Infer(..) - | Generator(..) - | GeneratorWitness(..) + | Coroutine(..) + | CoroutineWitness(..) | Bound(_, _) | Placeholder(_) | Error(_) => { @@ -577,8 +580,10 @@ Alias(Opaque, AliasTy { def_id, .. }) => { let parent = self.tcx.parent(def_id); let parent_ty = self.tcx.type_of(parent).instantiate_identity(); - if let hir::def::DefKind::TyAlias | hir::def::DefKind::AssocTy = self.tcx.def_kind(parent) - && let Alias(Opaque, AliasTy { def_id: parent_opaque_def_id, .. }) = *parent_ty.kind() + if let hir::def::DefKind::TyAlias | hir::def::DefKind::AssocTy = + self.tcx.def_kind(parent) + && let Alias(Opaque, AliasTy { def_id: parent_opaque_def_id, .. }) = + *parent_ty.kind() && parent_opaque_def_id == def_id { t diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/error.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -241,8 +241,10 @@ } ty::Dynamic(..) => "trait object".into(), ty::Closure(..) => "closure".into(), - ty::Generator(def_id, ..) => tcx.generator_kind(def_id).unwrap().descr().into(), - ty::GeneratorWitness(..) => "generator witness".into(), + ty::Coroutine(def_id, ..) => { + format!("{:#}", tcx.coroutine_kind(def_id).unwrap()).into() + } + ty::CoroutineWitness(..) => "coroutine witness".into(), ty::Infer(ty::TyVar(_)) => "inferred type".into(), ty::Infer(ty::IntVar(_)) => "integer".into(), ty::Infer(ty::FloatVar(_)) => "floating-point number".into(), @@ -253,7 +255,13 @@ ty::Infer(ty::FreshFloatTy(_)) => "fresh floating-point type".into(), ty::Alias(ty::Projection | ty::Inherent, _) => "associated type".into(), ty::Param(p) => format!("type parameter `{p}`").into(), - ty::Alias(ty::Opaque, ..) => if tcx.ty_is_opaque_future(self) { "future".into() } else { "opaque type".into() }, + ty::Alias(ty::Opaque, ..) => { + if tcx.ty_is_opaque_future(self) { + "future".into() + } else { + "opaque type".into() + } + } ty::Error(_) => "type error".into(), _ => { let width = tcx.sess.diagnostic_width(); @@ -293,8 +301,10 @@ ty::FnPtr(_) => "fn pointer".into(), ty::Dynamic(..) => "trait object".into(), ty::Closure(..) => "closure".into(), - ty::Generator(def_id, ..) => tcx.generator_kind(def_id).unwrap().descr().into(), - ty::GeneratorWitness(..) => "generator witness".into(), + ty::Coroutine(def_id, ..) => { + format!("{:#}", tcx.coroutine_kind(def_id).unwrap()).into() + } + ty::CoroutineWitness(..) => "coroutine witness".into(), ty::Tuple(..) => "tuple".into(), ty::Placeholder(..) => "higher-ranked type".into(), ty::Bound(..) => "bound type variable".into(), @@ -309,26 +319,25 @@ impl<'tcx> TyCtxt<'tcx> { pub fn ty_string_with_limit(self, ty: Ty<'tcx>, length_limit: usize) -> String { let mut type_limit = 50; - let regular = FmtPrinter::new(self, hir::def::Namespace::TypeNS) - .pretty_print_type(ty) - .expect("could not write to `String`") - .into_buffer(); + let regular = FmtPrinter::print_string(self, hir::def::Namespace::TypeNS, |cx| { + cx.pretty_print_type(ty) + }) + .expect("could not write to `String`"); if regular.len() <= length_limit { return regular; } let mut short; loop { // Look for the longest properly trimmed path that still fits in length_limit. - short = with_forced_trimmed_paths!( - FmtPrinter::new_with_limit( + short = with_forced_trimmed_paths!({ + let mut cx = FmtPrinter::new_with_limit( self, hir::def::Namespace::TypeNS, rustc_session::Limit(type_limit), - ) - .pretty_print_type(ty) - .expect("could not write to `String`") - .into_buffer() - ); + ); + cx.pretty_print_type(ty).expect("could not write to `String`"); + cx.into_buffer() + }); if short.len() <= length_limit || type_limit == 0 { break; } @@ -338,10 +347,10 @@ } pub fn short_ty_string(self, ty: Ty<'tcx>) -> (String, Option) { - let regular = FmtPrinter::new(self, hir::def::Namespace::TypeNS) - .pretty_print_type(ty) - .expect("could not write to `String`") - .into_buffer(); + let regular = FmtPrinter::print_string(self, hir::def::Namespace::TypeNS, |cx| { + cx.pretty_print_type(ty) + }) + .expect("could not write to `String`"); if !self.sess.opts.unstable_opts.write_long_types_to_disk { return (regular, None); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/fast_reject.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/fast_reject.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/fast_reject.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/fast_reject.rs 2023-12-21 16:55:28.000000000 +0000 @@ -28,8 +28,8 @@ MarkerTraitObject, Trait(DefId), Closure(DefId), - Generator(DefId), - GeneratorWitness(DefId), + Coroutine(DefId), + CoroutineWitness(DefId), Function(usize), Placeholder, } @@ -128,8 +128,8 @@ }, ty::Ref(_, _, mutbl) => Some(SimplifiedType::Ref(mutbl)), ty::FnDef(def_id, _) | ty::Closure(def_id, _) => Some(SimplifiedType::Closure(def_id)), - ty::Generator(def_id, _, _) => Some(SimplifiedType::Generator(def_id)), - ty::GeneratorWitness(def_id, _) => Some(SimplifiedType::GeneratorWitness(def_id)), + ty::Coroutine(def_id, _, _) => Some(SimplifiedType::Coroutine(def_id)), + ty::CoroutineWitness(def_id, _) => Some(SimplifiedType::CoroutineWitness(def_id)), ty::Never => Some(SimplifiedType::Never), ty::Tuple(tys) => Some(SimplifiedType::Tuple(tys.len())), ty::FnPtr(f) => Some(SimplifiedType::Function(f.skip_binder().inputs().len())), @@ -164,8 +164,8 @@ | SimplifiedType::Foreign(d) | SimplifiedType::Trait(d) | SimplifiedType::Closure(d) - | SimplifiedType::Generator(d) - | SimplifiedType::GeneratorWitness(d) => Some(d), + | SimplifiedType::Coroutine(d) + | SimplifiedType::CoroutineWitness(d) => Some(d), _ => None, } } @@ -234,8 +234,8 @@ | ty::Foreign(..) => {} ty::FnDef(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Placeholder(..) | ty::Bound(..) | ty::Infer(_) => bug!("unexpected impl_ty: {impl_ty}"), @@ -310,7 +310,7 @@ }, // Impls cannot contain these types as these cannot be named directly. - ty::FnDef(..) | ty::Closure(..) | ty::Generator(..) => false, + ty::FnDef(..) | ty::Closure(..) | ty::Coroutine(..) => false, // Placeholder types don't unify with anything on their own ty::Placeholder(..) | ty::Bound(..) => false, @@ -337,7 +337,7 @@ ty::Error(_) => true, - ty::GeneratorWitness(..) => { + ty::CoroutineWitness(..) => { bug!("unexpected obligation type: {:?}", obligation_ty) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/flags.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/flags.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/flags.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/flags.rs 2023-12-21 16:55:28.000000000 +0000 @@ -95,8 +95,8 @@ self.add_flags(TypeFlags::STILL_FURTHER_SPECIALIZABLE); } - ty::Generator(_, args, _) => { - let args = args.as_generator(); + ty::Coroutine(_, args, _) => { + let args = args.as_coroutine(); let should_remove_further_specializable = !self.flags.contains(TypeFlags::STILL_FURTHER_SPECIALIZABLE); self.add_args(args.parent_args()); @@ -111,14 +111,14 @@ self.add_ty(args.tupled_upvars_ty()); } - ty::GeneratorWitness(_, args) => { + ty::CoroutineWitness(_, args) => { let should_remove_further_specializable = !self.flags.contains(TypeFlags::STILL_FURTHER_SPECIALIZABLE); self.add_args(args); if should_remove_further_specializable { self.flags -= TypeFlags::STILL_FURTHER_SPECIALIZABLE; } - self.add_flags(TypeFlags::HAS_TY_GENERATOR); + self.add_flags(TypeFlags::HAS_TY_COROUTINE); } &ty::Closure(_, args) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generic_args.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generic_args.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generic_args.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generic_args.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,7 @@ use crate::ty::codec::{TyDecoder, TyEncoder}; use crate::ty::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable}; -use crate::ty::sty::{ClosureArgs, GeneratorArgs, InlineConstArgs}; +use crate::ty::sty::{ClosureArgs, CoroutineArgs, InlineConstArgs}; use crate::ty::visit::{TypeVisitable, TypeVisitableExt, TypeVisitor}; use crate::ty::{self, Lift, List, ParamConst, Ty, TyCtxt}; @@ -11,7 +11,6 @@ use rustc_hir::def_id::DefId; use rustc_macros::HashStable; use rustc_serialize::{self, Decodable, Encodable}; -use rustc_span::sym; use rustc_type_ir::WithCachedTypeInfo; use smallvec::SmallVec; @@ -267,12 +266,12 @@ ClosureArgs { args: self } } - /// Interpret these generic args as the args of a generator type. - /// Generator args have a particular structure controlled by the - /// compiler that encodes information like the signature and generator kind; - /// see `ty::GeneratorArgs` struct for more comments. - pub fn as_generator(&'tcx self) -> GeneratorArgs<'tcx> { - GeneratorArgs { args: self } + /// Interpret these generic args as the args of a coroutine type. + /// Coroutine args have a particular structure controlled by the + /// compiler that encodes information like the signature and coroutine kind; + /// see `ty::CoroutineArgs` struct for more comments. + pub fn as_coroutine(&'tcx self) -> CoroutineArgs<'tcx> { + CoroutineArgs { args: self } } /// Interpret these generic args as the args of an inline const. @@ -452,10 +451,6 @@ tcx.mk_args_from_iter(self.iter().take(generics.count())) } - pub fn host_effect_param(&'tcx self) -> Option> { - self.consts().rfind(|x| matches!(x.kind(), ty::ConstKind::Param(p) if p.name == sym::host)) - } - pub fn print_as_list(&self) -> String { let v = self.iter().map(|arg| arg.to_string()).collect::>(); format!("[{}]", v.join(", ")) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/generics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -79,6 +79,10 @@ } } + pub fn is_host_effect(&self) -> bool { + matches!(self.kind, GenericParamDefKind::Const { is_host_effect: true, .. }) + } + pub fn default_value<'tcx>( &self, tcx: TyCtxt<'tcx>, @@ -233,6 +237,20 @@ } } + /// Returns the `GenericParamDef` with the given index if available. + pub fn opt_param_at( + &'tcx self, + param_index: usize, + tcx: TyCtxt<'tcx>, + ) -> Option<&'tcx GenericParamDef> { + if let Some(index) = param_index.checked_sub(self.parent_count) { + self.params.get(index) + } else { + tcx.generics_of(self.parent.expect("parent_count > 0 but no parent?")) + .opt_param_at(param_index, tcx) + } + } + pub fn params_to(&'tcx self, param_index: usize, tcx: TyCtxt<'tcx>) -> &'tcx [GenericParamDef] { if let Some(index) = param_index.checked_sub(self.parent_count) { &self.params[..index] @@ -264,6 +282,20 @@ } } + /// Returns the `GenericParamDef` associated with this `ParamTy` if it belongs to this + /// `Generics`. + pub fn opt_type_param( + &'tcx self, + param: &ParamTy, + tcx: TyCtxt<'tcx>, + ) -> Option<&'tcx GenericParamDef> { + let param = self.opt_param_at(param.index as usize, tcx)?; + match param.kind { + GenericParamDefKind::Type { .. } => Some(param), + _ => None, + } + } + /// Returns the `GenericParamDef` associated with this `ParamConst`. pub fn const_param(&'tcx self, param: &ParamConst, tcx: TyCtxt<'tcx>) -> &GenericParamDef { let param = self.param_at(param.index as usize, tcx); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/inhabitedness/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/inhabitedness/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/inhabitedness/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/inhabitedness/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -47,7 +47,7 @@ use crate::ty::context::TyCtxt; use crate::ty::{self, DefId, Ty, VariantDef, Visibility}; -use rustc_type_ir::sty::TyKind::*; +use rustc_type_ir::TyKind::*; pub mod inhabited_predicate; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/instance.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/instance.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/instance.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/instance.rs 2023-12-21 16:55:28.000000000 +0000 @@ -36,7 +36,7 @@ /// This includes: /// - `fn` items /// - closures - /// - generators + /// - coroutines Item(DefId), /// An intrinsic `fn` item (with `"rust-intrinsic"` or `"platform-intrinsic"` ABI). @@ -245,16 +245,15 @@ // drops of `Option::None` before LTO. We also respect the intent of // `#[inline]` on `Drop::drop` implementations. return ty.ty_adt_def().map_or(true, |adt_def| { - adt_def.destructor(tcx).map_or_else( - || adt_def.is_enum(), - |dtor| tcx.codegen_fn_attrs(dtor.did).requests_inline(), - ) + adt_def + .destructor(tcx) + .map_or_else(|| adt_def.is_enum(), |dtor| tcx.cross_crate_inlinable(dtor.did)) }); } if let ty::InstanceDef::ThreadLocalShim(..) = *self { return false; } - tcx.codegen_fn_attrs(self.def_id()).requests_inline() + tcx.cross_crate_inlinable(self.def_id()) } pub fn requires_caller_location(&self, tcx: TyCtxt<'_>) -> bool { @@ -299,9 +298,9 @@ ty::tls::with(|tcx| { let args = tcx.lift(instance.args).expect("could not lift for printing"); - let s = FmtPrinter::new_with_limit(tcx, Namespace::ValueNS, type_length) - .print_def_path(instance.def_id(), args)? - .into_buffer(); + let mut cx = FmtPrinter::new_with_limit(tcx, Namespace::ValueNS, type_length); + cx.print_def_path(instance.def_id(), args)?; + let s = cx.into_buffer(); f.write_str(&s) })?; @@ -617,12 +616,17 @@ v: EarlyBinder, ) -> Result> where - T: TypeFoldable> + Clone, + T: TypeFoldable>, { if let Some(args) = self.args_for_mir_body() { tcx.try_instantiate_and_normalize_erasing_regions(args, param_env, v) } else { - tcx.try_normalize_erasing_regions(param_env, v.skip_binder()) + // We're using `instantiate_identity` as e.g. + // `FnPtrShim` is separately generated for every + // instantiation of the `FnDef`, so the MIR body + // is already instantiated. Any generic parameters it + // contains are generic parameters from the caller. + tcx.try_normalize_erasing_regions(param_env, v.instantiate_identity()) } } @@ -649,15 +653,15 @@ let unused = tcx.unused_generic_params(instance); debug!("polymorphize: unused={:?}", unused); - // If this is a closure or generator then we need to handle the case where another closure + // If this is a closure or coroutine then we need to handle the case where another closure // from the function is captured as an upvar and hasn't been polymorphized. In this case, // the unpolymorphized upvar closure would result in a polymorphized closure producing // multiple mono items (and eventually symbol clashes). let def_id = instance.def_id(); let upvars_ty = if tcx.is_closure(def_id) { Some(args.as_closure().tupled_upvars_ty()) - } else if tcx.type_of(def_id).skip_binder().is_generator() { - Some(args.as_generator().tupled_upvars_ty()) + } else if tcx.type_of(def_id).skip_binder().is_coroutine() { + Some(args.as_coroutine().tupled_upvars_ty()) } else { None }; @@ -685,13 +689,13 @@ Ty::new_closure(self.tcx, def_id, polymorphized_args) } } - ty::Generator(def_id, args, movability) => { + ty::Coroutine(def_id, args, movability) => { let polymorphized_args = polymorphize(self.tcx, ty::InstanceDef::Item(def_id), args); if args == polymorphized_args { ty } else { - Ty::new_generator(self.tcx, def_id, polymorphized_args, movability) + Ty::new_coroutine(self.tcx, def_id, polymorphized_args, movability) } } _ => ty.super_fold_with(self), @@ -711,7 +715,7 @@ upvars_ty == Some(args[param.index as usize].expect_ty()) => { // ..then double-check that polymorphization marked it used.. debug_assert!(!is_unused); - // ..and polymorphize any closures/generators captured as upvars. + // ..and polymorphize any closures/coroutines captured as upvars. let upvars_ty = upvars_ty.unwrap(); let polymorphized_upvars_ty = upvars_ty.fold_with( &mut PolymorphizationFolder { tcx }); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/layout.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/layout.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/layout.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/layout.rs 2023-12-21 16:55:28.000000000 +0000 @@ -809,7 +809,7 @@ | ty::FnPtr(_) | ty::Never | ty::FnDef(..) - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::Foreign(..) | ty::Dynamic(_, _, ty::Dyn) => { bug!("TyAndLayout::field({:?}): not applicable", this) @@ -868,7 +868,7 @@ { let metadata = tcx.normalize_erasing_regions( cx.param_env(), - Ty::new_projection(tcx,metadata_def_id, [pointee]), + Ty::new_projection(tcx, metadata_def_id, [pointee]), ); // Map `Metadata = DynMetadata` back to a vtable, since it @@ -898,16 +898,16 @@ ty::Array(element, _) | ty::Slice(element) => TyMaybeWithLayout::Ty(element), ty::Str => TyMaybeWithLayout::Ty(tcx.types.u8), - // Tuples, generators and closures. + // Tuples, coroutines and closures. ty::Closure(_, ref args) => field_ty_or_layout( TyAndLayout { ty: args.as_closure().tupled_upvars_ty(), ..this }, cx, i, ), - ty::Generator(def_id, ref args, _) => match this.variants { + ty::Coroutine(def_id, ref args, _) => match this.variants { Variants::Single { index } => TyMaybeWithLayout::Ty( - args.as_generator() + args.as_coroutine() .state_tys(def_id, tcx) .nth(index.as_usize()) .unwrap() @@ -918,7 +918,7 @@ if i == tag_field { return TyMaybeWithLayout::TyAndLayout(tag_layout(tag)); } - TyMaybeWithLayout::Ty(args.as_generator().prefix_tys()[i]) + TyMaybeWithLayout::Ty(args.as_coroutine().prefix_tys()[i]) } }, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/list.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/list.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/list.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/list.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use crate::arena::Arena; use rustc_data_structures::aligned::{align_of, Aligned}; use rustc_serialize::{Encodable, Encoder}; -use rustc_type_ir::{InferCtxtLike, OptWithInfcx}; +use rustc_type_ir::{InferCtxtLike, WithInfcx}; use std::alloc::Layout; use std::cmp::Ordering; use std::fmt; @@ -121,8 +121,8 @@ } } impl<'tcx, T: super::DebugWithInfcx>> super::DebugWithInfcx> for List { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { fmt::Debug::fmt(&this.map(|this| this.as_slice()), f) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,7 +20,7 @@ use crate::error::{OpaqueHiddenTypeMismatch, TypeMismatchReason}; use crate::metadata::ModChild; use crate::middle::privacy::EffectiveVisibilities; -use crate::mir::{Body, GeneratorLayout}; +use crate::mir::{Body, CoroutineLayout}; use crate::query::Providers; use crate::traits::{self, Reveal}; use crate::ty; @@ -54,7 +54,7 @@ use rustc_span::{ExpnId, ExpnKind, Span}; use rustc_target::abi::{Align, FieldIdx, Integer, IntegerType, VariantIdx}; pub use rustc_target::abi::{ReprFlags, ReprOptions}; -pub use rustc_type_ir::{DebugWithInfcx, InferCtxtLike, OptWithInfcx}; +pub use rustc_type_ir::{DebugWithInfcx, InferCtxtLike, WithInfcx}; pub use vtable::*; use std::fmt::Debug; @@ -97,17 +97,17 @@ pub use self::sty::BoundRegionKind::*; pub use self::sty::{ AliasTy, Article, Binder, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, BoundVar, - BoundVariableKind, CanonicalPolyFnSig, ClosureArgs, ClosureArgsParts, ConstKind, ConstVid, - EarlyBoundRegion, EffectVid, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, - FnSig, FreeRegion, GenSig, GeneratorArgs, GeneratorArgsParts, InlineConstArgs, + BoundVariableKind, CanonicalPolyFnSig, ClauseKind, ClosureArgs, ClosureArgsParts, ConstKind, + ConstVid, CoroutineArgs, CoroutineArgsParts, EarlyBoundRegion, EffectVid, ExistentialPredicate, + ExistentialProjection, ExistentialTraitRef, FnSig, FreeRegion, GenSig, InlineConstArgs, InlineConstArgsParts, ParamConst, ParamTy, PolyExistentialPredicate, PolyExistentialProjection, - PolyExistentialTraitRef, PolyFnSig, PolyGenSig, PolyTraitRef, Region, RegionKind, RegionVid, - TraitRef, TyKind, TypeAndMut, UpvarArgs, VarianceDiagInfo, + PolyExistentialTraitRef, PolyFnSig, PolyGenSig, PolyTraitRef, PredicateKind, Region, + RegionKind, RegionVid, TraitRef, TyKind, TypeAndMut, UpvarArgs, VarianceDiagInfo, }; pub use self::trait_def::TraitDef; pub use self::typeck_results::{ - CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, TypeckResults, - UserType, UserTypeAnnotationIndex, + CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, IsIdentity, + TypeckResults, UserType, UserTypeAnnotationIndex, }; pub mod _match; @@ -233,6 +233,7 @@ #[derive(Clone, Debug, TypeFoldable, TypeVisitable)] pub struct ImplHeader<'tcx> { pub impl_def_id: DefId, + pub impl_args: ty::GenericArgsRef<'tcx>, pub self_ty: Ty<'tcx>, pub trait_ref: Option>, pub predicates: Vec>, @@ -578,11 +579,6 @@ pub struct Clause<'tcx>(Interned<'tcx, WithCachedTypeInfo>>>); impl<'tcx> Clause<'tcx> { - pub fn from_projection_clause(tcx: TyCtxt<'tcx>, pred: PolyProjectionPredicate<'tcx>) -> Self { - let pred: Predicate<'tcx> = pred.to_predicate(tcx); - pred.expect_clause() - } - pub fn as_predicate(self) -> Predicate<'tcx> { Predicate(self.0) } @@ -631,98 +627,6 @@ } } -#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] -#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)] -/// A clause is something that can appear in where bounds or be inferred -/// by implied bounds. -pub enum ClauseKind<'tcx> { - /// Corresponds to `where Foo: Bar`. `Foo` here would be - /// the `Self` type of the trait reference and `A`, `B`, and `C` - /// would be the type parameters. - Trait(TraitPredicate<'tcx>), - - /// `where 'a: 'b` - RegionOutlives(RegionOutlivesPredicate<'tcx>), - - /// `where T: 'a` - TypeOutlives(TypeOutlivesPredicate<'tcx>), - - /// `where ::Name == X`, approximately. - /// See the `ProjectionPredicate` struct for details. - Projection(ProjectionPredicate<'tcx>), - - /// Ensures that a const generic argument to a parameter `const N: u8` - /// is of type `u8`. - ConstArgHasType(Const<'tcx>, Ty<'tcx>), - - /// No syntax: `T` well-formed. - WellFormed(GenericArg<'tcx>), - - /// Constant initializer must evaluate successfully. - ConstEvaluatable(ty::Const<'tcx>), -} - -#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] -#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)] -pub enum PredicateKind<'tcx> { - /// Prove a clause - Clause(ClauseKind<'tcx>), - - /// Trait must be object-safe. - ObjectSafe(DefId), - - /// No direct syntax. May be thought of as `where T: FnFoo<...>` - /// for some generic args `...` and `T` being a closure type. - /// Satisfied (or refuted) once we know the closure's kind. - ClosureKind(DefId, GenericArgsRef<'tcx>, ClosureKind), - - /// `T1 <: T2` - /// - /// This obligation is created most often when we have two - /// unresolved type variables and hence don't have enough - /// information to process the subtyping obligation yet. - Subtype(SubtypePredicate<'tcx>), - - /// `T1` coerced to `T2` - /// - /// Like a subtyping obligation, this is created most often - /// when we have two unresolved type variables and hence - /// don't have enough information to process the coercion - /// obligation yet. At the moment, we actually process coercions - /// very much like subtyping and don't handle the full coercion - /// logic. - Coerce(CoercePredicate<'tcx>), - - /// Constants must be equal. The first component is the const that is expected. - ConstEquate(Const<'tcx>, Const<'tcx>), - - /// A marker predicate that is always ambiguous. - /// Used for coherence to mark opaque types as possibly equal to each other but ambiguous. - Ambiguous, - - /// Separate from `ClauseKind::Projection` which is used for normalization in new solver. - /// This predicate requires two terms to be equal to eachother. - /// - /// Only used for new solver - AliasRelate(Term<'tcx>, Term<'tcx>, AliasRelationDirection), -} - -#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] -#[derive(HashStable, Debug)] -pub enum AliasRelationDirection { - Equate, - Subtype, -} - -impl std::fmt::Display for AliasRelationDirection { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - AliasRelationDirection::Equate => write!(f, "=="), - AliasRelationDirection::Subtype => write!(f, "<:"), - } - } -} - /// The crate outlives map is computed during typeck and contains the /// outlives of every item in the local crate. You should not use it /// directly, because to do so will make your pass dependent on the @@ -1028,7 +932,7 @@ _ => None, }, TermKind::Const(ct) => match ct.kind() { - ConstKind::Unevaluated(uv) => Some(tcx.mk_alias_ty(uv.def, uv.args)), + ConstKind::Unevaluated(uv) => Some(AliasTy::new(tcx, uv.def, uv.args)), _ => None, }, } @@ -1089,19 +993,19 @@ } #[derive(Copy, Clone, Eq, PartialEq, Debug)] -pub enum TermVid<'tcx> { +pub enum TermVid { Ty(ty::TyVid), - Const(ty::ConstVid<'tcx>), + Const(ty::ConstVid), } -impl From for TermVid<'_> { +impl From for TermVid { fn from(value: ty::TyVid) -> Self { TermVid::Ty(value) } } -impl<'tcx> From> for TermVid<'tcx> { - fn from(value: ty::ConstVid<'tcx>) -> Self { +impl From for TermVid { + fn from(value: ty::ConstVid) -> Self { TermVid::Const(value) } } @@ -1296,12 +1200,25 @@ } } +impl<'tcx> ToPredicate<'tcx> for TraitPredicate<'tcx> { + fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { + PredicateKind::Clause(ClauseKind::Trait(self)).to_predicate(tcx) + } +} + impl<'tcx> ToPredicate<'tcx> for PolyTraitPredicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { self.map_bound(|p| PredicateKind::Clause(ClauseKind::Trait(p))).to_predicate(tcx) } } +impl<'tcx> ToPredicate<'tcx, Clause<'tcx>> for TraitPredicate<'tcx> { + fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Clause<'tcx> { + let p: Predicate<'tcx> = self.to_predicate(tcx); + p.expect_clause() + } +} + impl<'tcx> ToPredicate<'tcx, Clause<'tcx>> for PolyTraitPredicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Clause<'tcx> { let p: Predicate<'tcx> = self.to_predicate(tcx); @@ -1340,9 +1257,10 @@ } } -impl<'tcx> ToPredicate<'tcx> for TraitPredicate<'tcx> { - fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { - PredicateKind::Clause(ClauseKind::Trait(self)).to_predicate(tcx) +impl<'tcx> ToPredicate<'tcx, Clause<'tcx>> for PolyProjectionPredicate<'tcx> { + fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Clause<'tcx> { + let p: Predicate<'tcx> = self.to_predicate(tcx); + p.expect_clause() } } @@ -1609,7 +1527,7 @@ pub ty: Ty<'tcx>, } -pub type PlaceholderConst<'tcx> = Placeholder; +pub type PlaceholderConst = Placeholder; /// When type checking, we use the `ParamEnv` to track /// details about the set of where-clauses that are in scope at this @@ -1743,30 +1661,9 @@ Self::new(List::empty(), self.reveal()) } - /// Creates a suitable environment in which to perform trait - /// queries on the given value. When type-checking, this is simply - /// the pair of the environment plus value. But when reveal is set to - /// All, then if `value` does not reference any type parameters, we will - /// pair it with the empty environment. This improves caching and is generally - /// invisible. - /// - /// N.B., we preserve the environment when type-checking because it - /// is possible for the user to have wacky where-clauses like - /// `where Box: Copy`, which are clearly never - /// satisfiable. We generally want to behave as if they were true, - /// although the surrounding function is never reachable. + /// Creates a pair of param-env and value for use in queries. pub fn and>>(self, value: T) -> ParamEnvAnd<'tcx, T> { - match self.reveal() { - Reveal::UserFacing => ParamEnvAnd { param_env: self, value }, - - Reveal::All => { - if value.is_global() { - ParamEnvAnd { param_env: self.without_caller_bounds(), value } - } else { - ParamEnvAnd { param_env: self, value } - } - } - } + ParamEnvAnd { param_env: self, value } } } @@ -2114,7 +2011,7 @@ // Generate a deterministically-derived seed from the item's path hash // to allow for cross-crate compilation to actually work - let mut field_shuffle_seed = self.def_path_hash(did).0.to_smaller_hash(); + let mut field_shuffle_seed = self.def_path_hash(did).0.to_smaller_hash().as_u64(); // If the user defined a custom seed for layout randomization, xor the item's // path hash with the user defined seed, this will allowing determinism while @@ -2433,10 +2330,10 @@ self.def_kind(trait_def_id) == DefKind::TraitAlias } - /// Returns layout of a generator. Layout might be unavailable if the - /// generator is tainted by errors. - pub fn generator_layout(self, def_id: DefId) -> Option<&'tcx GeneratorLayout<'tcx>> { - self.optimized_mir(def_id).generator_layout() + /// Returns layout of a coroutine. Layout might be unavailable if the + /// coroutine is tainted by errors. + pub fn coroutine_layout(self, def_id: DefId) -> Option<&'tcx CoroutineLayout<'tcx>> { + self.optimized_mir(def_id).coroutine_layout() } /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/opaque_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/opaque_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/opaque_types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/opaque_types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -152,14 +152,14 @@ Ty::new_closure(self.tcx, def_id, args) } - ty::Generator(def_id, args, movability) => { + ty::Coroutine(def_id, args, movability) => { let args = self.fold_closure_args(def_id, args); - Ty::new_generator(self.tcx, def_id, args, movability) + Ty::new_coroutine(self.tcx, def_id, args, movability) } - ty::GeneratorWitness(def_id, args) => { + ty::CoroutineWitness(def_id, args) => { let args = self.fold_closure_args(def_id, args); - Ty::new_generator_witness(self.tcx, def_id, args) + Ty::new_coroutine_witness(self.tcx, def_id, args) } ty::Param(param) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/parameterized.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/parameterized.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/parameterized.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/parameterized.rs 2023-12-21 16:55:28.000000000 +0000 @@ -82,7 +82,7 @@ rustc_attr::Stability, rustc_hir::Constness, rustc_hir::Defaultness, - rustc_hir::GeneratorKind, + rustc_hir::CoroutineKind, rustc_hir::IsAsync, rustc_hir::LangItem, rustc_hir::def::DefKind, @@ -123,7 +123,7 @@ parameterized_over_tcx! { crate::middle::exported_symbols::ExportedSymbol, crate::mir::Body, - crate::mir::GeneratorLayout, + crate::mir::CoroutineLayout, ty::Ty, ty::FnSig, ty::GenericPredicates, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,13 +10,10 @@ mod pretty; pub use self::pretty::*; -// FIXME(eddyb) false positive, the lifetime parameters are used with `P: Printer<...>`. -#[allow(unused_lifetimes)] -pub trait Print<'tcx, P> { - type Output; - type Error; +pub type PrintError = std::fmt::Error; - fn print(&self, cx: P) -> Result; +pub trait Print<'tcx, P> { + fn print(&self, cx: &mut P) -> Result<(), PrintError>; } /// Interface for outputting user-facing "type-system entities" @@ -29,81 +26,73 @@ // // FIXME(eddyb) find a better name; this is more general than "printing". pub trait Printer<'tcx>: Sized { - type Error; - - type Path; - type Region; - type Type; - type DynExistential; - type Const; - fn tcx<'a>(&'a self) -> TyCtxt<'tcx>; fn print_def_path( - self, + &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { self.default_print_def_path(def_id, args) } fn print_impl_path( - self, + &mut self, impl_def_id: DefId, args: &'tcx [GenericArg<'tcx>], self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { self.default_print_impl_path(impl_def_id, args, self_ty, trait_ref) } - fn print_region(self, region: ty::Region<'tcx>) -> Result; + fn print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), PrintError>; - fn print_type(self, ty: Ty<'tcx>) -> Result; + fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError>; fn print_dyn_existential( - self, + &mut self, predicates: &'tcx ty::List>, - ) -> Result; + ) -> Result<(), PrintError>; - fn print_const(self, ct: ty::Const<'tcx>) -> Result; + fn print_const(&mut self, ct: ty::Const<'tcx>) -> Result<(), PrintError>; - fn path_crate(self, cnum: CrateNum) -> Result; + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError>; fn path_qualified( - self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result; + ) -> Result<(), PrintError>; fn path_append_impl( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result; + ) -> Result<(), PrintError>; fn path_append( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result; + ) -> Result<(), PrintError>; fn path_generic_args( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], - ) -> Result; + ) -> Result<(), PrintError>; // Defaults (should not be overridden): #[instrument(skip(self), level = "debug")] fn default_print_def_path( - self, + &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { let key = self.tcx().def_key(def_id); debug!(?key); @@ -170,7 +159,7 @@ } self.path_append( - |cx: Self| { + |cx: &mut Self| { if trait_qualify_parent { let trait_ref = ty::TraitRef::new( cx.tcx(), @@ -189,12 +178,12 @@ } fn default_print_impl_path( - self, + &mut self, impl_def_id: DefId, _args: &'tcx [GenericArg<'tcx>], self_ty: Ty<'tcx>, impl_trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { debug!( "default_print_impl_path: impl_def_id={:?}, self_ty={}, impl_trait_ref={:?}", impl_def_id, self_ty, impl_trait_ref @@ -270,8 +259,8 @@ ty::FnDef(def_id, _) | ty::Closure(def_id, _) - | ty::Generator(def_id, _, _) - | ty::GeneratorWitness(def_id, _) + | ty::Coroutine(def_id, _, _) + | ty::CoroutineWitness(def_id, _) | ty::Foreign(def_id) => Some(def_id), ty::Bool @@ -295,34 +284,25 @@ } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::Region<'tcx> { - type Output = P::Region; - type Error = P::Error; - fn print(&self, cx: P) -> Result { + fn print(&self, cx: &mut P) -> Result<(), PrintError> { cx.print_region(*self) } } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for Ty<'tcx> { - type Output = P::Type; - type Error = P::Error; - - fn print(&self, cx: P) -> Result { + fn print(&self, cx: &mut P) -> Result<(), PrintError> { cx.print_type(*self) } } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for &'tcx ty::List> { - type Output = P::DynExistential; - type Error = P::Error; - fn print(&self, cx: P) -> Result { + fn print(&self, cx: &mut P) -> Result<(), PrintError> { cx.print_dyn_existential(self) } } impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::Const<'tcx> { - type Output = P::Const; - type Error = P::Error; - fn print(&self, cx: P) -> Result { + fn print(&self, cx: &mut P) -> Result<(), PrintError> { cx.print_const(*self) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/pretty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/pretty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/pretty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/print/pretty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,7 @@ use crate::mir::interpret::{AllocRange, GlobalAlloc, Pointer, Provenance, Scalar}; use crate::query::IntoQueryParam; use crate::query::Providers; +use crate::traits::util::supertraits_for_pretty_printing; use crate::ty::{ self, ConstInt, ParamConst, ScalarInt, Term, TermKind, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, @@ -41,10 +42,10 @@ write!(scoped_cx!(), $($data),+)? }; (@print($x:expr)) => { - scoped_cx!() = $x.print(scoped_cx!())? + $x.print(scoped_cx!())? }; (@$method:ident($($arg:expr),*)) => { - scoped_cx!() = scoped_cx!().$method($($arg),*)? + scoped_cx!().$method($($arg),*)? }; ($($elem:tt $(($($args:tt)*))?),+) => {{ $(p!(@ $elem $(($($args)*))?);)+ @@ -52,7 +53,6 @@ } macro_rules! define_scoped_cx { ($cx:ident) => { - #[allow(unused_macros)] macro_rules! scoped_cx { () => { $cx @@ -205,79 +205,69 @@ } /// Trait for printers that pretty-print using `fmt::Write` to the printer. -pub trait PrettyPrinter<'tcx>: - Printer< - 'tcx, - Error = fmt::Error, - Path = Self, - Region = Self, - Type = Self, - DynExistential = Self, - Const = Self, - > + fmt::Write -{ +pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { /// Like `print_def_path` but for value paths. fn print_value_path( - self, + &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { self.print_def_path(def_id, args) } - fn in_binder(self, value: &ty::Binder<'tcx, T>) -> Result + fn in_binder(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { value.as_ref().skip_binder().print(self) } - fn wrap_binder Result>( - self, + fn wrap_binder Result<(), fmt::Error>>( + &mut self, value: &ty::Binder<'tcx, T>, f: F, - ) -> Result + ) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { f(value.as_ref().skip_binder(), self) } /// Prints comma-separated elements. - fn comma_sep(mut self, mut elems: impl Iterator) -> Result + fn comma_sep(&mut self, mut elems: impl Iterator) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error>, + T: Print<'tcx, Self>, { if let Some(first) = elems.next() { - self = first.print(self)?; + first.print(self)?; for elem in elems { self.write_str(", ")?; - self = elem.print(self)?; + elem.print(self)?; } } - Ok(self) + Ok(()) } /// Prints `{f: t}` or `{f as t}` depending on the `cast` argument fn typed_value( - mut self, - f: impl FnOnce(Self) -> Result, - t: impl FnOnce(Self) -> Result, + &mut self, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + t: impl FnOnce(&mut Self) -> Result<(), PrintError>, conversion: &str, - ) -> Result { + ) -> Result<(), PrintError> { self.write_str("{")?; - self = f(self)?; + f(self)?; self.write_str(conversion)?; - self = t(self)?; + t(self)?; self.write_str("}")?; - Ok(self) + Ok(()) } /// Prints `<...>` around what `f` prints. fn generic_delimiters( - self, - f: impl FnOnce(Self) -> Result, - ) -> Result; + &mut self, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + ) -> Result<(), PrintError>; /// Returns `true` if the region should be printed in /// optional positions, e.g., `&'a T` or `dyn Tr + 'b`. @@ -291,9 +281,9 @@ /// If possible, this returns a global path resolving to `def_id` that is visible /// from at least one local module, and returns `true`. If the crate defining `def_id` is /// declared with an `extern crate`, the path is guaranteed to use the `extern crate`. - fn try_print_visible_def_path(self, def_id: DefId) -> Result<(Self, bool), Self::Error> { + fn try_print_visible_def_path(&mut self, def_id: DefId) -> Result { if NO_VISIBLE_PATH.with(|flag| flag.get()) { - return Ok((self, false)); + return Ok(false); } let mut callers = Vec::new(); @@ -305,10 +295,7 @@ // For enum variants, if they have an unique name, then we only print the name, otherwise we // print the enum name and the variant name. Otherwise, we do not print anything and let the // caller use the `print_def_path` fallback. - fn force_print_trimmed_def_path( - mut self, - def_id: DefId, - ) -> Result<(Self::Path, bool), Self::Error> { + fn force_print_trimmed_def_path(&mut self, def_id: DefId) -> Result { let key = self.tcx().def_key(def_id); let visible_parent_map = self.tcx().visible_parent_map(()); let kind = self.tcx().def_kind(def_id); @@ -319,8 +306,7 @@ && let DefPathData::TypeNs(_) = key.disambiguated_data.data && Some(*visible_parent) != actual_parent { - this - .tcx() + this.tcx() // FIXME(typed_def_id): Further propagate ModDefId .module_children(ModDefId::new_unchecked(*visible_parent)) .iter() @@ -337,7 +323,7 @@ { // If `Assoc` is unique, we don't want to talk about `Trait::Assoc`. self.write_str(get_local_name(&self, *symbol, def_id, key).as_str())?; - return Ok((self, true)); + return Ok(true); } if let Some(symbol) = key.get_opt_name() { if let DefKind::AssocConst | DefKind::AssocFn | DefKind::AssocTy = kind @@ -359,44 +345,46 @@ // the parent type in the path. For example, `Iterator::Item`. self.write_str(get_local_name(&self, symbol, parent, parent_key).as_str())?; self.write_str("::")?; - } else if let DefKind::Struct | DefKind::Union | DefKind::Enum | DefKind::Trait - | DefKind::TyAlias | DefKind::Fn | DefKind::Const | DefKind::Static(_) = kind + } else if let DefKind::Struct + | DefKind::Union + | DefKind::Enum + | DefKind::Trait + | DefKind::TyAlias + | DefKind::Fn + | DefKind::Const + | DefKind::Static(_) = kind { } else { // If not covered above, like for example items out of `impl` blocks, fallback. - return Ok((self, false)); + return Ok(false); } self.write_str(get_local_name(&self, symbol, def_id, key).as_str())?; - return Ok((self, true)); + return Ok(true); } - Ok((self, false)) + Ok(false) } /// Try to see if this path can be trimmed to a unique symbol name. - fn try_print_trimmed_def_path( - mut self, - def_id: DefId, - ) -> Result<(Self::Path, bool), Self::Error> { + fn try_print_trimmed_def_path(&mut self, def_id: DefId) -> Result { if FORCE_TRIMMED_PATH.with(|flag| flag.get()) { - let (s, trimmed) = self.force_print_trimmed_def_path(def_id)?; + let trimmed = self.force_print_trimmed_def_path(def_id)?; if trimmed { - return Ok((s, true)); + return Ok(true); } - self = s; } if !self.tcx().sess.opts.unstable_opts.trim_diagnostic_paths || matches!(self.tcx().sess.opts.trimmed_def_paths, TrimmedDefPaths::Never) || NO_TRIMMED_PATH.with(|flag| flag.get()) || SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get()) { - return Ok((self, false)); + return Ok(false); } match self.tcx().trimmed_def_paths(()).get(&def_id) { - None => Ok((self, false)), + None => Ok(false), Some(symbol) => { write!(self, "{}", Ident::with_dummy_span(*symbol))?; - Ok((self, true)) + Ok(true) } } } @@ -415,19 +403,18 @@ /// This method returns false if we can't print the visible path, so /// `print_def_path` can fall back on the item's real definition path. fn try_print_visible_def_path_recur( - mut self, + &mut self, def_id: DefId, callers: &mut Vec, - ) -> Result<(Self, bool), Self::Error> { - define_scoped_cx!(self); - + ) -> Result { debug!("try_print_visible_def_path: def_id={:?}", def_id); // If `def_id` is a direct or injected extern crate, return the // path to the crate followed by the path to the item within the crate. if let Some(cnum) = def_id.as_crate_root() { if cnum == LOCAL_CRATE { - return Ok((self.path_crate(cnum)?, true)); + self.path_crate(cnum)?; + return Ok(true); } // In local mode, when we encounter a crate other than @@ -450,7 +437,8 @@ // or avoid ending up with `ExternCrateSource::Extern`, // for the injected `std`/`core`. if span.is_dummy() { - return Ok((self.path_crate(cnum)?, true)); + self.path_crate(cnum)?; + return Ok(true); } // Disable `try_print_trimmed_def_path` behavior within @@ -458,23 +446,25 @@ // in cases where the `extern crate foo` has non-trivial // parents, e.g. it's nested in `impl foo::Trait for Bar` // (see also issues #55779 and #87932). - self = with_no_visible_paths!(self.print_def_path(def_id, &[])?); + with_no_visible_paths!(self.print_def_path(def_id, &[])?); - return Ok((self, true)); + return Ok(true); } (ExternCrateSource::Path, LOCAL_CRATE) => { - return Ok((self.path_crate(cnum)?, true)); + self.path_crate(cnum)?; + return Ok(true); } _ => {} }, None => { - return Ok((self.path_crate(cnum)?, true)); + self.path_crate(cnum)?; + return Ok(true); } } } if def_id.is_local() { - return Ok((self, false)); + return Ok(false); } let visible_parent_map = self.tcx().visible_parent_map(()); @@ -495,7 +485,7 @@ } let Some(visible_parent) = visible_parent_map.get(&def_id).cloned() else { - return Ok((self, false)); + return Ok(false); }; let actual_parent = self.tcx().opt_parent(def_id); @@ -558,7 +548,7 @@ *name = new_name; } else { // There is no name that is public and isn't `_`, so bail. - return Ok((self, false)); + return Ok(false); } } // Re-exported `extern crate` (#43189). @@ -570,7 +560,7 @@ debug!("try_print_visible_def_path: data={:?}", data); if callers.contains(&visible_parent) { - return Ok((self, false)); + return Ok(false); } callers.push(visible_parent); // HACK(eddyb) this bypasses `path_append`'s prefix printing to avoid @@ -578,19 +568,19 @@ // To support printers that do not implement `PrettyPrinter`, a `Vec` or // linked list on the stack would need to be built, before any printing. match self.try_print_visible_def_path_recur(visible_parent, callers)? { - (cx, false) => return Ok((cx, false)), - (cx, true) => self = cx, + false => return Ok(false), + true => {} } callers.pop(); - - Ok((self.path_append(Ok, &DisambiguatedDefPathData { data, disambiguator: 0 })?, true)) + self.path_append(|_| Ok(()), &DisambiguatedDefPathData { data, disambiguator: 0 })?; + Ok(true) } fn pretty_path_qualified( - self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { if trait_ref.is_none() { // Inherent impls. Try to print `Foo::bar` for an inherent // impl on `Foo`, but fallback to `::bar` if self-type is @@ -611,26 +601,26 @@ } } - self.generic_delimiters(|mut cx| { + self.generic_delimiters(|cx| { define_scoped_cx!(cx); p!(print(self_ty)); if let Some(trait_ref) = trait_ref { p!(" as ", print(trait_ref.print_only_trait_path())); } - Ok(cx) + Ok(()) }) } fn pretty_path_append_impl( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; - self.generic_delimiters(|mut cx| { + self.generic_delimiters(|cx| { define_scoped_cx!(cx); p!("impl "); @@ -639,11 +629,11 @@ } p!(print(self_ty)); - Ok(cx) + Ok(()) }) } - fn pretty_print_type(mut self, ty: Ty<'tcx>) -> Result { + fn pretty_print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { define_scoped_cx!(self); match *ty.kind() { @@ -689,7 +679,7 @@ ty::Infer(infer_ty) => { if self.should_print_verbose() { p!(write("{:?}", ty.kind())); - return Ok(self); + return Ok(()); } if let ty::TyVar(ty_vid) = infer_ty { @@ -706,7 +696,7 @@ ty::Param(ref param_ty) => p!(print(param_ty)), ty::Bound(debruijn, bound_ty) => match bound_ty.kind { ty::BoundTyKind::Anon => { - rustc_type_ir::debug_bound_var(&mut self, debruijn, bound_ty.var)? + rustc_type_ir::debug_bound_var(self, debruijn, bound_ty.var)? } ty::BoundTyKind::Param(_, s) => match self.should_print_verbose() { true => p!(write("{:?}", ty.kind())), @@ -761,7 +751,7 @@ if self.should_print_verbose() { // FIXME(eddyb) print this with `print_def_path`. p!(write("Opaque({:?}, {})", def_id, args.print_as_list())); - return Ok(self); + return Ok(()); } let parent = self.tcx().parent(def_id); @@ -776,17 +766,17 @@ // If the type alias directly starts with the `impl` of the // opaque type we're printing, then skip the `::{opaque#1}`. p!(print_def_path(parent, args)); - return Ok(self); + return Ok(()); } } // Complex opaque type, e.g. `type Foo = (i32, impl Debug);` p!(print_def_path(def_id, args)); - return Ok(self); + return Ok(()); } _ => { if with_no_queries() { p!(print_def_path(def_id, &[])); - return Ok(self); + return Ok(()); } else { return self.pretty_print_opaque_impl_type(def_id, args); } @@ -794,11 +784,11 @@ } } ty::Str => p!("str"), - ty::Generator(did, args, movability) => { + ty::Coroutine(did, args, movability) => { p!(write("{{")); - let generator_kind = self.tcx().generator_kind(did).unwrap(); + let coroutine_kind = self.tcx().coroutine_kind(did).unwrap(); let should_print_movability = - self.should_print_verbose() || generator_kind == hir::GeneratorKind::Gen; + self.should_print_verbose() || coroutine_kind == hir::CoroutineKind::Coroutine; if should_print_movability { match movability { @@ -808,7 +798,7 @@ } if !self.should_print_verbose() { - p!(write("{}", generator_kind)); + p!(write("{}", coroutine_kind)); // FIXME(eddyb) should use `def_span`. if let Some(did) = did.as_local() { let span = self.tcx().def_span(did); @@ -824,24 +814,24 @@ } else { p!(print_def_path(did, args)); p!(" upvar_tys=("); - if !args.as_generator().is_valid() { + if !args.as_coroutine().is_valid() { p!("unavailable"); } else { - self = self.comma_sep(args.as_generator().upvar_tys().iter())?; + self.comma_sep(args.as_coroutine().upvar_tys().iter())?; } p!(")"); - if args.as_generator().is_valid() { - p!(" ", print(args.as_generator().witness())); + if args.as_coroutine().is_valid() { + p!(" ", print(args.as_coroutine().witness())); } } p!("}}") } - ty::GeneratorWitness(did, args) => { + ty::CoroutineWitness(did, args) => { p!(write("{{")); if !self.tcx().sess.verbose() { - p!("generator witness"); + p!("coroutine witness"); // FIXME(eddyb) should use `def_span`. if let Some(did) = did.as_local() { let span = self.tcx().def_span(did); @@ -897,7 +887,7 @@ print(args.as_closure().sig_as_fn_ptr_ty()) ); p!(" upvar_tys=("); - self = self.comma_sep(args.as_closure().upvar_tys().iter())?; + self.comma_sep(args.as_closure().upvar_tys().iter())?; p!(")"); } } @@ -907,14 +897,14 @@ ty::Slice(ty) => p!("[", print(ty), "]"), } - Ok(self) + Ok(()) } fn pretty_print_opaque_impl_type( - mut self, + &mut self, def_id: DefId, args: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { let tcx = self.tcx(); // Grab the "TraitA + TraitB" from `impl TraitA + TraitB`, @@ -972,7 +962,7 @@ write!(self, "{}", if first { "" } else { " + " })?; write!(self, "{}", if paren_needed { "(" } else { "" })?; - self = self.wrap_binder(&fn_once_trait_ref, |trait_ref, mut cx| { + self.wrap_binder(&fn_once_trait_ref, |trait_ref, cx| { define_scoped_cx!(cx); // Get the (single) generic ty (the args) of this FnOnce trait ref. let generics = tcx.generics_of(trait_ref.def_id); @@ -1029,7 +1019,7 @@ } } - Ok(cx) + Ok(()) })?; } @@ -1037,7 +1027,7 @@ for (trait_ref, assoc_items) in traits { write!(self, "{}", if first { "" } else { " + " })?; - self = self.wrap_binder(&trait_ref, |trait_ref, mut cx| { + self.wrap_binder(&trait_ref, |trait_ref, cx| { define_scoped_cx!(cx); p!(print(trait_ref.print_only_trait_name())); @@ -1058,16 +1048,16 @@ } for (assoc_item_def_id, term) in assoc_items { - // Skip printing `<{generator@} as Generator<_>>::Return` from async blocks, - // unless we can find out what generator return type it comes from. + // Skip printing `<{coroutine@} as Coroutine<_>>::Return` from async blocks, + // unless we can find out what coroutine return type it comes from. let term = if let Some(ty) = term.skip_binder().ty() && let ty::Alias(ty::Projection, proj) = ty.kind() && let Some(assoc) = tcx.opt_associated_item(proj.def_id) - && assoc.trait_container(tcx) == tcx.lang_items().gen_trait() + && assoc.trait_container(tcx) == tcx.lang_items().coroutine_trait() && assoc.name == rustc_span::sym::Return { - if let ty::Generator(_, args, _) = args.type_at(0).kind() { - let return_ty = args.as_generator().return_ty(); + if let ty::Coroutine(_, args, _) = args.type_at(0).kind() { + let return_ty = args.as_coroutine().return_ty(); if !return_ty.is_ty_var() { return_ty.into() } else { @@ -1101,7 +1091,7 @@ } first = false; - Ok(cx) + Ok(()) })?; } @@ -1114,22 +1104,24 @@ if !FORCE_TRIMMED_PATH.with(|flag| flag.get()) { for re in lifetimes { write!(self, " + ")?; - self = self.print_region(re)?; + self.print_region(re)?; } } if self.tcx().features().return_type_notation - && let Some(ty::ImplTraitInTraitData::Trait { fn_def_id, .. }) = self.tcx().opt_rpitit_info(def_id) - && let ty::Alias(_, alias_ty) = self.tcx().fn_sig(fn_def_id).skip_binder().output().skip_binder().kind() + && let Some(ty::ImplTraitInTraitData::Trait { fn_def_id, .. }) = + self.tcx().opt_rpitit_info(def_id) + && let ty::Alias(_, alias_ty) = + self.tcx().fn_sig(fn_def_id).skip_binder().output().skip_binder().kind() && alias_ty.def_id == def_id { let num_args = self.tcx().generics_of(fn_def_id).count(); write!(self, " {{ ")?; - self = self.print_def_path(fn_def_id, &args[..num_args])?; + self.print_def_path(fn_def_id, &args[..num_args])?; write!(self, "() }}")?; } - Ok(self) + Ok(()) } /// Insert the trait ref and optionally a projection type associated with it into either the @@ -1159,14 +1151,14 @@ entry.has_fn_once = true; return; } else if Some(trait_def_id) == self.tcx().lang_items().fn_mut_trait() { - let super_trait_ref = crate::traits::util::supertraits(self.tcx(), trait_ref) + let super_trait_ref = supertraits_for_pretty_printing(self.tcx(), trait_ref) .find(|super_trait_ref| super_trait_ref.def_id() == fn_once_trait) .unwrap(); fn_traits.entry(super_trait_ref).or_default().fn_mut_trait_ref = Some(trait_ref); return; } else if Some(trait_def_id) == self.tcx().lang_items().fn_trait() { - let super_trait_ref = crate::traits::util::supertraits(self.tcx(), trait_ref) + let super_trait_ref = supertraits_for_pretty_printing(self.tcx(), trait_ref) .find(|super_trait_ref| super_trait_ref.def_id() == fn_once_trait) .unwrap(); @@ -1180,9 +1172,9 @@ } fn pretty_print_inherent_projection( - self, + &mut self, alias_ty: &ty::AliasTy<'tcx>, - ) -> Result { + ) -> Result<(), PrintError> { let def_key = self.tcx().def_key(alias_ty.def_id); self.path_generic_args( |cx| { @@ -1199,19 +1191,19 @@ None } - fn const_infer_name(&self, _: ty::ConstVid<'tcx>) -> Option { + fn const_infer_name(&self, _: ty::ConstVid) -> Option { None } fn pretty_print_dyn_existential( - mut self, + &mut self, predicates: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { // Generate the main trait ref, including associated types. let mut first = true; if let Some(principal) = predicates.principal() { - self = self.wrap_binder(&principal, |principal, mut cx| { + self.wrap_binder(&principal, |principal, cx| { define_scoped_cx!(cx); p!(print_def_path(principal.def_id, &[])); @@ -1251,8 +1243,8 @@ }); if !args.is_empty() || !projections.is_empty() { - p!(generic_delimiters(|mut cx| { - cx = cx.comma_sep(args.iter().copied())?; + p!(generic_delimiters(|cx| { + cx.comma_sep(args.iter().copied())?; if !args.is_empty() && !projections.is_empty() { write!(cx, ", ")?; } @@ -1260,7 +1252,7 @@ })); } } - Ok(cx) + Ok(()) })?; first = false; @@ -1291,15 +1283,15 @@ p!(print_def_path(def_id, &[])); } - Ok(self) + Ok(()) } fn pretty_fn_sig( - mut self, + &mut self, inputs: &[Ty<'tcx>], c_variadic: bool, output: Ty<'tcx>, - ) -> Result { + ) -> Result<(), PrintError> { define_scoped_cx!(self); p!("(", comma_sep(inputs.iter().copied())); @@ -1314,28 +1306,28 @@ p!(" -> ", print(output)); } - Ok(self) + Ok(()) } fn pretty_print_const( - mut self, + &mut self, ct: ty::Const<'tcx>, print_ty: bool, - ) -> Result { + ) -> Result<(), PrintError> { define_scoped_cx!(self); if self.should_print_verbose() { p!(write("{:?}", ct)); - return Ok(self); + return Ok(()); } macro_rules! print_underscore { () => {{ if print_ty { - self = self.typed_value( - |mut this| { + self.typed_value( + |this| { write!(this, "_")?; - Ok(this) + Ok(()) }, |this| this.print_type(ct.ty()), ": ", @@ -1364,27 +1356,29 @@ // cause printing to enter an infinite recursion if the anon const is in the self type i.e. // `impl Default for [T; 32 - 1 - 1 - 1] {` // where we would try to print `<[T; /* print `constant#0` again */] as Default>::{constant#0}` - p!(write("{}::{}", self.tcx().crate_name(def.krate), self.tcx().def_path(def).to_string_no_crate_verbose())) + p!(write( + "{}::{}", + self.tcx().crate_name(def.krate), + self.tcx().def_path(def).to_string_no_crate_verbose() + )) } } defkind => bug!("`{:?}` has unexpected defkind {:?}", ct, defkind), } } - ty::ConstKind::Infer(infer_ct) => { - match infer_ct { - ty::InferConst::Var(ct_vid) - if let Some(name) = self.const_infer_name(ct_vid) => - p!(write("{}", name)), - _ => print_underscore!(), + ty::ConstKind::Infer(infer_ct) => match infer_ct { + ty::InferConst::Var(ct_vid) if let Some(name) = self.const_infer_name(ct_vid) => { + p!(write("{}", name)) } - } + _ => print_underscore!(), + }, ty::ConstKind::Param(ParamConst { name, .. }) => p!(write("{}", name)), ty::ConstKind::Value(value) => { return self.pretty_print_const_valtree(value, ct.ty(), print_ty); } ty::ConstKind::Bound(debruijn, bound_var) => { - rustc_type_ir::debug_bound_var(&mut self, debruijn, bound_var)? + rustc_type_ir::debug_bound_var(self, debruijn, bound_var)? } ty::ConstKind::Placeholder(placeholder) => p!(write("{placeholder:?}")), // FIXME(generic_const_exprs): @@ -1392,14 +1386,14 @@ ty::ConstKind::Expr(_) => p!("{{const expr}}"), ty::ConstKind::Error(_) => p!("{{const error}}"), }; - Ok(self) + Ok(()) } fn pretty_print_const_scalar( - self, + &mut self, scalar: Scalar, ty: Ty<'tcx>, - ) -> Result { + ) -> Result<(), PrintError> { match scalar { Scalar::Ptr(ptr, _size) => self.pretty_print_const_scalar_ptr(ptr, ty), Scalar::Int(int) => { @@ -1409,10 +1403,10 @@ } fn pretty_print_const_scalar_ptr( - mut self, + &mut self, ptr: Pointer, ty: Ty<'tcx>, - ) -> Result { + ) -> Result<(), PrintError> { define_scoped_cx!(self); let (alloc_id, offset) = ptr.into_parts(); @@ -1443,7 +1437,7 @@ Some(GlobalAlloc::VTable(..)) => p!(""), None => p!(""), } - return Ok(self); + return Ok(()); } } } @@ -1454,27 +1448,27 @@ if let Some(GlobalAlloc::Function(instance)) = self.tcx().try_get_global_alloc(alloc_id) { - self = self.typed_value( + self.typed_value( |this| this.print_value_path(instance.def_id(), instance.args), |this| this.print_type(ty), " as ", )?; - return Ok(self); + return Ok(()); } } _ => {} } // Any pointer values not covered by a branch above - self = self.pretty_print_const_pointer(ptr, ty)?; - Ok(self) + self.pretty_print_const_pointer(ptr, ty)?; + Ok(()) } fn pretty_print_const_scalar_int( - mut self, + &mut self, int: ScalarInt, ty: Ty<'tcx>, print_ty: bool, - ) -> Result { + ) -> Result<(), PrintError> { define_scoped_cx!(self); match ty.kind() { @@ -1501,10 +1495,10 @@ // Pointer types ty::Ref(..) | ty::RawPtr(_) | ty::FnPtr(_) => { let data = int.assert_bits(self.tcx().data_layout.pointer_size); - self = self.typed_value( - |mut this| { + self.typed_value( + |this| { write!(this, "0x{data:x}")?; - Ok(this) + Ok(()) }, |this| this.print_type(ty), " as ", @@ -1512,57 +1506,57 @@ } // Nontrivial types with scalar bit representation _ => { - let print = |mut this: Self| { + let print = |this: &mut Self| { if int.size() == Size::ZERO { write!(this, "transmute(())")?; } else { write!(this, "transmute(0x{int:x})")?; } - Ok(this) + Ok(()) }; - self = if print_ty { + if print_ty { self.typed_value(print, |this| this.print_type(ty), ": ")? } else { print(self)? }; } } - Ok(self) + Ok(()) } /// This is overridden for MIR printing because we only want to hide alloc ids from users, not /// from MIR where it is actually useful. fn pretty_print_const_pointer( - self, + &mut self, _: Pointer, ty: Ty<'tcx>, - ) -> Result { + ) -> Result<(), PrintError> { self.typed_value( - |mut this| { + |this| { this.write_str("&_")?; - Ok(this) + Ok(()) }, |this| this.print_type(ty), ": ", ) } - fn pretty_print_byte_str(mut self, byte_str: &'tcx [u8]) -> Result { + fn pretty_print_byte_str(&mut self, byte_str: &'tcx [u8]) -> Result<(), PrintError> { write!(self, "b\"{}\"", byte_str.escape_ascii())?; - Ok(self) + Ok(()) } fn pretty_print_const_valtree( - mut self, + &mut self, valtree: ty::ValTree<'tcx>, ty: Ty<'tcx>, print_ty: bool, - ) -> Result { + ) -> Result<(), PrintError> { define_scoped_cx!(self); if self.should_print_verbose() { p!(write("ValTree({:?}: ", valtree), print(ty), ")"); - return Ok(self); + return Ok(()); } let u8_type = self.tcx().types.u8; @@ -1583,12 +1577,12 @@ bug!("expected to convert valtree to raw bytes for type {:?}", ty) }); p!(write("{:?}", String::from_utf8_lossy(bytes))); - return Ok(self); + return Ok(()); } _ => { p!("&"); p!(pretty_print_const_valtree(valtree, *inner_ty, print_ty)); - return Ok(self); + return Ok(()); } }, (ty::ValTree::Branch(_), ty::Array(t, _)) if *t == u8_type => { @@ -1597,7 +1591,7 @@ }); p!("*"); p!(pretty_print_byte_str(bytes)); - return Ok(self); + return Ok(()); } // Aggregates, printed as array/tuple/struct/variant construction syntax. (ty::ValTree::Branch(_), ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) => { @@ -1616,10 +1610,10 @@ p!(")"); } ty::Adt(def, _) if def.variants().is_empty() => { - self = self.typed_value( - |mut this| { + self.typed_value( + |this| { write!(this, "unreachable()")?; - Ok(this) + Ok(()) }, |this| this.print_type(ty), ": ", @@ -1651,7 +1645,7 @@ } _ => unreachable!(), } - return Ok(self); + return Ok(()); } (ty::ValTree::Leaf(leaf), ty::Ref(_, inner_ty, _)) => { p!(write("&")); @@ -1674,18 +1668,15 @@ if print_ty { p!(": ", print(ty)); } - Ok(self) + Ok(()) } - fn pretty_closure_as_impl( - mut self, - closure: ty::ClosureArgs<'tcx>, - ) -> Result { + fn pretty_closure_as_impl(&mut self, closure: ty::ClosureArgs<'tcx>) -> Result<(), PrintError> { let sig = closure.sig(); let kind = closure.kind_ty().to_opt_closure_kind().unwrap_or(ty::ClosureKind::Fn); write!(self, "impl ")?; - self.wrap_binder(&sig, |sig, mut cx| { + self.wrap_binder(&sig, |sig, cx| { define_scoped_cx!(cx); p!(print(kind), "("); @@ -1701,7 +1692,7 @@ p!(" -> ", print(sig.output())); } - Ok(cx) + Ok(()) }) } @@ -1719,7 +1710,7 @@ let literal = tcx.lift(c).unwrap(); let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); cx.print_alloc_ids = true; - let cx = cx.pretty_print_const(literal, print_types)?; + cx.pretty_print_const(literal, print_types)?; fmt.write_str(&cx.into_buffer())?; Ok(()) }) @@ -1748,7 +1739,7 @@ pub region_highlight_mode: RegionHighlightMode<'tcx>, pub ty_infer_name_resolver: Option Option + 'a>>, - pub const_infer_name_resolver: Option) -> Option + 'a>>, + pub const_infer_name_resolver: Option Option + 'a>>, } impl<'a, 'tcx> Deref for FmtPrinter<'a, 'tcx> { @@ -1770,6 +1761,16 @@ Self::new_with_limit(tcx, ns, limit) } + pub fn print_string( + tcx: TyCtxt<'tcx>, + ns: Namespace, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + ) -> Result { + let mut c = FmtPrinter::new(tcx, ns); + f(&mut c)?; + Ok(c.into_buffer()) + } + pub fn new_with_limit(tcx: TyCtxt<'tcx>, ns: Namespace, type_length_limit: Limit) -> Self { FmtPrinter(Box::new(FmtPrinterData { tcx, @@ -1830,7 +1831,8 @@ let def_id = def_id.into_query_param(); let ns = guess_def_namespace(self, def_id); debug!("def_path_str: def_id={:?}, ns={:?}", def_id, ns); - FmtPrinter::new(self, ns).print_def_path(def_id, args).unwrap().into_buffer() + + FmtPrinter::print_string(self, ns, |cx| cx.print_def_path(def_id, args)).unwrap() } pub fn value_path_str_with_args( @@ -1841,7 +1843,8 @@ let def_id = def_id.into_query_param(); let ns = guess_def_namespace(self, def_id); debug!("value_path_str: def_id={:?}, ns={:?}", def_id, ns); - FmtPrinter::new(self, ns).print_value_path(def_id, args).unwrap().into_buffer() + + FmtPrinter::print_string(self, ns, |cx| cx.print_value_path(def_id, args)).unwrap() } } @@ -1853,34 +1856,24 @@ } impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> { - type Error = fmt::Error; - - type Path = Self; - type Region = Self; - type Type = Self; - type DynExistential = Self; - type Const = Self; - fn tcx<'a>(&'a self) -> TyCtxt<'tcx> { self.tcx } fn print_def_path( - mut self, + &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], - ) -> Result { - define_scoped_cx!(self); - + ) -> Result<(), PrintError> { if args.is_empty() { match self.try_print_trimmed_def_path(def_id)? { - (cx, true) => return Ok(cx), - (cx, false) => self = cx, + true => return Ok(()), + false => {} } match self.try_print_visible_def_path(def_id)? { - (cx, true) => return Ok(cx), - (cx, false) => self = cx, + true => return Ok(()), + false => {} } } @@ -1901,7 +1894,7 @@ let parent_def_id = DefId { index: key.parent.unwrap(), ..def_id }; let span = self.tcx.def_span(def_id); - self = self.print_def_path(parent_def_id, &[])?; + self.print_def_path(parent_def_id, &[])?; // HACK(eddyb) copy of `path_append` to avoid // constructing a `DisambiguatedDefPathData`. @@ -1917,40 +1910,40 @@ )?; self.empty_path = false; - return Ok(self); + return Ok(()); } } self.default_print_def_path(def_id, args) } - fn print_region(self, region: ty::Region<'tcx>) -> Result { + fn print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), PrintError> { self.pretty_print_region(region) } - fn print_type(mut self, ty: Ty<'tcx>) -> Result { + fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { if self.type_length_limit.value_within_limit(self.printed_type_count) { self.printed_type_count += 1; self.pretty_print_type(ty) } else { self.truncated = true; write!(self, "...")?; - Ok(self) + Ok(()) } } fn print_dyn_existential( - self, + &mut self, predicates: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { self.pretty_print_dyn_existential(predicates) } - fn print_const(self, ct: ty::Const<'tcx>) -> Result { + fn print_const(&mut self, ct: ty::Const<'tcx>) -> Result<(), PrintError> { self.pretty_print_const(ct, false) } - fn path_crate(mut self, cnum: CrateNum) -> Result { + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.empty_path = true; if cnum == LOCAL_CRATE { if self.tcx.sess.at_least_rust_2018() { @@ -1964,52 +1957,52 @@ write!(self, "{}", self.tcx.crate_name(cnum))?; self.empty_path = false; } - Ok(self) + Ok(()) } fn path_qualified( - mut self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { - self = self.pretty_path_qualified(self_ty, trait_ref)?; + ) -> Result<(), PrintError> { + self.pretty_path_qualified(self_ty, trait_ref)?; self.empty_path = false; - Ok(self) + Ok(()) } fn path_append_impl( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { - self = self.pretty_path_append_impl( - |mut cx| { - cx = print_prefix(cx)?; + ) -> Result<(), PrintError> { + self.pretty_path_append_impl( + |cx| { + print_prefix(cx)?; if !cx.empty_path { write!(cx, "::")?; } - Ok(cx) + Ok(()) }, self_ty, trait_ref, )?; self.empty_path = false; - Ok(self) + Ok(()) } fn path_append( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; // Skip `::{{extern}}` blocks and `::{{constructor}}` on tuple/unit structs. if let DefPathData::ForeignMod | DefPathData::Ctor = disambiguated_data.data { - return Ok(self); + return Ok(()); } let name = disambiguated_data.data.name(); @@ -2024,19 +2017,19 @@ } let verbose = self.should_print_verbose(); - disambiguated_data.fmt_maybe_verbose(&mut self, verbose)?; + disambiguated_data.fmt_maybe_verbose(self, verbose)?; self.empty_path = false; - Ok(self) + Ok(()) } fn path_generic_args( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; let tcx = self.tcx; @@ -2070,7 +2063,7 @@ } self.generic_delimiters(|cx| cx.comma_sep(args.into_iter())) } else { - Ok(self) + Ok(()) } } } @@ -2084,68 +2077,68 @@ self.printed_type_count = 0; } - fn const_infer_name(&self, id: ty::ConstVid<'tcx>) -> Option { + fn const_infer_name(&self, id: ty::ConstVid) -> Option { self.0.const_infer_name_resolver.as_ref().and_then(|func| func(id)) } fn print_value_path( - mut self, + &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { let was_in_value = std::mem::replace(&mut self.in_value, true); - self = self.print_def_path(def_id, args)?; + self.print_def_path(def_id, args)?; self.in_value = was_in_value; - Ok(self) + Ok(()) } - fn in_binder(self, value: &ty::Binder<'tcx, T>) -> Result + fn in_binder(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { self.pretty_in_binder(value) } - fn wrap_binder Result>( - self, + fn wrap_binder Result<(), PrintError>>( + &mut self, value: &ty::Binder<'tcx, T>, f: C, - ) -> Result + ) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { self.pretty_wrap_binder(value, f) } fn typed_value( - mut self, - f: impl FnOnce(Self) -> Result, - t: impl FnOnce(Self) -> Result, + &mut self, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + t: impl FnOnce(&mut Self) -> Result<(), PrintError>, conversion: &str, - ) -> Result { + ) -> Result<(), PrintError> { self.write_str("{")?; - self = f(self)?; + f(self)?; self.write_str(conversion)?; let was_in_value = std::mem::replace(&mut self.in_value, false); - self = t(self)?; + t(self)?; self.in_value = was_in_value; self.write_str("}")?; - Ok(self) + Ok(()) } fn generic_delimiters( - mut self, - f: impl FnOnce(Self) -> Result, - ) -> Result { + &mut self, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + ) -> Result<(), PrintError> { write!(self, "<")?; let was_in_value = std::mem::replace(&mut self.in_value, false); - let mut inner = f(self)?; - inner.in_value = was_in_value; + f(self)?; + self.in_value = was_in_value; - write!(inner, ">")?; - Ok(inner) + write!(self, ">")?; + Ok(()) } fn should_print_region(&self, region: ty::Region<'tcx>) -> bool { @@ -2194,18 +2187,18 @@ } fn pretty_print_const_pointer( - self, + &mut self, p: Pointer, ty: Ty<'tcx>, - ) -> Result { - let print = |mut this: Self| { + ) -> Result<(), PrintError> { + let print = |this: &mut Self| { define_scoped_cx!(this); if this.print_alloc_ids { p!(write("{:?}", p)); } else { p!("&_"); } - Ok(this) + Ok(()) }; self.typed_value(print, |this| this.print_type(ty), ": ") } @@ -2213,19 +2206,19 @@ // HACK(eddyb) limited to `FmtPrinter` because of `region_highlight_mode`. impl<'tcx> FmtPrinter<'_, 'tcx> { - pub fn pretty_print_region(mut self, region: ty::Region<'tcx>) -> Result { + pub fn pretty_print_region(&mut self, region: ty::Region<'tcx>) -> Result<(), fmt::Error> { define_scoped_cx!(self); // Watch out for region highlights. let highlight = self.region_highlight_mode; if let Some(n) = highlight.region_highlighted(region) { p!(write("'{}", n)); - return Ok(self); + return Ok(()); } if self.should_print_verbose() { p!(write("{:?}", region)); - return Ok(self); + return Ok(()); } let identify_regions = self.tcx.sess.opts.unstable_opts.identify_regions; @@ -2238,7 +2231,7 @@ ty::ReEarlyBound(ref data) => { if data.name != kw::Empty { p!(write("{}", data.name)); - return Ok(self); + return Ok(()); } } ty::ReLateBound(_, ty::BoundRegion { kind: br, .. }) @@ -2246,34 +2239,36 @@ | ty::RePlaceholder(ty::Placeholder { bound: ty::BoundRegion { kind: br, .. }, .. }) => { - if let ty::BrNamed(_, name) = br && br.is_named() { + if let ty::BrNamed(_, name) = br + && br.is_named() + { p!(write("{}", name)); - return Ok(self); + return Ok(()); } if let Some((region, counter)) = highlight.highlight_bound_region { if br == region { p!(write("'{}", counter)); - return Ok(self); + return Ok(()); } } } ty::ReVar(region_vid) if identify_regions => { p!(write("{:?}", region_vid)); - return Ok(self); + return Ok(()); } ty::ReVar(_) => {} ty::ReErased => {} ty::ReError(_) => {} ty::ReStatic => { p!("'static"); - return Ok(self); + return Ok(()); } } p!("'_"); - Ok(self) + Ok(()) } } @@ -2356,11 +2351,11 @@ // `region_index` and `used_region_names`. impl<'tcx> FmtPrinter<'_, 'tcx> { pub fn name_all_regions( - mut self, + &mut self, value: &ty::Binder<'tcx, T>, - ) -> Result<(Self, T, BTreeMap>), fmt::Error> + ) -> Result<(T, BTreeMap>), fmt::Error> where - T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { fn name_by_region_index( index: usize, @@ -2401,8 +2396,6 @@ let _ = write!(cx, "{cont}"); }; - define_scoped_cx!(self); - let possible_names = ('a'..='z').rev().map(|s| Symbol::intern(&format!("'{s}"))); let mut available_names = possible_names @@ -2433,10 +2426,10 @@ // anyways. let (new_value, map) = if self.should_print_verbose() { for var in value.bound_vars().iter() { - start_or_continue(&mut self, "for<", ", "); + start_or_continue(self, "for<", ", "); write!(self, "{var:?}")?; } - start_or_continue(&mut self, "", "> "); + start_or_continue(self, "", "> "); (value.clone().skip_binder(), BTreeMap::default()) } else { let tcx = self.tcx; @@ -2500,8 +2493,8 @@ }; if !trim_path { - start_or_continue(&mut self, "for<", ", "); - do_continue(&mut self, name); + start_or_continue(self, "for<", ", "); + do_continue(self, name); } ty::Region::new_late_bound( tcx, @@ -2518,42 +2511,42 @@ let new_value = value.clone().skip_binder().fold_with(&mut folder); let region_map = folder.region_map; if !trim_path { - start_or_continue(&mut self, "", "> "); + start_or_continue(self, "", "> "); } (new_value, region_map) }; self.binder_depth += 1; self.region_index = region_index; - Ok((self, new_value, map)) + Ok((new_value, map)) } - pub fn pretty_in_binder(self, value: &ty::Binder<'tcx, T>) -> Result + pub fn pretty_in_binder(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), fmt::Error> where - T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { let old_region_index = self.region_index; - let (new, new_value, _) = self.name_all_regions(value)?; - let mut inner = new_value.print(new)?; - inner.region_index = old_region_index; - inner.binder_depth -= 1; - Ok(inner) + let (new_value, _) = self.name_all_regions(value)?; + new_value.print(self)?; + self.region_index = old_region_index; + self.binder_depth -= 1; + Ok(()) } - pub fn pretty_wrap_binder Result>( - self, + pub fn pretty_wrap_binder Result<(), fmt::Error>>( + &mut self, value: &ty::Binder<'tcx, T>, f: C, - ) -> Result + ) -> Result<(), fmt::Error> where - T: Print<'tcx, Self, Output = Self, Error = fmt::Error> + TypeFoldable>, + T: Print<'tcx, Self> + TypeFoldable>, { let old_region_index = self.region_index; - let (new, new_value, _) = self.name_all_regions(value)?; - let mut inner = f(&new_value, new)?; - inner.region_index = old_region_index; - inner.binder_depth -= 1; - Ok(inner) + let (new_value, _) = self.name_all_regions(value)?; + f(&new_value, self)?; + self.region_index = old_region_index; + self.binder_depth -= 1; + Ok(()) } fn prepare_region_info(&mut self, value: &ty::Binder<'tcx, T>) @@ -2611,66 +2604,25 @@ impl<'tcx, T, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::Binder<'tcx, T> where - T: Print<'tcx, P, Output = P, Error = P::Error> + TypeFoldable>, + T: Print<'tcx, P> + TypeFoldable>, { - type Output = P; - type Error = P::Error; - - fn print(&self, cx: P) -> Result { + fn print(&self, cx: &mut P) -> Result<(), PrintError> { cx.in_binder(self) } } impl<'tcx, T, U, P: PrettyPrinter<'tcx>> Print<'tcx, P> for ty::OutlivesPredicate where - T: Print<'tcx, P, Output = P, Error = P::Error>, - U: Print<'tcx, P, Output = P, Error = P::Error>, + T: Print<'tcx, P>, + U: Print<'tcx, P>, { - type Output = P; - type Error = P::Error; - fn print(&self, mut cx: P) -> Result { + fn print(&self, cx: &mut P) -> Result<(), PrintError> { define_scoped_cx!(cx); p!(print(self.0), ": ", print(self.1)); - Ok(cx) + Ok(()) } } -macro_rules! forward_display_to_print { - ($($ty:ty),+) => { - // Some of the $ty arguments may not actually use 'tcx - $(#[allow(unused_lifetimes)] impl<'tcx> fmt::Display for $ty { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - ty::tls::with(|tcx| { - let cx = tcx.lift(*self) - .expect("could not lift for printing") - .print(FmtPrinter::new(tcx, Namespace::TypeNS))?; - f.write_str(&cx.into_buffer())?; - Ok(()) - }) - } - })+ - }; -} - -macro_rules! define_print_and_forward_display { - (($self:ident, $cx:ident): $($ty:ty $print:block)+) => { - $(impl<'tcx, P: PrettyPrinter<'tcx>> Print<'tcx, P> for $ty { - type Output = P; - type Error = fmt::Error; - fn print(&$self, $cx: P) -> Result { - #[allow(unused_mut)] - let mut $cx = $cx; - define_scoped_cx!($cx); - let _: () = $print; - #[allow(unreachable_code)] - Ok($cx) - } - })+ - - forward_display_to_print!($($ty),+); - }; -} - /// Wrapper type for `ty::TraitRef` which opts-in to pretty printing only /// the trait path. That is, it will print `Trait` instead of /// `>`. @@ -2745,6 +2697,43 @@ pub closure: ty::ClosureArgs<'tcx>, } +macro_rules! forward_display_to_print { + ($($ty:ty),+) => { + // Some of the $ty arguments may not actually use 'tcx + $(#[allow(unused_lifetimes)] impl<'tcx> fmt::Display for $ty { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ty::tls::with(|tcx| { + let mut cx = FmtPrinter::new(tcx, Namespace::TypeNS); + tcx.lift(*self) + .expect("could not lift for printing") + .print(&mut cx)?; + f.write_str(&cx.into_buffer())?; + Ok(()) + }) + } + })+ + }; +} + +macro_rules! define_print { + (($self:ident, $cx:ident): $($ty:ty $print:block)+) => { + $(impl<'tcx, P: PrettyPrinter<'tcx>> Print<'tcx, P> for $ty { + fn print(&$self, $cx: &mut P) -> Result<(), PrintError> { + define_scoped_cx!($cx); + let _: () = $print; + Ok(()) + } + })+ + }; +} + +macro_rules! define_print_and_forward_display { + (($self:ident, $cx:ident): $($ty:ty $print:block)+) => { + define_print!(($self, $cx): $($ty $print)*); + forward_display_to_print!($($ty),+); + }; +} + forward_display_to_print! { ty::Region<'tcx>, Ty<'tcx>, @@ -2765,6 +2754,51 @@ ty::OutlivesPredicate, ty::Region<'tcx>> } +define_print! { + (self, cx): + + ty::ClauseKind<'tcx> { + match *self { + ty::ClauseKind::Trait(ref data) => { + p!(print(data)) + } + ty::ClauseKind::RegionOutlives(predicate) => p!(print(predicate)), + ty::ClauseKind::TypeOutlives(predicate) => p!(print(predicate)), + ty::ClauseKind::Projection(predicate) => p!(print(predicate)), + ty::ClauseKind::ConstArgHasType(ct, ty) => { + p!("the constant `", print(ct), "` has type `", print(ty), "`") + }, + ty::ClauseKind::WellFormed(arg) => p!(print(arg), " well-formed"), + ty::ClauseKind::ConstEvaluatable(ct) => { + p!("the constant `", print(ct), "` can be evaluated") + } + } + } + + ty::PredicateKind<'tcx> { + match *self { + ty::PredicateKind::Clause(data) => { + p!(print(data)) + } + ty::PredicateKind::Subtype(predicate) => p!(print(predicate)), + ty::PredicateKind::Coerce(predicate) => p!(print(predicate)), + ty::PredicateKind::ObjectSafe(trait_def_id) => { + p!("the trait `", print_def_path(trait_def_id, &[]), "` is object-safe") + } + ty::PredicateKind::ClosureKind(closure_def_id, _closure_args, kind) => p!( + "the closure `", + print_value_path(closure_def_id, &[]), + write("` implements the trait `{}`", kind) + ), + ty::PredicateKind::ConstEquate(c1, c2) => { + p!("the constant `", print(c1), "` equals `", print(c2), "`") + } + ty::PredicateKind::Ambiguous => p!("ambiguous"), + ty::PredicateKind::AliasRelate(t1, t2, dir) => p!(print(t1), write(" {} ", dir), print(t2)), + } + } +} + define_print_and_forward_display! { (self, cx): @@ -2893,55 +2927,13 @@ } ty::Predicate<'tcx> { - let binder = self.kind(); - p!(print(binder)) + p!(print(self.kind())) } ty::Clause<'tcx> { p!(print(self.kind())) } - ty::ClauseKind<'tcx> { - match *self { - ty::ClauseKind::Trait(ref data) => { - p!(print(data)) - } - ty::ClauseKind::RegionOutlives(predicate) => p!(print(predicate)), - ty::ClauseKind::TypeOutlives(predicate) => p!(print(predicate)), - ty::ClauseKind::Projection(predicate) => p!(print(predicate)), - ty::ClauseKind::ConstArgHasType(ct, ty) => { - p!("the constant `", print(ct), "` has type `", print(ty), "`") - }, - ty::ClauseKind::WellFormed(arg) => p!(print(arg), " well-formed"), - ty::ClauseKind::ConstEvaluatable(ct) => { - p!("the constant `", print(ct), "` can be evaluated") - } - } - } - - ty::PredicateKind<'tcx> { - match *self { - ty::PredicateKind::Clause(data) => { - p!(print(data)) - } - ty::PredicateKind::Subtype(predicate) => p!(print(predicate)), - ty::PredicateKind::Coerce(predicate) => p!(print(predicate)), - ty::PredicateKind::ObjectSafe(trait_def_id) => { - p!("the trait `", print_def_path(trait_def_id, &[]), "` is object-safe") - } - ty::PredicateKind::ClosureKind(closure_def_id, _closure_args, kind) => p!( - "the closure `", - print_value_path(closure_def_id, &[]), - write("` implements the trait `{}`", kind) - ), - ty::PredicateKind::ConstEquate(c1, c2) => { - p!("the constant `", print(c1), "` equals `", print(c2), "`") - } - ty::PredicateKind::Ambiguous => p!("ambiguous"), - ty::PredicateKind::AliasRelate(t1, t2, dir) => p!(print(t1), write(" {} ", dir), print(t2)), - } - } - GenericArg<'tcx> { match self.unpack() { GenericArgKind::Lifetime(lt) => p!(print(lt)), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/relate.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/relate.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/relate.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/relate.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,6 +8,7 @@ use crate::ty::{self, Expr, ImplSubject, Term, TermKind, Ty, TyCtxt, TypeFoldable}; use crate::ty::{GenericArg, GenericArgKind, GenericArgsRef}; use rustc_hir as hir; +use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; use rustc_target::spec::abi; use std::iter; @@ -134,7 +135,7 @@ } #[inline] -pub fn relate_args<'tcx, R: TypeRelation<'tcx>>( +pub fn relate_args_invariantly<'tcx, R: TypeRelation<'tcx>>( relation: &mut R, a_arg: GenericArgsRef<'tcx>, b_arg: GenericArgsRef<'tcx>, @@ -273,8 +274,21 @@ if a.def_id != b.def_id { Err(TypeError::ProjectionMismatched(expected_found(relation, a.def_id, b.def_id))) } else { - let args = relation.relate(a.args, b.args)?; - Ok(relation.tcx().mk_alias_ty(a.def_id, args)) + let args = match relation.tcx().def_kind(a.def_id) { + DefKind::OpaqueTy => relate_args_with_variances( + relation, + a.def_id, + relation.tcx().variances_of(a.def_id), + a.args, + b.args, + false, // do not fetch `type_of(a_def_id)`, as it will cause a cycle + )?, + DefKind::AssocTy | DefKind::AssocConst | DefKind::TyAlias => { + relate_args_invariantly(relation, a.args, b.args)? + } + def => bug!("unknown alias DefKind: {def:?}"), + }; + Ok(ty::AliasTy::new(relation.tcx(), a.def_id, args)) } } } @@ -315,7 +329,7 @@ if a.def_id != b.def_id { Err(TypeError::Traits(expected_found(relation, a.def_id, b.def_id))) } else { - let args = relate_args(relation, a.args, b.args)?; + let args = relate_args_invariantly(relation, a.args, b.args)?; Ok(ty::TraitRef::new(relation.tcx(), a.def_id, args)) } } @@ -331,26 +345,26 @@ if a.def_id != b.def_id { Err(TypeError::Traits(expected_found(relation, a.def_id, b.def_id))) } else { - let args = relate_args(relation, a.args, b.args)?; + let args = relate_args_invariantly(relation, a.args, b.args)?; Ok(ty::ExistentialTraitRef { def_id: a.def_id, args }) } } } #[derive(PartialEq, Copy, Debug, Clone, TypeFoldable, TypeVisitable)] -struct GeneratorWitness<'tcx>(&'tcx ty::List>); +struct CoroutineWitness<'tcx>(&'tcx ty::List>); -impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> { +impl<'tcx> Relate<'tcx> for CoroutineWitness<'tcx> { fn relate>( relation: &mut R, - a: GeneratorWitness<'tcx>, - b: GeneratorWitness<'tcx>, - ) -> RelateResult<'tcx, GeneratorWitness<'tcx>> { + a: CoroutineWitness<'tcx>, + b: CoroutineWitness<'tcx>, + ) -> RelateResult<'tcx, CoroutineWitness<'tcx>> { assert_eq!(a.0.len(), b.0.len()); let tcx = relation.tcx(); let types = tcx.mk_type_list_from_iter(iter::zip(a.0, b.0).map(|(a, b)| relation.relate(a, b)))?; - Ok(GeneratorWitness(types)) + Ok(CoroutineWitness(types)) } } @@ -443,31 +457,31 @@ Ok(Ty::new_dynamic(tcx, relation.relate(a_obj, b_obj)?, region_bound, a_repr)) } - (&ty::Generator(a_id, a_args, movability), &ty::Generator(b_id, b_args, _)) + (&ty::Coroutine(a_id, a_args, movability), &ty::Coroutine(b_id, b_args, _)) if a_id == b_id => { - // All Generator types with the same id represent - // the (anonymous) type of the same generator expression. So + // All Coroutine types with the same id represent + // the (anonymous) type of the same coroutine expression. So // all of their regions should be equated. - let args = relation.relate(a_args, b_args)?; - Ok(Ty::new_generator(tcx, a_id, args, movability)) + let args = relate_args_invariantly(relation, a_args, b_args)?; + Ok(Ty::new_coroutine(tcx, a_id, args, movability)) } - (&ty::GeneratorWitness(a_id, a_args), &ty::GeneratorWitness(b_id, b_args)) + (&ty::CoroutineWitness(a_id, a_args), &ty::CoroutineWitness(b_id, b_args)) if a_id == b_id => { - // All GeneratorWitness types with the same id represent - // the (anonymous) type of the same generator expression. So + // All CoroutineWitness types with the same id represent + // the (anonymous) type of the same coroutine expression. So // all of their regions should be equated. - let args = relation.relate(a_args, b_args)?; - Ok(Ty::new_generator_witness(tcx, a_id, args)) + let args = relate_args_invariantly(relation, a_args, b_args)?; + Ok(Ty::new_coroutine_witness(tcx, a_id, args)) } (&ty::Closure(a_id, a_args), &ty::Closure(b_id, b_args)) if a_id == b_id => { // All Closure types with the same id represent // the (anonymous) type of the same closure expression. So // all of their regions should be equated. - let args = relation.relate(a_args, b_args)?; + let args = relate_args_invariantly(relation, a_args, b_args)?; Ok(Ty::new_closure(tcx, a_id, &args)) } @@ -536,24 +550,6 @@ Ok(Ty::new_fn_ptr(tcx, fty)) } - // The args of opaque types may not all be invariant, so we have - // to treat them separately from other aliases. - ( - &ty::Alias(ty::Opaque, ty::AliasTy { def_id: a_def_id, args: a_args, .. }), - &ty::Alias(ty::Opaque, ty::AliasTy { def_id: b_def_id, args: b_args, .. }), - ) if a_def_id == b_def_id => { - let opt_variances = tcx.variances_of(a_def_id); - let args = relate_args_with_variances( - relation, - a_def_id, - opt_variances, - a_args, - b_args, - false, // do not fetch `type_of(a_def_id)`, as it will cause a cycle - )?; - Ok(Ty::new_opaque(tcx, a_def_id, args)) - } - // Alias tend to mostly already be handled downstream due to normalization. (&ty::Alias(a_kind, a_data), &ty::Alias(b_kind, b_data)) => { let alias_ty = relation.relate(a_data, b_data)?; @@ -709,19 +705,19 @@ a: ty::ClosureArgs<'tcx>, b: ty::ClosureArgs<'tcx>, ) -> RelateResult<'tcx, ty::ClosureArgs<'tcx>> { - let args = relate_args(relation, a.args, b.args)?; + let args = relate_args_invariantly(relation, a.args, b.args)?; Ok(ty::ClosureArgs { args }) } } -impl<'tcx> Relate<'tcx> for ty::GeneratorArgs<'tcx> { +impl<'tcx> Relate<'tcx> for ty::CoroutineArgs<'tcx> { fn relate>( relation: &mut R, - a: ty::GeneratorArgs<'tcx>, - b: ty::GeneratorArgs<'tcx>, - ) -> RelateResult<'tcx, ty::GeneratorArgs<'tcx>> { - let args = relate_args(relation, a.args, b.args)?; - Ok(ty::GeneratorArgs { args }) + a: ty::CoroutineArgs<'tcx>, + b: ty::CoroutineArgs<'tcx>, + ) -> RelateResult<'tcx, ty::CoroutineArgs<'tcx>> { + let args = relate_args_invariantly(relation, a.args, b.args)?; + Ok(ty::CoroutineArgs { args }) } } @@ -731,7 +727,7 @@ a: GenericArgsRef<'tcx>, b: GenericArgsRef<'tcx>, ) -> RelateResult<'tcx, GenericArgsRef<'tcx>> { - relate_args(relation, a, b) + relate_args_invariantly(relation, a, b) } } @@ -834,19 +830,6 @@ }) } } - -impl<'tcx> Relate<'tcx> for ty::ProjectionPredicate<'tcx> { - fn relate>( - relation: &mut R, - a: ty::ProjectionPredicate<'tcx>, - b: ty::ProjectionPredicate<'tcx>, - ) -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>> { - Ok(ty::ProjectionPredicate { - projection_ty: relation.relate(a.projection_ty, b.projection_ty)?, - term: relation.relate(a.term, b.term)?, - }) - } -} /////////////////////////////////////////////////////////////////////////// // Error handling diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/structural_impls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/structural_impls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/structural_impls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/structural_impls.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ use crate::ty::{self, AliasTy, InferConst, Lift, Term, TermKind, Ty, TyCtxt}; use rustc_hir::def::Namespace; use rustc_target::abi::TyAndLayout; -use rustc_type_ir::{ConstKind, DebugWithInfcx, InferCtxtLike, OptWithInfcx}; +use rustc_type_ir::{ConstKind, DebugWithInfcx, InferCtxtLike, WithInfcx}; use std::fmt::{self, Debug}; use std::ops::ControlFlow; @@ -22,11 +22,10 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { with_no_trimmed_paths!({ - f.write_str( - &FmtPrinter::new(tcx, Namespace::TypeNS) - .print_def_path(self.def_id, &[])? - .into_buffer(), - ) + let s = FmtPrinter::print_string(tcx, Namespace::TypeNS, |cx| { + cx.print_def_path(self.def_id, &[]) + })?; + f.write_str(&s) }) }) } @@ -36,11 +35,10 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { with_no_trimmed_paths!({ - f.write_str( - &FmtPrinter::new(tcx, Namespace::TypeNS) - .print_def_path(self.did(), &[])? - .into_buffer(), - ) + let s = FmtPrinter::print_string(tcx, Namespace::TypeNS, |cx| { + cx.print_def_path(self.did(), &[]) + })?; + f.write_str(&s) }) }) } @@ -89,12 +87,12 @@ impl<'tcx> fmt::Debug for ty::FnSig<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) + WithInfcx::with_no_infcx(self).fmt(f) } } impl<'tcx> DebugWithInfcx> for ty::FnSig<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { let sig = this.data; @@ -130,18 +128,6 @@ } } -impl<'tcx> fmt::Debug for ty::ConstVid<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "?{}c", self.index) - } -} - -impl fmt::Debug for ty::EffectVid<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "?{}e", self.index) - } -} - impl<'tcx> fmt::Debug for ty::TraitRef<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { with_no_trimmed_paths!(fmt::Display::fmt(self, f)) @@ -149,8 +135,8 @@ } impl<'tcx> ty::DebugWithInfcx> for Ty<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { this.data.fmt(f) @@ -199,51 +185,14 @@ } } -impl<'tcx> fmt::Debug for ty::ClauseKind<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - ty::ClauseKind::ConstArgHasType(ct, ty) => write!(f, "ConstArgHasType({ct:?}, {ty:?})"), - ty::ClauseKind::Trait(ref a) => a.fmt(f), - ty::ClauseKind::RegionOutlives(ref pair) => pair.fmt(f), - ty::ClauseKind::TypeOutlives(ref pair) => pair.fmt(f), - ty::ClauseKind::Projection(ref pair) => pair.fmt(f), - ty::ClauseKind::WellFormed(ref data) => write!(f, "WellFormed({data:?})"), - ty::ClauseKind::ConstEvaluatable(ct) => { - write!(f, "ConstEvaluatable({ct:?})") - } - } - } -} - -impl<'tcx> fmt::Debug for ty::PredicateKind<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - ty::PredicateKind::Clause(ref a) => a.fmt(f), - ty::PredicateKind::Subtype(ref pair) => pair.fmt(f), - ty::PredicateKind::Coerce(ref pair) => pair.fmt(f), - ty::PredicateKind::ObjectSafe(trait_def_id) => { - write!(f, "ObjectSafe({trait_def_id:?})") - } - ty::PredicateKind::ClosureKind(closure_def_id, closure_args, kind) => { - write!(f, "ClosureKind({closure_def_id:?}, {closure_args:?}, {kind:?})") - } - ty::PredicateKind::ConstEquate(c1, c2) => write!(f, "ConstEquate({c1:?}, {c2:?})"), - ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"), - ty::PredicateKind::AliasRelate(t1, t2, dir) => { - write!(f, "AliasRelate({t1:?}, {dir:?}, {t2:?})") - } - } - } -} - impl<'tcx> fmt::Debug for AliasTy<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) + WithInfcx::with_no_infcx(self).fmt(f) } } impl<'tcx> DebugWithInfcx> for AliasTy<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { f.debug_struct("AliasTy") @@ -253,7 +202,7 @@ } } -impl<'tcx> fmt::Debug for ty::InferConst<'tcx> { +impl fmt::Debug for ty::InferConst { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { InferConst::Var(var) => write!(f, "{var:?}"), @@ -262,17 +211,17 @@ } } } -impl<'tcx> DebugWithInfcx> for ty::InferConst<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, +impl<'tcx> DebugWithInfcx> for ty::InferConst { + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { use ty::InferConst::*; - match this.infcx.and_then(|infcx| infcx.universe_of_ct(*this.data)) { + match this.infcx.universe_of_ct(*this.data) { None => write!(f, "{:?}", this.data), Some(universe) => match *this.data { - Var(vid) => write!(f, "?{}_{}c", vid.index, universe.index()), - EffectVar(vid) => write!(f, "?{}_{}e", vid.index, universe.index()), + Var(vid) => write!(f, "?{}_{}c", vid.index(), universe.index()), + EffectVar(vid) => write!(f, "?{}_{}e", vid.index(), universe.index()), Fresh(_) => { unreachable!() } @@ -283,12 +232,12 @@ impl<'tcx> fmt::Debug for ty::consts::Expr<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) + WithInfcx::with_no_infcx(self).fmt(f) } } impl<'tcx> DebugWithInfcx> for ty::consts::Expr<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { match this.data { @@ -316,12 +265,12 @@ impl<'tcx> fmt::Debug for ty::UnevaluatedConst<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) + WithInfcx::with_no_infcx(self).fmt(f) } } impl<'tcx> DebugWithInfcx> for ty::UnevaluatedConst<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { f.debug_struct("UnevaluatedConst") @@ -333,12 +282,12 @@ impl<'tcx> fmt::Debug for ty::Const<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) + WithInfcx::with_no_infcx(self).fmt(f) } } impl<'tcx> DebugWithInfcx> for ty::Const<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { // If this is a value, we spend some effort to make it look nice. @@ -350,9 +299,8 @@ let ConstKind::Value(valtree) = lifted.kind() else { bug!("we checked that this is a valtree") }; - let cx = FmtPrinter::new(tcx, Namespace::ValueNS); - let cx = - cx.pretty_print_const_valtree(valtree, lifted.ty(), /*print_ty*/ true)?; + let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS); + cx.pretty_print_const_valtree(valtree, lifted.ty(), /*print_ty*/ true)?; f.write_str(&cx.into_buffer()) }); } @@ -395,8 +343,8 @@ } } impl<'tcx> DebugWithInfcx> for GenericArg<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { match this.data.unpack() { @@ -413,8 +361,8 @@ } } impl<'tcx> DebugWithInfcx> for Region<'tcx> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { write!(f, "{:?}", &this.map(|data| data.kind())) @@ -422,11 +370,11 @@ } impl<'tcx> DebugWithInfcx> for ty::RegionVid { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { - match this.infcx.and_then(|infcx| infcx.universe_of_lt(*this.data)) { + match this.infcx.universe_of_lt(*this.data) { Some(universe) => write!(f, "'?{}_{}", this.data.index(), universe.index()), None => write!(f, "{:?}", this.data), } @@ -434,8 +382,8 @@ } impl<'tcx, T: DebugWithInfcx>> DebugWithInfcx> for ty::Binder<'tcx, T> { - fn fmt>>( - this: OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { f.debug_tuple("Binder") @@ -501,7 +449,6 @@ crate::ty::IntVarValue, crate::ty::adjustment::PointerCoercion, crate::ty::RegionVid, - crate::ty::UniverseIndex, crate::ty::Variance, ::rustc_span::Span, ::rustc_span::symbol::Ident, @@ -518,7 +465,6 @@ ::rustc_hir::Mutability, ::rustc_hir::Unsafety, ::rustc_target::spec::abi::Abi, - crate::ty::AliasRelationDirection, crate::ty::ClosureKind, crate::ty::ParamConst, crate::ty::ParamTy, @@ -654,11 +600,11 @@ ty::Ref(r, ty, mutbl) => { ty::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) } - ty::Generator(did, args, movability) => { - ty::Generator(did, args.try_fold_with(folder)?, movability) + ty::Coroutine(did, args, movability) => { + ty::Coroutine(did, args.try_fold_with(folder)?, movability) } - ty::GeneratorWitness(did, args) => { - ty::GeneratorWitness(did, args.try_fold_with(folder)?) + ty::CoroutineWitness(did, args) => { + ty::CoroutineWitness(did, args.try_fold_with(folder)?) } ty::Closure(did, args) => ty::Closure(did, args.try_fold_with(folder)?), ty::Alias(kind, data) => ty::Alias(kind, data.try_fold_with(folder)?), @@ -706,8 +652,8 @@ r.visit_with(visitor)?; ty.visit_with(visitor) } - ty::Generator(_did, ref args, _) => args.visit_with(visitor), - ty::GeneratorWitness(_did, ref args) => args.visit_with(visitor), + ty::Coroutine(_did, ref args, _) => args.visit_with(visitor), + ty::CoroutineWitness(_did, ref args) => args.visit_with(visitor), ty::Closure(_did, ref args) => args.visit_with(visitor), ty::Alias(_, ref data) => data.visit_with(visitor), @@ -865,7 +811,7 @@ } } -impl<'tcx> TypeFoldable> for InferConst<'tcx> { +impl<'tcx> TypeFoldable> for InferConst { fn try_fold_with>>( self, _folder: &mut F, @@ -874,7 +820,7 @@ } } -impl<'tcx> TypeVisitable> for InferConst<'tcx> { +impl<'tcx> TypeVisitable> for InferConst { fn visit_with>>( &self, _visitor: &mut V, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/sty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/sty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/sty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/sty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -29,17 +29,18 @@ use std::borrow::Cow; use std::cmp::Ordering; use std::fmt; -use std::marker::PhantomData; use std::ops::{ControlFlow, Deref, Range}; use ty::util::IntTypeExt; -use rustc_type_ir::sty::TyKind::*; +use rustc_type_ir::ClauseKind as IrClauseKind; use rustc_type_ir::CollectAndApply; use rustc_type_ir::ConstKind as IrConstKind; use rustc_type_ir::DebugWithInfcx; use rustc_type_ir::DynKind; +use rustc_type_ir::PredicateKind as IrPredicateKind; use rustc_type_ir::RegionKind as IrRegionKind; use rustc_type_ir::TyKind as IrTyKind; +use rustc_type_ir::TyKind::*; use super::GenericParamDefKind; @@ -48,6 +49,8 @@ pub type TyKind<'tcx> = IrTyKind>; pub type RegionKind<'tcx> = IrRegionKind>; pub type ConstKind<'tcx> = IrConstKind>; +pub type PredicateKind<'tcx> = IrPredicateKind>; +pub type ClauseKind<'tcx> = IrClauseKind>; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] #[derive(HashStable, TypeFoldable, TypeVisitable, Lift)] @@ -215,20 +218,20 @@ /// closure C (which would then require fixed point iteration to /// handle). Plus it fixes an ICE. :P /// -/// ## Generators +/// ## Coroutines /// -/// Generators are handled similarly in `GeneratorArgs`. The set of +/// Coroutines are handled similarly in `CoroutineArgs`. The set of /// type parameters is similar, but `CK` and `CS` are replaced by the /// following type parameters: /// -/// * `GS`: The generator's "resume type", which is the type of the +/// * `GS`: The coroutine's "resume type", which is the type of the /// argument passed to `resume`, and the type of `yield` expressions -/// inside the generator. +/// inside the coroutine. /// * `GY`: The "yield type", which is the type of values passed to -/// `yield` inside the generator. +/// `yield` inside the coroutine. /// * `GR`: The "return type", which is the type of value returned upon -/// completion of the generator. -/// * `GW`: The "generator witness". +/// completion of the coroutine. +/// * `GW`: The "coroutine witness". #[derive(Copy, Clone, PartialEq, Eq, Debug, TypeFoldable, TypeVisitable, Lift)] pub struct ClosureArgs<'tcx> { /// Lifetime and type parameters from the enclosing function, @@ -352,11 +355,11 @@ /// Similar to `ClosureArgs`; see the above documentation for more. #[derive(Copy, Clone, PartialEq, Eq, Debug, TypeFoldable, TypeVisitable)] -pub struct GeneratorArgs<'tcx> { +pub struct CoroutineArgs<'tcx> { pub args: GenericArgsRef<'tcx>, } -pub struct GeneratorArgsParts<'tcx, T> { +pub struct CoroutineArgsParts<'tcx, T> { pub parent_args: &'tcx [GenericArg<'tcx>], pub resume_ty: T, pub yield_ty: T, @@ -365,14 +368,14 @@ pub tupled_upvars_ty: T, } -impl<'tcx> GeneratorArgs<'tcx> { - /// Construct `GeneratorArgs` from `GeneratorArgsParts`, containing `Args` - /// for the generator parent, alongside additional generator-specific components. +impl<'tcx> CoroutineArgs<'tcx> { + /// Construct `CoroutineArgs` from `CoroutineArgsParts`, containing `Args` + /// for the coroutine parent, alongside additional coroutine-specific components. pub fn new( tcx: TyCtxt<'tcx>, - parts: GeneratorArgsParts<'tcx, Ty<'tcx>>, - ) -> GeneratorArgs<'tcx> { - GeneratorArgs { + parts: CoroutineArgsParts<'tcx, Ty<'tcx>>, + ) -> CoroutineArgs<'tcx> { + CoroutineArgs { args: tcx.mk_args_from_iter( parts.parent_args.iter().copied().chain( [ @@ -389,12 +392,12 @@ } } - /// Divides the generator args into their respective components. - /// The ordering assumed here must match that used by `GeneratorArgs::new` above. - fn split(self) -> GeneratorArgsParts<'tcx, GenericArg<'tcx>> { + /// Divides the coroutine args into their respective components. + /// The ordering assumed here must match that used by `CoroutineArgs::new` above. + fn split(self) -> CoroutineArgsParts<'tcx, GenericArg<'tcx>> { match self.args[..] { [ref parent_args @ .., resume_ty, yield_ty, return_ty, witness, tupled_upvars_ty] => { - GeneratorArgsParts { + CoroutineArgsParts { parent_args, resume_ty, yield_ty, @@ -403,34 +406,34 @@ tupled_upvars_ty, } } - _ => bug!("generator args missing synthetics"), + _ => bug!("coroutine args missing synthetics"), } } /// Returns `true` only if enough of the synthetic types are known to - /// allow using all of the methods on `GeneratorArgs` without panicking. + /// allow using all of the methods on `CoroutineArgs` without panicking. /// - /// Used primarily by `ty::print::pretty` to be able to handle generator + /// Used primarily by `ty::print::pretty` to be able to handle coroutine /// types that haven't had their synthetic types substituted in. pub fn is_valid(self) -> bool { self.args.len() >= 5 && matches!(self.split().tupled_upvars_ty.expect_ty().kind(), Tuple(_)) } - /// Returns the substitutions of the generator's parent. + /// Returns the substitutions of the coroutine's parent. pub fn parent_args(self) -> &'tcx [GenericArg<'tcx>] { self.split().parent_args } - /// This describes the types that can be contained in a generator. + /// This describes the types that can be contained in a coroutine. /// It will be a type variable initially and unified in the last stages of typeck of a body. - /// It contains a tuple of all the types that could end up on a generator frame. + /// It contains a tuple of all the types that could end up on a coroutine frame. /// The state transformation MIR pass may only produce layouts which mention types /// in this tuple. Upvars are not counted here. pub fn witness(self) -> Ty<'tcx> { self.split().witness.expect_ty() } - /// Returns an iterator over the list of types of captured paths by the generator. + /// Returns an iterator over the list of types of captured paths by the coroutine. /// In case there was a type error in figuring out the types of the captured path, an /// empty iterator is returned. #[inline] @@ -443,28 +446,28 @@ } } - /// Returns the tuple type representing the upvars for this generator. + /// Returns the tuple type representing the upvars for this coroutine. #[inline] pub fn tupled_upvars_ty(self) -> Ty<'tcx> { self.split().tupled_upvars_ty.expect_ty() } - /// Returns the type representing the resume type of the generator. + /// Returns the type representing the resume type of the coroutine. pub fn resume_ty(self) -> Ty<'tcx> { self.split().resume_ty.expect_ty() } - /// Returns the type representing the yield type of the generator. + /// Returns the type representing the yield type of the coroutine. pub fn yield_ty(self) -> Ty<'tcx> { self.split().yield_ty.expect_ty() } - /// Returns the type representing the return type of the generator. + /// Returns the type representing the return type of the coroutine. pub fn return_ty(self) -> Ty<'tcx> { self.split().return_ty.expect_ty() } - /// Returns the "generator signature", which consists of its yield + /// Returns the "coroutine signature", which consists of its yield /// and return types. /// /// N.B., some bits of the code prefers to see this wrapped in a @@ -474,7 +477,7 @@ ty::Binder::dummy(self.sig()) } - /// Returns the "generator signature", which consists of its resume, yield + /// Returns the "coroutine signature", which consists of its resume, yield /// and return types. pub fn sig(self) -> GenSig<'tcx> { ty::GenSig { @@ -485,23 +488,23 @@ } } -impl<'tcx> GeneratorArgs<'tcx> { - /// Generator has not been resumed yet. +impl<'tcx> CoroutineArgs<'tcx> { + /// Coroutine has not been resumed yet. pub const UNRESUMED: usize = 0; - /// Generator has returned or is completed. + /// Coroutine has returned or is completed. pub const RETURNED: usize = 1; - /// Generator has been poisoned. + /// Coroutine has been poisoned. pub const POISONED: usize = 2; const UNRESUMED_NAME: &'static str = "Unresumed"; const RETURNED_NAME: &'static str = "Returned"; const POISONED_NAME: &'static str = "Panicked"; - /// The valid variant indices of this generator. + /// The valid variant indices of this coroutine. #[inline] pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range { // FIXME requires optimized MIR - FIRST_VARIANT..tcx.generator_layout(def_id).unwrap().variant_fields.next_index() + FIRST_VARIANT..tcx.coroutine_layout(def_id).unwrap().variant_fields.next_index() } /// The discriminant for the given variant. Panics if the `variant_index` is @@ -513,13 +516,13 @@ tcx: TyCtxt<'tcx>, variant_index: VariantIdx, ) -> Discr<'tcx> { - // Generators don't support explicit discriminant values, so they are + // Coroutines don't support explicit discriminant values, so they are // the same as the variant index. assert!(self.variant_range(def_id, tcx).contains(&variant_index)); Discr { val: variant_index.as_usize() as u128, ty: self.discr_ty(tcx) } } - /// The set of all discriminants for the generator, enumerated with their + /// The set of all discriminants for the coroutine, enumerated with their /// variant indices. #[inline] pub fn discriminants( @@ -543,15 +546,15 @@ } } - /// The type of the state discriminant used in the generator type. + /// The type of the state discriminant used in the coroutine type. #[inline] pub fn discr_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { tcx.types.u32 } /// This returns the types of the MIR locals which had to be stored across suspension points. - /// It is calculated in rustc_mir_transform::generator::StateTransform. - /// All the types here must be in the tuple in GeneratorInterior. + /// It is calculated in rustc_mir_transform::coroutine::StateTransform. + /// All the types here must be in the tuple in CoroutineInterior. /// /// The locals are grouped by their variant number. Note that some locals may /// be repeated in multiple variants. @@ -561,7 +564,7 @@ def_id: DefId, tcx: TyCtxt<'tcx>, ) -> impl Iterator> + Captures<'tcx>> { - let layout = tcx.generator_layout(def_id).unwrap(); + let layout = tcx.coroutine_layout(def_id).unwrap(); layout.variant_fields.iter().map(move |variant| { variant.iter().map(move |field| { ty::EarlyBinder::bind(layout.field_tys[*field].ty).instantiate(tcx, self.args) @@ -569,7 +572,7 @@ }) } - /// This is the types of the fields of a generator which are not stored in a + /// This is the types of the fields of a coroutine which are not stored in a /// variant. #[inline] pub fn prefix_tys(self) -> &'tcx List> { @@ -580,18 +583,18 @@ #[derive(Debug, Copy, Clone, HashStable)] pub enum UpvarArgs<'tcx> { Closure(GenericArgsRef<'tcx>), - Generator(GenericArgsRef<'tcx>), + Coroutine(GenericArgsRef<'tcx>), } impl<'tcx> UpvarArgs<'tcx> { - /// Returns an iterator over the list of types of captured paths by the closure/generator. + /// Returns an iterator over the list of types of captured paths by the closure/coroutine. /// In case there was a type error in figuring out the types of the captured path, an /// empty iterator is returned. #[inline] pub fn upvar_tys(self) -> &'tcx List> { let tupled_tys = match self { UpvarArgs::Closure(args) => args.as_closure().tupled_upvars_ty(), - UpvarArgs::Generator(args) => args.as_generator().tupled_upvars_ty(), + UpvarArgs::Coroutine(args) => args.as_coroutine().tupled_upvars_ty(), }; match tupled_tys.kind() { @@ -606,7 +609,7 @@ pub fn tupled_upvars_ty(self) -> Ty<'tcx> { match self { UpvarArgs::Closure(args) => args.as_closure().tupled_upvars_ty(), - UpvarArgs::Generator(args) => args.as_generator().tupled_upvars_ty(), + UpvarArgs::Coroutine(args) => args.as_coroutine().tupled_upvars_ty(), } } } @@ -683,8 +686,8 @@ } impl<'tcx> DebugWithInfcx> for ExistentialPredicate<'tcx> { - fn fmt>>( - this: rustc_type_ir::OptWithInfcx<'_, TyCtxt<'tcx>, InfCtx, &Self>, + fn fmt>>( + this: rustc_type_ir::WithInfcx<'_, Infcx, &Self>, f: &mut core::fmt::Formatter<'_>, ) -> core::fmt::Result { fmt::Debug::fmt(&this.data, f) @@ -725,7 +728,7 @@ self.rebind(tr).with_self_ty(tcx, self_ty).to_predicate(tcx) } ExistentialPredicate::Projection(p) => { - ty::Clause::from_projection_clause(tcx, self.rebind(p.with_self_ty(tcx, self_ty))) + self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx) } ExistentialPredicate::AutoTrait(did) => { let generics = tcx.generics_of(did); @@ -1213,14 +1216,28 @@ pub def_id: DefId, /// This field exists to prevent the creation of `AliasTy` without using - /// [TyCtxt::mk_alias_ty]. - pub(super) _use_mk_alias_ty_instead: (), + /// [AliasTy::new]. + _use_alias_ty_new_instead: (), } impl<'tcx> AliasTy<'tcx> { + pub fn new( + tcx: TyCtxt<'tcx>, + def_id: DefId, + args: impl IntoIterator>>, + ) -> ty::AliasTy<'tcx> { + let args = tcx.check_and_mk_args(def_id, args); + ty::AliasTy { def_id, args, _use_alias_ty_new_instead: () } + } + pub fn kind(self, tcx: TyCtxt<'tcx>) -> ty::AliasKind { match tcx.def_kind(self.def_id) { - DefKind::AssocTy if let DefKind::Impl { of_trait: false } = tcx.def_kind(tcx.parent(self.def_id)) => ty::Inherent, + DefKind::AssocTy + if let DefKind::Impl { of_trait: false } = + tcx.def_kind(tcx.parent(self.def_id)) => + { + ty::Inherent + } DefKind::AssocTy => ty::Projection, DefKind::OpaqueTy => ty::Opaque, DefKind::TyAlias => ty::Weak, @@ -1240,7 +1257,7 @@ } pub fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self { - tcx.mk_alias_ty(self.def_id, [self_ty.into()].into_iter().chain(self.args.iter().skip(1))) + AliasTy::new(tcx, self.def_id, [self_ty.into()].into_iter().chain(self.args.iter().skip(1))) } } @@ -1569,26 +1586,22 @@ } } -/// A **`const`** **v**ariable **ID**. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[derive(HashStable, TyEncodable, TyDecodable)] -pub struct ConstVid<'tcx> { - pub index: u32, - pub phantom: PhantomData<&'tcx ()>, +rustc_index::newtype_index! { + /// A **`const`** **v**ariable **ID**. + #[debug_format = "?{}c"] + pub struct ConstVid {} } -/// An **effect** **v**ariable **ID**. -/// -/// Handling effect infer variables happens separately from const infer variables -/// because we do not want to reuse any of the const infer machinery. If we try to -/// relate an effect variable with a normal one, we would ICE, which can catch bugs -/// where we are not correctly using the effect var for an effect param. Fallback -/// is also implemented on top of having separate effect and normal const variables. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[derive(TyEncodable, TyDecodable)] -pub struct EffectVid<'tcx> { - pub index: u32, - pub phantom: PhantomData<&'tcx ()>, +rustc_index::newtype_index! { + /// An **effect** **v**ariable **ID**. + /// + /// Handling effect infer variables happens separately from const infer variables + /// because we do not want to reuse any of the const infer machinery. If we try to + /// relate an effect variable with a normal one, we would ICE, which can catch bugs + /// where we are not correctly using the effect var for an effect param. Fallback + /// is also implemented on top of having separate effect and normal const variables. + #[debug_format = "?{}e"] + pub struct EffectVid {} } rustc_index::newtype_index! { @@ -1662,8 +1675,11 @@ debug_assert!(!self_ty.has_escaping_bound_vars()); ty::ProjectionPredicate { - projection_ty: tcx - .mk_alias_ty(self.def_id, [self_ty.into()].into_iter().chain(self.args)), + projection_ty: AliasTy::new( + tcx, + self.def_id, + [self_ty.into()].into_iter().chain(self.args), + ), term: self.term, } } @@ -1966,7 +1982,7 @@ #[inline] pub fn new_opaque(tcx: TyCtxt<'tcx>, def_id: DefId, args: GenericArgsRef<'tcx>) -> Ty<'tcx> { - Ty::new_alias(tcx, ty::Opaque, tcx.mk_alias_ty(def_id, args)) + Ty::new_alias(tcx, ty::Opaque, AliasTy::new(tcx, def_id, args)) } /// Constructs a `TyKind::Error` type with current `ErrorGuaranteed` @@ -2130,7 +2146,7 @@ item_def_id: DefId, args: impl IntoIterator>>, ) -> Ty<'tcx> { - Ty::new_alias(tcx, ty::Projection, tcx.mk_alias_ty(item_def_id, args)) + Ty::new_alias(tcx, ty::Projection, AliasTy::new(tcx, item_def_id, args)) } #[inline] @@ -2148,27 +2164,27 @@ } #[inline] - pub fn new_generator( + pub fn new_coroutine( tcx: TyCtxt<'tcx>, def_id: DefId, - generator_args: GenericArgsRef<'tcx>, + coroutine_args: GenericArgsRef<'tcx>, movability: hir::Movability, ) -> Ty<'tcx> { debug_assert_eq!( - generator_args.len(), + coroutine_args.len(), tcx.generics_of(tcx.typeck_root_def_id(def_id)).count() + 5, - "generator constructed with incorrect number of substitutions" + "coroutine constructed with incorrect number of substitutions" ); - Ty::new(tcx, Generator(def_id, generator_args, movability)) + Ty::new(tcx, Coroutine(def_id, coroutine_args, movability)) } #[inline] - pub fn new_generator_witness( + pub fn new_coroutine_witness( tcx: TyCtxt<'tcx>, id: DefId, args: GenericArgsRef<'tcx>, ) -> Ty<'tcx> { - Ty::new(tcx, GeneratorWitness(id, args)) + Ty::new(tcx, CoroutineWitness(id, args)) } // misc @@ -2478,8 +2494,8 @@ } #[inline] - pub fn is_generator(self) -> bool { - matches!(self.kind(), Generator(..)) + pub fn is_coroutine(self) -> bool { + matches!(self.kind(), Coroutine(..)) } #[inline] @@ -2635,13 +2651,13 @@ /// If the type contains variants, returns the valid range of variant indices. // - // FIXME: This requires the optimized MIR in the case of generators. + // FIXME: This requires the optimized MIR in the case of coroutines. #[inline] pub fn variant_range(self, tcx: TyCtxt<'tcx>) -> Option> { match self.kind() { TyKind::Adt(adt, _) => Some(adt.variant_range()), - TyKind::Generator(def_id, args, _) => { - Some(args.as_generator().variant_range(*def_id, tcx)) + TyKind::Coroutine(def_id, args, _) => { + Some(args.as_coroutine().variant_range(*def_id, tcx)) } _ => None, } @@ -2650,7 +2666,7 @@ /// If the type contains variants, returns the variant for `variant_index`. /// Panics if `variant_index` is out of range. // - // FIXME: This requires the optimized MIR in the case of generators. + // FIXME: This requires the optimized MIR in the case of coroutines. #[inline] pub fn discriminant_for_variant( self, @@ -2661,8 +2677,8 @@ TyKind::Adt(adt, _) if adt.is_enum() => { Some(adt.discriminant_for_variant(tcx, variant_index)) } - TyKind::Generator(def_id, args, _) => { - Some(args.as_generator().discriminant_for_variant(*def_id, tcx, variant_index)) + TyKind::Coroutine(def_id, args, _) => { + Some(args.as_coroutine().discriminant_for_variant(*def_id, tcx, variant_index)) } _ => None, } @@ -2672,7 +2688,7 @@ pub fn discriminant_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { match self.kind() { ty::Adt(adt, _) if adt.is_enum() => adt.repr().discr_type().to_ty(tcx), - ty::Generator(_, args, _) => args.as_generator().discr_ty(tcx), + ty::Coroutine(_, args, _) => args.as_coroutine().discr_ty(tcx), ty::Param(_) | ty::Alias(..) | ty::Infer(ty::TyVar(_)) => { let assoc_items = tcx.associated_item_def_ids( @@ -2697,7 +2713,7 @@ | ty::FnPtr(..) | ty::Dynamic(..) | ty::Closure(..) - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Error(_) @@ -2731,8 +2747,8 @@ | ty::RawPtr(..) | ty::Char | ty::Ref(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Array(..) | ty::Closure(..) | ty::Never @@ -2819,8 +2835,8 @@ | ty::RawPtr(..) | ty::Char | ty::Ref(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Array(..) | ty::Closure(..) | ty::Never @@ -2848,7 +2864,7 @@ /// Returning true means the type is known to be pure and `Copy+Clone`. /// Returning `false` means nothing -- could be `Copy`, might not be. /// - /// This is mostly useful for optimizations, as there are the types + /// This is mostly useful for optimizations, as these are the types /// on which we can replace cloning with dereferencing. pub fn is_trivially_pure_clone_copy(self) -> bool { match self.kind() { @@ -2883,7 +2899,7 @@ // anything with custom metadata it might be more complicated. ty::Ref(_, _, hir::Mutability::Not) | ty::RawPtr(..) => false, - ty::Generator(..) | ty::GeneratorWitness(..) => false, + ty::Coroutine(..) | ty::CoroutineWitness(..) => false, // Might be, but not "trivial" so just giving the safe answer. ty::Adt(..) | ty::Closure(..) => false, @@ -2958,8 +2974,8 @@ | FnPtr(_) | Dynamic(_, _, _) | Closure(_, _) - | Generator(_, _, _) - | GeneratorWitness(..) + | Coroutine(_, _, _) + | CoroutineWitness(..) | Never | Tuple(_) => true, Error(_) | Infer(_) | Alias(_, _) | Param(_) | Bound(_, _) | Placeholder(_) => false, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/typeck_results.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/typeck_results.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/typeck_results.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/typeck_results.rs 2023-12-21 16:55:28.000000000 +0000 @@ -24,7 +24,7 @@ use rustc_middle::mir::FakeReadCause; use rustc_session::Session; use rustc_span::Span; -use rustc_target::abi::FieldIdx; +use rustc_target::abi::{FieldIdx, VariantIdx}; use std::{collections::hash_map::Entry, hash::Hash, iter}; use super::RvalueScopes; @@ -189,9 +189,9 @@ /// Details may be find in `rustc_hir_analysis::check::rvalue_scopes`. pub rvalue_scopes: RvalueScopes, - /// Stores the predicates that apply on generator witness types. - /// formatting modified file tests/ui/generator/retain-resume-ref.rs - pub generator_interior_predicates: + /// Stores the predicates that apply on coroutine witness types. + /// formatting modified file tests/ui/coroutine/retain-resume-ref.rs + pub coroutine_interior_predicates: LocalDefIdMap, ObligationCause<'tcx>)>>, /// We sometimes treat byte string literals (which are of type `&[u8; N]`) @@ -205,7 +205,7 @@ pub closure_size_eval: LocalDefIdMap>, /// Container types and field indices of `offset_of!` expressions - offset_of_data: ItemLocalMap<(Ty<'tcx>, Vec)>, + offset_of_data: ItemLocalMap<(Ty<'tcx>, Vec<(VariantIdx, FieldIdx)>)>, } impl<'tcx> TypeckResults<'tcx> { @@ -231,7 +231,7 @@ closure_min_captures: Default::default(), closure_fake_reads: Default::default(), rvalue_scopes: Default::default(), - generator_interior_predicates: Default::default(), + coroutine_interior_predicates: Default::default(), treat_byte_string_as_slice: Default::default(), closure_size_eval: Default::default(), offset_of_data: Default::default(), @@ -464,11 +464,15 @@ &self.coercion_casts } - pub fn offset_of_data(&self) -> LocalTableInContext<'_, (Ty<'tcx>, Vec)> { + pub fn offset_of_data( + &self, + ) -> LocalTableInContext<'_, (Ty<'tcx>, Vec<(VariantIdx, FieldIdx)>)> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.offset_of_data } } - pub fn offset_of_data_mut(&mut self) -> LocalTableInContextMut<'_, (Ty<'tcx>, Vec)> { + pub fn offset_of_data_mut( + &mut self, + ) -> LocalTableInContextMut<'_, (Ty<'tcx>, Vec<(VariantIdx, FieldIdx)>)> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.offset_of_data } } } @@ -594,10 +598,27 @@ /// Canonical user type annotation. pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>; -impl<'tcx> CanonicalUserType<'tcx> { +/// A user-given type annotation attached to a constant. These arise +/// from constants that are named via paths, like `Foo::::new` and +/// so forth. +#[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)] +#[derive(Eq, Hash, HashStable, TypeFoldable, TypeVisitable)] +pub enum UserType<'tcx> { + Ty(Ty<'tcx>), + + /// The canonical type is the result of `type_of(def_id)` with the + /// given substitutions applied. + TypeOf(DefId, UserArgs<'tcx>), +} + +pub trait IsIdentity { + fn is_identity(&self) -> bool; +} + +impl<'tcx> IsIdentity for CanonicalUserType<'tcx> { /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, /// i.e., each thing is mapped to a canonical variable with the same index. - pub fn is_identity(&self) -> bool { + fn is_identity(&self) -> bool { match self.value { UserType::Ty(_) => false, UserType::TypeOf(_, user_args) => { @@ -640,19 +661,6 @@ } } -/// A user-given type annotation attached to a constant. These arise -/// from constants that are named via paths, like `Foo::::new` and -/// so forth. -#[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)] -#[derive(Eq, Hash, HashStable, TypeFoldable, TypeVisitable)] -pub enum UserType<'tcx> { - Ty(Ty<'tcx>), - - /// The canonical type is the result of `type_of(def_id)` with the - /// given substitutions applied. - TypeOf(DefId, UserArgs<'tcx>), -} - impl<'tcx> std::fmt::Display for UserType<'tcx> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,7 +19,7 @@ use rustc_macros::HashStable; use rustc_session::Limit; use rustc_span::sym; -use rustc_target::abi::{Integer, IntegerType, Size}; +use rustc_target::abi::{Integer, IntegerType, Primitive, Size}; use rustc_target::spec::abi::Abi; use smallvec::SmallVec; use std::{fmt, iter}; @@ -460,7 +460,7 @@ /// Checks whether each generic argument is simply a unique generic parameter. pub fn uses_unique_generic_params( self, - args: GenericArgsRef<'tcx>, + args: &[ty::GenericArg<'tcx>], ignore_regions: CheckRegions, ) -> Result<(), NotUniqueParam<'tcx>> { let mut seen = GrowableBitSet::default(); @@ -548,15 +548,15 @@ /// those are not yet phased out). The parent of the closure's /// `DefId` will also be the context where it appears. pub fn is_closure(self, def_id: DefId) -> bool { - matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Generator) + matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Coroutine) } /// Returns `true` if `def_id` refers to a definition that does not have its own - /// type-checking context, i.e. closure, generator or inline const. + /// type-checking context, i.e. closure, coroutine or inline const. pub fn is_typeck_child(self, def_id: DefId) -> bool { matches!( self.def_kind(def_id), - DefKind::Closure | DefKind::Generator | DefKind::InlineConst + DefKind::Closure | DefKind::Coroutine | DefKind::InlineConst ) } @@ -686,13 +686,13 @@ } /// Return the set of types that should be taken into account when checking - /// trait bounds on a generator's internal state. - pub fn generator_hidden_types( + /// trait bounds on a coroutine's internal state. + pub fn coroutine_hidden_types( self, def_id: DefId, ) -> impl Iterator>> { - let generator_layout = self.mir_generator_witnesses(def_id); - generator_layout + let coroutine_layout = self.mir_coroutine_witnesses(def_id); + coroutine_layout .as_ref() .map_or_else(|| [].iter(), |l| l.field_tys.iter()) .filter(|decl| !decl.ignore_for_traits) @@ -709,7 +709,7 @@ found_recursion: false, found_any_recursion: false, check_recursion: false, - expand_generators: false, + expand_coroutines: false, tcx: self, }; val.fold_with(&mut visitor) @@ -729,7 +729,7 @@ found_recursion: false, found_any_recursion: false, check_recursion: true, - expand_generators: true, + expand_coroutines: true, tcx: self, }; @@ -746,9 +746,10 @@ pub fn def_kind_descr(self, def_kind: DefKind, def_id: DefId) -> &'static str { match def_kind { DefKind::AssocFn if self.associated_item(def_id).fn_has_self_parameter => "method", - DefKind::Generator => match self.generator_kind(def_id).unwrap() { - rustc_hir::GeneratorKind::Async(..) => "async closure", - rustc_hir::GeneratorKind::Gen => "generator", + DefKind::Coroutine => match self.coroutine_kind(def_id).unwrap() { + rustc_hir::CoroutineKind::Async(..) => "async closure", + rustc_hir::CoroutineKind::Coroutine => "coroutine", + rustc_hir::CoroutineKind::Gen(..) => "gen closure", }, _ => def_kind.descr(def_id), } @@ -763,9 +764,10 @@ pub fn def_kind_descr_article(self, def_kind: DefKind, def_id: DefId) -> &'static str { match def_kind { DefKind::AssocFn if self.associated_item(def_id).fn_has_self_parameter => "a", - DefKind::Generator => match self.generator_kind(def_id).unwrap() { - rustc_hir::GeneratorKind::Async(..) => "an", - rustc_hir::GeneratorKind::Gen => "a", + DefKind::Coroutine => match self.coroutine_kind(def_id).unwrap() { + rustc_hir::CoroutineKind::Async(..) => "an", + rustc_hir::CoroutineKind::Coroutine => "a", + rustc_hir::CoroutineKind::Gen(..) => "a", }, _ => def_kind.article(), } @@ -804,7 +806,7 @@ primary_def_id: Option, found_recursion: bool, found_any_recursion: bool, - expand_generators: bool, + expand_coroutines: bool, /// Whether or not to check for recursive opaque types. /// This is `true` when we're explicitly checking for opaque type /// recursion, and 'false' otherwise to avoid unnecessary work. @@ -842,7 +844,7 @@ } } - fn expand_generator(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) -> Option> { + fn expand_coroutine(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) -> Option> { if self.found_any_recursion { return None; } @@ -851,11 +853,11 @@ let expanded_ty = match self.expanded_cache.get(&(def_id, args)) { Some(expanded_ty) => *expanded_ty, None => { - for bty in self.tcx.generator_hidden_types(def_id) { + for bty in self.tcx.coroutine_hidden_types(def_id) { let hidden_ty = bty.instantiate(self.tcx, args); self.fold_ty(hidden_ty); } - let expanded_ty = Ty::new_generator_witness(self.tcx, def_id, args); + let expanded_ty = Ty::new_coroutine_witness(self.tcx, def_id, args); self.expanded_cache.insert((def_id, args), expanded_ty); expanded_ty } @@ -882,14 +884,14 @@ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { let mut t = if let ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) = *t.kind() { self.expand_opaque_ty(def_id, args).unwrap_or(t) - } else if t.has_opaque_types() || t.has_generators() { + } else if t.has_opaque_types() || t.has_coroutines() { t.super_fold_with(self) } else { t }; - if self.expand_generators { - if let ty::GeneratorWitness(def_id, args) = *t.kind() { - t = self.expand_generator(def_id, args).unwrap_or(t); + if self.expand_coroutines { + if let ty::CoroutineWitness(def_id, args) = *t.kind() { + t = self.expand_coroutine(def_id, args).unwrap_or(t); } } t @@ -917,54 +919,62 @@ } impl<'tcx> Ty<'tcx> { + /// Returns the `Size` for primitive types (bool, uint, int, char, float). + pub fn primitive_size(self, tcx: TyCtxt<'tcx>) -> Size { + match *self.kind() { + ty::Bool => Size::from_bytes(1), + ty::Char => Size::from_bytes(4), + ty::Int(ity) => Integer::from_int_ty(&tcx, ity).size(), + ty::Uint(uty) => Integer::from_uint_ty(&tcx, uty).size(), + ty::Float(ty::FloatTy::F32) => Primitive::F32.size(&tcx), + ty::Float(ty::FloatTy::F64) => Primitive::F64.size(&tcx), + _ => bug!("non primitive type"), + } + } + pub fn int_size_and_signed(self, tcx: TyCtxt<'tcx>) -> (Size, bool) { - let (int, signed) = match *self.kind() { - ty::Int(ity) => (Integer::from_int_ty(&tcx, ity), true), - ty::Uint(uty) => (Integer::from_uint_ty(&tcx, uty), false), + match *self.kind() { + ty::Int(ity) => (Integer::from_int_ty(&tcx, ity).size(), true), + ty::Uint(uty) => (Integer::from_uint_ty(&tcx, uty).size(), false), _ => bug!("non integer discriminant"), - }; - (int.size(), signed) + } } - /// Returns the maximum value for the given numeric type (including `char`s) - /// or returns `None` if the type is not numeric. - pub fn numeric_max_val(self, tcx: TyCtxt<'tcx>) -> Option> { - let val = match self.kind() { + /// Returns the minimum and maximum values for the given numeric type (including `char`s) or + /// returns `None` if the type is not numeric. + pub fn numeric_min_and_max_as_bits(self, tcx: TyCtxt<'tcx>) -> Option<(u128, u128)> { + use rustc_apfloat::ieee::{Double, Single}; + Some(match self.kind() { ty::Int(_) | ty::Uint(_) => { let (size, signed) = self.int_size_and_signed(tcx); - let val = + let min = if signed { size.truncate(size.signed_int_min() as u128) } else { 0 }; + let max = if signed { size.signed_int_max() as u128 } else { size.unsigned_int_max() }; - Some(val) + (min, max) } - ty::Char => Some(std::char::MAX as u128), - ty::Float(fty) => Some(match fty { - ty::FloatTy::F32 => rustc_apfloat::ieee::Single::INFINITY.to_bits(), - ty::FloatTy::F64 => rustc_apfloat::ieee::Double::INFINITY.to_bits(), - }), - _ => None, - }; + ty::Char => (0, std::char::MAX as u128), + ty::Float(ty::FloatTy::F32) => { + ((-Single::INFINITY).to_bits(), Single::INFINITY.to_bits()) + } + ty::Float(ty::FloatTy::F64) => { + ((-Double::INFINITY).to_bits(), Double::INFINITY.to_bits()) + } + _ => return None, + }) + } - val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self))) + /// Returns the maximum value for the given numeric type (including `char`s) + /// or returns `None` if the type is not numeric. + pub fn numeric_max_val(self, tcx: TyCtxt<'tcx>) -> Option> { + self.numeric_min_and_max_as_bits(tcx) + .map(|(_, max)| ty::Const::from_bits(tcx, max, ty::ParamEnv::empty().and(self))) } /// Returns the minimum value for the given numeric type (including `char`s) /// or returns `None` if the type is not numeric. pub fn numeric_min_val(self, tcx: TyCtxt<'tcx>) -> Option> { - let val = match self.kind() { - ty::Int(_) | ty::Uint(_) => { - let (size, signed) = self.int_size_and_signed(tcx); - let val = if signed { size.truncate(size.signed_int_min() as u128) } else { 0 }; - Some(val) - } - ty::Char => Some(0), - ty::Float(fty) => Some(match fty { - ty::FloatTy::F32 => (-::rustc_apfloat::ieee::Single::INFINITY).to_bits(), - ty::FloatTy::F64 => (-::rustc_apfloat::ieee::Double::INFINITY).to_bits(), - }), - _ => None, - }; - - val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self))) + self.numeric_min_and_max_as_bits(tcx) + .map(|(min, _)| ty::Const::from_bits(tcx, min, ty::ParamEnv::empty().and(self))) } /// Checks whether values of this type `T` are *moved* or *copied* @@ -1024,8 +1034,8 @@ | ty::Closure(..) | ty::Dynamic(..) | ty::Foreign(_) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Infer(_) | ty::Alias(..) | ty::Param(_) @@ -1063,8 +1073,8 @@ | ty::Closure(..) | ty::Dynamic(..) | ty::Foreign(_) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Infer(_) | ty::Alias(..) | ty::Param(_) @@ -1184,7 +1194,7 @@ // Conservatively return `false` for all others... // Anonymous function types - ty::FnDef(..) | ty::Closure(..) | ty::Dynamic(..) | ty::Generator(..) => false, + ty::FnDef(..) | ty::Closure(..) | ty::Dynamic(..) | ty::Coroutine(..) => false, // Generic or inferred types // @@ -1194,7 +1204,7 @@ false } - ty::Foreign(_) | ty::GeneratorWitness(..) | ty::Error(_) => false, + ty::Foreign(_) | ty::CoroutineWitness(..) | ty::Error(_) => false, } } @@ -1328,8 +1338,8 @@ | ty::Placeholder(..) | ty::Infer(_) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) => Ok(smallvec![ty]), + | ty::Coroutine(..) + | ty::CoroutineWitness(..) => Ok(smallvec![ty]), } } @@ -1360,7 +1370,7 @@ // Not trivial because they have components, and instead of looking inside, // we'll just perform trait selection. - ty::Closure(..) | ty::Generator(..) | ty::GeneratorWitness(..) | ty::Adt(..) => false, + ty::Closure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) | ty::Adt(..) => false, ty::Array(ty, _) | ty::Slice(ty) => is_trivially_const_drop(ty), @@ -1421,7 +1431,7 @@ found_recursion: false, found_any_recursion: false, check_recursion: false, - expand_generators: false, + expand_coroutines: false, tcx, }; val.fold_with(&mut visitor) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/visit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/visit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/visit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/visit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -47,8 +47,8 @@ fn has_opaque_types(&self) -> bool { self.has_type_flags(TypeFlags::HAS_TY_OPAQUE) } - fn has_generators(&self) -> bool { - self.has_type_flags(TypeFlags::HAS_TY_GENERATOR) + fn has_coroutines(&self) -> bool { + self.has_type_flags(TypeFlags::HAS_TY_COROUTINE) } fn references_error(&self) -> bool { self.has_type_flags(TypeFlags::HAS_ERROR) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/walk.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/walk.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/walk.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/ty/walk.rs 2023-12-21 16:55:28.000000000 +0000 @@ -189,8 +189,8 @@ } ty::Adt(_, args) | ty::Closure(_, args) - | ty::Generator(_, args, _) - | ty::GeneratorWitness(_, args) + | ty::Coroutine(_, args, _) + | ty::CoroutineWitness(_, args) | ty::FnDef(_, args) => { stack.extend(args.iter().rev()); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/values.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/values.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/values.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_middle/src/values.rs 2023-12-21 16:55:28.000000000 +0000 @@ -217,7 +217,8 @@ match ty.kind { hir::TyKind::Path(hir::QPath::Resolved(_, path)) => { if let Res::Def(kind, def_id) = path.res - && !matches!(kind, DefKind::TyAlias) { + && !matches!(kind, DefKind::TyAlias) + { let check_params = def_id.as_local().map_or(true, |def_id| { if def_id == needle { spans.push(ty.span); @@ -227,8 +228,11 @@ if check_params && let Some(args) = path.segments.last().unwrap().args { let params_in_repr = tcx.params_in_repr(def_id); // the domain size check is needed because the HIR may not be well-formed at this point - for (i, arg) in args.args.iter().enumerate().take(params_in_repr.domain_size()) { - if let hir::GenericArg::Type(ty) = arg && params_in_repr.contains(i as u32) { + for (i, arg) in args.args.iter().enumerate().take(params_in_repr.domain_size()) + { + if let hir::GenericArg::Type(ty) = arg + && params_in_repr.contains(i as u32) + { find_item_ty_spans(tcx, ty, needle, spans, seen_representable); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,25 +3,25 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -rustc_arena = { path = "../rustc_arena" } -tracing = "0.1" +# tidy-alphabetical-start either = "1" -rustc_middle = { path = "../rustc_middle" } rustc_apfloat = "0.2.0" +rustc_arena = { path = "../rustc_arena" } +rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } -rustc_index = { path = "../rustc_index" } rustc_errors = { path = "../rustc_errors" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hir = { path = "../rustc_hir" } +rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -rustc_ast = { path = "../rustc_ast" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -173,7 +173,7 @@ mir_build_literal_in_range_out_of_bounds = literal out of range for `{$ty}` - .label = this value doesn't fit in `{$ty}` whose maximum value is `{$max}` + .label = this value does not fit into the type `{$ty}` whose range is `{$min}..={$max}` mir_build_lower_range_bound_must_be_less_than_or_equal_to_upper = lower range bound must be less than or equal to upper @@ -221,6 +221,11 @@ .help = ensure that all variants are matched explicitly by adding the suggested match arms .note = the matched value is of type `{$scrut_ty}` and the `non_exhaustive_omitted_patterns` attribute was found +mir_build_non_exhaustive_omitted_pattern_lint_on_arm = the lint level must be set on the whole match + .help = it no longer has any effect to set the lint level on an individual match arm + .label = remove this attribute + .suggestion = set the lint level on the whole match + mir_build_non_exhaustive_patterns_type_not_empty = non-exhaustive patterns: type `{$ty}` is non-empty .def_note = `{$peeled_ty}` defined here .type_note = the matched value is of type `{$ty}` @@ -242,7 +247,7 @@ mir_build_pattern_not_covered = refutable pattern in {$origin} .pattern_ty = the matched value is of type `{$pattern_ty}` -mir_build_pointer_pattern = function pointers and unsized pointers in patterns behave unpredictably and should not be relied upon. See https://github.com/rust-lang/rust/issues/70861 for details. +mir_build_pointer_pattern = function pointers and raw pointers not derived from integers in patterns behave unpredictably and should not be relied upon. See https://github.com/rust-lang/rust/issues/70861 for details. mir_build_privately_uninhabited = pattern `{$witness_1}` is currently uninhabited, but this variant contains private fields which may become inhabited in the future @@ -315,6 +320,7 @@ .label = unreachable pattern .catchall_label = matches any value +mir_build_unsafe_fn_safe_body = an unsafe function restricts its caller, but its body is safe by default mir_build_unsafe_not_inherited = items do not inherit unsafety from separate enclosing items mir_build_unsafe_op_in_unsafe_fn_borrow_of_layout_constrained_field_requires_unsafe = @@ -379,6 +385,7 @@ .label = unnecessary `unsafe` block mir_build_unused_unsafe_enclosing_block_label = because it's nested under this `unsafe` block -mir_build_unused_unsafe_enclosing_fn_label = because it's nested under this `unsafe` fn mir_build_variant_defined_here = not covered + +mir_build_wrap_suggestion = consider wrapping the function body in an unsafe block diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/custom/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/custom/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/custom/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/custom/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -48,7 +48,7 @@ source: MirSource::item(did), phase: MirPhase::Built, source_scopes: IndexVec::new(), - generator: None, + coroutine: None, local_decls: IndexVec::new(), user_type_annotations: IndexVec::new(), arg_count: params.len(), @@ -60,6 +60,7 @@ tainted_by_errors: None, injection_phase: None, pass_count: 0, + function_coverage_info: None, }; body.local_decls.push(LocalDecl::new(return_ty, return_ty_span)); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_place.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_place.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_place.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_place.rs 2023-12-21 16:55:28.000000000 +0000 @@ -75,7 +75,7 @@ /// Given a list of MIR projections, convert them to list of HIR ProjectionKind. /// The projections are truncated to represent a path that might be captured by a -/// closure/generator. This implies the vector returned from this function doesn't contain +/// closure/coroutine. This implies the vector returned from this function doesn't contain /// ProjectionElems `Downcast`, `ConstantIndex`, `Index`, or `Subslice` because those will never be /// part of a path that is captured by a closure. We stop applying projections once we see the first /// projection that isn't captured by a closure. @@ -213,7 +213,7 @@ /// projections. /// /// Supports only HIR projection kinds that represent a path that might be -/// captured by a closure or a generator, i.e., an `Index` or a `Subslice` +/// captured by a closure or a coroutine, i.e., an `Index` or a `Subslice` /// projection kinds are unsupported. fn strip_prefix<'a, 'tcx>( mut base_ty: Ty<'tcx>, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs 2023-12-21 16:55:28.000000000 +0000 @@ -181,9 +181,9 @@ block = success; // The `Box` temporary created here is not a part of the HIR, - // and therefore is not considered during generator auto-trait + // and therefore is not considered during coroutine auto-trait // determination. See the comment about `box` at `yield_in_scope`. - let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span).internal()); + let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span)); this.cfg.push( block, Statement { source_info, kind: StatementKind::StorageLive(result) }, @@ -213,7 +213,9 @@ // Casting an enum to an integer is equivalent to computing the discriminant and casting the // discriminant. Previously every backend had to repeat the logic for this operation. Now we // create all the steps directly in MIR with operations all backends need to support anyway. - let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind() && adt_def.is_enum() { + let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind() + && adt_def.is_enum() + { let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx); let temp = unpack!(block = this.as_temp(block, scope, source, Mutability::Not)); let layout = this.tcx.layout_of(this.param_env.and(source.ty)); @@ -224,7 +226,7 @@ discr, Rvalue::Discriminant(temp.into()), ); - let (op,ty) = (Operand::Move(discr), discr_ty); + let (op, ty) = (Operand::Move(discr), discr_ty); if let Abi::Scalar(scalar) = layout.unwrap().abi && !scalar.is_always_valid(&this.tcx) @@ -236,27 +238,30 @@ block, source_info, unsigned_place, - Rvalue::Cast(CastKind::IntToInt, Operand::Copy(discr), unsigned_ty)); + Rvalue::Cast(CastKind::IntToInt, Operand::Copy(discr), unsigned_ty), + ); let bool_ty = this.tcx.types.bool; let range = scalar.valid_range(&this.tcx); let merge_op = - if range.start <= range.end { - BinOp::BitAnd - } else { - BinOp::BitOr - }; + if range.start <= range.end { BinOp::BitAnd } else { BinOp::BitOr }; let mut comparer = |range: u128, bin_op: BinOp| -> Place<'tcx> { - let range_val = - Const::from_bits(this.tcx, range, ty::ParamEnv::empty().and(unsigned_ty)); + let range_val = Const::from_bits( + this.tcx, + range, + ty::ParamEnv::empty().and(unsigned_ty), + ); let lit_op = this.literal_operand(expr.span, range_val); let is_bin_op = this.temp(bool_ty, expr_span); this.cfg.push_assign( block, source_info, is_bin_op, - Rvalue::BinaryOp(bin_op, Box::new((Operand::Copy(unsigned_place), lit_op))), + Rvalue::BinaryOp( + bin_op, + Box::new((Operand::Copy(unsigned_place), lit_op)), + ), ); is_bin_op }; @@ -270,7 +275,13 @@ block, source_info, merge_place, - Rvalue::BinaryOp(merge_op, Box::new((Operand::Move(start_place), Operand::Move(end_place)))), + Rvalue::BinaryOp( + merge_op, + Box::new(( + Operand::Move(start_place), + Operand::Move(end_place), + )), + ), ); merge_place }; @@ -278,19 +289,24 @@ block, Statement { source_info, - kind: StatementKind::Intrinsic(Box::new(NonDivergingIntrinsic::Assume( - Operand::Move(assert_place), - ))), + kind: StatementKind::Intrinsic(Box::new( + NonDivergingIntrinsic::Assume(Operand::Move(assert_place)), + )), }, ); } - (op,ty) - + (op, ty) } else { let ty = source.ty; let source = unpack!( - block = this.as_operand(block, scope, source, LocalInfo::Boring, NeedsTemporary::No) + block = this.as_operand( + block, + scope, + source, + LocalInfo::Boring, + NeedsTemporary::No + ) ); (source, ty) }; @@ -471,11 +487,11 @@ .collect(); let result = match args { - UpvarArgs::Generator(args) => { + UpvarArgs::Coroutine(args) => { // We implicitly set the discriminant to 0. See // librustc_mir/transform/deaggregator.rs for details. let movability = movability.unwrap(); - Box::new(AggregateKind::Generator(closure_id.to_def_id(), args, movability)) + Box::new(AggregateKind::Coroutine(closure_id.to_def_id(), args, movability)) } UpvarArgs::Closure(args) => { Box::new(AggregateKind::Closure(closure_id.to_def_id(), args)) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_temp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_temp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_temp.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/as_temp.rs 2023-12-21 16:55:28.000000000 +0000 @@ -52,12 +52,10 @@ let local_info = match expr.kind { ExprKind::StaticRef { def_id, .. } => { assert!(!this.tcx.is_thread_local_static(def_id)); - local_decl.internal = true; LocalInfo::StaticRef { def_id, is_thread_local: false } } ExprKind::ThreadLocalRef(def_id) => { assert!(this.tcx.is_thread_local_static(def_id)); - local_decl.internal = true; LocalInfo::StaticRef { def_id, is_thread_local: true } } ExprKind::NamedConst { def_id, .. } | ExprKind::ConstParam { def_id, .. } => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/into.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/into.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/into.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/into.rs 2023-12-21 16:55:28.000000000 +0000 @@ -547,7 +547,7 @@ source_info, TerminatorKind::Yield { value, resume, resume_arg: destination, drop: None }, ); - this.generator_drop_cleanup(block); + this.coroutine_drop_cleanup(block); resume.unit() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,7 +44,7 @@ //! the most suitable spot to implement it, and then just let the //! other fns cycle around. The handoff works like this: //! -//! - `into(place)` -> fallback is to create a rvalue with `as_rvalue` and assign it to `place` +//! - `into(place)` -> fallback is to create an rvalue with `as_rvalue` and assign it to `place` //! - `as_rvalue` -> fallback is to create an Operand with `as_operand` and use `Rvalue::use` //! - `as_operand` -> either invokes `as_constant` or `as_temp` //! - `as_constant` -> (no fallback) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/stmt.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/stmt.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/stmt.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/expr/stmt.rs 2023-12-21 16:55:28.000000000 +0000 @@ -120,32 +120,31 @@ // // it is usually better to focus on `the_value` rather // than the entirety of block(s) surrounding it. - let adjusted_span = - if let ExprKind::Block { block } = expr.kind - && let Some(tail_ex) = this.thir[block].expr - { - let mut expr = &this.thir[tail_ex]; - loop { - match expr.kind { - ExprKind::Block { block } - if let Some(nested_expr) = this.thir[block].expr => - { - expr = &this.thir[nested_expr]; - } - ExprKind::Scope { value: nested_expr, .. } => { - expr = &this.thir[nested_expr]; - } - _ => break, + let adjusted_span = if let ExprKind::Block { block } = expr.kind + && let Some(tail_ex) = this.thir[block].expr + { + let mut expr = &this.thir[tail_ex]; + loop { + match expr.kind { + ExprKind::Block { block } + if let Some(nested_expr) = this.thir[block].expr => + { + expr = &this.thir[nested_expr]; } + ExprKind::Scope { value: nested_expr, .. } => { + expr = &this.thir[nested_expr]; + } + _ => break, } - this.block_context.push(BlockFrame::TailExpr { - tail_result_is_ignored: true, - span: expr.span, - }); - Some(expr.span) - } else { - None - }; + } + this.block_context.push(BlockFrame::TailExpr { + tail_result_is_ignored: true, + span: expr.span, + }); + Some(expr.span) + } else { + None + }; let temp = unpack!(block = this.as_temp(block, statement_scope, expr, Mutability::Not)); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -157,7 +157,7 @@ /// [ 0. Pre-match ] /// | /// [ 1. Evaluate Scrutinee (expression being matched on) ] - /// [ (fake read of scrutinee) ] + /// [ (PlaceMention of scrutinee) ] /// | /// [ 2. Decision tree -- check discriminants ] <--------+ /// | | @@ -184,7 +184,7 @@ /// /// We generate MIR in the following steps: /// - /// 1. Evaluate the scrutinee and add the fake read of it ([Builder::lower_scrutinee]). + /// 1. Evaluate the scrutinee and add the PlaceMention of it ([Builder::lower_scrutinee]). /// 2. Create the decision tree ([Builder::lower_match_tree]). /// 3. Determine the fake borrows that are needed from the places that were /// matched against and create the required temporaries for them @@ -223,6 +223,7 @@ let fake_borrow_temps = self.lower_match_tree( block, scrutinee_span, + &scrutinee_place, match_start_span, match_has_guard, &mut candidates, @@ -238,7 +239,7 @@ ) } - /// Evaluate the scrutinee and add the fake read of it. + /// Evaluate the scrutinee and add the PlaceMention for it. fn lower_scrutinee( &mut self, mut block: BasicBlock, @@ -246,26 +247,9 @@ scrutinee_span: Span, ) -> BlockAnd> { let scrutinee_place_builder = unpack!(block = self.as_place_builder(block, scrutinee)); - // Matching on a `scrutinee_place` with an uninhabited type doesn't - // generate any memory reads by itself, and so if the place "expression" - // contains unsafe operations like raw pointer dereferences or union - // field projections, we wouldn't know to require an `unsafe` block - // around a `match` equivalent to `std::intrinsics::unreachable()`. - // See issue #47412 for this hole being discovered in the wild. - // - // HACK(eddyb) Work around the above issue by adding a dummy inspection - // of `scrutinee_place`, specifically by applying `ReadForMatch`. - // - // NOTE: ReadForMatch also checks that the scrutinee is initialized. - // This is currently needed to not allow matching on an uninitialized, - // uninhabited value. If we get never patterns, those will check that - // the place is initialized, and so this read would only be used to - // check safety. - let cause_matched_place = FakeReadCause::ForMatchedPlace(None); - let source_info = self.source_info(scrutinee_span); - if let Some(scrutinee_place) = scrutinee_place_builder.try_to_place(self) { - self.cfg.push_fake_read(block, source_info, cause_matched_place, scrutinee_place); + let source_info = self.source_info(scrutinee_span); + self.cfg.push_place_mention(block, source_info, scrutinee_place); } block.and(scrutinee_place_builder) @@ -304,6 +288,7 @@ &mut self, block: BasicBlock, scrutinee_span: Span, + scrutinee_place_builder: &PlaceBuilder<'tcx>, match_start_span: Span, match_has_guard: bool, candidates: &mut [&mut Candidate<'pat, 'tcx>], @@ -331,6 +316,33 @@ // otherwise block. Match checking will ensure this is actually // unreachable. let source_info = self.source_info(scrutinee_span); + + // Matching on a `scrutinee_place` with an uninhabited type doesn't + // generate any memory reads by itself, and so if the place "expression" + // contains unsafe operations like raw pointer dereferences or union + // field projections, we wouldn't know to require an `unsafe` block + // around a `match` equivalent to `std::intrinsics::unreachable()`. + // See issue #47412 for this hole being discovered in the wild. + // + // HACK(eddyb) Work around the above issue by adding a dummy inspection + // of `scrutinee_place`, specifically by applying `ReadForMatch`. + // + // NOTE: ReadForMatch also checks that the scrutinee is initialized. + // This is currently needed to not allow matching on an uninitialized, + // uninhabited value. If we get never patterns, those will check that + // the place is initialized, and so this read would only be used to + // check safety. + let cause_matched_place = FakeReadCause::ForMatchedPlace(None); + + if let Some(scrutinee_place) = scrutinee_place_builder.try_to_place(self) { + self.cfg.push_fake_read( + otherwise_block, + source_info, + cause_matched_place, + scrutinee_place, + ); + } + self.cfg.terminate(otherwise_block, source_info, TerminatorKind::Unreachable); } @@ -599,13 +611,8 @@ } _ => { - let place_builder = unpack!(block = self.as_place_builder(block, initializer)); - - if let Some(place) = place_builder.try_to_place(self) { - let source_info = self.source_info(initializer.span); - self.cfg.push_place_mention(block, source_info, place); - } - + let place_builder = + unpack!(block = self.lower_scrutinee(block, initializer, initializer.span)); self.place_into_pattern(block, &irrefutable_pat, place_builder, true) } } @@ -622,6 +629,7 @@ let fake_borrow_temps = self.lower_match_tree( block, irrefutable_pat.span, + &initializer, irrefutable_pat.span, false, &mut [&mut candidate], @@ -736,7 +744,9 @@ self.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(local_id) }); // Although there is almost always scope for given variable in corner cases // like #92893 we might get variable with no scope. - if let Some(region_scope) = self.region_scope_tree.var_scope(var.0.local_id) && schedule_drop { + if let Some(region_scope) = self.region_scope_tree.var_scope(var.0.local_id) + && schedule_drop + { self.schedule_drop(span, region_scope, local_id, DropKind::Storage); } Place::from(local_id) @@ -814,7 +824,10 @@ } } - PatKind::Constant { .. } | PatKind::Range { .. } | PatKind::Wild => {} + PatKind::Constant { .. } + | PatKind::Range { .. } + | PatKind::Wild + | PatKind::Error(_) => {} PatKind::Deref { ref subpattern } => { self.visit_primary_bindings(subpattern, pattern_user_ty.deref(), f); @@ -842,6 +855,10 @@ self.visit_primary_bindings(subpattern, subpattern_user_ty, f) } + PatKind::InlineConstant { ref subpattern, .. } => { + self.visit_primary_bindings(subpattern, pattern_user_ty, f) + } + PatKind::Leaf { ref subpatterns } => { for subpattern in subpatterns { let subpattern_user_ty = pattern_user_ty.clone().leaf(subpattern.field); @@ -1018,7 +1035,7 @@ ty: Ty<'tcx>, }, - /// Test whether the value falls within an inclusive or exclusive range + /// Test whether the value falls within an inclusive or exclusive range. Range(Box>), /// Test that the length of the slice is equal to `len`. @@ -1798,7 +1815,6 @@ let fake_borrow_ty = Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, fake_borrow_deref_ty); let mut fake_borrow_temp = LocalDecl::new(fake_borrow_ty, temp_span); - fake_borrow_temp.internal = self.local_decls[matched_place.local].internal; fake_borrow_temp.local_info = ClearCrossCrate::Set(Box::new(LocalInfo::FakeBorrow)); let fake_borrow_temp = self.local_decls.push(fake_borrow_temp); @@ -1833,6 +1849,7 @@ let fake_borrow_temps = self.lower_match_tree( block, pat.span, + &expr_place_builder, pat.span, false, &mut [&mut guard_candidate, &mut otherwise_candidate], @@ -2268,7 +2285,6 @@ ty: var_ty, user_ty: if user_ty.is_empty() { None } else { Some(Box::new(user_ty)) }, source_info, - internal: false, local_info: ClearCrossCrate::Set(Box::new(LocalInfo::User(BindingForm::Var( VarBindingForm { binding_mode, @@ -2298,7 +2314,6 @@ ty: Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, var_ty), user_ty: None, source_info, - internal: false, local_info: ClearCrossCrate::Set(Box::new(LocalInfo::User( BindingForm::RefForGuard, ))), @@ -2336,6 +2351,7 @@ let fake_borrow_temps = this.lower_match_tree( block, initializer_span, + &scrutinee, pattern.span, false, &mut [&mut candidate, &mut wildcard], diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/simplify.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/simplify.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/simplify.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/simplify.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,11 +15,7 @@ use crate::build::expr::as_place::PlaceBuilder; use crate::build::matches::{Ascription, Binding, Candidate, MatchPair}; use crate::build::Builder; -use rustc_hir::RangeEnd; use rustc_middle::thir::{self, *}; -use rustc_middle::ty; -use rustc_middle::ty::layout::IntegerExt; -use rustc_target::abi::{Integer, Size}; use std::mem; @@ -148,7 +144,6 @@ match_pair: MatchPair<'pat, 'tcx>, candidate: &mut Candidate<'pat, 'tcx>, ) -> Result<(), MatchPair<'pat, 'tcx>> { - let tcx = self.tcx; match match_pair.pattern.kind { PatKind::AscribeUserType { ref subpattern, @@ -168,7 +163,7 @@ Ok(()) } - PatKind::Wild => { + PatKind::Wild | PatKind::Error(_) => { // nothing left to do Ok(()) } @@ -204,41 +199,16 @@ Err(match_pair) } - PatKind::Range(box PatRange { lo, hi, end }) => { - let (range, bias) = match *lo.ty().kind() { - ty::Char => { - (Some(('\u{0000}' as u128, '\u{10FFFF}' as u128, Size::from_bits(32))), 0) - } - ty::Int(ity) => { - let size = Integer::from_int_ty(&tcx, ity).size(); - let max = size.truncate(u128::MAX); - let bias = 1u128 << (size.bits() - 1); - (Some((0, max, size)), bias) - } - ty::Uint(uty) => { - let size = Integer::from_uint_ty(&tcx, uty).size(); - let max = size.truncate(u128::MAX); - (Some((0, max, size)), 0) - } - _ => (None, 0), - }; - if let Some((min, max, sz)) = range { - // We want to compare ranges numerically, but the order of the bitwise - // representation of signed integers does not match their numeric order. Thus, - // to correct the ordering, we need to shift the range of signed integers to - // correct the comparison. This is achieved by XORing with a bias (see - // pattern/_match.rs for another pertinent example of this pattern). - // - // Also, for performance, it's important to only do the second `try_to_bits` if - // necessary. - let lo = lo.try_to_bits(sz).unwrap() ^ bias; - if lo <= min { - let hi = hi.try_to_bits(sz).unwrap() ^ bias; - if hi > max || hi == max && end == RangeEnd::Included { - // Irrefutable pattern match. - return Ok(()); - } - } + PatKind::InlineConstant { subpattern: ref pattern, def: _ } => { + candidate.match_pairs.push(MatchPair::new(match_pair.place, pattern, self)); + + Ok(()) + } + + PatKind::Range(ref range) => { + if let Some(true) = range.is_full_range(self.tcx) { + // Irrefutable pattern match. + return Ok(()); } Err(match_pair) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/test.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/matches/test.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,6 @@ use crate::build::expr::as_place::PlaceBuilder; use crate::build::matches::{Candidate, MatchPair, Test, TestKind}; use crate::build::Builder; -use crate::thir::pattern::compare_const_vals; use rustc_data_structures::fx::FxIndexMap; use rustc_hir::{LangItem, RangeEnd}; use rustc_index::bit_set::BitSet; @@ -59,8 +58,7 @@ }, PatKind::Range(ref range) => { - assert_eq!(range.lo.ty(), match_pair.pattern.ty); - assert_eq!(range.hi.ty(), match_pair.pattern.ty); + assert_eq!(range.ty, match_pair.pattern.ty); Test { span: match_pair.pattern.span, kind: TestKind::Range(range.clone()) } } @@ -73,11 +71,13 @@ PatKind::Or { .. } => bug!("or-patterns should have already been handled"), PatKind::AscribeUserType { .. } + | PatKind::InlineConstant { .. } | PatKind::Array { .. } | PatKind::Wild | PatKind::Binding { .. } | PatKind::Leaf { .. } - | PatKind::Deref { .. } => self.error_simplifiable(match_pair), + | PatKind::Deref { .. } + | PatKind::Error(_) => self.error_simplifiable(match_pair), } } @@ -110,8 +110,10 @@ | PatKind::Or { .. } | PatKind::Binding { .. } | PatKind::AscribeUserType { .. } + | PatKind::InlineConstant { .. } | PatKind::Leaf { .. } - | PatKind::Deref { .. } => { + | PatKind::Deref { .. } + | PatKind::Error(_) => { // don't know how to add these patterns to a switch false } @@ -236,18 +238,27 @@ TestKind::Eq { value, ty } => { let tcx = self.tcx; - if let ty::Adt(def, _) = ty.kind() && Some(def.did()) == tcx.lang_items().string() { + if let ty::Adt(def, _) = ty.kind() + && Some(def.did()) == tcx.lang_items().string() + { if !tcx.features().string_deref_patterns { - bug!("matching on `String` went through without enabling string_deref_patterns"); + bug!( + "matching on `String` went through without enabling string_deref_patterns" + ); } let re_erased = tcx.lifetimes.re_erased; - let ref_string = self.temp(Ty::new_imm_ref(tcx,re_erased, ty), test.span); - let ref_str_ty = Ty::new_imm_ref(tcx,re_erased, tcx.types.str_); + let ref_string = self.temp(Ty::new_imm_ref(tcx, re_erased, ty), test.span); + let ref_str_ty = Ty::new_imm_ref(tcx, re_erased, tcx.types.str_); let ref_str = self.temp(ref_str_ty, test.span); let deref = tcx.require_lang_item(LangItem::Deref, None); let method = trait_method(tcx, deref, sym::deref, [ty]); let eq_block = self.cfg.start_new_block(); - self.cfg.push_assign(block, source_info, ref_string, Rvalue::Ref(re_erased, BorrowKind::Shared, place)); + self.cfg.push_assign( + block, + source_info, + ref_string, + Rvalue::Ref(re_erased, BorrowKind::Shared, place), + ); self.cfg.terminate( block, source_info, @@ -262,10 +273,17 @@ target: Some(eq_block), unwind: UnwindAction::Continue, call_source: CallSource::Misc, - fn_span: source_info.span - } + fn_span: source_info.span, + }, + ); + self.non_scalar_compare( + eq_block, + make_target_blocks, + source_info, + value, + ref_str, + ref_str_ty, ); - self.non_scalar_compare(eq_block, make_target_blocks, source_info, value, ref_str, ref_str_ty); return; } if !ty.is_scalar() { @@ -289,11 +307,14 @@ } } - TestKind::Range(box PatRange { lo, hi, ref end }) => { + TestKind::Range(ref range) => { let lower_bound_success = self.cfg.start_new_block(); let target_blocks = make_target_blocks(self); // Test `val` by computing `lo <= val && val <= hi`, using primitive comparisons. + // FIXME: skip useless comparison when the range is half-open. + let lo = range.lo.to_const(range.ty, self.tcx); + let hi = range.hi.to_const(range.ty, self.tcx); let lo = self.literal_operand(test.span, lo); let hi = self.literal_operand(test.span, hi); let val = Operand::Copy(place); @@ -310,7 +331,7 @@ lo, val.clone(), ); - let op = match *end { + let op = match range.end { RangeEnd::Included => BinOp::Le, RangeEnd::Excluded => BinOp::Lt, }; @@ -678,34 +699,18 @@ } (TestKind::Range(test), PatKind::Range(pat)) => { - use std::cmp::Ordering::*; - if test == pat { self.candidate_without_match_pair(match_pair_index, candidate); return Some(0); } - // For performance, it's important to only do the second - // `compare_const_vals` if necessary. - let no_overlap = if matches!( - (compare_const_vals(self.tcx, test.hi, pat.lo, self.param_env)?, test.end), - (Less, _) | (Equal, RangeEnd::Excluded) // test < pat - ) || matches!( - (compare_const_vals(self.tcx, test.lo, pat.hi, self.param_env)?, pat.end), - (Greater, _) | (Equal, RangeEnd::Excluded) // test > pat - ) { - Some(1) - } else { - None - }; - // If the testing range does not overlap with pattern range, // the pattern can be matched only if this test fails. - no_overlap + if !test.overlaps(pat, self.tcx, self.param_env)? { Some(1) } else { None } } (TestKind::Range(range), &PatKind::Constant { value }) => { - if let Some(false) = self.const_range_contains(&*range, value) { + if !range.contains(value, self.tcx, self.param_env)? { // `value` is not contained in the testing range, // so `value` can be matched only if this test fails. Some(1) @@ -797,27 +802,13 @@ span_bug!(match_pair.pattern.span, "simplifiable pattern found: {:?}", match_pair.pattern) } - fn const_range_contains(&self, range: &PatRange<'tcx>, value: Const<'tcx>) -> Option { - use std::cmp::Ordering::*; - - // For performance, it's important to only do the second - // `compare_const_vals` if necessary. - Some( - matches!(compare_const_vals(self.tcx, range.lo, value, self.param_env)?, Less | Equal) - && matches!( - (compare_const_vals(self.tcx, value, range.hi, self.param_env)?, range.end), - (Less, _) | (Equal, RangeEnd::Included) - ), - ) - } - fn values_not_contained_in_range( &self, range: &PatRange<'tcx>, options: &FxIndexMap, u128>, ) -> Option { for &val in options.keys() { - if self.const_range_contains(range, val)? { + if range.contains(val, self.tcx, self.param_env)? { return Some(false); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/misc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/misc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/misc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/misc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,9 +15,7 @@ /// N.B., **No cleanup is scheduled for this temporary.** You should /// call `schedule_drop` once the temporary is initialized. pub(crate) fn temp(&mut self, ty: Ty<'tcx>, span: Span) -> Place<'tcx> { - // Mark this local as internal to avoid temporaries with types not present in the - // user's code resulting in ICEs from the generator transform. - let temp = self.local_decls.push(LocalDecl::new(ty, span).internal()); + let temp = self.local_decls.push(LocalDecl::new(ty, span)); let place = Place::from(temp); debug!("temp: created temp {:?} with type {:?}", place, self.local_decls[temp].ty); place diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,7 @@ use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; -use rustc_hir::{GeneratorKind, Node}; +use rustc_hir::{CoroutineKind, Node}; use rustc_index::bit_set::GrowableBitSet; use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_infer::infer::{InferCtxt, TyCtxtInferExt}; @@ -53,10 +53,7 @@ } /// Construct the MIR for a given `DefId`. -fn mir_build(tcx: TyCtxt<'_>, def: LocalDefId) -> Body<'_> { - // Ensure unsafeck and abstract const building is ran before we steal the THIR. - tcx.ensure_with_value() - .thir_check_unsafety(tcx.typeck_root_def_id(def.to_def_id()).expect_local()); +fn mir_build<'tcx>(tcx: TyCtxt<'tcx>, def: LocalDefId) -> Body<'tcx> { tcx.ensure_with_value().thir_abstract_const(def); if let Err(e) = tcx.check_match(def) { return construct_error(tcx, def, e); @@ -65,20 +62,27 @@ let body = match tcx.thir_body(def) { Err(error_reported) => construct_error(tcx, def, error_reported), Ok((thir, expr)) => { - // We ran all queries that depended on THIR at the beginning - // of `mir_build`, so now we can steal it - let thir = thir.steal(); + let build_mir = |thir: &Thir<'tcx>| match thir.body_type { + thir::BodyTy::Fn(fn_sig) => construct_fn(tcx, def, thir, expr, fn_sig), + thir::BodyTy::Const(ty) => construct_const(tcx, def, thir, expr, ty), + }; - tcx.ensure().check_match(def); // this must run before MIR dump, because // "not all control paths return a value" is reported here. // // maybe move the check to a MIR pass? tcx.ensure().check_liveness(def); - match thir.body_type { - thir::BodyTy::Fn(fn_sig) => construct_fn(tcx, def, &thir, expr, fn_sig), - thir::BodyTy::Const(ty) => construct_const(tcx, def, &thir, expr, ty), + if tcx.sess.opts.unstable_opts.thir_unsafeck { + // Don't steal here if THIR unsafeck is being used. Instead + // steal in unsafeck. This is so that pattern inline constants + // can be evaluated as part of building the THIR of the parent + // function without a cycle. + build_mir(&thir.borrow()) + } else { + // We ran all queries that depended on THIR at the beginning + // of `mir_build`, so now we can steal it + build_mir(&thir.steal()) } } }; @@ -173,7 +177,7 @@ check_overflow: bool, fn_span: Span, arg_count: usize, - generator_kind: Option, + coroutine_kind: Option, /// The current set of scopes, updated as we traverse; /// see the `scope` module for more details. @@ -448,7 +452,7 @@ ) -> Body<'tcx> { let span = tcx.def_span(fn_def); let fn_id = tcx.hir().local_def_id_to_hir_id(fn_def); - let generator_kind = tcx.generator_kind(fn_def); + let coroutine_kind = tcx.coroutine_kind(fn_def); // The representation of thir for `-Zunpretty=thir-tree` relies on // the entry expression being the last element of `thir.exprs`. @@ -478,15 +482,15 @@ let arguments = &thir.params; - let (yield_ty, return_ty) = if generator_kind.is_some() { - let gen_ty = arguments[thir::UPVAR_ENV_PARAM].ty; - let gen_sig = match gen_ty.kind() { - ty::Generator(_, gen_args, ..) => gen_args.as_generator().sig(), + let (yield_ty, return_ty) = if coroutine_kind.is_some() { + let coroutine_ty = arguments[thir::UPVAR_ENV_PARAM].ty; + let coroutine_sig = match coroutine_ty.kind() { + ty::Coroutine(_, gen_args, ..) => gen_args.as_coroutine().sig(), _ => { - span_bug!(span, "generator w/o generator type: {:?}", gen_ty) + span_bug!(span, "coroutine w/o coroutine type: {:?}", coroutine_ty) } }; - (Some(gen_sig.yield_ty), gen_sig.return_ty) + (Some(coroutine_sig.yield_ty), coroutine_sig.return_ty) } else { (None, fn_sig.output()) }; @@ -519,7 +523,7 @@ safety, return_ty, return_ty_span, - generator_kind, + coroutine_kind, ); let call_site_scope = @@ -553,7 +557,7 @@ None }; if yield_ty.is_some() { - body.generator.as_mut().unwrap().yield_ty = yield_ty; + body.coroutine.as_mut().unwrap().yield_ty = yield_ty; } body } @@ -616,29 +620,53 @@ /// /// This is required because we may still want to run MIR passes on an item /// with type errors, but normal MIR construction can't handle that in general. -fn construct_error(tcx: TyCtxt<'_>, def: LocalDefId, err: ErrorGuaranteed) -> Body<'_> { - let span = tcx.def_span(def); - let hir_id = tcx.hir().local_def_id_to_hir_id(def); - let generator_kind = tcx.generator_kind(def); - let body_owner_kind = tcx.hir().body_owner_kind(def); - - let ty = Ty::new_error(tcx, err); - let num_params = match body_owner_kind { - hir::BodyOwnerKind::Fn => tcx.fn_sig(def).skip_binder().inputs().skip_binder().len(), - hir::BodyOwnerKind::Closure => { - let ty = tcx.type_of(def).instantiate_identity(); - match ty.kind() { - ty::Closure(_, args) => 1 + args.as_closure().sig().inputs().skip_binder().len(), - ty::Generator(..) => 2, - _ => bug!("expected closure or generator, found {ty:?}"), - } +fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) -> Body<'_> { + let span = tcx.def_span(def_id); + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); + let coroutine_kind = tcx.coroutine_kind(def_id); + + let (inputs, output, yield_ty) = match tcx.def_kind(def_id) { + DefKind::Const + | DefKind::AssocConst + | DefKind::AnonConst + | DefKind::InlineConst + | DefKind::Static(_) => (vec![], tcx.type_of(def_id).instantiate_identity(), None), + DefKind::Ctor(..) | DefKind::Fn | DefKind::AssocFn => { + let sig = tcx.liberate_late_bound_regions( + def_id.to_def_id(), + tcx.fn_sig(def_id).instantiate_identity(), + ); + (sig.inputs().to_vec(), sig.output(), None) } - hir::BodyOwnerKind::Const { .. } => 0, - hir::BodyOwnerKind::Static(_) => 0, + DefKind::Closure => { + let closure_ty = tcx.type_of(def_id).instantiate_identity(); + let ty::Closure(_, args) = closure_ty.kind() else { bug!() }; + let args = args.as_closure(); + let sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), args.sig()); + let self_ty = match args.kind() { + ty::ClosureKind::Fn => Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, closure_ty), + ty::ClosureKind::FnMut => Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, closure_ty), + ty::ClosureKind::FnOnce => closure_ty, + }; + ([self_ty].into_iter().chain(sig.inputs().to_vec()).collect(), sig.output(), None) + } + DefKind::Coroutine => { + let coroutine_ty = tcx.type_of(def_id).instantiate_identity(); + let ty::Coroutine(_, args, _) = coroutine_ty.kind() else { bug!() }; + let args = args.as_coroutine(); + let yield_ty = args.yield_ty(); + let return_ty = args.return_ty(); + (vec![coroutine_ty, args.resume_ty()], return_ty, Some(yield_ty)) + } + dk => bug!("{:?} is not a body: {:?}", def_id, dk), }; + + let source_info = SourceInfo { span, scope: OUTERMOST_SOURCE_SCOPE }; + let local_decls = IndexVec::from_iter( + [output].iter().chain(&inputs).map(|ty| LocalDecl::with_source_info(*ty, source_info)), + ); let mut cfg = CFG { basic_blocks: IndexVec::new() }; let mut source_scopes = IndexVec::new(); - let mut local_decls = IndexVec::from_elem_n(LocalDecl::new(ty, span), 1); cfg.start_new_block(); source_scopes.push(SourceScopeData { @@ -651,28 +679,24 @@ safety: Safety::Safe, }), }); - let source_info = SourceInfo { span, scope: OUTERMOST_SOURCE_SCOPE }; - // Some MIR passes will expect the number of parameters to match the - // function declaration. - for _ in 0..num_params { - local_decls.push(LocalDecl::with_source_info(ty, source_info)); - } cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable); let mut body = Body::new( - MirSource::item(def.to_def_id()), + MirSource::item(def_id.to_def_id()), cfg.basic_blocks, source_scopes, local_decls, IndexVec::new(), - num_params, + inputs.len(), vec![], span, - generator_kind, - Some(err), + coroutine_kind, + Some(guar), ); - body.generator.as_mut().map(|gen| gen.yield_ty = Some(ty)); + + body.coroutine.as_mut().map(|gen| gen.yield_ty = yield_ty); + body } @@ -687,7 +711,7 @@ safety: Safety, return_ty: Ty<'tcx>, return_span: Span, - generator_kind: Option, + coroutine_kind: Option, ) -> Builder<'a, 'tcx> { let tcx = infcx.tcx; let attrs = tcx.hir().attrs(hir_id); @@ -718,7 +742,7 @@ cfg: CFG { basic_blocks: IndexVec::new() }, fn_span: span, arg_count, - generator_kind, + coroutine_kind, scopes: scope::Scopes::new(), block_context: BlockContext::new(), source_scopes: IndexVec::new(), @@ -760,7 +784,7 @@ self.arg_count, self.var_debug_info, self.fn_span, - self.generator_kind, + self.coroutine_kind, None, ) } @@ -777,7 +801,7 @@ let upvar_args = match closure_ty.kind() { ty::Closure(_, args) => ty::UpvarArgs::Closure(args), - ty::Generator(_, args, _) => ty::UpvarArgs::Generator(args), + ty::Coroutine(_, args, _) => ty::UpvarArgs::Coroutine(args), _ => return, }; @@ -847,7 +871,9 @@ self.local_decls.push(LocalDecl::with_source_info(param.ty, source_info)); // If this is a simple binding pattern, give debuginfo a nice name. - if let Some(ref pat) = param.pat && let Some(name) = pat.simple_ident() { + if let Some(ref pat) = param.pat + && let Some(name) = pat.simple_ident() + { self.var_debug_info.push(VarDebugInfo { name, source_info, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/scope.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/scope.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/scope.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/build/scope.rs 2023-12-21 16:55:28.000000000 +0000 @@ -108,8 +108,8 @@ /// [DropTree] for more details. unwind_drops: DropTree, - /// Drops that need to be done on paths to the `GeneratorDrop` terminator. - generator_drops: DropTree, + /// Drops that need to be done on paths to the `CoroutineDrop` terminator. + coroutine_drops: DropTree, } #[derive(Debug)] @@ -133,8 +133,8 @@ cached_unwind_block: Option, /// The drop index that will drop everything in and below this scope on a - /// generator drop path. - cached_generator_drop_block: Option, + /// coroutine drop path. + cached_coroutine_drop_block: Option, } #[derive(Clone, Copy, Debug)] @@ -194,7 +194,7 @@ /// A tree of drops that we have deferred lowering. It's used for: /// /// * Drops on unwind paths -/// * Drops on generator drop paths (when a suspended generator is dropped) +/// * Drops on coroutine drop paths (when a suspended coroutine is dropped) /// * Drops on return and loop exit paths /// * Drops on the else path in an `if let` chain /// @@ -222,8 +222,8 @@ /// * polluting the cleanup MIR with StorageDead creates /// landing pads even though there's no actual destructors /// * freeing up stack space has no effect during unwinding - /// Note that for generators we do emit StorageDeads, for the - /// use of optimizations in the MIR generator transform. + /// Note that for coroutines we do emit StorageDeads, for the + /// use of optimizations in the MIR coroutine transform. fn needs_cleanup(&self) -> bool { self.drops.iter().any(|drop| match drop.kind { DropKind::Value => true, @@ -233,7 +233,7 @@ fn invalidate_cache(&mut self) { self.cached_unwind_block = None; - self.cached_generator_drop_block = None; + self.cached_coroutine_drop_block = None; } } @@ -407,7 +407,7 @@ breakable_scopes: Vec::new(), if_then_scope: None, unwind_drops: DropTree::new(), - generator_drops: DropTree::new(), + coroutine_drops: DropTree::new(), } } @@ -419,7 +419,7 @@ drops: vec![], moved_locals: vec![], cached_unwind_block: None, - cached_generator_drop_block: None, + cached_coroutine_drop_block: None, }); } @@ -725,7 +725,7 @@ // Add a dummy `Assign` statement to the CFG, with the span for the source code's `continue` // statement. fn add_dummy_assignment(&mut self, span: Span, block: BasicBlock, source_info: SourceInfo) { - let local_decl = LocalDecl::new(Ty::new_unit(self.tcx), span).internal(); + let local_decl = LocalDecl::new(Ty::new_unit(self.tcx), span); let temp_place = Place::from(self.local_decls.push(local_decl)); self.cfg.push_assign_unit(block, source_info, temp_place, self.tcx); } @@ -734,7 +734,7 @@ // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. let needs_cleanup = self.scopes.scopes.last().is_some_and(|scope| scope.needs_cleanup()); - let is_generator = self.generator_kind.is_some(); + let is_coroutine = self.coroutine_kind.is_some(); let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX }; let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes"); @@ -744,7 +744,7 @@ scope, block, unwind_to, - is_generator && needs_cleanup, + is_coroutine && needs_cleanup, self.arg_count, )) } @@ -984,11 +984,11 @@ // caches gets invalidated. i.e., if a new drop is added into the middle scope, the // cache of outer scope stays intact. // - // Since we only cache drops for the unwind path and the generator drop + // Since we only cache drops for the unwind path and the coroutine drop // path, we only need to invalidate the cache for drops that happen on - // the unwind or generator drop paths. This means that for - // non-generators we don't need to invalidate caches for `DropKind::Storage`. - let invalidate_caches = needs_drop || self.generator_kind.is_some(); + // the unwind or coroutine drop paths. This means that for + // non-coroutines we don't need to invalidate caches for `DropKind::Storage`. + let invalidate_caches = needs_drop || self.coroutine_kind.is_some(); for scope in self.scopes.scopes.iter_mut().rev() { if invalidate_caches { scope.invalidate_cache(); @@ -1101,10 +1101,10 @@ return cached_drop; } - let is_generator = self.generator_kind.is_some(); + let is_coroutine = self.coroutine_kind.is_some(); for scope in &mut self.scopes.scopes[uncached_scope..=target] { for drop in &scope.drops { - if is_generator || drop.kind == DropKind::Value { + if is_coroutine || drop.kind == DropKind::Value { cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop); } } @@ -1137,17 +1137,17 @@ } /// Sets up a path that performs all required cleanup for dropping a - /// generator, starting from the given block that ends in + /// coroutine, starting from the given block that ends in /// [TerminatorKind::Yield]. /// - /// This path terminates in GeneratorDrop. - pub(crate) fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { + /// This path terminates in CoroutineDrop. + pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) { debug_assert!( matches!( self.cfg.block_data(yield_block).terminator().kind, TerminatorKind::Yield { .. } ), - "generator_drop_cleanup called on block with non-yield terminator." + "coroutine_drop_cleanup called on block with non-yield terminator." ); let (uncached_scope, mut cached_drop) = self .scopes @@ -1156,18 +1156,18 @@ .enumerate() .rev() .find_map(|(scope_idx, scope)| { - scope.cached_generator_drop_block.map(|cached_block| (scope_idx + 1, cached_block)) + scope.cached_coroutine_drop_block.map(|cached_block| (scope_idx + 1, cached_block)) }) .unwrap_or((0, ROOT_NODE)); for scope in &mut self.scopes.scopes[uncached_scope..] { for drop in &scope.drops { - cached_drop = self.scopes.generator_drops.add_drop(*drop, cached_drop); + cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop); } - scope.cached_generator_drop_block = Some(cached_drop); + scope.cached_coroutine_drop_block = Some(cached_drop); } - self.scopes.generator_drops.add_entry(yield_block, cached_drop); + self.scopes.coroutine_drops.add_entry(yield_block, cached_drop); } /// Utility function for *non*-scope code to build their own drops @@ -1274,7 +1274,7 @@ // drops panic (panicking while unwinding will abort, so there's no need for // another set of arrows). // - // For generators, we unwind from a drop on a local to its StorageDead + // For coroutines, we unwind from a drop on a local to its StorageDead // statement. For other functions we don't worry about StorageDead. The // drops for the unwind path should have already been generated by // `diverge_cleanup_gen`. @@ -1346,7 +1346,7 @@ blocks[ROOT_NODE] = continue_block; drops.build_mir::(&mut self.cfg, &mut blocks); - let is_generator = self.generator_kind.is_some(); + let is_coroutine = self.coroutine_kind.is_some(); // Link the exit drop tree to unwind drop tree. if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) { @@ -1355,7 +1355,7 @@ for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) { match drop_data.0.kind { DropKind::Storage => { - if is_generator { + if is_coroutine { let unwind_drop = self .scopes .unwind_drops @@ -1381,10 +1381,10 @@ blocks[ROOT_NODE].map(BasicBlock::unit) } - /// Build the unwind and generator drop trees. + /// Build the unwind and coroutine drop trees. pub(crate) fn build_drop_trees(&mut self) { - if self.generator_kind.is_some() { - self.build_generator_drop_trees(); + if self.coroutine_kind.is_some() { + self.build_coroutine_drop_trees(); } else { Self::build_unwind_tree( &mut self.cfg, @@ -1395,18 +1395,18 @@ } } - fn build_generator_drop_trees(&mut self) { - // Build the drop tree for dropping the generator while it's suspended. - let drops = &mut self.scopes.generator_drops; + fn build_coroutine_drop_trees(&mut self) { + // Build the drop tree for dropping the coroutine while it's suspended. + let drops = &mut self.scopes.coroutine_drops; let cfg = &mut self.cfg; let fn_span = self.fn_span; let mut blocks = IndexVec::from_elem(None, &drops.drops); - drops.build_mir::(cfg, &mut blocks); + drops.build_mir::(cfg, &mut blocks); if let Some(root_block) = blocks[ROOT_NODE] { cfg.terminate( root_block, SourceInfo::outermost(fn_span), - TerminatorKind::GeneratorDrop, + TerminatorKind::CoroutineDrop, ); } @@ -1416,11 +1416,11 @@ Self::build_unwind_tree(cfg, unwind_drops, fn_span, resume_block); // Build the drop tree for unwinding when dropping a suspended - // generator. + // coroutine. // // This is a different tree to the standard unwind paths here to // prevent drop elaboration from creating drop flags that would have - // to be captured by the generator. I'm not sure how important this + // to be captured by the coroutine. I'm not sure how important this // optimization is, but it is here. for (drop_idx, drop_data) in drops.drops.iter_enumerated() { if let DropKind::Value = drop_data.0.kind { @@ -1461,9 +1461,9 @@ } } -struct GeneratorDrop; +struct CoroutineDrop; -impl<'tcx> DropTreeBuilder<'tcx> for GeneratorDrop { +impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop { fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { cfg.start_new_block() } @@ -1474,7 +1474,7 @@ } else { span_bug!( term.source_info.span, - "cannot enter generator drop tree from {:?}", + "cannot enter coroutine drop tree from {:?}", term.kind ) } @@ -1511,7 +1511,7 @@ | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::Yield { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::FalseEdge { .. } => { span_bug!(term.source_info.span, "cannot unwind from {:?}", term.kind) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/check_unsafety.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/check_unsafety.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/check_unsafety.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/check_unsafety.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,6 +13,7 @@ use rustc_span::symbol::Symbol; use rustc_span::Span; +use std::mem; use std::ops::Bound; struct UnsafetyVisitor<'a, 'tcx> { @@ -24,7 +25,6 @@ /// The current "safety context". This notably tracks whether we are in an /// `unsafe` block, and whether it has been used. safety_context: SafetyContext, - body_unsafety: BodyUnsafety, /// The `#[target_feature]` attributes of the body. Used for checking /// calls to functions with `#[target_feature]` (RFC 2396). body_target_features: &'tcx [Symbol], @@ -34,43 +34,54 @@ in_union_destructure: bool, param_env: ParamEnv<'tcx>, inside_adt: bool, + warnings: &'a mut Vec, + + /// Flag to ensure that we only suggest wrapping the entire function body in + /// an unsafe block once. + suggest_unsafe_block: bool, } impl<'tcx> UnsafetyVisitor<'_, 'tcx> { fn in_safety_context(&mut self, safety_context: SafetyContext, f: impl FnOnce(&mut Self)) { - if let ( - SafetyContext::UnsafeBlock { span: enclosing_span, .. }, - SafetyContext::UnsafeBlock { span: block_span, hir_id, .. }, - ) = (self.safety_context, safety_context) - { - self.warn_unused_unsafe( - hir_id, - block_span, - Some(UnusedUnsafeEnclosing::Block { - span: self.tcx.sess.source_map().guess_head_span(enclosing_span), - }), - ); - f(self); - } else { - let prev_context = self.safety_context; - self.safety_context = safety_context; + let prev_context = mem::replace(&mut self.safety_context, safety_context); - f(self); + f(self); - if let SafetyContext::UnsafeBlock { used: false, span, hir_id } = self.safety_context { - self.warn_unused_unsafe( - hir_id, - span, - if self.unsafe_op_in_unsafe_fn_allowed() { - self.body_unsafety - .unsafe_fn_sig_span() - .map(|span| UnusedUnsafeEnclosing::Function { span }) - } else { - None - }, - ); + let safety_context = mem::replace(&mut self.safety_context, prev_context); + if let SafetyContext::UnsafeBlock { used, span, hir_id, nested_used_blocks } = + safety_context + { + if !used { + self.warn_unused_unsafe(hir_id, span, None); + + if let SafetyContext::UnsafeBlock { + nested_used_blocks: ref mut prev_nested_used_blocks, + .. + } = self.safety_context + { + prev_nested_used_blocks.extend(nested_used_blocks); + } + } else { + for block in nested_used_blocks { + self.warn_unused_unsafe( + block.hir_id, + block.span, + Some(UnusedUnsafeEnclosing::Block { + span: self.tcx.sess.source_map().guess_head_span(span), + }), + ); + } + + match self.safety_context { + SafetyContext::UnsafeBlock { + nested_used_blocks: ref mut prev_nested_used_blocks, + .. + } => { + prev_nested_used_blocks.push(NestedUsedBlock { hir_id, span }); + } + _ => (), + } } - self.safety_context = prev_context; } } @@ -88,7 +99,13 @@ SafetyContext::UnsafeFn if unsafe_op_in_unsafe_fn_allowed => {} SafetyContext::UnsafeFn => { // unsafe_op_in_unsafe_fn is disallowed - kind.emit_unsafe_op_in_unsafe_fn_lint(self.tcx, self.hir_context, span); + kind.emit_unsafe_op_in_unsafe_fn_lint( + self.tcx, + self.hir_context, + span, + self.suggest_unsafe_block, + ); + self.suggest_unsafe_block = false; } SafetyContext::Safe => { kind.emit_requires_unsafe_err( @@ -102,18 +119,12 @@ } fn warn_unused_unsafe( - &self, + &mut self, hir_id: hir::HirId, block_span: Span, enclosing_unsafe: Option, ) { - let block_span = self.tcx.sess.source_map().guess_head_span(block_span); - self.tcx.emit_spanned_lint( - UNUSED_UNSAFE, - hir_id, - block_span, - UnusedUnsafe { span: block_span, enclosing: enclosing_unsafe }, - ); + self.warnings.push(UnusedUnsafeWarning { hir_id, block_span, enclosing_unsafe }); } /// Whether the `unsafe_op_in_unsafe_fn` lint is `allow`ed at the current HIR node. @@ -121,12 +132,21 @@ self.tcx.lint_level_at_node(UNSAFE_OP_IN_UNSAFE_FN, self.hir_context).0 == Level::Allow } - /// Handle closures/generators/inline-consts, which is unsafecked with their parent body. + /// Handle closures/coroutines/inline-consts, which is unsafecked with their parent body. fn visit_inner_body(&mut self, def: LocalDefId) { if let Ok((inner_thir, expr)) = self.tcx.thir_body(def) { - let inner_thir = &inner_thir.borrow(); + // Runs all other queries that depend on THIR. + self.tcx.ensure_with_value().mir_built(def); + let inner_thir = &inner_thir.steal(); let hir_context = self.tcx.hir().local_def_id_to_hir_id(def); - let mut inner_visitor = UnsafetyVisitor { thir: inner_thir, hir_context, ..*self }; + let safety_context = mem::replace(&mut self.safety_context, SafetyContext::Safe); + let mut inner_visitor = UnsafetyVisitor { + thir: inner_thir, + hir_context, + safety_context, + warnings: self.warnings, + ..*self + }; inner_visitor.visit_expr(&inner_thir[expr]); // Unsafe blocks can be used in the inner body, make sure to take it into account self.safety_context = inner_visitor.safety_context; @@ -193,8 +213,15 @@ }); } BlockSafety::ExplicitUnsafe(hir_id) => { + let used = + matches!(self.tcx.lint_level_at_node(UNUSED_UNSAFE, hir_id), (Level::Allow, _)); self.in_safety_context( - SafetyContext::UnsafeBlock { span: block.span, hir_id, used: false }, + SafetyContext::UnsafeBlock { + span: block.span, + hir_id, + used, + nested_used_blocks: Vec::new(), + }, |this| visit::walk_block(this, block), ); } @@ -224,7 +251,9 @@ PatKind::Wild | // these just wrap other patterns PatKind::Or { .. } | - PatKind::AscribeUserType { .. } => {} + PatKind::InlineConstant { .. } | + PatKind::AscribeUserType { .. } | + PatKind::Error(_) => {} } }; @@ -276,6 +305,10 @@ visit::walk_pat(self, pat); self.inside_adt = old_inside_adt; } + PatKind::InlineConstant { def, .. } => { + self.visit_inner_body(*def); + visit::walk_pat(self, pat); + } _ => { visit::walk_pat(self, pat); } @@ -372,7 +405,9 @@ } } ExprKind::Deref { arg } => { - if let ExprKind::StaticRef { def_id, .. } = self.thir[arg].kind { + if let ExprKind::StaticRef { def_id, .. } | ExprKind::ThreadLocalRef(def_id) = + self.thir[arg].kind + { if self.tcx.is_mutable_static(def_id) { self.requires_unsafe(expr.span, UseOfMutableStatic); } else if self.tcx.is_foreign_item(def_id) { @@ -411,7 +446,9 @@ } ExprKind::Field { lhs, .. } => { let lhs = &self.thir[lhs]; - if let ty::Adt(adt_def, _) = lhs.ty.kind() && adt_def.is_union() { + if let ty::Adt(adt_def, _) = lhs.ty.kind() + && adt_def.is_union() + { if let Some((assigned_ty, assignment_span)) = self.assignment_info { if assigned_ty.needs_drop(self.tcx, self.param_env) { // This would be unsafe, but should be outright impossible since we reject such unions. @@ -458,48 +495,35 @@ } } } - ExprKind::Let { expr: expr_id, .. } => { - let let_expr = &self.thir[expr_id]; - if let ty::Adt(adt_def, _) = let_expr.ty.kind() && adt_def.is_union() { - self.requires_unsafe(expr.span, AccessToUnionField); - } - } _ => {} } visit::walk_expr(self, expr); } } -#[derive(Clone, Copy)] +#[derive(Clone)] enum SafetyContext { Safe, BuiltinUnsafeBlock, UnsafeFn, - UnsafeBlock { span: Span, hir_id: hir::HirId, used: bool }, + UnsafeBlock { + span: Span, + hir_id: hir::HirId, + used: bool, + nested_used_blocks: Vec, + }, } #[derive(Clone, Copy)] -enum BodyUnsafety { - /// The body is not unsafe. - Safe, - /// The body is an unsafe function. The span points to - /// the signature of the function. - Unsafe(Span), +struct NestedUsedBlock { + hir_id: hir::HirId, + span: Span, } -impl BodyUnsafety { - /// Returns whether the body is unsafe. - fn is_unsafe(&self) -> bool { - matches!(self, BodyUnsafety::Unsafe(_)) - } - - /// If the body is unsafe, returns the `Span` of its signature. - fn unsafe_fn_sig_span(self) -> Option { - match self { - BodyUnsafety::Unsafe(span) => Some(span), - BodyUnsafety::Safe => None, - } - } +struct UnusedUnsafeWarning { + hir_id: hir::HirId, + block_span: Span, + enclosing_unsafe: Option, } #[derive(Clone, Copy, PartialEq)] @@ -524,7 +548,22 @@ tcx: TyCtxt<'_>, hir_id: hir::HirId, span: Span, + suggest_unsafe_block: bool, ) { + let parent_id = tcx.hir().get_parent_item(hir_id); + let parent_owner = tcx.hir().owner(parent_id); + let should_suggest = parent_owner.fn_sig().map_or(false, |sig| sig.header.is_unsafe()); + let unsafe_not_inherited_note = if should_suggest { + suggest_unsafe_block.then(|| { + let body_span = tcx.hir().body(parent_owner.body_id().unwrap()).value.span; + UnsafeNotInheritedLintNote { + signature_span: tcx.def_span(parent_id.def_id), + body_span, + } + }) + } else { + None + }; // FIXME: ideally we would want to trim the def paths, but this is not // feasible with the current lint emission API (see issue #106126). match self { @@ -535,61 +574,89 @@ UnsafeOpInUnsafeFnCallToUnsafeFunctionRequiresUnsafe { span, function: &with_no_trimmed_paths!(tcx.def_path_str(*did)), + unsafe_not_inherited_note, }, ), CallToUnsafeFunction(None) => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnCallToUnsafeFunctionRequiresUnsafeNameless { span }, + UnsafeOpInUnsafeFnCallToUnsafeFunctionRequiresUnsafeNameless { + span, + unsafe_not_inherited_note, + }, ), UseOfInlineAssembly => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnUseOfInlineAssemblyRequiresUnsafe { span }, + UnsafeOpInUnsafeFnUseOfInlineAssemblyRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), InitializingTypeWith => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnInitializingTypeWithRequiresUnsafe { span }, + UnsafeOpInUnsafeFnInitializingTypeWithRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), UseOfMutableStatic => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnUseOfMutableStaticRequiresUnsafe { span }, + UnsafeOpInUnsafeFnUseOfMutableStaticRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), UseOfExternStatic => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnUseOfExternStaticRequiresUnsafe { span }, + UnsafeOpInUnsafeFnUseOfExternStaticRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), DerefOfRawPointer => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnDerefOfRawPointerRequiresUnsafe { span }, + UnsafeOpInUnsafeFnDerefOfRawPointerRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), AccessToUnionField => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnAccessToUnionFieldRequiresUnsafe { span }, + UnsafeOpInUnsafeFnAccessToUnionFieldRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), MutationOfLayoutConstrainedField => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnMutationOfLayoutConstrainedFieldRequiresUnsafe { span }, + UnsafeOpInUnsafeFnMutationOfLayoutConstrainedFieldRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), BorrowOfLayoutConstrainedField => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, hir_id, span, - UnsafeOpInUnsafeFnBorrowOfLayoutConstrainedFieldRequiresUnsafe { span }, + UnsafeOpInUnsafeFnBorrowOfLayoutConstrainedFieldRequiresUnsafe { + span, + unsafe_not_inherited_note, + }, ), CallToFunctionWith(did) => tcx.emit_spanned_lint( UNSAFE_OP_IN_UNSAFE_FN, @@ -598,6 +665,7 @@ UnsafeOpInUnsafeFnCallToFunctionWithRequiresUnsafe { span, function: &with_no_trimmed_paths!(tcx.def_path_str(*did)), + unsafe_not_inherited_note, }, ), } @@ -616,8 +684,7 @@ && let hir::BlockCheckMode::UnsafeBlock(_) = block.rules { true - } - else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) + } else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) && sig.header.is_unsafe() { true @@ -784,34 +851,47 @@ } let Ok((thir, expr)) = tcx.thir_body(def) else { return }; - let thir = &thir.borrow(); + // Runs all other queries that depend on THIR. + tcx.ensure_with_value().mir_built(def); + let thir = &thir.steal(); // If `thir` is empty, a type error occurred, skip this body. if thir.exprs.is_empty() { return; } let hir_id = tcx.hir().local_def_id_to_hir_id(def); - let body_unsafety = tcx.hir().fn_sig_by_hir_id(hir_id).map_or(BodyUnsafety::Safe, |fn_sig| { + let safety_context = tcx.hir().fn_sig_by_hir_id(hir_id).map_or(SafetyContext::Safe, |fn_sig| { if fn_sig.header.unsafety == hir::Unsafety::Unsafe { - BodyUnsafety::Unsafe(fn_sig.span) + SafetyContext::UnsafeFn } else { - BodyUnsafety::Safe + SafetyContext::Safe } }); let body_target_features = &tcx.body_codegen_attrs(def.to_def_id()).target_features; - let safety_context = - if body_unsafety.is_unsafe() { SafetyContext::UnsafeFn } else { SafetyContext::Safe }; + let mut warnings = Vec::new(); let mut visitor = UnsafetyVisitor { tcx, thir, safety_context, hir_context: hir_id, - body_unsafety, body_target_features, assignment_info: None, in_union_destructure: false, param_env: tcx.param_env(def), inside_adt: false, + warnings: &mut warnings, + suggest_unsafe_block: true, }; visitor.visit_expr(&thir[expr]); + + warnings.sort_by_key(|w| w.block_span); + for UnusedUnsafeWarning { hir_id, block_span, enclosing_unsafe } in warnings { + let block_span = tcx.sess.source_map().guess_head_span(block_span); + tcx.emit_spanned_lint( + UNUSED_UNSAFE, + hir_id, + block_span, + UnusedUnsafe { span: block_span, enclosing: enclosing_unsafe }, + ); + } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ use crate::{ fluent_generated as fluent, - thir::pattern::{deconstruct_pat::DeconstructedPat, MatchCheckCtxt}, + thir::pattern::{deconstruct_pat::WitnessPat, MatchCheckCtxt}, }; use rustc_errors::{ error_code, AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, @@ -29,6 +29,8 @@ #[label] pub span: Span, pub function: &'a str, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -37,6 +39,8 @@ pub struct UnsafeOpInUnsafeFnCallToUnsafeFunctionRequiresUnsafeNameless { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -45,6 +49,8 @@ pub struct UnsafeOpInUnsafeFnUseOfInlineAssemblyRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -53,6 +59,8 @@ pub struct UnsafeOpInUnsafeFnInitializingTypeWithRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -61,6 +69,8 @@ pub struct UnsafeOpInUnsafeFnUseOfMutableStaticRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -69,6 +79,8 @@ pub struct UnsafeOpInUnsafeFnUseOfExternStaticRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -77,6 +89,8 @@ pub struct UnsafeOpInUnsafeFnDerefOfRawPointerRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -85,6 +99,8 @@ pub struct UnsafeOpInUnsafeFnAccessToUnionFieldRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -93,6 +109,8 @@ pub struct UnsafeOpInUnsafeFnMutationOfLayoutConstrainedFieldRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -100,6 +118,8 @@ pub struct UnsafeOpInUnsafeFnBorrowOfLayoutConstrainedFieldRequiresUnsafe { #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(LintDiagnostic)] @@ -109,6 +129,8 @@ #[label] pub span: Span, pub function: &'a str, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option, } #[derive(Diagnostic)] @@ -376,6 +398,27 @@ pub span: Span, } +pub struct UnsafeNotInheritedLintNote { + pub signature_span: Span, + pub body_span: Span, +} + +impl AddToDiagnostic for UnsafeNotInheritedLintNote { + fn add_to_diagnostic_with(self, diag: &mut Diagnostic, _: F) + where + F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage, + { + diag.span_note(self.signature_span, fluent::mir_build_unsafe_fn_safe_body); + let body_start = self.body_span.shrink_to_lo(); + let body_end = self.body_span.shrink_to_hi(); + diag.tool_only_multipart_suggestion( + fluent::mir_build_wrap_suggestion, + vec![(body_start, "{ unsafe ".into()), (body_end, "}".into())], + Applicability::MaybeIncorrect, + ); + } +} + #[derive(LintDiagnostic)] #[diag(mir_build_unused_unsafe)] pub struct UnusedUnsafe { @@ -392,11 +435,6 @@ #[primary_span] span: Span, }, - #[label(mir_build_unused_unsafe_enclosing_fn_label)] - Function { - #[primary_span] - span: Span, - }, } pub(crate) struct NonExhaustivePatternsTypeNotEmpty<'p, 'tcx, 'm> { @@ -551,6 +589,7 @@ #[label] pub span: Span, pub ty: Ty<'tcx>, + pub min: i128, pub max: u128, } @@ -793,6 +832,18 @@ pub uncovered: Uncovered<'tcx>, } +#[derive(LintDiagnostic)] +#[diag(mir_build_non_exhaustive_omitted_pattern_lint_on_arm)] +#[help] +pub(crate) struct NonExhaustiveOmittedPatternLintOnArm { + #[label] + pub lint_span: Span, + #[suggestion(code = "#[{lint_level}({lint_name})]\n", applicability = "maybe-incorrect")] + pub suggest_lint_on_match: Option, + pub lint_level: &'static str, + pub lint_name: &'static str, +} + #[derive(Subdiagnostic)] #[label(mir_build_uncovered)] pub(crate) struct Uncovered<'tcx> { @@ -809,15 +860,21 @@ pub fn new<'p>( span: Span, cx: &MatchCheckCtxt<'p, 'tcx>, - witnesses: Vec>, + witnesses: Vec>, ) -> Self { - let witness_1 = witnesses.get(0).unwrap().to_pat(cx); + let witness_1 = witnesses.get(0).unwrap().to_diagnostic_pat(cx); Self { span, count: witnesses.len(), // Substitute dummy values if witnesses is smaller than 3. These will never be read. - witness_2: witnesses.get(1).map(|w| w.to_pat(cx)).unwrap_or_else(|| witness_1.clone()), - witness_3: witnesses.get(2).map(|w| w.to_pat(cx)).unwrap_or_else(|| witness_1.clone()), + witness_2: witnesses + .get(1) + .map(|w| w.to_diagnostic_pat(cx)) + .unwrap_or_else(|| witness_1.clone()), + witness_3: witnesses + .get(2) + .map(|w| w.to_diagnostic_pat(cx)) + .unwrap_or_else(|| witness_1.clone()), witness_1, remainder: witnesses.len().saturating_sub(3), } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,7 +20,7 @@ mod check_unsafety; mod errors; pub mod lints; -pub mod thir; +mod thir; use rustc_middle::query::Providers; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lints.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lints.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lints.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/lints.rs 2023-12-21 16:55:28.000000000 +0000 @@ -67,16 +67,21 @@ let def_id = body.source.def_id().expect_local(); // First check if `body` is an `fn drop()` of `Drop` - if let DefKind::AssocFn = tcx.def_kind(def_id) && - let Some(trait_ref) = tcx.impl_of_method(def_id.to_def_id()).and_then(|def_id| tcx.impl_trait_ref(def_id)) && - let Some(drop_trait) = tcx.lang_items().drop_trait() && drop_trait == trait_ref.instantiate_identity().def_id { - + if let DefKind::AssocFn = tcx.def_kind(def_id) + && let Some(trait_ref) = + tcx.impl_of_method(def_id.to_def_id()).and_then(|def_id| tcx.impl_trait_ref(def_id)) + && let Some(drop_trait) = tcx.lang_items().drop_trait() + && drop_trait == trait_ref.instantiate_identity().def_id + { // It was. Now figure out for what type `Drop` is implemented and then // check for recursion. - if let ty::Ref(_, dropped_ty, _) = tcx.liberate_late_bound_regions( - def_id.to_def_id(), - tcx.fn_sig(def_id).instantiate_identity().input(0), - ).kind() { + if let ty::Ref(_, dropped_ty, _) = tcx + .liberate_late_bound_regions( + def_id.to_def_id(), + tcx.fn_sig(def_id).instantiate_identity().input(0), + ) + .kind() + { check_recursion(tcx, body, RecursiveDrop { drop_for: *dropped_ty }); } } @@ -187,7 +192,7 @@ match self.body[bb].terminator().kind { // These terminators return control flow to the caller. TerminatorKind::UnwindTerminate(_) - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::UnwindResume | TerminatorKind::Return | TerminatorKind::Unreachable diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/expr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/expr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/expr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/expr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -191,11 +191,16 @@ source: self.mirror_expr(source), cast: PointerCoercion::ArrayToPointer, } - } else { - // check whether this is casting an enum variant discriminant - // to prevent cycles, we refer to the discriminant initializer + } else if let hir::ExprKind::Path(ref qpath) = source.kind + && let res = self.typeck_results().qpath_res(qpath, source.hir_id) + && let ty = self.typeck_results().node_type(source.hir_id) + && let ty::Adt(adt_def, args) = ty.kind() + && let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), variant_ctor_id) = res + { + // Check whether this is casting an enum variant discriminant. + // To prevent cycles, we refer to the discriminant initializer, // which is always an integer and thus doesn't need to know the - // enum's layout (or its tag type) to compute it during const eval + // enum's layout (or its tag type) to compute it during const eval. // Example: // enum Foo { // A, @@ -204,21 +209,6 @@ // The correct solution would be to add symbolic computations to miri, // so we wouldn't have to compute and store the actual value - let hir::ExprKind::Path(ref qpath) = source.kind else { - return ExprKind::Cast { source: self.mirror_expr(source) }; - }; - - let res = self.typeck_results().qpath_res(qpath, source.hir_id); - let ty = self.typeck_results().node_type(source.hir_id); - let ty::Adt(adt_def, args) = ty.kind() else { - return ExprKind::Cast { source: self.mirror_expr(source) }; - }; - - let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), variant_ctor_id) = res - else { - return ExprKind::Cast { source: self.mirror_expr(source) }; - }; - let idx = adt_def.variant_index_with_ctor_id(variant_ctor_id); let (discr_did, discr_offset) = adt_def.discriminant_def_for_variant(idx); @@ -255,6 +245,10 @@ }; ExprKind::Cast { source } + } else { + // Default to `ExprKind::Cast` for all explicit casts. + // MIR building then picks the right MIR casts based on the types. + ExprKind::Cast { source: self.mirror_expr(source) } } } @@ -320,17 +314,23 @@ reason: errors::RustcBoxAttrReason::Attributes, }); } else if let Some(box_item) = tcx.lang_items().owned_box() { - if let hir::ExprKind::Path(hir::QPath::TypeRelative(ty, fn_path)) = fun.kind + if let hir::ExprKind::Path(hir::QPath::TypeRelative(ty, fn_path)) = + fun.kind && let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind && path.res.opt_def_id().is_some_and(|did| did == box_item) && fn_path.ident.name == sym::new && let [value] = args { - return Expr { temp_lifetime, ty: expr_ty, span: expr.span, kind: ExprKind::Box { value: self.mirror_expr(value) } } + return Expr { + temp_lifetime, + ty: expr_ty, + span: expr.span, + kind: ExprKind::Box { value: self.mirror_expr(value) }, + }; } else { tcx.sess.emit_err(errors::RustcBoxAttributeError { span: expr.span, - reason: errors::RustcBoxAttrReason::NotBoxNew + reason: errors::RustcBoxAttrReason::NotBoxNew, }); } } else { @@ -343,17 +343,16 @@ // Tuple-like ADTs are represented as ExprKind::Call. We convert them here. let adt_data = if let hir::ExprKind::Path(ref qpath) = fun.kind - && let Some(adt_def) = expr_ty.ty_adt_def() { + && let Some(adt_def) = expr_ty.ty_adt_def() + { match qpath { - hir::QPath::Resolved(_, ref path) => { - match path.res { - Res::Def(DefKind::Ctor(_, CtorKind::Fn), ctor_id) => { - Some((adt_def, adt_def.variant_index_with_ctor_id(ctor_id))) - } - Res::SelfCtor(..) => Some((adt_def, FIRST_VARIANT)), - _ => None, + hir::QPath::Resolved(_, ref path) => match path.res { + Res::Def(DefKind::Ctor(_, CtorKind::Fn), ctor_id) => { + Some((adt_def, adt_def.variant_index_with_ctor_id(ctor_id))) } - } + Res::SelfCtor(..) => Some((adt_def, FIRST_VARIANT)), + _ => None, + }, hir::QPath::TypeRelative(_ty, _) => { if let Some((DefKind::Ctor(_, CtorKind::Fn), ctor_id)) = self.typeck_results().type_dependent_def(fun.hir_id) @@ -362,7 +361,6 @@ } else { None } - } _ => None, } @@ -570,8 +568,8 @@ let closure_ty = self.typeck_results().expr_ty(expr); let (def_id, args, movability) = match *closure_ty.kind() { ty::Closure(def_id, args) => (def_id, UpvarArgs::Closure(args), None), - ty::Generator(def_id, args, movability) => { - (def_id, UpvarArgs::Generator(args), Some(movability)) + ty::Coroutine(def_id, args, movability) => { + (def_id, UpvarArgs::Coroutine(args), Some(movability)) } _ => { span_bug!(expr.span, "closure expr w/o closure type: {:?}", closure_ty); @@ -672,7 +670,7 @@ hir::ExprKind::OffsetOf(_, _) => { let data = self.typeck_results.offset_of_data(); let &(container, ref indices) = data.get(expr.hir_id).unwrap(); - let fields = tcx.mk_fields_from_iter(indices.iter().copied()); + let fields = tcx.mk_offset_of_from_iter(indices.iter().copied()); ExprKind::OffsetOf { container, fields } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/cx/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,7 +37,7 @@ // The resume argument may be missing, in that case we need to provide it here. // It will always be `()` in this case. - if tcx.def_kind(owner_def) == DefKind::Generator && body.params.is_empty() { + if tcx.def_kind(owner_def) == DefKind::Coroutine && body.params.is_empty() { cx.thir.params.push(Param { ty: Ty::new_unit(tcx), pat: None, @@ -148,11 +148,16 @@ Some(env_param) } - DefKind::Generator => { - let gen_ty = self.typeck_results.node_type(owner_id); - let gen_param = - Param { ty: gen_ty, pat: None, ty_span: None, self_kind: None, hir_id: None }; - Some(gen_param) + DefKind::Coroutine => { + let coroutine_ty = self.typeck_results.node_type(owner_id); + let coroutine_param = Param { + ty: coroutine_ty, + pat: None, + ty_span: None, + self_kind: None, + hir_id: None, + }; + Some(coroutine_param) } _ => None, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/check_match.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/check_match.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/check_match.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/check_match.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,4 @@ -use super::deconstruct_pat::{Constructor, DeconstructedPat}; +use super::deconstruct_pat::{Constructor, DeconstructedPat, WitnessPat}; use super::usefulness::{ compute_match_usefulness, MatchArm, MatchCheckCtxt, Reachability, UsefulnessReport, }; @@ -9,9 +9,7 @@ use rustc_ast::Mutability; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::stack::ensure_sufficient_stack; -use rustc_errors::{ - struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan, -}; +use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, MultiSpan}; use rustc_hir as hir; use rustc_hir::def::*; use rustc_hir::def_id::LocalDefId; @@ -44,7 +42,7 @@ for param in thir.params.iter() { if let Some(box ref pattern) = param.pat { - visitor.check_irrefutable(pattern, "function argument", None); + visitor.check_binding_is_irrefutable(pattern, "function argument", None); } } visitor.error @@ -58,7 +56,7 @@ struct_span_err!(sess, sp, E0004, "{}", &error_message) } -#[derive(PartialEq)] +#[derive(Debug, Copy, Clone, PartialEq)] enum RefutableFlag { Irrefutable, Refutable, @@ -68,24 +66,30 @@ #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum LetSource { None, + PlainLet, IfLet, IfLetGuard, LetElse, WhileLet, } -struct MatchVisitor<'a, 'p, 'tcx> { +struct MatchVisitor<'thir, 'p, 'tcx> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - thir: &'a Thir<'tcx>, + thir: &'thir Thir<'tcx>, lint_level: HirId, let_source: LetSource, pattern_arena: &'p TypedArena>, + /// Tracks if we encountered an error while checking this body. That the first function to + /// report it stores it here. Some functions return `Result` to allow callers to short-circuit + /// on error, but callers don't need to store it here again. error: Result<(), ErrorGuaranteed>, } -impl<'a, 'tcx> Visitor<'a, 'tcx> for MatchVisitor<'a, '_, 'tcx> { - fn thir(&self) -> &'a Thir<'tcx> { +// Visitor for a thir body. This calls `check_match`, `check_let` and `check_let_chain` as +// appropriate. +impl<'thir, 'tcx> Visitor<'thir, 'tcx> for MatchVisitor<'thir, '_, 'tcx> { + fn thir(&self) -> &'thir Thir<'tcx> { self.thir } @@ -100,7 +104,7 @@ } Some(Guard::IfLet(ref pat, expr)) => { this.with_let_source(LetSource::IfLetGuard, |this| { - this.check_let(pat, expr, LetSource::IfLetGuard, pat.span); + this.check_let(pat, Some(expr), pat.span); this.visit_pat(pat); this.visit_expr(&this.thir[expr]); }); @@ -148,10 +152,18 @@ self.check_match(scrutinee, arms, source, ex.span); } ExprKind::Let { box ref pat, expr } => { - self.check_let(pat, expr, self.let_source, ex.span); + self.check_let(pat, Some(expr), ex.span); } - ExprKind::LogicalOp { op: LogicalOp::And, lhs, rhs } => { - self.check_let_chain(self.let_source, ex.span, lhs, rhs); + ExprKind::LogicalOp { op: LogicalOp::And, .. } + if !matches!(self.let_source, LetSource::None) => + { + let mut chain_refutabilities = Vec::new(); + let Ok(()) = self.visit_land(ex, &mut chain_refutabilities) else { return }; + // If at least one of the operands is a `let ... = ...`. + if chain_refutabilities.iter().any(|x| x.is_some()) { + self.check_let_chain(chain_refutabilities, ex.span); + } + return; } _ => {} }; @@ -159,31 +171,27 @@ } fn visit_stmt(&mut self, stmt: &Stmt<'tcx>) { - let old_lint_level = self.lint_level; match stmt.kind { StmtKind::Let { box ref pattern, initializer, else_block, lint_level, span, .. } => { - if let LintLevel::Explicit(lint_level) = lint_level { - self.lint_level = lint_level; - } - - if let Some(initializer) = initializer && else_block.is_some() { - self.check_let(pattern, initializer, LetSource::LetElse, span); - } - - if else_block.is_none() { - self.check_irrefutable(pattern, "local binding", Some(span)); - } + self.with_lint_level(lint_level, |this| { + let let_source = + if else_block.is_some() { LetSource::LetElse } else { LetSource::PlainLet }; + this.with_let_source(let_source, |this| { + this.check_let(pattern, initializer, span) + }); + visit::walk_stmt(this, stmt); + }); + } + StmtKind::Expr { .. } => { + visit::walk_stmt(self, stmt); } - _ => {} } - visit::walk_stmt(self, stmt); - self.lint_level = old_lint_level; } } -impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> { +impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { #[instrument(level = "trace", skip(self, f))] fn with_let_source(&mut self, let_source: LetSource, f: impl FnOnce(&mut Self)) { let old_let_source = self.let_source; @@ -192,49 +200,127 @@ self.let_source = old_let_source; } - fn with_lint_level(&mut self, new_lint_level: LintLevel, f: impl FnOnce(&mut Self)) { + fn with_lint_level( + &mut self, + new_lint_level: LintLevel, + f: impl FnOnce(&mut Self) -> T, + ) -> T { if let LintLevel::Explicit(hir_id) = new_lint_level { let old_lint_level = self.lint_level; self.lint_level = hir_id; - f(self); + let ret = f(self); self.lint_level = old_lint_level; + ret } else { - f(self); + f(self) + } + } + + /// Visit a nested chain of `&&`. Used for if-let chains. This must call `visit_expr` on the + /// subexpressions we are not handling ourselves. + fn visit_land( + &mut self, + ex: &Expr<'tcx>, + accumulator: &mut Vec>, + ) -> Result<(), ErrorGuaranteed> { + match ex.kind { + ExprKind::Scope { value, lint_level, .. } => self.with_lint_level(lint_level, |this| { + this.visit_land(&this.thir[value], accumulator) + }), + ExprKind::LogicalOp { op: LogicalOp::And, lhs, rhs } => { + // We recurse into the lhs only, because `&&` chains associate to the left. + let res_lhs = self.visit_land(&self.thir[lhs], accumulator); + let res_rhs = self.visit_land_rhs(&self.thir[rhs])?; + accumulator.push(res_rhs); + res_lhs + } + _ => { + let res = self.visit_land_rhs(ex)?; + accumulator.push(res); + Ok(()) + } } } - fn check_patterns(&self, pat: &Pat<'tcx>, rf: RefutableFlag) { - pat.walk_always(|pat| check_borrow_conflicts_in_at_patterns(self, pat)); - check_for_bindings_named_same_as_variants(self, pat, rf); + /// Visit the right-hand-side of a `&&`. Used for if-let chains. Returns `Some` if the + /// expression was ultimately a `let ... = ...`, and `None` if it was a normal boolean + /// expression. This must call `visit_expr` on the subexpressions we are not handling ourselves. + fn visit_land_rhs( + &mut self, + ex: &Expr<'tcx>, + ) -> Result, ErrorGuaranteed> { + match ex.kind { + ExprKind::Scope { value, lint_level, .. } => { + self.with_lint_level(lint_level, |this| this.visit_land_rhs(&this.thir[value])) + } + ExprKind::Let { box ref pat, expr } => { + self.with_let_source(LetSource::None, |this| { + this.visit_expr(&this.thir()[expr]); + }); + Ok(Some((ex.span, self.is_let_irrefutable(pat)?))) + } + _ => { + self.with_let_source(LetSource::None, |this| { + this.visit_expr(ex); + }); + Ok(None) + } + } } fn lower_pattern( - &self, - cx: &mut MatchCheckCtxt<'p, 'tcx>, - pattern: &Pat<'tcx>, - ) -> &'p DeconstructedPat<'p, 'tcx> { - cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern)) + &mut self, + cx: &MatchCheckCtxt<'p, 'tcx>, + pat: &Pat<'tcx>, + ) -> Result<&'p DeconstructedPat<'p, 'tcx>, ErrorGuaranteed> { + if let Err(err) = pat.pat_error_reported() { + self.error = Err(err); + Err(err) + } else { + // Check the pattern for some things unrelated to exhaustiveness. + let refutable = if cx.refutable { Refutable } else { Irrefutable }; + pat.walk_always(|pat| check_borrow_conflicts_in_at_patterns(self, pat)); + pat.walk_always(|pat| check_for_bindings_named_same_as_variants(self, pat, refutable)); + Ok(cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, pat))) + } } - fn new_cx(&self, hir_id: HirId, refutable: bool) -> MatchCheckCtxt<'p, 'tcx> { + fn new_cx( + &self, + refutability: RefutableFlag, + match_span: Option, + ) -> MatchCheckCtxt<'p, 'tcx> { + let refutable = match refutability { + Irrefutable => false, + Refutable => true, + }; MatchCheckCtxt { tcx: self.tcx, param_env: self.param_env, - module: self.tcx.parent_module(hir_id).to_def_id(), + module: self.tcx.parent_module(self.lint_level).to_def_id(), pattern_arena: &self.pattern_arena, + match_span, refutable, } } #[instrument(level = "trace", skip(self))] - fn check_let(&mut self, pat: &Pat<'tcx>, scrutinee: ExprId, source: LetSource, span: Span) { - if let LetSource::None = source { - return; + fn check_let(&mut self, pat: &Pat<'tcx>, scrutinee: Option, span: Span) { + assert!(self.let_source != LetSource::None); + if let LetSource::PlainLet = self.let_source { + self.check_binding_is_irrefutable(pat, "local binding", Some(span)) + } else { + let Ok(refutability) = self.is_let_irrefutable(pat) else { return }; + if matches!(refutability, Irrefutable) { + report_irrefutable_let_patterns( + self.tcx, + self.lint_level, + self.let_source, + 1, + span, + ); + } } - self.check_patterns(pat, Refutable); - let mut cx = self.new_cx(self.lint_level, true); - let tpat = self.lower_pattern(&mut cx, pat); - self.check_let_reachability(&mut cx, self.lint_level, source, tpat, span); } fn check_match( @@ -244,32 +330,25 @@ source: hir::MatchSource, expr_span: Span, ) { - let mut cx = self.new_cx(self.lint_level, true); + let cx = self.new_cx(Refutable, Some(expr_span)); + let mut tarms = Vec::with_capacity(arms.len()); for &arm in arms { - // Check the arm for some things unrelated to exhaustiveness. let arm = &self.thir.arms[arm]; - self.with_lint_level(arm.lint_level, |this| { - this.check_patterns(&arm.pattern, Refutable); + let got_error = self.with_lint_level(arm.lint_level, |this| { + let Ok(pat) = this.lower_pattern(&cx, &arm.pattern) else { return true }; + let arm = MatchArm { pat, hir_id: this.lint_level, has_guard: arm.guard.is_some() }; + tarms.push(arm); + false }); + if got_error { + return; + } } - let tarms: Vec<_> = arms - .iter() - .map(|&arm| { - let arm = &self.thir.arms[arm]; - let hir_id = match arm.lint_level { - LintLevel::Explicit(hir_id) => hir_id, - LintLevel::Inherited => self.lint_level, - }; - let pat = self.lower_pattern(&mut cx, &arm.pattern); - MatchArm { pat, hir_id, has_guard: arm.guard.is_some() } - }) - .collect(); - let scrut = &self.thir[scrut]; let scrut_ty = scrut.ty; - let report = compute_match_usefulness(&cx, &tarms, self.lint_level, scrut_ty); + let report = compute_match_usefulness(&cx, &tarms, self.lint_level, scrut_ty, scrut.span); match source { // Don't report arm reachability of desugared `match $iter.into_iter() { iter => .. }` @@ -293,107 +372,39 @@ debug_assert_eq!(pat.span.desugaring_kind(), Some(DesugaringKind::ForLoop)); let PatKind::Variant { ref subpatterns, .. } = pat.kind else { bug!() }; let [pat_field] = &subpatterns[..] else { bug!() }; - self.check_irrefutable(&pat_field.pattern, "`for` loop binding", None); + self.check_binding_is_irrefutable(&pat_field.pattern, "`for` loop binding", None); } else { - self.error = Err(non_exhaustive_match( + self.error = Err(report_non_exhaustive_match( &cx, self.thir, scrut_ty, scrut.span, witnesses, arms, expr_span, )); } } } - fn check_let_reachability( - &mut self, - cx: &mut MatchCheckCtxt<'p, 'tcx>, - pat_id: HirId, - source: LetSource, - pat: &'p DeconstructedPat<'p, 'tcx>, - span: Span, - ) { - if is_let_irrefutable(cx, pat_id, pat) { - irrefutable_let_patterns(cx.tcx, pat_id, source, 1, span); - } - } - #[instrument(level = "trace", skip(self))] fn check_let_chain( &mut self, - let_source: LetSource, - top_expr_span: Span, - mut lhs: ExprId, - rhs: ExprId, + chain_refutabilities: Vec>, + whole_chain_span: Span, ) { - if let LetSource::None = let_source { - return; - } - - // Lint level enclosing the next `lhs`. - let mut cur_lint_level = self.lint_level; - - // Obtain the refutabilities of all exprs in the chain, - // and record chain members that aren't let exprs. - let mut chain_refutabilities = Vec::new(); - - let add = |expr: ExprId, mut local_lint_level| { - // `local_lint_level` is the lint level enclosing the pattern inside `expr`. - let mut expr = &self.thir[expr]; - debug!(?expr, ?local_lint_level, "add"); - // Fast-forward through scopes. - while let ExprKind::Scope { value, lint_level, .. } = expr.kind { - if let LintLevel::Explicit(hir_id) = lint_level { - local_lint_level = hir_id - } - expr = &self.thir[value]; - } - debug!(?expr, ?local_lint_level, "after scopes"); - match expr.kind { - ExprKind::Let { box ref pat, expr: _ } => { - let mut ncx = self.new_cx(local_lint_level, true); - let tpat = self.lower_pattern(&mut ncx, pat); - let refutable = !is_let_irrefutable(&mut ncx, local_lint_level, tpat); - Some((expr.span, refutable)) - } - _ => None, - } - }; - - // Let chains recurse on the left, so we start by adding the rightmost. - chain_refutabilities.push(add(rhs, cur_lint_level)); - - loop { - while let ExprKind::Scope { value, lint_level, .. } = self.thir[lhs].kind { - if let LintLevel::Explicit(hir_id) = lint_level { - cur_lint_level = hir_id - } - lhs = value; - } - if let ExprKind::LogicalOp { op: LogicalOp::And, lhs: new_lhs, rhs: expr } = - self.thir[lhs].kind - { - chain_refutabilities.push(add(expr, cur_lint_level)); - lhs = new_lhs; - } else { - chain_refutabilities.push(add(lhs, cur_lint_level)); - break; - } - } - debug!(?chain_refutabilities); - chain_refutabilities.reverse(); + assert!(self.let_source != LetSource::None); - // Third, emit the actual warnings. - if chain_refutabilities.iter().all(|r| matches!(*r, Some((_, false)))) { + if chain_refutabilities.iter().all(|r| matches!(*r, Some((_, Irrefutable)))) { // The entire chain is made up of irrefutable `let` statements - irrefutable_let_patterns( + report_irrefutable_let_patterns( self.tcx, self.lint_level, - let_source, + self.let_source, chain_refutabilities.len(), - top_expr_span, + whole_chain_span, ); return; } - if let Some(until) = chain_refutabilities.iter().position(|r| !matches!(*r, Some((_, false)))) && until > 0 { + if let Some(until) = + chain_refutabilities.iter().position(|r| !matches!(*r, Some((_, Irrefutable)))) + && until > 0 + { // The chain has a non-zero prefix of irrefutable `let` statements. // Check if the let source is while, for there is no alternative place to put a prefix, @@ -402,43 +413,71 @@ // so can't always be moved out. // FIXME: Add checking whether the bindings are actually used in the prefix, // and lint if they are not. - if !matches!(let_source, LetSource::WhileLet | LetSource::IfLetGuard) { + if !matches!(self.let_source, LetSource::WhileLet | LetSource::IfLetGuard) { // Emit the lint let prefix = &chain_refutabilities[..until]; let span_start = prefix[0].unwrap().0; let span_end = prefix.last().unwrap().unwrap().0; let span = span_start.to(span_end); let count = prefix.len(); - self.tcx.emit_spanned_lint(IRREFUTABLE_LET_PATTERNS, self.lint_level, span, LeadingIrrefutableLetPatterns { count }); + self.tcx.emit_spanned_lint( + IRREFUTABLE_LET_PATTERNS, + self.lint_level, + span, + LeadingIrrefutableLetPatterns { count }, + ); } } - if let Some(from) = chain_refutabilities.iter().rposition(|r| !matches!(*r, Some((_, false)))) && from != (chain_refutabilities.len() - 1) { + if let Some(from) = + chain_refutabilities.iter().rposition(|r| !matches!(*r, Some((_, Irrefutable)))) + && from != (chain_refutabilities.len() - 1) + { // The chain has a non-empty suffix of irrefutable `let` statements let suffix = &chain_refutabilities[from + 1..]; let span_start = suffix[0].unwrap().0; let span_end = suffix.last().unwrap().unwrap().0; let span = span_start.to(span_end); let count = suffix.len(); - self.tcx.emit_spanned_lint(IRREFUTABLE_LET_PATTERNS, self.lint_level, span, TrailingIrrefutableLetPatterns { count }); + self.tcx.emit_spanned_lint( + IRREFUTABLE_LET_PATTERNS, + self.lint_level, + span, + TrailingIrrefutableLetPatterns { count }, + ); } } - #[instrument(level = "trace", skip(self))] - fn check_irrefutable(&mut self, pat: &Pat<'tcx>, origin: &str, sp: Option) { - let mut cx = self.new_cx(self.lint_level, false); + fn analyze_binding( + &mut self, + pat: &Pat<'tcx>, + refutability: RefutableFlag, + ) -> Result<(MatchCheckCtxt<'p, 'tcx>, UsefulnessReport<'p, 'tcx>), ErrorGuaranteed> { + let cx = self.new_cx(refutability, None); + let pat = self.lower_pattern(&cx, pat)?; + let arms = [MatchArm { pat, hir_id: self.lint_level, has_guard: false }]; + let report = compute_match_usefulness(&cx, &arms, self.lint_level, pat.ty(), pat.span()); + Ok((cx, report)) + } + + fn is_let_irrefutable(&mut self, pat: &Pat<'tcx>) -> Result { + let (cx, report) = self.analyze_binding(pat, Refutable)?; + // Report if the pattern is unreachable, which can only occur when the type is uninhabited. + // This also reports unreachable sub-patterns. + report_arm_reachability(&cx, &report); + // If the list of witnesses is empty, the match is exhaustive, i.e. the `if let` pattern is + // irrefutable. + Ok(if report.non_exhaustiveness_witnesses.is_empty() { Irrefutable } else { Refutable }) + } - let pattern = self.lower_pattern(&mut cx, pat); - let pattern_ty = pattern.ty(); - let arm = MatchArm { pat: pattern, hir_id: self.lint_level, has_guard: false }; - let report = compute_match_usefulness(&cx, &[arm], self.lint_level, pattern_ty); + #[instrument(level = "trace", skip(self))] + fn check_binding_is_irrefutable(&mut self, pat: &Pat<'tcx>, origin: &str, sp: Option) { + let pattern_ty = pat.ty; - // Note: we ignore whether the pattern is unreachable (i.e. whether the type is empty). We - // only care about exhaustiveness here. + let Ok((cx, report)) = self.analyze_binding(pat, Irrefutable) else { return }; let witnesses = report.non_exhaustiveness_witnesses; if witnesses.is_empty() { // The pattern is irrefutable. - self.check_patterns(pat, Irrefutable); return; } @@ -448,23 +487,21 @@ let mut interpreted_as_const = None; if let PatKind::Constant { .. } - | PatKind::AscribeUserType { - subpattern: box Pat { kind: PatKind::Constant { .. }, .. }, - .. - } = pat.kind + | PatKind::AscribeUserType { + subpattern: box Pat { kind: PatKind::Constant { .. }, .. }, + .. + } = pat.kind && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(pat.span) { // If the pattern to match is an integer literal: if snippet.chars().all(|c| c.is_digit(10)) { // Then give a suggestion, the user might've meant to create a binding instead. misc_suggestion = Some(MiscPatternSuggestion::AttemptedIntegerLiteral { - start_span: pat.span.shrink_to_lo() + start_span: pat.span.shrink_to_lo(), }); } else if snippet.chars().all(|c| c.is_alphanumeric() || c == '_') { - interpreted_as_const = Some(InterpretedAsConst { - span: pat.span, - variable: snippet, - }); + interpreted_as_const = + Some(InterpretedAsConst { span: pat.span, variable: snippet }); } } @@ -487,34 +524,23 @@ }); }; - let adt_defined_here = try { - let ty = pattern_ty.peel_refs(); - let ty::Adt(def, _) = ty.kind() else { None? }; - let adt_def_span = cx.tcx.hir().get_if_local(def.did())?.ident()?.span; - let mut variants = vec![]; - - for span in maybe_point_at_variant(&cx, *def, witnesses.iter().take(5)) { - variants.push(Variant { span }); - } - AdtDefinedHere { adt_def_span, ty, variants } - }; + let adt_defined_here = report_adt_defined_here(self.tcx, pattern_ty, &witnesses, false); // Emit an extra note if the first uncovered witness would be uninhabited // if we disregard visibility. - let witness_1_is_privately_uninhabited = - if cx.tcx.features().exhaustive_patterns - && let Some(witness_1) = witnesses.get(0) - && let ty::Adt(adt, args) = witness_1.ty().kind() - && adt.is_enum() - && let Constructor::Variant(variant_index) = witness_1.ctor() - { - let variant = adt.variant(*variant_index); - let inhabited = variant.inhabited_predicate(cx.tcx, *adt).instantiate(cx.tcx, args); - assert!(inhabited.apply(cx.tcx, cx.param_env, cx.module)); - !inhabited.apply_ignore_module(cx.tcx, cx.param_env) - } else { - false - }; + let witness_1_is_privately_uninhabited = if self.tcx.features().exhaustive_patterns + && let Some(witness_1) = witnesses.get(0) + && let ty::Adt(adt, args) = witness_1.ty().kind() + && adt.is_enum() + && let Constructor::Variant(variant_index) = witness_1.ctor() + { + let variant = adt.variant(*variant_index); + let inhabited = variant.inhabited_predicate(self.tcx, *adt).instantiate(self.tcx, args); + assert!(inhabited.apply(self.tcx, cx.param_env, cx.module)); + !inhabited.apply_ignore_module(self.tcx, cx.param_env) + } else { + false + }; self.error = Err(self.tcx.sess.emit_err(PatternNotCovered { span: pat.span, @@ -532,69 +558,154 @@ } } -fn check_for_bindings_named_same_as_variants( - cx: &MatchVisitor<'_, '_, '_>, - pat: &Pat<'_>, - rf: RefutableFlag, -) { - pat.walk_always(|p| { - if let PatKind::Binding { - name, - mode: BindingMode::ByValue, - mutability: Mutability::Not, - subpattern: None, - ty, - .. - } = p.kind - && let ty::Adt(edef, _) = ty.peel_refs().kind() - && edef.is_enum() - && edef.variants().iter().any(|variant| { - variant.name == name && variant.ctor_kind() == Some(CtorKind::Const) - }) - { - let variant_count = edef.variants().len(); - let ty_path = with_no_trimmed_paths!({ - cx.tcx.def_path_str(edef.did()) +/// Check if a by-value binding is by-value. That is, check if the binding's type is not `Copy`. +/// Check that there are no borrow or move conflicts in `binding @ subpat` patterns. +/// +/// For example, this would reject: +/// - `ref x @ Some(ref mut y)`, +/// - `ref mut x @ Some(ref y)`, +/// - `ref mut x @ Some(ref mut y)`, +/// - `ref mut? x @ Some(y)`, and +/// - `x @ Some(ref mut? y)`. +/// +/// This analysis is *not* subsumed by NLL. +fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>, pat: &Pat<'tcx>) { + // Extract `sub` in `binding @ sub`. + let PatKind::Binding { name, mode, ty, subpattern: Some(box ref sub), .. } = pat.kind else { + return; + }; + + let is_binding_by_move = |ty: Ty<'tcx>| !ty.is_copy_modulo_regions(cx.tcx, cx.param_env); + + let sess = cx.tcx.sess; + + // Get the binding move, extract the mutability if by-ref. + let mut_outer = match mode { + BindingMode::ByValue if is_binding_by_move(ty) => { + // We have `x @ pat` where `x` is by-move. Reject all borrows in `pat`. + let mut conflicts_ref = Vec::new(); + sub.each_binding(|_, mode, _, span| match mode { + BindingMode::ByValue => {} + BindingMode::ByRef(_) => conflicts_ref.push(span), }); - cx.tcx.emit_spanned_lint( - BINDINGS_WITH_VARIANT_NAME, - cx.lint_level, - p.span, - BindingsWithVariantName { - // If this is an irrefutable pattern, and there's > 1 variant, - // then we can't actually match on this. Applying the below - // suggestion would produce code that breaks on `check_irrefutable`. - suggestion: if rf == Refutable || variant_count == 1 { - Some(p.span) - } else { None }, - ty_path, + if !conflicts_ref.is_empty() { + sess.emit_err(BorrowOfMovedValue { + binding_span: pat.span, + conflicts_ref, name, - }, - ) + ty, + suggest_borrowing: Some(pat.span.shrink_to_lo()), + }); + } + return; + } + BindingMode::ByValue => return, + BindingMode::ByRef(m) => m.mutability(), + }; + + // We now have `ref $mut_outer binding @ sub` (semantically). + // Recurse into each binding in `sub` and find mutability or move conflicts. + let mut conflicts_move = Vec::new(); + let mut conflicts_mut_mut = Vec::new(); + let mut conflicts_mut_ref = Vec::new(); + sub.each_binding(|name, mode, ty, span| { + match mode { + BindingMode::ByRef(mut_inner) => match (mut_outer, mut_inner.mutability()) { + // Both sides are `ref`. + (Mutability::Not, Mutability::Not) => {} + // 2x `ref mut`. + (Mutability::Mut, Mutability::Mut) => { + conflicts_mut_mut.push(Conflict::Mut { span, name }) + } + (Mutability::Not, Mutability::Mut) => { + conflicts_mut_ref.push(Conflict::Mut { span, name }) + } + (Mutability::Mut, Mutability::Not) => { + conflicts_mut_ref.push(Conflict::Ref { span, name }) + } + }, + BindingMode::ByValue if is_binding_by_move(ty) => { + conflicts_move.push(Conflict::Moved { span, name }) // `ref mut?` + by-move conflict. + } + BindingMode::ByValue => {} // `ref mut?` + by-copy is fine. } }); -} -/// Checks for common cases of "catchall" patterns that may not be intended as such. -fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool { - use Constructor::*; - match pat.ctor() { - Wildcard => true, - Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)), - _ => false, + let report_mut_mut = !conflicts_mut_mut.is_empty(); + let report_mut_ref = !conflicts_mut_ref.is_empty(); + let report_move_conflict = !conflicts_move.is_empty(); + + let mut occurrences = match mut_outer { + Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }], + Mutability::Not => vec![Conflict::Ref { span: pat.span, name }], + }; + occurrences.extend(conflicts_mut_mut); + occurrences.extend(conflicts_mut_ref); + occurrences.extend(conflicts_move); + + // Report errors if any. + if report_mut_mut { + // Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`. + sess.emit_err(MultipleMutBorrows { span: pat.span, occurrences }); + } else if report_mut_ref { + // Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse. + match mut_outer { + Mutability::Mut => { + sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurrences }); + } + Mutability::Not => { + sess.emit_err(AlreadyBorrowed { span: pat.span, occurrences }); + } + }; + } else if report_move_conflict { + // Report by-ref and by-move conflicts, e.g. `ref x @ y`. + sess.emit_err(MovedWhileBorrowed { span: pat.span, occurrences }); } } -fn unreachable_pattern(tcx: TyCtxt<'_>, span: Span, id: HirId, catchall: Option) { - tcx.emit_spanned_lint( - UNREACHABLE_PATTERNS, - id, - span, - UnreachablePattern { span: if catchall.is_some() { Some(span) } else { None }, catchall }, - ); +fn check_for_bindings_named_same_as_variants( + cx: &MatchVisitor<'_, '_, '_>, + pat: &Pat<'_>, + rf: RefutableFlag, +) { + if let PatKind::Binding { + name, + mode: BindingMode::ByValue, + mutability: Mutability::Not, + subpattern: None, + ty, + .. + } = pat.kind + && let ty::Adt(edef, _) = ty.peel_refs().kind() + && edef.is_enum() + && edef + .variants() + .iter() + .any(|variant| variant.name == name && variant.ctor_kind() == Some(CtorKind::Const)) + { + let variant_count = edef.variants().len(); + let ty_path = with_no_trimmed_paths!(cx.tcx.def_path_str(edef.did())); + cx.tcx.emit_spanned_lint( + BINDINGS_WITH_VARIANT_NAME, + cx.lint_level, + pat.span, + BindingsWithVariantName { + // If this is an irrefutable pattern, and there's > 1 variant, + // then we can't actually match on this. Applying the below + // suggestion would produce code that breaks on `check_binding_is_irrefutable`. + suggestion: if rf == Refutable || variant_count == 1 { + Some(pat.span) + } else { + None + }, + ty_path, + name, + }, + ) + } } -fn irrefutable_let_patterns( +fn report_irrefutable_let_patterns( tcx: TyCtxt<'_>, id: HirId, source: LetSource, @@ -608,7 +719,7 @@ } match source { - LetSource::None => bug!(), + LetSource::None | LetSource::PlainLet => bug!(), LetSource::IfLet => emit_diag!(IrrefutableLetPatternsIfLet), LetSource::IfLetGuard => emit_diag!(IrrefutableLetPatternsIfLetGuard), LetSource::LetElse => emit_diag!(IrrefutableLetPatternsLetElse), @@ -616,34 +727,28 @@ } } -fn is_let_irrefutable<'p, 'tcx>( - cx: &mut MatchCheckCtxt<'p, 'tcx>, - pat_id: HirId, - pat: &'p DeconstructedPat<'p, 'tcx>, -) -> bool { - let arms = [MatchArm { pat, hir_id: pat_id, has_guard: false }]; - let report = compute_match_usefulness(&cx, &arms, pat_id, pat.ty()); - - // Report if the pattern is unreachable, which can only occur when the type is uninhabited. - // This also reports unreachable sub-patterns though, so we can't just replace it with an - // `is_uninhabited` check. - report_arm_reachability(&cx, &report); - - // If the list of witnesses is empty, the match is exhaustive, - // i.e. the `if let` pattern is irrefutable. - report.non_exhaustiveness_witnesses.is_empty() -} - /// Report unreachable arms, if any. fn report_arm_reachability<'p, 'tcx>( cx: &MatchCheckCtxt<'p, 'tcx>, report: &UsefulnessReport<'p, 'tcx>, ) { + let report_unreachable_pattern = |span, hir_id, catchall: Option| { + cx.tcx.emit_spanned_lint( + UNREACHABLE_PATTERNS, + hir_id, + span, + UnreachablePattern { + span: if catchall.is_some() { Some(span) } else { None }, + catchall, + }, + ); + }; + use Reachability::*; let mut catchall = None; for (arm, is_useful) in report.arm_usefulness.iter() { match is_useful { - Unreachable => unreachable_pattern(cx.tcx, arm.pat.span(), arm.hir_id, catchall), + Unreachable => report_unreachable_pattern(arm.pat.span(), arm.hir_id, catchall), Reachable(unreachables) if unreachables.is_empty() => {} // The arm is reachable, but contains unreachable subpatterns (from or-patterns). Reachable(unreachables) => { @@ -651,7 +756,7 @@ // Emit lints in the order in which they occur in the file. unreachables.sort_unstable(); for span in unreachables { - unreachable_pattern(cx.tcx, span, arm.hir_id, None); + report_unreachable_pattern(span, arm.hir_id, None); } } } @@ -661,24 +766,23 @@ } } -fn collect_non_exhaustive_tys<'p, 'tcx>( - pat: &DeconstructedPat<'p, 'tcx>, - non_exhaustive_tys: &mut FxHashSet>, -) { - if matches!(pat.ctor(), Constructor::NonExhaustive) { - non_exhaustive_tys.insert(pat.ty()); +/// Checks for common cases of "catchall" patterns that may not be intended as such. +fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool { + use Constructor::*; + match pat.ctor() { + Wildcard => true, + Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)), + _ => false, } - pat.iter_fields() - .for_each(|field_pat| collect_non_exhaustive_tys(field_pat, non_exhaustive_tys)) } /// Report that a match is not exhaustive. -fn non_exhaustive_match<'p, 'tcx>( +fn report_non_exhaustive_match<'p, 'tcx>( cx: &MatchCheckCtxt<'p, 'tcx>, thir: &Thir<'tcx>, scrut_ty: Ty<'tcx>, sp: Span, - witnesses: Vec>, + witnesses: Vec>, arms: &[ArmId], expr_span: Span, ) -> ErrorGuaranteed { @@ -707,12 +811,19 @@ sp, format!("non-exhaustive patterns: {joined_patterns} not covered"), ); - err.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns)); + err.span_label( + sp, + format!( + "pattern{} {} not covered", + rustc_errors::pluralize!(witnesses.len()), + joined_patterns + ), + ); patterns_len = witnesses.len(); pattern = if witnesses.len() < 4 { witnesses .iter() - .map(|witness| witness.to_pat(cx).to_string()) + .map(|witness| witness.to_diagnostic_pat(cx).to_string()) .collect::>() .join(" | ") } else { @@ -720,19 +831,37 @@ }; }; - adt_defined_here(cx, &mut err, scrut_ty, &witnesses); + // Point at the definition of non-covered `enum` variants. + if let Some(AdtDefinedHere { adt_def_span, ty, variants }) = + report_adt_defined_here(cx.tcx, scrut_ty, &witnesses, true) + { + let mut multi_span = MultiSpan::from_span(adt_def_span); + multi_span.push_span_label(adt_def_span, ""); + for Variant { span } in variants { + multi_span.push_span_label(span, "not covered"); + } + err.span_note(multi_span, format!("`{ty}` defined here")); + } err.note(format!("the matched value is of type `{}`", scrut_ty)); - if !is_empty_match && witnesses.len() == 1 { + if !is_empty_match { let mut non_exhaustive_tys = FxHashSet::default(); - collect_non_exhaustive_tys(&witnesses[0], &mut non_exhaustive_tys); + // Look at the first witness. + collect_non_exhaustive_tys(cx.tcx, &witnesses[0], &mut non_exhaustive_tys); for ty in non_exhaustive_tys { if ty.is_ptr_sized_integral() { - err.note(format!( - "`{ty}` does not have a fixed maximum value, so a wildcard `_` is necessary to match \ - exhaustively", + if ty == cx.tcx.types.usize { + err.note(format!( + "`{ty}` does not have a fixed maximum value, so half-open ranges are necessary to match \ + exhaustively", )); + } else if ty == cx.tcx.types.isize { + err.note(format!( + "`{ty}` does not have fixed minimum and maximum values, so half-open ranges are necessary to match \ + exhaustively", + )); + } if cx.tcx.sess.is_nightly_build() { err.help(format!( "add `#![feature(precise_pointer_size_matching)]` to the crate attributes to \ @@ -770,8 +899,10 @@ } [only] => { let only = &thir[*only]; - let (pre_indentation, is_multiline) = if let Some(snippet) = sm.indentation_before(only.span) - && let Ok(with_trailing) = sm.span_extend_while(only.span, |c| c.is_whitespace() || c == ',') + let (pre_indentation, is_multiline) = if let Some(snippet) = + sm.indentation_before(only.span) + && let Ok(with_trailing) = + sm.span_extend_while(only.span, |c| c.is_whitespace() || c == ',') && sm.is_multiline(with_trailing) { (format!("\n{snippet}"), true) @@ -852,18 +983,18 @@ err.emit() } -pub(crate) fn joined_uncovered_patterns<'p, 'tcx>( +fn joined_uncovered_patterns<'p, 'tcx>( cx: &MatchCheckCtxt<'p, 'tcx>, - witnesses: &[DeconstructedPat<'p, 'tcx>], + witnesses: &[WitnessPat<'tcx>], ) -> String { const LIMIT: usize = 3; - let pat_to_str = |pat: &DeconstructedPat<'p, 'tcx>| pat.to_pat(cx).to_string(); + let pat_to_str = |pat: &WitnessPat<'tcx>| pat.to_diagnostic_pat(cx).to_string(); match witnesses { [] => bug!(), - [witness] => format!("`{}`", witness.to_pat(cx)), + [witness] => format!("`{}`", witness.to_diagnostic_pat(cx)), [head @ .., tail] if head.len() < LIMIT => { let head: Vec<_> = head.iter().map(pat_to_str).collect(); - format!("`{}` and `{}`", head.join("`, `"), tail.to_pat(cx)) + format!("`{}` and `{}`", head.join("`, `"), tail.to_diagnostic_pat(cx)) } _ => { let (head, tail) = witnesses.split_at(LIMIT); @@ -873,59 +1004,64 @@ } } -pub(crate) fn pattern_not_covered_label( - witnesses: &[DeconstructedPat<'_, '_>], - joined_patterns: &str, -) -> String { - format!("pattern{} {} not covered", rustc_errors::pluralize!(witnesses.len()), joined_patterns) +fn collect_non_exhaustive_tys<'tcx>( + tcx: TyCtxt<'tcx>, + pat: &WitnessPat<'tcx>, + non_exhaustive_tys: &mut FxHashSet>, +) { + if matches!(pat.ctor(), Constructor::NonExhaustive) { + non_exhaustive_tys.insert(pat.ty()); + } + if let Constructor::IntRange(range) = pat.ctor() { + if range.is_beyond_boundaries(pat.ty(), tcx) { + // The range denotes the values before `isize::MIN` or the values after `usize::MAX`/`isize::MAX`. + non_exhaustive_tys.insert(pat.ty()); + } + } + pat.iter_fields() + .for_each(|field_pat| collect_non_exhaustive_tys(tcx, field_pat, non_exhaustive_tys)) } -/// Point at the definition of non-covered `enum` variants. -fn adt_defined_here<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - err: &mut Diagnostic, +fn report_adt_defined_here<'tcx>( + tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, - witnesses: &[DeconstructedPat<'p, 'tcx>], -) { + witnesses: &[WitnessPat<'tcx>], + point_at_non_local_ty: bool, +) -> Option> { let ty = ty.peel_refs(); - if let ty::Adt(def, _) = ty.kind() { - let mut spans = vec![]; - if witnesses.len() < 5 { - for sp in maybe_point_at_variant(cx, *def, witnesses.iter()) { - spans.push(sp); - } - } - let def_span = cx - .tcx - .hir() - .get_if_local(def.did()) - .and_then(|node| node.ident()) - .map(|ident| ident.span) - .unwrap_or_else(|| cx.tcx.def_span(def.did())); - let mut span: MultiSpan = - if spans.is_empty() { def_span.into() } else { spans.clone().into() }; - - span.push_span_label(def_span, ""); - for pat in spans { - span.push_span_label(pat, "not covered"); - } - err.span_note(span, format!("`{ty}` defined here")); + let ty::Adt(def, _) = ty.kind() else { + return None; + }; + let adt_def_span = + tcx.hir().get_if_local(def.did()).and_then(|node| node.ident()).map(|ident| ident.span); + let adt_def_span = if point_at_non_local_ty { + adt_def_span.unwrap_or_else(|| tcx.def_span(def.did())) + } else { + adt_def_span? + }; + + let mut variants = vec![]; + for span in maybe_point_at_variant(tcx, *def, witnesses.iter().take(5)) { + variants.push(Variant { span }); } + Some(AdtDefinedHere { adt_def_span, ty, variants }) } -fn maybe_point_at_variant<'a, 'p: 'a, 'tcx: 'a>( - cx: &MatchCheckCtxt<'p, 'tcx>, +fn maybe_point_at_variant<'a, 'tcx: 'a>( + tcx: TyCtxt<'tcx>, def: AdtDef<'tcx>, - patterns: impl Iterator>, + patterns: impl Iterator>, ) -> Vec { use Constructor::*; let mut covered = vec![]; for pattern in patterns { if let Variant(variant_index) = pattern.ctor() { - if let ty::Adt(this_def, _) = pattern.ty().kind() && this_def.did() != def.did() { + if let ty::Adt(this_def, _) = pattern.ty().kind() + && this_def.did() != def.did() + { continue; } - let sp = def.variant(*variant_index).ident(cx.tcx).span; + let sp = def.variant(*variant_index).ident(tcx).span; if covered.contains(&sp) { // Don't point at variants that have already been covered due to other patterns to avoid // visual clutter. @@ -933,112 +1069,7 @@ } covered.push(sp); } - covered.extend(maybe_point_at_variant(cx, def, pattern.iter_fields())); + covered.extend(maybe_point_at_variant(tcx, def, pattern.iter_fields())); } covered } - -/// Check if a by-value binding is by-value. That is, check if the binding's type is not `Copy`. -/// Check that there are no borrow or move conflicts in `binding @ subpat` patterns. -/// -/// For example, this would reject: -/// - `ref x @ Some(ref mut y)`, -/// - `ref mut x @ Some(ref y)`, -/// - `ref mut x @ Some(ref mut y)`, -/// - `ref mut? x @ Some(y)`, and -/// - `x @ Some(ref mut? y)`. -/// -/// This analysis is *not* subsumed by NLL. -fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>, pat: &Pat<'tcx>) { - // Extract `sub` in `binding @ sub`. - let PatKind::Binding { name, mode, ty, subpattern: Some(box ref sub), .. } = pat.kind else { - return; - }; - - let is_binding_by_move = |ty: Ty<'tcx>| !ty.is_copy_modulo_regions(cx.tcx, cx.param_env); - - let sess = cx.tcx.sess; - - // Get the binding move, extract the mutability if by-ref. - let mut_outer = match mode { - BindingMode::ByValue if is_binding_by_move(ty) => { - // We have `x @ pat` where `x` is by-move. Reject all borrows in `pat`. - let mut conflicts_ref = Vec::new(); - sub.each_binding(|_, mode, _, span| match mode { - BindingMode::ByValue => {} - BindingMode::ByRef(_) => conflicts_ref.push(span), - }); - if !conflicts_ref.is_empty() { - sess.emit_err(BorrowOfMovedValue { - binding_span: pat.span, - conflicts_ref, - name, - ty, - suggest_borrowing: Some(pat.span.shrink_to_lo()), - }); - } - return; - } - BindingMode::ByValue => return, - BindingMode::ByRef(m) => m.mutability(), - }; - - // We now have `ref $mut_outer binding @ sub` (semantically). - // Recurse into each binding in `sub` and find mutability or move conflicts. - let mut conflicts_move = Vec::new(); - let mut conflicts_mut_mut = Vec::new(); - let mut conflicts_mut_ref = Vec::new(); - sub.each_binding(|name, mode, ty, span| { - match mode { - BindingMode::ByRef(mut_inner) => match (mut_outer, mut_inner.mutability()) { - // Both sides are `ref`. - (Mutability::Not, Mutability::Not) => {} - // 2x `ref mut`. - (Mutability::Mut, Mutability::Mut) => { - conflicts_mut_mut.push(Conflict::Mut { span, name }) - } - (Mutability::Not, Mutability::Mut) => { - conflicts_mut_ref.push(Conflict::Mut { span, name }) - } - (Mutability::Mut, Mutability::Not) => { - conflicts_mut_ref.push(Conflict::Ref { span, name }) - } - }, - BindingMode::ByValue if is_binding_by_move(ty) => { - conflicts_move.push(Conflict::Moved { span, name }) // `ref mut?` + by-move conflict. - } - BindingMode::ByValue => {} // `ref mut?` + by-copy is fine. - } - }); - - let report_mut_mut = !conflicts_mut_mut.is_empty(); - let report_mut_ref = !conflicts_mut_ref.is_empty(); - let report_move_conflict = !conflicts_move.is_empty(); - - let mut occurrences = match mut_outer { - Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }], - Mutability::Not => vec![Conflict::Ref { span: pat.span, name }], - }; - occurrences.extend(conflicts_mut_mut); - occurrences.extend(conflicts_mut_ref); - occurrences.extend(conflicts_move); - - // Report errors if any. - if report_mut_mut { - // Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`. - sess.emit_err(MultipleMutBorrows { span: pat.span, occurrences }); - } else if report_mut_ref { - // Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse. - match mut_outer { - Mutability::Mut => { - sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurrences }); - } - Mutability::Not => { - sess.emit_err(AlreadyBorrowed { span: pat.span, occurrences }); - } - }; - } else if report_move_conflict { - // Report by-ref and by-move conflicts, e.g. `ref x @ y`. - sess.emit_err(MovedWhileBorrowed { span: pat.span, occurrences }); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,7 +7,7 @@ use rustc_middle::thir::{FieldPat, Pat, PatKind}; use rustc_middle::ty::{self, Ty, TyCtxt, ValTree}; use rustc_session::lint; -use rustc_span::Span; +use rustc_span::{ErrorGuaranteed, Span}; use rustc_target::abi::{FieldIdx, VariantIdx}; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt; use rustc_trait_selection::traits::{self, ObligationCause}; @@ -48,7 +48,7 @@ // This tracks if we emitted some hard error for a given const value, so that // we will not subsequently issue an irrelevant lint for the same const // value. - saw_const_match_error: Cell, + saw_const_match_error: Cell>, // This tracks if we emitted some diagnostic for a given const value, so that // we will not subsequently issue an irrelevant lint for the same const @@ -84,7 +84,7 @@ span, infcx, param_env: pat_ctxt.param_env, - saw_const_match_error: Cell::new(false), + saw_const_match_error: Cell::new(None), saw_const_match_lint: Cell::new(false), behind_reference: Cell::new(false), treat_byte_string_as_slice: pat_ctxt @@ -123,6 +123,8 @@ }); debug!(?check_body_for_struct_match_violation, ?mir_structural_match_violation); + let have_valtree = + matches!(cv, mir::Const::Ty(c) if matches!(c.kind(), ty::ConstKind::Value(_))); let inlined_const_as_pat = match cv { mir::Const::Ty(c) => match c.kind() { ty::ConstKind::Param(_) @@ -154,7 +156,7 @@ }), }; - if !self.saw_const_match_error.get() { + if self.saw_const_match_error.get().is_none() { // If we were able to successfully convert the const to some pat (possibly with some // lints, but no errors), double-check that all types in the const implement // `Structural` and `PartialEq`. @@ -180,36 +182,35 @@ if let Some(non_sm_ty) = structural { if !self.type_has_partial_eq_impl(cv.ty()) { - if let ty::Adt(def, ..) = non_sm_ty.kind() { + let e = if let ty::Adt(def, ..) = non_sm_ty.kind() { if def.is_union() { let err = UnionPattern { span: self.span }; - self.tcx().sess.emit_err(err); + self.tcx().sess.emit_err(err) } else { // fatal avoids ICE from resolution of nonexistent method (rare case). self.tcx() .sess - .emit_fatal(TypeNotStructural { span: self.span, non_sm_ty }); + .emit_fatal(TypeNotStructural { span: self.span, non_sm_ty }) } } else { let err = InvalidPattern { span: self.span, non_sm_ty }; - self.tcx().sess.emit_err(err); - } + self.tcx().sess.emit_err(err) + }; // All branches above emitted an error. Don't print any more lints. - // The pattern we return is irrelevant since we errored. - return Box::new(Pat { span: self.span, ty: cv.ty(), kind: PatKind::Wild }); + // We errored. Signal that in the pattern, so that follow up errors can be silenced. + let kind = PatKind::Error(e); + return Box::new(Pat { span: self.span, ty: cv.ty(), kind }); + } else if let ty::Adt(..) = cv.ty().kind() && matches!(cv, mir::Const::Val(..)) { + // This branch is only entered when the current `cv` is `mir::Const::Val`. + // This is because `mir::Const::ty` has already been handled by `Self::recur` + // and the invalid types may be ignored. + let err = TypeNotStructural { span: self.span, non_sm_ty }; + let e = self.tcx().sess.emit_err(err); + let kind = PatKind::Error(e); + return Box::new(Pat { span: self.span, ty: cv.ty(), kind }); } else if !self.saw_const_match_lint.get() { if let Some(mir_structural_match_violation) = mir_structural_match_violation { match non_sm_ty.kind() { - ty::RawPtr(pointee) - if pointee.ty.is_sized(self.tcx(), self.param_env) => {} - ty::FnPtr(..) | ty::RawPtr(..) => { - self.tcx().emit_spanned_lint( - lint::builtin::POINTER_STRUCTURAL_MATCH, - self.id, - self.span, - PointerPattern, - ); - } ty::Adt(..) if mir_structural_match_violation => { self.tcx().emit_spanned_lint( lint::builtin::INDIRECT_STRUCTURAL_MATCH, @@ -227,19 +228,15 @@ } } } - } else if !self.saw_const_match_lint.get() { - match cv.ty().kind() { - ty::RawPtr(pointee) if pointee.ty.is_sized(self.tcx(), self.param_env) => {} - ty::FnPtr(..) | ty::RawPtr(..) => { - self.tcx().emit_spanned_lint( - lint::builtin::POINTER_STRUCTURAL_MATCH, - self.id, - self.span, - PointerPattern, - ); - } - _ => {} - } + } else if !have_valtree && !self.saw_const_match_lint.get() { + // The only way valtree construction can fail without the structural match + // checker finding a violation is if there is a pointer somewhere. + self.tcx().emit_spanned_lint( + lint::builtin::POINTER_STRUCTURAL_MATCH, + self.id, + self.span, + PointerPattern, + ); } // Always check for `PartialEq`, even if we emitted other lints. (But not if there were @@ -330,7 +327,7 @@ // Backwards compatibility hack because we can't cause hard errors on these // types, so we compare them via `PartialEq::eq` at runtime. ty::Adt(..) if !self.type_marked_structural(ty) && self.behind_reference.get() => { - if !self.saw_const_match_error.get() && !self.saw_const_match_lint.get() { + if self.saw_const_match_error.get().is_none() && !self.saw_const_match_lint.get() { self.saw_const_match_lint.set(true); tcx.emit_spanned_lint( lint::builtin::INDIRECT_STRUCTURAL_MATCH, @@ -345,18 +342,18 @@ return Err(FallbackToOpaqueConst); } ty::FnDef(..) => { - self.saw_const_match_error.set(true); - tcx.sess.emit_err(InvalidPattern { span, non_sm_ty: ty }); - // We errored, so the pattern we generate is irrelevant. - PatKind::Wild + let e = tcx.sess.emit_err(InvalidPattern { span, non_sm_ty: ty }); + self.saw_const_match_error.set(Some(e)); + // We errored. Signal that in the pattern, so that follow up errors can be silenced. + PatKind::Error(e) } ty::Adt(adt_def, _) if !self.type_marked_structural(ty) => { debug!("adt_def {:?} has !type_marked_structural for cv.ty: {:?}", adt_def, ty,); - self.saw_const_match_error.set(true); let err = TypeNotStructural { span, non_sm_ty: ty }; - tcx.sess.emit_err(err); - // We errored, so the pattern we generate is irrelevant. - PatKind::Wild + let e = tcx.sess.emit_err(err); + self.saw_const_match_error.set(Some(e)); + // We errored. Signal that in the pattern, so that follow up errors can be silenced. + PatKind::Error(e) } ty::Adt(adt_def, args) if adt_def.is_enum() => { let (&variant_index, fields) = cv.unwrap_branch().split_first().unwrap(); @@ -380,11 +377,19 @@ subpatterns: self .field_pats(cv.unwrap_branch().iter().copied().zip(fields.iter()))?, }, - ty::Adt(def, args) => PatKind::Leaf { - subpatterns: self.field_pats(cv.unwrap_branch().iter().copied().zip( - def.non_enum_variant().fields.iter().map(|field| field.ty(self.tcx(), args)), - ))?, - }, + ty::Adt(def, args) => { + assert!(!def.is_union()); // Valtree construction would never succeed for unions. + PatKind::Leaf { + subpatterns: self.field_pats( + cv.unwrap_branch().iter().copied().zip( + def.non_enum_variant() + .fields + .iter() + .map(|field| field.ty(self.tcx(), args)), + ), + )?, + } + } ty::Slice(elem_ty) => PatKind::Slice { prefix: cv .unwrap_branch() @@ -416,7 +421,9 @@ // instead of a hard error. ty::Adt(_, _) if !self.type_marked_structural(*pointee_ty) => { if self.behind_reference.get() { - if !self.saw_const_match_error.get() && !self.saw_const_match_lint.get() { + if self.saw_const_match_error.get().is_none() + && !self.saw_const_match_lint.get() + { self.saw_const_match_lint.set(true); tcx.emit_spanned_lint( lint::builtin::INDIRECT_STRUCTURAL_MATCH, @@ -427,14 +434,16 @@ } return Err(FallbackToOpaqueConst); } else { - if !self.saw_const_match_error.get() { - self.saw_const_match_error.set(true); + if let Some(e) = self.saw_const_match_error.get() { + // We already errored. Signal that in the pattern, so that follow up errors can be silenced. + PatKind::Error(e) + } else { let err = TypeNotStructural { span, non_sm_ty: *pointee_ty }; - tcx.sess.emit_err(err); + let e = tcx.sess.emit_err(err); + self.saw_const_match_error.set(Some(e)); + // We errored. Signal that in the pattern, so that follow up errors can be silenced. + PatKind::Error(e) } - tcx.sess.delay_span_bug(span, "`saw_const_match_error` set but no error?"); - // We errored, so the pattern we generate is irrelevant. - PatKind::Wild } } // All other references are converted into deref patterns and then recursively @@ -443,11 +452,9 @@ _ => { if !pointee_ty.is_sized(tcx, param_env) && !pointee_ty.is_slice() { let err = UnsizedPattern { span, non_sm_ty: *pointee_ty }; - tcx.sess.emit_err(err); - - // FIXME: introduce PatKind::Error to silence follow up diagnostics due to unreachable patterns. - // We errored, so the pattern we generate is irrelevant. - PatKind::Wild + let e = tcx.sess.emit_err(err); + // We errored. Signal that in the pattern, so that follow up errors can be silenced. + PatKind::Error(e) } else { let old = self.behind_reference.replace(true); // `b"foo"` produces a `&[u8; 3]`, but you can't use constants of array type when @@ -469,20 +476,25 @@ } } }, - ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) => { + ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => { + // The raw pointers we see here have been "vetted" by valtree construction to be + // just integers, so we simply allow them. PatKind::Constant { value: mir::Const::Ty(ty::Const::new_value(tcx, cv, ty)) } } - ty::FnPtr(..) | ty::RawPtr(..) => unreachable!(), + ty::FnPtr(..) => { + // Valtree construction would never succeed for these, so this is unreachable. + unreachable!() + } _ => { - self.saw_const_match_error.set(true); let err = InvalidPattern { span, non_sm_ty: ty }; - tcx.sess.emit_err(err); - // We errored, so the pattern we generate is irrelevant. - PatKind::Wild + let e = tcx.sess.emit_err(err); + self.saw_const_match_error.set(Some(e)); + // We errored. Signal that in the pattern, so that follow up errors can be silenced. + PatKind::Error(e) } }; - if !self.saw_const_match_error.get() + if self.saw_const_match_error.get().is_none() && !self.saw_const_match_lint.get() && mir_structural_match_violation // FIXME(#73448): Find a way to bring const qualification into parity with @@ -497,7 +509,7 @@ lint::builtin::NONTRIVIAL_STRUCTURAL_MATCH, id, span, - NontrivialStructuralMatch {non_sm_ty} + NontrivialStructuralMatch { non_sm_ty }, ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -39,35 +39,35 @@ //! //! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for //! or-patterns; instead we just try the alternatives one-by-one. For details on splitting -//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`]; for slices, see -//! [`SplitVarLenSlice`]. +//! wildcards, see [`Constructor::split`]; for integer ranges, see +//! [`IntRange::split`]; for slices, see [`Slice::split`]. use std::cell::Cell; use std::cmp::{self, max, min, Ordering}; use std::fmt; use std::iter::once; -use std::ops::RangeInclusive; use smallvec::{smallvec, SmallVec}; +use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS}; use rustc_data_structures::captures::Captures; -use rustc_hir::{HirId, RangeEnd}; +use rustc_data_structures::fx::FxHashSet; +use rustc_hir::RangeEnd; use rustc_index::Idx; use rustc_middle::middle::stability::EvalResult; use rustc_middle::mir; -use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange}; +use rustc_middle::mir::interpret::Scalar; +use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary}; use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef}; -use rustc_session::lint; use rustc_span::{Span, DUMMY_SP}; -use rustc_target::abi::{FieldIdx, Integer, Size, VariantIdx, FIRST_VARIANT}; +use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT}; use self::Constructor::*; +use self::MaybeInfiniteInt::*; use self::SliceKind::*; -use super::compare_const_vals; use super::usefulness::{MatchCheckCtxt, PatCtxt}; -use crate::errors::{Overlap, OverlappingRangeEndpoints}; /// Recursively expand this pattern into its subpatterns. Only useful for or-patterns. fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> { @@ -86,324 +86,317 @@ pats } -/// An inclusive interval, used for precise integer exhaustiveness checking. -/// `IntRange`s always store a contiguous range. This means that values are -/// encoded such that `0` encodes the minimum value for the integer, -/// regardless of the signedness. -/// For example, the pattern `-128..=127i8` is encoded as `0..=255`. -/// This makes comparisons and arithmetic on interval endpoints much more -/// straightforward. See `signed_bias` for details. -/// -/// `IntRange` is never used to encode an empty range or a "range" that wraps -/// around the (offset) space: i.e., `range.lo <= range.hi`. -#[derive(Clone, PartialEq, Eq)] -pub(crate) struct IntRange { - range: RangeInclusive, - /// Keeps the bias used for encoding the range. It depends on the type of the range and - /// possibly the pointer size of the current architecture. The algorithm ensures we never - /// compare `IntRange`s with different types/architectures. - bias: u128, +/// Whether we have seen a constructor in the column or not. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +enum Presence { + Unseen, + Seen, } -impl IntRange { - #[inline] - fn is_integral(ty: Ty<'_>) -> bool { - matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool) - } - - fn is_singleton(&self) -> bool { - self.range.start() == self.range.end() - } - - fn boundaries(&self) -> (u128, u128) { - (*self.range.start(), *self.range.end()) - } +/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the +/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as +/// `255`. See `signed_bias` for details. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub(crate) enum MaybeInfiniteInt { + NegInfinity, + /// Encoded value. DO NOT CONSTRUCT BY HAND; use `new_finite`. + Finite(u128), + /// The integer after `u128::MAX`. We need it to represent `x..=u128::MAX` as an exclusive range. + JustAfterMax, + PosInfinity, +} - #[inline] - fn integral_size_and_signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> Option<(Size, u128)> { +impl MaybeInfiniteInt { + // The return value of `signed_bias` should be XORed with a value to encode/decode it. + fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 { match *ty.kind() { - ty::Bool => Some((Size::from_bytes(1), 0)), - ty::Char => Some((Size::from_bytes(4), 0)), ty::Int(ity) => { - let size = Integer::from_int_ty(&tcx, ity).size(); - Some((size, 1u128 << (size.bits() as u128 - 1))) + let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128; + 1u128 << (bits - 1) } - ty::Uint(uty) => Some((Integer::from_uint_ty(&tcx, uty).size(), 0)), - _ => None, + _ => 0, } } - #[inline] - fn from_constant<'tcx>( + fn new_finite(tcx: TyCtxt<'_>, ty: Ty<'_>, bits: u128) -> Self { + let bias = Self::signed_bias(tcx, ty); + // Perform a shift if the underlying types are signed, which makes the interval arithmetic + // type-independent. + let x = bits ^ bias; + Finite(x) + } + fn from_pat_range_bdy<'tcx>( + bdy: PatRangeBoundary<'tcx>, + ty: Ty<'tcx>, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, - value: mir::Const<'tcx>, - ) -> Option { - let ty = value.ty(); - let (target_size, bias) = Self::integral_size_and_signed_bias(tcx, ty)?; - let val = match value { - mir::Const::Ty(c) if let ty::ConstKind::Value(valtree) = c.kind() => { - valtree.unwrap_leaf().to_bits(target_size).ok() + ) -> Self { + match bdy { + PatRangeBoundary::NegInfinity => NegInfinity, + PatRangeBoundary::Finite(value) => { + let bits = value.eval_bits(tcx, param_env); + Self::new_finite(tcx, ty, bits) + } + PatRangeBoundary::PosInfinity => PosInfinity, + } + } + + /// Used only for diagnostics. + /// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for + /// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with + /// `PosInfinity`. + fn to_diagnostic_pat_range_bdy<'tcx>( + self, + ty: Ty<'tcx>, + tcx: TyCtxt<'tcx>, + ) -> PatRangeBoundary<'tcx> { + match self { + NegInfinity => PatRangeBoundary::NegInfinity, + Finite(x) => { + let bias = Self::signed_bias(tcx, ty); + let bits = x ^ bias; + let size = ty.primitive_size(tcx); + match Scalar::try_from_uint(bits, size) { + Some(scalar) => { + let value = mir::Const::from_scalar(tcx, scalar, ty); + PatRangeBoundary::Finite(value) + } + // The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value + // for a type, the problem isn't that the value is too small. So it must be too + // large. + None => PatRangeBoundary::PosInfinity, + } + } + JustAfterMax | PosInfinity => PatRangeBoundary::PosInfinity, + } + } + + /// Note: this will not turn a finite value into an infinite one or vice-versa. + pub(crate) fn minus_one(self) -> Self { + match self { + Finite(n) => match n.checked_sub(1) { + Some(m) => Finite(m), + None => bug!(), + }, + JustAfterMax => Finite(u128::MAX), + x => x, + } + } + /// Note: this will not turn a finite value into an infinite one or vice-versa. + pub(crate) fn plus_one(self) -> Self { + match self { + Finite(n) => match n.checked_add(1) { + Some(m) => Finite(m), + None => JustAfterMax, }, - // This is a more general form of the previous case. - _ => value.try_eval_bits(tcx, param_env), - }?; + JustAfterMax => bug!(), + x => x, + } + } +} + +/// An exclusive interval, used for precise integer exhaustiveness checking. `IntRange`s always +/// store a contiguous range. +/// +/// `IntRange` is never used to encode an empty range or a "range" that wraps around the (offset) +/// space: i.e., `range.lo < range.hi`. +#[derive(Clone, Copy, PartialEq, Eq)] +pub(crate) struct IntRange { + pub(crate) lo: MaybeInfiniteInt, // Must not be `PosInfinity`. + pub(crate) hi: MaybeInfiniteInt, // Must not be `NegInfinity`. +} + +impl IntRange { + #[inline] + pub(super) fn is_integral(ty: Ty<'_>) -> bool { + matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) + } - let val = val ^ bias; - Some(IntRange { range: val..=val, bias }) + /// Best effort; will not know that e.g. `255u8..` is a singleton. + pub(super) fn is_singleton(&self) -> bool { + // Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite + // to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`. + self.lo.plus_one() == self.hi } #[inline] - fn from_range<'tcx>( - tcx: TyCtxt<'tcx>, - lo: u128, - hi: u128, - ty: Ty<'tcx>, - end: &RangeEnd, - ) -> Option { - Self::is_integral(ty).then(|| { - // Perform a shift if the underlying types are signed, - // which makes the interval arithmetic simpler. - let bias = IntRange::signed_bias(tcx, ty); - let (lo, hi) = (lo ^ bias, hi ^ bias); - let offset = (*end == RangeEnd::Excluded) as u128; - if lo > hi || (lo == hi && *end == RangeEnd::Excluded) { - // This should have been caught earlier by E0030. - bug!("malformed range pattern: {}..={}", lo, (hi - offset)); - } - IntRange { range: lo..=(hi - offset), bias } - }) + fn from_bits<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, bits: u128) -> IntRange { + let x = MaybeInfiniteInt::new_finite(tcx, ty, bits); + IntRange { lo: x, hi: x.plus_one() } } - // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it. - fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 { - match *ty.kind() { - ty::Int(ity) => { - let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128; - 1u128 << (bits - 1) - } - _ => 0, + #[inline] + fn from_range(lo: MaybeInfiniteInt, mut hi: MaybeInfiniteInt, end: RangeEnd) -> IntRange { + if end == RangeEnd::Included { + hi = hi.plus_one(); + } + if lo >= hi { + // This should have been caught earlier by E0030. + bug!("malformed range pattern: {lo:?}..{hi:?}"); } + IntRange { lo, hi } } fn is_subrange(&self, other: &Self) -> bool { - other.range.start() <= self.range.start() && self.range.end() <= other.range.end() + other.lo <= self.lo && self.hi <= other.hi } fn intersection(&self, other: &Self) -> Option { - let (lo, hi) = self.boundaries(); - let (other_lo, other_hi) = other.boundaries(); - if lo <= other_hi && other_lo <= hi { - Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), bias: self.bias }) + if self.lo < other.hi && other.lo < self.hi { + Some(IntRange { lo: max(self.lo, other.lo), hi: min(self.hi, other.hi) }) } else { None } } - fn suspicious_intersection(&self, other: &Self) -> bool { - // `false` in the following cases: - // 1 ---- // 1 ---------- // 1 ---- // 1 ---- - // 2 ---------- // 2 ---- // 2 ---- // 2 ---- - // - // The following are currently `false`, but could be `true` in the future (#64007): - // 1 --------- // 1 --------- - // 2 ---------- // 2 ---------- - // - // `true` in the following cases: - // 1 ------- // 1 ------- - // 2 -------- // 2 ------- - let (lo, hi) = self.boundaries(); - let (other_lo, other_hi) = other.boundaries(); - (lo == other_hi || hi == other_lo) && !self.is_singleton() && !other.is_singleton() - } - - /// Only used for displaying the range properly. - fn to_pat<'tcx>(&self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Pat<'tcx> { - let (lo, hi) = self.boundaries(); - - let bias = self.bias; - let (lo, hi) = (lo ^ bias, hi ^ bias); - - let env = ty::ParamEnv::empty().and(ty); - let lo_const = mir::Const::from_bits(tcx, lo, env); - let hi_const = mir::Const::from_bits(tcx, hi, env); - - let kind = if lo == hi { - PatKind::Constant { value: lo_const } - } else { - PatKind::Range(Box::new(PatRange { - lo: lo_const, - hi: hi_const, - end: RangeEnd::Included, - })) - }; - - Pat { ty, span: DUMMY_SP, kind } - } - - /// Lint on likely incorrect range patterns (#63987) - pub(super) fn lint_overlapping_range_endpoints<'a, 'p: 'a, 'tcx: 'a>( + /// Partition a range of integers into disjoint subranges. This does constructor splitting for + /// integer ranges as explained at the top of the file. + /// + /// This returns an output that covers `self`. The output is split so that the only + /// intersections between an output range and a column range are inclusions. No output range + /// straddles the boundary of one of the inputs. + /// + /// Additionally, we track for each output range whether it is covered by one of the column ranges or not. + /// + /// The following input: + /// ```text + /// (--------------------------) // `self` + /// (------) (----------) (-) + /// (------) (--------) + /// ``` + /// is first intersected with `self`: + /// ```text + /// (--------------------------) // `self` + /// (----) (----------) (-) + /// (------) (--------) + /// ``` + /// and then iterated over as follows: + /// ```text + /// (-(--)-(-)-(------)-)--(-)- + /// ``` + /// where each sequence of dashes is an output range, and dashes outside parentheses are marked + /// as `Presence::Missing`. + /// + /// ## `isize`/`usize` + /// + /// Whereas a wildcard of type `i32` stands for the range `i32::MIN..=i32::MAX`, a `usize` + /// wildcard stands for `0..PosInfinity` and a `isize` wildcard stands for + /// `NegInfinity..PosInfinity`. In other words, as far as `IntRange` is concerned, there are + /// values before `isize::MIN` and after `usize::MAX`/`isize::MAX`. + /// This is to avoid e.g. `0..(u32::MAX as usize)` from being exhaustive on one architecture and + /// not others. See discussions around the `precise_pointer_size_matching` feature for more + /// details. + /// + /// These infinities affect splitting subtly: it is possible to get `NegInfinity..0` and + /// `usize::MAX+1..PosInfinity` in the output. Diagnostics must be careful to handle these + /// fictitious ranges sensibly. + fn split( &self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - pats: impl Iterator>, - column_count: usize, - lint_root: HirId, - ) { - if self.is_singleton() { - return; - } - - if column_count != 1 { - // FIXME: for now, only check for overlapping ranges on simple range - // patterns. Otherwise with the current logic the following is detected - // as overlapping: - // ``` - // match (0u8, true) { - // (0 ..= 125, false) => {} - // (125 ..= 255, true) => {} - // _ => {} - // } - // ``` - return; - } - - let overlap: Vec<_> = pats - .filter_map(|pat| Some((pat.ctor().as_int_range()?, pat.span()))) - .filter(|(range, _)| self.suspicious_intersection(range)) - .map(|(range, span)| Overlap { - range: self.intersection(&range).unwrap().to_pat(pcx.cx.tcx, pcx.ty), - span, - }) + column_ranges: impl Iterator, + ) -> impl Iterator { + // The boundaries of ranges in `column_ranges` intersected with `self`. + // We do parenthesis matching for input ranges. A boundary counts as +1 if it starts + // a range and -1 if it ends it. When the count is > 0 between two boundaries, we + // are within an input range. + let mut boundaries: Vec<(MaybeInfiniteInt, isize)> = column_ranges + .filter_map(|r| self.intersection(&r)) + .flat_map(|r| [(r.lo, 1), (r.hi, -1)]) .collect(); - - if !overlap.is_empty() { - pcx.cx.tcx.emit_spanned_lint( - lint::builtin::OVERLAPPING_RANGE_ENDPOINTS, - lint_root, - pcx.span, - OverlappingRangeEndpoints { overlap, range: pcx.span }, - ); - } + // We sort by boundary, and for each boundary we sort the "closing parentheses" first. The + // order of +1/-1 for a same boundary value is actually irrelevant, because we only look at + // the accumulated count between distinct boundary values. + boundaries.sort_unstable(); + + // Accumulate parenthesis counts. + let mut paren_counter = 0isize; + // Gather pairs of adjacent boundaries. + let mut prev_bdy = self.lo; + boundaries + .into_iter() + // End with the end of the range. The count is ignored. + .chain(once((self.hi, 0))) + // List pairs of adjacent boundaries and the count between them. + .map(move |(bdy, delta)| { + // `delta` affects the count as we cross `bdy`, so the relevant count between + // `prev_bdy` and `bdy` is untouched by `delta`. + let ret = (prev_bdy, paren_counter, bdy); + prev_bdy = bdy; + paren_counter += delta; + ret + }) + // Skip empty ranges. + .filter(|&(prev_bdy, _, bdy)| prev_bdy != bdy) + // Convert back to ranges. + .map(move |(prev_bdy, paren_count, bdy)| { + use Presence::*; + let presence = if paren_count > 0 { Seen } else { Unseen }; + let range = IntRange { lo: prev_bdy, hi: bdy }; + (presence, range) + }) } - /// See `Constructor::is_covered_by` - fn is_covered_by(&self, other: &Self) -> bool { - if self.intersection(other).is_some() { - // Constructor splitting should ensure that all intersections we encounter are actually - // inclusions. - assert!(self.is_subrange(other)); - true + /// Whether the range denotes the fictitious values before `isize::MIN` or after + /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist). + pub(crate) fn is_beyond_boundaries<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> bool { + ty.is_ptr_sized_integral() && !tcx.features().precise_pointer_size_matching && { + // The two invalid ranges are `NegInfinity..isize::MIN` (represented as + // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `to_diagnostic_pat_range_bdy` + // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `self.lo` + // otherwise. + let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx); + matches!(lo, PatRangeBoundary::PosInfinity) + || matches!(self.hi, MaybeInfiniteInt::Finite(0)) + } + } + /// Only used for displaying the range. + pub(super) fn to_diagnostic_pat<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Pat<'tcx> { + let kind = if matches!((self.lo, self.hi), (NegInfinity, PosInfinity)) { + PatKind::Wild + } else if self.is_singleton() { + let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx); + let value = lo.as_finite().unwrap(); + PatKind::Constant { value } } else { - false - } + // We convert to an inclusive range for diagnostics. + let mut end = RangeEnd::Included; + let mut lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx); + if matches!(lo, PatRangeBoundary::PosInfinity) { + // The only reason to get `PosInfinity` here is the special case where + // `to_diagnostic_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the + // fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do + // this). We show this to the user as `usize::MAX..` which is slightly incorrect but + // probably clear enough. + let c = ty.numeric_max_val(tcx).unwrap(); + let value = mir::Const::from_ty_const(c, tcx); + lo = PatRangeBoundary::Finite(value); + } + let hi = if matches!(self.hi, MaybeInfiniteInt::Finite(0)) { + // The range encodes `..ty::MIN`, so we can't convert it to an inclusive range. + end = RangeEnd::Excluded; + self.hi + } else { + self.hi.minus_one() + }; + let hi = hi.to_diagnostic_pat_range_bdy(ty, tcx); + PatKind::Range(Box::new(PatRange { lo, hi, end, ty })) + }; + + Pat { ty, span: DUMMY_SP, kind } } } -/// Note: this is often not what we want: e.g. `false` is converted into the range `0..=0` and -/// would be displayed as such. To render properly, convert to a pattern first. +/// Note: this will render signed ranges incorrectly. To render properly, convert to a pattern +/// first. impl fmt::Debug for IntRange { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let (lo, hi) = self.boundaries(); - let bias = self.bias; - let (lo, hi) = (lo ^ bias, hi ^ bias); - write!(f, "{lo}")?; - write!(f, "{}", RangeEnd::Included)?; - write!(f, "{hi}") - } -} - -/// Represents a border between 2 integers. Because the intervals spanning borders must be able to -/// cover every integer, we need to be able to represent 2^128 + 1 such borders. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -enum IntBorder { - JustBefore(u128), - AfterMax, -} - -/// A range of integers that is partitioned into disjoint subranges. This does constructor -/// splitting for integer ranges as explained at the top of the file. -/// -/// This is fed multiple ranges, and returns an output that covers the input, but is split so that -/// the only intersections between an output range and a seen range are inclusions. No output range -/// straddles the boundary of one of the inputs. -/// -/// The following input: -/// ```text -/// |-------------------------| // `self` -/// |------| |----------| |----| -/// |-------| |-------| -/// ``` -/// would be iterated over as follows: -/// ```text -/// ||---|--||-|---|---|---|--| -/// ``` -#[derive(Debug, Clone)] -struct SplitIntRange { - /// The range we are splitting - range: IntRange, - /// The borders of ranges we have seen. They are all contained within `range`. This is kept - /// sorted. - borders: Vec, -} - -impl SplitIntRange { - fn new(range: IntRange) -> Self { - SplitIntRange { range, borders: Vec::new() } - } - - /// Internal use - fn to_borders(r: IntRange) -> [IntBorder; 2] { - use IntBorder::*; - let (lo, hi) = r.boundaries(); - let lo = JustBefore(lo); - let hi = match hi.checked_add(1) { - Some(m) => JustBefore(m), - None => AfterMax, - }; - [lo, hi] - } - - /// Add ranges relative to which we split. - fn split(&mut self, ranges: impl Iterator) { - let this_range = &self.range; - let included_ranges = ranges.filter_map(|r| this_range.intersection(&r)); - let included_borders = included_ranges.flat_map(|r| { - let borders = Self::to_borders(r); - once(borders[0]).chain(once(borders[1])) - }); - self.borders.extend(included_borders); - self.borders.sort_unstable(); - } - - /// Iterate over the contained ranges. - fn iter(&self) -> impl Iterator + Captures<'_> { - use IntBorder::*; - - let self_range = Self::to_borders(self.range.clone()); - // Start with the start of the range. - let mut prev_border = self_range[0]; - self.borders - .iter() - .copied() - // End with the end of the range. - .chain(once(self_range[1])) - // List pairs of adjacent borders. - .map(move |border| { - let ret = (prev_border, border); - prev_border = border; - ret - }) - // Skip duplicates. - .filter(|(prev_border, border)| prev_border != border) - // Finally, convert to ranges. - .map(move |(prev_border, border)| { - let range = match (prev_border, border) { - (JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1), - (JustBefore(n), AfterMax) => n..=u128::MAX, - _ => unreachable!(), // Ruled out by the sorting and filtering we did - }; - IntRange { range, bias: self.range.bias } - }) + if let Finite(lo) = self.lo { + write!(f, "{lo}")?; + } + write!(f, "{}", RangeEnd::Excluded)?; + if let Finite(hi) = self.hi { + write!(f, "{hi}")?; + } + Ok(()) } } @@ -463,142 +456,164 @@ fn is_covered_by(self, other: Self) -> bool { other.kind.covers_length(self.arity()) } -} -/// This computes constructor splitting for variable-length slices, as explained at the top of the -/// file. -/// -/// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x, _, -/// _, y] | ...`. The corresponding value constructors are fixed-length array constructors above a -/// given minimum length. We obviously can't list this infinitude of constructors. Thankfully, -/// it turns out that for each finite set of slice patterns, all sufficiently large array lengths -/// are equivalent. -/// -/// Let's look at an example, where we are trying to split the last pattern: -/// ``` -/// # fn foo(x: &[bool]) { -/// match x { -/// [true, true, ..] => {} -/// [.., false, false] => {} -/// [..] => {} -/// } -/// # } -/// ``` -/// Here are the results of specialization for the first few lengths: -/// ``` -/// # fn foo(x: &[bool]) { match x { -/// // length 0 -/// [] => {} -/// // length 1 -/// [_] => {} -/// // length 2 -/// [true, true] => {} -/// [false, false] => {} -/// [_, _] => {} -/// // length 3 -/// [true, true, _ ] => {} -/// [_, false, false] => {} -/// [_, _, _ ] => {} -/// // length 4 -/// [true, true, _, _ ] => {} -/// [_, _, false, false] => {} -/// [_, _, _, _ ] => {} -/// // length 5 -/// [true, true, _, _, _ ] => {} -/// [_, _, _, false, false] => {} -/// [_, _, _, _, _ ] => {} -/// # _ => {} -/// # }} -/// ``` -/// -/// If we went above length 5, we would simply be inserting more columns full of wildcards in the -/// middle. This means that the set of witnesses for length `l >= 5` if equivalent to the set for -/// any other `l' >= 5`: simply add or remove wildcards in the middle to convert between them. -/// -/// This applies to any set of slice patterns: there will be a length `L` above which all lengths -/// behave the same. This is exactly what we need for constructor splitting. Therefore a -/// variable-length slice can be split into a variable-length slice of minimal length `L`, and many -/// fixed-length slices of lengths `< L`. -/// -/// For each variable-length pattern `p` with a prefix of length `plₚ` and suffix of length `slₚ`, -/// only the first `plₚ` and the last `slₚ` elements are examined. Therefore, as long as `L` is -/// positive (to avoid concerns about empty types), all elements after the maximum prefix length -/// and before the maximum suffix length are not examined by any variable-length pattern, and -/// therefore can be added/removed without affecting them - creating equivalent patterns from any -/// sufficiently-large length. -/// -/// Of course, if fixed-length patterns exist, we must be sure that our length is large enough to -/// miss them all, so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))` -/// -/// `max_slice` below will be made to have arity `L`. -#[derive(Debug)] -struct SplitVarLenSlice { - /// If the type is an array, this is its size. - array_len: Option, - /// The arity of the input slice. - arity: usize, - /// The smallest slice bigger than any slice seen. `max_slice.arity()` is the length `L` - /// described above. - max_slice: SliceKind, -} - -impl SplitVarLenSlice { - fn new(prefix: usize, suffix: usize, array_len: Option) -> Self { - SplitVarLenSlice { array_len, arity: prefix + suffix, max_slice: VarLen(prefix, suffix) } - } - - /// Pass a set of slices relative to which to split this one. - fn split(&mut self, slices: impl Iterator) { - let VarLen(max_prefix_len, max_suffix_len) = &mut self.max_slice else { - // No need to split - return; - }; - // We grow `self.max_slice` to be larger than all slices encountered, as described above. - // For diagnostics, we keep the prefix and suffix lengths separate, but grow them so that - // `L = max_prefix_len + max_suffix_len`. - let mut max_fixed_len = 0; - for slice in slices { - match slice { - FixedLen(len) => { - max_fixed_len = cmp::max(max_fixed_len, len); - } - VarLen(prefix, suffix) => { - *max_prefix_len = cmp::max(*max_prefix_len, prefix); - *max_suffix_len = cmp::max(*max_suffix_len, suffix); + /// This computes constructor splitting for variable-length slices, as explained at the top of + /// the file. + /// + /// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x, + /// _, _, y] | etc`. The corresponding value constructors are fixed-length array constructors of + /// corresponding lengths. We obviously can't list this infinitude of constructors. + /// Thankfully, it turns out that for each finite set of slice patterns, all sufficiently large + /// array lengths are equivalent. + /// + /// Let's look at an example, where we are trying to split the last pattern: + /// ``` + /// # fn foo(x: &[bool]) { + /// match x { + /// [true, true, ..] => {} + /// [.., false, false] => {} + /// [..] => {} + /// } + /// # } + /// ``` + /// Here are the results of specialization for the first few lengths: + /// ``` + /// # fn foo(x: &[bool]) { match x { + /// // length 0 + /// [] => {} + /// // length 1 + /// [_] => {} + /// // length 2 + /// [true, true] => {} + /// [false, false] => {} + /// [_, _] => {} + /// // length 3 + /// [true, true, _ ] => {} + /// [_, false, false] => {} + /// [_, _, _ ] => {} + /// // length 4 + /// [true, true, _, _ ] => {} + /// [_, _, false, false] => {} + /// [_, _, _, _ ] => {} + /// // length 5 + /// [true, true, _, _, _ ] => {} + /// [_, _, _, false, false] => {} + /// [_, _, _, _, _ ] => {} + /// # _ => {} + /// # }} + /// ``` + /// + /// We see that above length 4, we are simply inserting columns full of wildcards in the middle. + /// This means that specialization and witness computation with slices of length `l >= 4` will + /// give equivalent results regardless of `l`. This applies to any set of slice patterns: there + /// will be a length `L` above which all lengths behave the same. This is exactly what we need + /// for constructor splitting. + /// + /// A variable-length slice pattern covers all lengths from its arity up to infinity. As we just + /// saw, we can split this in two: lengths below `L` are treated individually with a + /// fixed-length slice each; lengths above `L` are grouped into a single variable-length slice + /// constructor. + /// + /// For each variable-length slice pattern `p` with a prefix of length `plₚ` and suffix of + /// length `slₚ`, only the first `plₚ` and the last `slₚ` elements are examined. Therefore, as + /// long as `L` is positive (to avoid concerns about empty types), all elements after the + /// maximum prefix length and before the maximum suffix length are not examined by any + /// variable-length pattern, and therefore can be ignored. This gives us a way to compute `L`. + /// + /// Additionally, if fixed-length patterns exist, we must pick an `L` large enough to miss them, + /// so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`. + /// `max_slice` below will be made to have this arity `L`. + /// + /// If `self` is fixed-length, it is returned as-is. + /// + /// Additionally, we track for each output slice whether it is covered by one of the column slices or not. + fn split( + self, + column_slices: impl Iterator, + ) -> impl Iterator { + // Range of lengths below `L`. + let smaller_lengths; + let arity = self.arity(); + let mut max_slice = self.kind; + // Tracks the smallest variable-length slice we've seen. Any slice arity above it is + // therefore `Presence::Seen` in the column. + let mut min_var_len = usize::MAX; + // Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`. + let mut seen_fixed_lens = FxHashSet::default(); + match &mut max_slice { + VarLen(max_prefix_len, max_suffix_len) => { + // We grow `max_slice` to be larger than all slices encountered, as described above. + // For diagnostics, we keep the prefix and suffix lengths separate, but grow them so that + // `L = max_prefix_len + max_suffix_len`. + let mut max_fixed_len = 0; + for slice in column_slices { + match slice.kind { + FixedLen(len) => { + max_fixed_len = cmp::max(max_fixed_len, len); + if arity <= len { + seen_fixed_lens.insert(len); + } + } + VarLen(prefix, suffix) => { + *max_prefix_len = cmp::max(*max_prefix_len, prefix); + *max_suffix_len = cmp::max(*max_suffix_len, suffix); + min_var_len = cmp::min(min_var_len, prefix + suffix); + } + } + } + // We want `L = max(L, max_fixed_len + 1)`, modulo the fact that we keep prefix and + // suffix separate. + if max_fixed_len + 1 >= *max_prefix_len + *max_suffix_len { + // The subtraction can't overflow thanks to the above check. + // The new `max_prefix_len` is larger than its previous value. + *max_prefix_len = max_fixed_len + 1 - *max_suffix_len; } - } - } - // We want `L = max(L, max_fixed_len + 1)`, modulo the fact that we keep prefix and - // suffix separate. - if max_fixed_len + 1 >= *max_prefix_len + *max_suffix_len { - // The subtraction can't overflow thanks to the above check. - // The new `max_prefix_len` is larger than its previous value. - *max_prefix_len = max_fixed_len + 1 - *max_suffix_len; - } - - // We cap the arity of `max_slice` at the array size. - match self.array_len { - Some(len) if self.max_slice.arity() >= len => self.max_slice = FixedLen(len), - _ => {} - } - } - /// Iterate over the partition of this slice. - fn iter(&self) -> impl Iterator + Captures<'_> { - let smaller_lengths = match self.array_len { - // The only admissible fixed-length slice is one of the array size. Whether `max_slice` - // is fixed-length or variable-length, it will be the only relevant slice to output - // here. - Some(_) => 0..0, // empty range - // We cover all arities in the range `(self.arity..infinity)`. We split that range into - // two: lengths smaller than `max_slice.arity()` are treated independently as - // fixed-lengths slices, and lengths above are captured by `max_slice`. - None => self.arity..self.max_slice.arity(), + // We cap the arity of `max_slice` at the array size. + match self.array_len { + Some(len) if max_slice.arity() >= len => max_slice = FixedLen(len), + _ => {} + } + + smaller_lengths = match self.array_len { + // The only admissible fixed-length slice is one of the array size. Whether `max_slice` + // is fixed-length or variable-length, it will be the only relevant slice to output + // here. + Some(_) => 0..0, // empty range + // We need to cover all arities in the range `(arity..infinity)`. We split that + // range into two: lengths smaller than `max_slice.arity()` are treated + // independently as fixed-lengths slices, and lengths above are captured by + // `max_slice`. + None => self.arity()..max_slice.arity(), + }; + } + FixedLen(_) => { + // No need to split here. We only track presence. + for slice in column_slices { + match slice.kind { + FixedLen(len) => { + if len == arity { + seen_fixed_lens.insert(len); + } + } + VarLen(prefix, suffix) => { + min_var_len = cmp::min(min_var_len, prefix + suffix); + } + } + } + smaller_lengths = 0..0; + } }; - smaller_lengths - .map(FixedLen) - .chain(once(self.max_slice)) - .map(move |kind| Slice::new(self.array_len, kind)) + + smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| { + let arity = kind.arity(); + let seen = if min_var_len <= arity || seen_fixed_lens.contains(&arity) { + Presence::Seen + } else { + Presence::Unseen + }; + (seen, Slice::new(self.array_len, kind)) + }) } } @@ -616,10 +631,13 @@ Single, /// Enum variants. Variant(VariantIdx), + /// Booleans + Bool(bool), /// Ranges of integer literal values (`2`, `2..=5` or `2..5`). IntRange(IntRange), /// Ranges of floating-point literal values (`2.0..=5.2`). - FloatRange(mir::Const<'tcx>, mir::Const<'tcx>, RangeEnd), + F32Range(IeeeFloat, IeeeFloat, RangeEnd), + F64Range(IeeeFloat, IeeeFloat, RangeEnd), /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately. Str(mir::Const<'tcx>), /// Array and slice patterns. @@ -628,35 +646,45 @@ /// boxes for the purposes of exhaustiveness: we must not inspect them, and they /// don't count towards making a match exhaustive. Opaque, + /// Or-pattern. + Or, + /// Wildcard pattern. + Wildcard, /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used /// for those types for which we cannot list constructors explicitly, like `f64` and `str`. NonExhaustive, - /// Stands for constructors that are not seen in the matrix, as explained in the documentation - /// for [`SplitWildcard`]. The carried `bool` is used for the `non_exhaustive_omitted_patterns` - /// lint. - Missing { nonexhaustive_enum_missing_real_variants: bool }, - /// Wildcard pattern. - Wildcard, - /// Or-pattern. - Or, + /// Fake extra constructor for variants that should not be mentioned in diagnostics. + /// We use this for variants behind an unstable gate as well as + /// `#[doc(hidden)]` ones. + Hidden, + /// Fake extra constructor for constructors that are not seen in the matrix, as explained in the + /// code for [`Constructor::split`]. + Missing, } impl<'tcx> Constructor<'tcx> { - pub(super) fn is_wildcard(&self) -> bool { - matches!(self, Wildcard) - } - pub(super) fn is_non_exhaustive(&self) -> bool { matches!(self, NonExhaustive) } - fn as_int_range(&self) -> Option<&IntRange> { + pub(super) fn as_variant(&self) -> Option { + match self { + Variant(i) => Some(*i), + _ => None, + } + } + fn as_bool(&self) -> Option { + match self { + Bool(b) => Some(*b), + _ => None, + } + } + pub(super) fn as_int_range(&self) -> Option<&IntRange> { match self { IntRange(range) => Some(range), _ => None, } } - fn as_slice(&self) -> Option { match self { Slice(slice) => Some(*slice), @@ -664,32 +692,6 @@ } } - /// Checks if the `Constructor` is a variant and `TyCtxt::eval_stability` returns - /// `EvalResult::Deny { .. }`. - /// - /// This means that the variant has a stdlib unstable feature marking it. - pub(super) fn is_unstable_variant(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> bool { - if let Constructor::Variant(idx) = self && let ty::Adt(adt, _) = pcx.ty.kind() { - let variant_def_id = adt.variant(*idx).def_id; - // Filter variants that depend on a disabled unstable feature. - return matches!( - pcx.cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None), - EvalResult::Deny { .. } - ); - } - false - } - - /// Checks if the `Constructor` is a `Constructor::Variant` with a `#[doc(hidden)]` - /// attribute from a type not local to the current crate. - pub(super) fn is_doc_hidden_variant(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> bool { - if let Constructor::Variant(idx) = self && let ty::Adt(adt, _) = pcx.ty.kind() { - let variant_def_id = adt.variants()[*idx].def_id; - return pcx.cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local(); - } - false - } - fn variant_index_for_adt(&self, adt: ty::AdtDef<'tcx>) -> VariantIdx { match *self { Variant(idx) => idx, @@ -721,30 +723,33 @@ _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx.ty), }, Slice(slice) => slice.arity(), - Str(..) - | FloatRange(..) + Bool(..) | IntRange(..) - | NonExhaustive + | F32Range(..) + | F64Range(..) + | Str(..) | Opaque + | NonExhaustive + | Hidden | Missing { .. } | Wildcard => 0, Or => bug!("The `Or` constructor doesn't have a fixed arity"), } } - /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual - /// constructors (like variants, integers or fixed-sized slices). When specializing for these - /// constructors, we want to be specialising for the actual underlying constructors. + /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of + /// actual constructors (like variants, integers or fixed-sized slices). When specializing for + /// these constructors, we want to be specialising for the actual underlying constructors. /// Naively, we would simply return the list of constructors they correspond to. We instead are - /// more clever: if there are constructors that we know will behave the same wrt the current - /// matrix, we keep them grouped. For example, all slices of a sufficiently large length - /// will either be all useful or all non-useful with a given matrix. + /// more clever: if there are constructors that we know will behave the same w.r.t. the current + /// matrix, we keep them grouped. For example, all slices of a sufficiently large length will + /// either be all useful or all non-useful with a given matrix. /// /// See the branches for details on how the splitting is done. /// - /// This function may discard some irrelevant constructors if this preserves behavior and - /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the - /// matrix, unless all of them are. + /// This function may discard some irrelevant constructors if this preserves behavior. Eg. for + /// the `_` case, we ignore the constructors already present in the column, unless all of them + /// are. pub(super) fn split<'a>( &self, pcx: &PatCtxt<'_, '_, 'tcx>, @@ -755,23 +760,68 @@ { match self { Wildcard => { - let mut split_wildcard = SplitWildcard::new(pcx); - split_wildcard.split(pcx, ctors); - split_wildcard.into_ctors(pcx) - } - // Fast-track if the range is trivial. In particular, we don't do the overlapping - // ranges check. - IntRange(ctor_range) if !ctor_range.is_singleton() => { - let mut split_range = SplitIntRange::new(ctor_range.clone()); - let int_ranges = ctors.filter_map(|ctor| ctor.as_int_range()); - split_range.split(int_ranges.cloned()); - split_range.iter().map(IntRange).collect() - } - &Slice(Slice { kind: VarLen(self_prefix, self_suffix), array_len }) => { - let mut split_self = SplitVarLenSlice::new(self_prefix, self_suffix, array_len); - let slices = ctors.filter_map(|c| c.as_slice()).map(|s| s.kind); - split_self.split(slices); - split_self.iter().map(Slice).collect() + let split_set = ConstructorSet::for_ty(pcx.cx, pcx.ty).split(pcx, ctors); + if !split_set.missing.is_empty() { + // We are splitting a wildcard in order to compute its usefulness. Some constructors are + // not present in the column. The first thing we note is that specializing with any of + // the missing constructors would select exactly the rows with wildcards. Moreover, they + // would all return equivalent results. We can therefore group them all into a + // fictitious `Missing` constructor. + // + // As an important optimization, this function will skip all the present constructors. + // This is correct because specializing with any of the present constructors would + // select a strict superset of the wildcard rows, and thus would only find witnesses + // already found with the `Missing` constructor. + // This does mean that diagnostics are incomplete: in + // ``` + // match x { + // Some(true) => {} + // } + // ``` + // we report `None` as missing but not `Some(false)`. + // + // When all the constructors are missing we can equivalently return the `Wildcard` + // constructor on its own. The difference between `Wildcard` and `Missing` will then + // only be in diagnostics. + + // If some constructors are missing, we typically want to report those constructors, + // e.g.: + // ``` + // enum Direction { N, S, E, W } + // let Direction::N = ...; + // ``` + // we can report 3 witnesses: `S`, `E`, and `W`. + // + // However, if the user didn't actually specify a constructor + // in this arm, e.g., in + // ``` + // let x: (Direction, Direction, bool) = ...; + // let (_, _, false) = x; + // ``` + // we don't want to show all 16 possible witnesses `(, , + // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we + // prefer to report just a wildcard `_`. + // + // The exception is: if we are at the top-level, for example in an empty match, we + // usually prefer to report the full list of constructors. + let all_missing = split_set.present.is_empty(); + let report_when_all_missing = + pcx.is_top_level && !IntRange::is_integral(pcx.ty); + let ctor = + if all_missing && !report_when_all_missing { Wildcard } else { Missing }; + smallvec![ctor] + } else { + split_set.present + } + } + // Fast-track if the range is trivial. + IntRange(this_range) if !this_range.is_singleton() => { + let column_ranges = ctors.filter_map(|ctor| ctor.as_int_range()).cloned(); + this_range.split(column_ranges).map(|(_, range)| IntRange(range)).collect() + } + Slice(this_slice @ Slice { kind: VarLen(..), .. }) => { + let column_slices = ctors.filter_map(|c| c.as_slice()); + this_slice.split(column_slices).map(|(_, slice)| Slice(slice)).collect() } // Any other constructor can be used unchanged. _ => smallvec![self.clone()], @@ -788,28 +838,29 @@ match (self, other) { // Wildcards cover anything (_, Wildcard) => true, - // The missing ctors are not covered by anything in the matrix except wildcards. - (Missing { .. } | Wildcard, _) => false, + // Only a wildcard pattern can match these special constructors. + (Wildcard | Missing { .. } | NonExhaustive | Hidden, _) => false, (Single, Single) => true, (Variant(self_id), Variant(other_id)) => self_id == other_id, + (Bool(self_b), Bool(other_b)) => self_b == other_b, - (IntRange(self_range), IntRange(other_range)) => self_range.is_covered_by(other_range), - ( - FloatRange(self_from, self_to, self_end), - FloatRange(other_from, other_to, other_end), - ) => { - match ( - compare_const_vals(pcx.cx.tcx, *self_to, *other_to, pcx.cx.param_env), - compare_const_vals(pcx.cx.tcx, *self_from, *other_from, pcx.cx.param_env), - ) { - (Some(to), Some(from)) => { - (from == Ordering::Greater || from == Ordering::Equal) - && (to == Ordering::Less - || (other_end == self_end && to == Ordering::Equal)) + (IntRange(self_range), IntRange(other_range)) => self_range.is_subrange(other_range), + (F32Range(self_from, self_to, self_end), F32Range(other_from, other_to, other_end)) => { + self_from.ge(other_from) + && match self_to.partial_cmp(other_to) { + Some(Ordering::Less) => true, + Some(Ordering::Equal) => other_end == self_end, + _ => false, + } + } + (F64Range(self_from, self_to, self_end), F64Range(other_from, other_to, other_end)) => { + self_from.ge(other_from) + && match self_to.partial_cmp(other_to) { + Some(Ordering::Less) => true, + Some(Ordering::Equal) => other_end == self_end, + _ => false, } - _ => false, - } } (Str(self_val), Str(other_val)) => { // FIXME Once valtrees are available we can directly use the bytes @@ -820,8 +871,6 @@ // We are trying to inspect an opaque constant. Thus we skip the row. (Opaque, _) | (_, Opaque) => false, - // Only a wildcard pattern can match the special extra constructor. - (NonExhaustive, _) => false, _ => span_bug!( pcx.span, @@ -831,96 +880,131 @@ ), } } +} - /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is - /// assumed to be built from `matrix.head_ctors()` with wildcards and opaques filtered out, - /// and `self` is assumed to have been split from a wildcard. - fn is_covered_by_any<'p>( - &self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - used_ctors: &[Constructor<'tcx>], - ) -> bool { - if used_ctors.is_empty() { - return false; - } - - // This must be kept in sync with `is_covered_by`. - match self { - // If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s. - Single => !used_ctors.is_empty(), - Variant(vid) => used_ctors.iter().any(|c| matches!(c, Variant(i) if i == vid)), - IntRange(range) => used_ctors - .iter() - .filter_map(|c| c.as_int_range()) - .any(|other| range.is_covered_by(other)), - Slice(slice) => used_ctors - .iter() - .filter_map(|c| c.as_slice()) - .any(|other| slice.is_covered_by(other)), - // This constructor is never covered by anything else - NonExhaustive => false, - Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => { - span_bug!(pcx.span, "found unexpected ctor in all_ctors: {:?}", self) - } - } - } +/// Describes the set of all constructors for a type. +#[derive(Debug)] +pub(super) enum ConstructorSet { + /// The type has a single constructor, e.g. `&T` or a struct. + Single, + /// This type has the following list of constructors. + /// Some variants are hidden, which means they won't be mentioned in diagnostics unless the user + /// mentioned them first. We use this for variants behind an unstable gate as well as + /// `#[doc(hidden)]` ones. + Variants { + visible_variants: Vec, + hidden_variants: Vec, + non_exhaustive: bool, + }, + /// Booleans. + Bool, + /// The type is spanned by integer values. The range or ranges give the set of allowed values. + /// The second range is only useful for `char`. + Integers { range_1: IntRange, range_2: Option }, + /// The type is matched by slices. The usize is the compile-time length of the array, if known. + Slice(Option), + /// The type is matched by slices whose elements are uninhabited. + SliceOfEmpty, + /// The constructors cannot be listed, and the type cannot be matched exhaustively. E.g. `str`, + /// floats. + Unlistable, + /// The type has no inhabitants. + Uninhabited, } -/// A wildcard constructor that we split relative to the constructors in the matrix, as explained -/// at the top of the file. +/// Describes the result of analyzing the constructors in a column of a match. /// -/// A constructor that is not present in the matrix rows will only be covered by the rows that have -/// wildcards. Thus we can group all of those constructors together; we call them "missing -/// constructors". Splitting a wildcard would therefore list all present constructors individually -/// (or grouped if they are integers or slices), and then all missing constructors together as a -/// group. +/// `present` is morally the set of constructors present in the column, and `missing` is the set of +/// constructors that exist in the type but are not present in the column. /// -/// However we can go further: since any constructor will match the wildcard rows, and having more -/// rows can only reduce the amount of usefulness witnesses, we can skip the present constructors -/// and only try the missing ones. -/// This will not preserve the whole list of witnesses, but will preserve whether the list is empty -/// or not. In fact this is quite natural from the point of view of diagnostics too. This is done -/// in `to_ctors`: in some cases we only return `Missing`. +/// More formally, they respect the following constraints: +/// - the union of `present` and `missing` covers the whole type +/// - `present` and `missing` are disjoint +/// - neither contains wildcards +/// - each constructor in `present` is covered by some non-wildcard constructor in the column +/// - together, the constructors in `present` cover all the non-wildcard constructor in the column +/// - non-wildcards in the column do no cover anything in `missing` +/// - constructors in `present` and `missing` are split for the column; in other words, they are +/// either fully included in or disjoint from each constructor in the column. This avoids +/// non-trivial intersections like between `0..10` and `5..15`. #[derive(Debug)] -pub(super) struct SplitWildcard<'tcx> { - /// Constructors (other than wildcards and opaques) seen in the matrix. - matrix_ctors: Vec>, - /// All the constructors for this type - all_ctors: SmallVec<[Constructor<'tcx>; 1]>, +pub(super) struct SplitConstructorSet<'tcx> { + pub(super) present: SmallVec<[Constructor<'tcx>; 1]>, + pub(super) missing: Vec>, } -impl<'tcx> SplitWildcard<'tcx> { - pub(super) fn new<'p>(pcx: &PatCtxt<'_, 'p, 'tcx>) -> Self { - debug!("SplitWildcard::new({:?})", pcx.ty); - let cx = pcx.cx; +impl ConstructorSet { + #[instrument(level = "debug", skip(cx), ret)] + pub(super) fn for_ty<'p, 'tcx>(cx: &MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self { let make_range = |start, end| { - IntRange( - // `unwrap()` is ok because we know the type is an integer. - IntRange::from_range(cx.tcx, start, end, pcx.ty, &RangeEnd::Included).unwrap(), + IntRange::from_range( + MaybeInfiniteInt::new_finite(cx.tcx, ty, start), + MaybeInfiniteInt::new_finite(cx.tcx, ty, end), + RangeEnd::Included, ) }; - // This determines the set of all possible constructors for the type `pcx.ty`. For numbers, + // This determines the set of all possible constructors for the type `ty`. For numbers, // arrays and slices we use ranges and variable-length slices when appropriate. // // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that // are statically impossible. E.g., for `Option`, we do not include `Some(_)` in the // returned list of constructors. - // Invariant: this is empty if and only if the type is uninhabited (as determined by + // Invariant: this is `Uninhabited` if and only if the type is uninhabited (as determined by // `cx.is_uninhabited()`). - let all_ctors = match pcx.ty.kind() { - ty::Bool => smallvec![make_range(0, 1)], + match ty.kind() { + ty::Bool => Self::Bool, + ty::Char => { + // The valid Unicode Scalar Value ranges. + Self::Integers { + range_1: make_range('\u{0000}' as u128, '\u{D7FF}' as u128), + range_2: Some(make_range('\u{E000}' as u128, '\u{10FFFF}' as u128)), + } + } + &ty::Int(ity) => { + let range = if ty.is_ptr_sized_integral() + && !cx.tcx.features().precise_pointer_size_matching + { + // The min/max values of `isize` are not allowed to be observed unless the + // `precise_pointer_size_matching` feature is enabled. + IntRange { lo: NegInfinity, hi: PosInfinity } + } else { + let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128; + let min = 1u128 << (bits - 1); + let max = min - 1; + make_range(min, max) + }; + Self::Integers { range_1: range, range_2: None } + } + &ty::Uint(uty) => { + let range = if ty.is_ptr_sized_integral() + && !cx.tcx.features().precise_pointer_size_matching + { + // The max value of `usize` is not allowed to be observed unless the + // `precise_pointer_size_matching` feature is enabled. + let lo = MaybeInfiniteInt::new_finite(cx.tcx, ty, 0); + IntRange { lo, hi: PosInfinity } + } else { + let size = Integer::from_uint_ty(&cx.tcx, uty).size(); + let max = size.truncate(u128::MAX); + make_range(0, max) + }; + Self::Integers { range_1: range, range_2: None } + } ty::Array(sub_ty, len) if len.try_eval_target_usize(cx.tcx, cx.param_env).is_some() => { let len = len.eval_target_usize(cx.tcx, cx.param_env) as usize; if len != 0 && cx.is_uninhabited(*sub_ty) { - smallvec![] + Self::Uninhabited } else { - smallvec![Slice(Slice::new(Some(len), VarLen(0, 0)))] + Self::Slice(Some(len)) } } // Treat arrays of a constant but unknown length like slices. ty::Array(sub_ty, _) | ty::Slice(sub_ty) => { - let kind = if cx.is_uninhabited(*sub_ty) { FixedLen(0) } else { VarLen(0, 0) }; - smallvec![Slice(Slice::new(None, kind))] + if cx.is_uninhabited(*sub_ty) { + Self::SliceOfEmpty + } else { + Self::Slice(None) + } } ty::Adt(def, args) if def.is_enum() => { // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an @@ -939,19 +1023,14 @@ // // we don't want to show every possible IO error, but instead have only `_` as the // witness. - let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty); - - let is_exhaustive_pat_feature = cx.tcx.features().exhaustive_patterns; + let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty); - // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it - // as though it had an "unknown" constructor to avoid exposing its emptiness. The - // exception is if the pattern is at the top level, because we want empty matches to be - // considered exhaustive. - let is_secretly_empty = - def.variants().is_empty() && !is_exhaustive_pat_feature && !pcx.is_top_level; - - let mut ctors: SmallVec<[_; 1]> = - def.variants() + if def.variants().is_empty() && !is_declared_nonexhaustive { + Self::Uninhabited + } else { + let is_exhaustive_pat_feature = cx.tcx.features().exhaustive_patterns; + let (hidden_variants, visible_variants) = def + .variants() .iter_enumerated() .filter(|(_, v)| { // If `exhaustive_patterns` is enabled, we exclude variants known to be @@ -961,135 +1040,188 @@ .instantiate(cx.tcx, args) .apply(cx.tcx, cx.param_env, cx.module) }) - .map(|(idx, _)| Variant(idx)) - .collect(); - - if is_secretly_empty || is_declared_nonexhaustive { - ctors.push(NonExhaustive); + .map(|(idx, _)| idx) + .partition(|idx| { + let variant_def_id = def.variant(*idx).def_id; + // Filter variants that depend on a disabled unstable feature. + let is_unstable = matches!( + cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None), + EvalResult::Deny { .. } + ); + // Filter foreign `#[doc(hidden)]` variants. + let is_doc_hidden = + cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local(); + is_unstable || is_doc_hidden + }); + + Self::Variants { + visible_variants, + hidden_variants, + non_exhaustive: is_declared_nonexhaustive, + } } - ctors - } - ty::Char => { - smallvec![ - // The valid Unicode Scalar Value ranges. - make_range('\u{0000}' as u128, '\u{D7FF}' as u128), - make_range('\u{E000}' as u128, '\u{10FFFF}' as u128), - ] - } - ty::Int(_) | ty::Uint(_) - if pcx.ty.is_ptr_sized_integral() - && !cx.tcx.features().precise_pointer_size_matching => - { - // `usize`/`isize` are not allowed to be matched exhaustively unless the - // `precise_pointer_size_matching` feature is enabled. So we treat those types like - // `#[non_exhaustive]` enums by returning a special unmatchable constructor. - smallvec![NonExhaustive] - } - &ty::Int(ity) => { - let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128; - let min = 1u128 << (bits - 1); - let max = min - 1; - smallvec![make_range(min, max)] } - &ty::Uint(uty) => { - let size = Integer::from_uint_ty(&cx.tcx, uty).size(); - let max = size.truncate(u128::MAX); - smallvec![make_range(0, max)] - } - // If `exhaustive_patterns` is disabled and our scrutinee is the never type, we cannot - // expose its emptiness. The exception is if the pattern is at the top level, because we - // want empty matches to be considered exhaustive. - ty::Never if !cx.tcx.features().exhaustive_patterns && !pcx.is_top_level => { - smallvec![NonExhaustive] - } - ty::Never => smallvec![], - _ if cx.is_uninhabited(pcx.ty) => smallvec![], - ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => smallvec![Single], + ty::Never => Self::Uninhabited, + _ if cx.is_uninhabited(ty) => Self::Uninhabited, + ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => Self::Single, // This type is one for which we cannot list constructors, like `str` or `f64`. - _ => smallvec![NonExhaustive], - }; - - SplitWildcard { matrix_ctors: Vec::new(), all_ctors } + _ => Self::Unlistable, + } } - /// Pass a set of constructors relative to which to split this one. Don't call twice, it won't - /// do what you want. - pub(super) fn split<'a>( - &mut self, + /// This is the core logical operation of exhaustiveness checking. This analyzes a column a + /// constructors to 1/ determine which constructors of the type (if any) are missing; 2/ split + /// constructors to handle non-trivial intersections e.g. on ranges or slices. + #[instrument(level = "debug", skip(self, pcx, ctors), ret)] + pub(super) fn split<'a, 'tcx>( + &self, pcx: &PatCtxt<'_, '_, 'tcx>, ctors: impl Iterator> + Clone, - ) where + ) -> SplitConstructorSet<'tcx> + where 'tcx: 'a, { - // Since `all_ctors` never contains wildcards, this won't recurse further. - self.all_ctors = - self.all_ctors.iter().flat_map(|ctor| ctor.split(pcx, ctors.clone())).collect(); - self.matrix_ctors = ctors.filter(|c| !matches!(c, Wildcard | Opaque)).cloned().collect(); - } + let mut present: SmallVec<[_; 1]> = SmallVec::new(); + let mut missing = Vec::new(); + // Constructors in `ctors`, except wildcards. + let mut seen = ctors.filter(|c| !(matches!(c, Opaque | Wildcard))); + match self { + ConstructorSet::Single => { + if seen.next().is_none() { + missing.push(Single); + } else { + present.push(Single); + } + } + ConstructorSet::Variants { visible_variants, hidden_variants, non_exhaustive } => { + let seen_set: FxHashSet<_> = seen.map(|c| c.as_variant().unwrap()).collect(); + let mut skipped_a_hidden_variant = false; + + for variant in visible_variants { + let ctor = Variant(*variant); + if seen_set.contains(&variant) { + present.push(ctor); + } else { + missing.push(ctor); + } + } - /// Whether there are any value constructors for this type that are not present in the matrix. - fn any_missing(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> bool { - self.iter_missing(pcx).next().is_some() - } + for variant in hidden_variants { + let ctor = Variant(*variant); + if seen_set.contains(&variant) { + present.push(ctor); + } else { + skipped_a_hidden_variant = true; + } + } + if skipped_a_hidden_variant { + missing.push(Hidden); + } - /// Iterate over the constructors for this type that are not present in the matrix. - pub(super) fn iter_missing<'a, 'p>( - &'a self, - pcx: &'a PatCtxt<'a, 'p, 'tcx>, - ) -> impl Iterator> + Captures<'p> { - self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.matrix_ctors)) - } - - /// Return the set of constructors resulting from splitting the wildcard. As explained at the - /// top of the file, if any constructors are missing we can ignore the present ones. - fn into_ctors(self, pcx: &PatCtxt<'_, '_, 'tcx>) -> SmallVec<[Constructor<'tcx>; 1]> { - if self.any_missing(pcx) { - // Some constructors are missing, thus we can specialize with the special `Missing` - // constructor, which stands for those constructors that are not seen in the matrix, - // and matches the same rows as any of them (namely the wildcard rows). See the top of - // the file for details. - // However, when all constructors are missing we can also specialize with the full - // `Wildcard` constructor. The difference will depend on what we want in diagnostics. - - // If some constructors are missing, we typically want to report those constructors, - // e.g.: - // ``` - // enum Direction { N, S, E, W } - // let Direction::N = ...; - // ``` - // we can report 3 witnesses: `S`, `E`, and `W`. - // - // However, if the user didn't actually specify a constructor - // in this arm, e.g., in - // ``` - // let x: (Direction, Direction, bool) = ...; - // let (_, _, false) = x; - // ``` - // we don't want to show all 16 possible witnesses `(, , - // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we - // prefer to report just a wildcard `_`. - // - // The exception is: if we are at the top-level, for example in an empty match, we - // sometimes prefer reporting the list of constructors instead of just `_`. - let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty); - let ctor = if !self.matrix_ctors.is_empty() || report_when_all_missing { - if pcx.is_non_exhaustive { - Missing { - nonexhaustive_enum_missing_real_variants: self - .iter_missing(pcx) - .any(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))), + if *non_exhaustive { + missing.push(NonExhaustive); + } + } + ConstructorSet::Bool => { + let mut seen_false = false; + let mut seen_true = false; + for b in seen.map(|ctor| ctor.as_bool().unwrap()) { + if b { + seen_true = true; + } else { + seen_false = true; } + } + if seen_false { + present.push(Bool(false)); } else { - Missing { nonexhaustive_enum_missing_real_variants: false } + missing.push(Bool(false)); } - } else { - Wildcard - }; - return smallvec![ctor]; + if seen_true { + present.push(Bool(true)); + } else { + missing.push(Bool(true)); + } + } + ConstructorSet::Integers { range_1, range_2 } => { + let seen_ranges: Vec<_> = + seen.map(|ctor| ctor.as_int_range().unwrap().clone()).collect(); + for (seen, splitted_range) in range_1.split(seen_ranges.iter().cloned()) { + match seen { + Presence::Unseen => missing.push(IntRange(splitted_range)), + Presence::Seen => present.push(IntRange(splitted_range)), + } + } + if let Some(range_2) = range_2 { + for (seen, splitted_range) in range_2.split(seen_ranges.into_iter()) { + match seen { + Presence::Unseen => missing.push(IntRange(splitted_range)), + Presence::Seen => present.push(IntRange(splitted_range)), + } + } + } + } + &ConstructorSet::Slice(array_len) => { + let seen_slices = seen.map(|c| c.as_slice().unwrap()); + let base_slice = Slice::new(array_len, VarLen(0, 0)); + for (seen, splitted_slice) in base_slice.split(seen_slices) { + let ctor = Slice(splitted_slice); + match seen { + Presence::Unseen => missing.push(ctor), + Presence::Seen => present.push(ctor), + } + } + } + ConstructorSet::SliceOfEmpty => { + // This one is tricky because even though there's only one possible value of this + // type (namely `[]`), slice patterns of all lengths are allowed, they're just + // unreachable if length != 0. + // We still gather the seen constructors in `present`, but the only slice that can + // go in `missing` is `[]`. + let seen_slices = seen.map(|c| c.as_slice().unwrap()); + let base_slice = Slice::new(None, VarLen(0, 0)); + for (seen, splitted_slice) in base_slice.split(seen_slices) { + let ctor = Slice(splitted_slice); + match seen { + Presence::Seen => present.push(ctor), + Presence::Unseen if splitted_slice.arity() == 0 => { + missing.push(Slice(Slice::new(None, FixedLen(0)))) + } + Presence::Unseen => {} + } + } + } + ConstructorSet::Unlistable => { + // Since we can't list constructors, we take the ones in the column. This might list + // some constructors several times but there's not much we can do. + present.extend(seen.cloned()); + missing.push(NonExhaustive); + } + // If `exhaustive_patterns` is disabled and our scrutinee is an empty type, we cannot + // expose its emptiness. The exception is if the pattern is at the top level, because we + // want empty matches to be considered exhaustive. + ConstructorSet::Uninhabited + if !pcx.cx.tcx.features().exhaustive_patterns && !pcx.is_top_level => + { + missing.push(NonExhaustive); + } + ConstructorSet::Uninhabited => {} } - // All the constructors are present in the matrix, so we just go through them all. - self.all_ctors + SplitConstructorSet { present, missing } + } + + /// Compute the set of constructors missing from this column. + /// This is only used for reporting to the user. + pub(super) fn compute_missing<'a, 'tcx>( + &self, + pcx: &PatCtxt<'_, '_, 'tcx>, + ctors: impl Iterator> + Clone, + ) -> Vec> + where + 'tcx: 'a, + { + self.split(pcx, ctors).missing } } @@ -1202,11 +1334,14 @@ } _ => bug!("bad slice pattern {:?} {:?}", constructor, pcx), }, - Str(..) - | FloatRange(..) + Bool(..) | IntRange(..) - | NonExhaustive + | F32Range(..) + | F64Range(..) + | Str(..) | Opaque + | NonExhaustive + | Hidden | Missing { .. } | Wildcard => Fields::empty(), Or => { @@ -1227,9 +1362,10 @@ /// Values and patterns can be represented as a constructor applied to some fields. This represents /// a pattern in this form. -/// This also keeps track of whether the pattern has been found reachable during analysis. For this -/// reason we should be careful not to clone patterns for which we care about that. Use -/// `clone_and_forget_reachability` if you're sure. +/// This also uses interior mutability to keep track of whether the pattern has been found reachable +/// during analysis. For this reason they cannot be cloned. +/// A `DeconstructedPat` will almost always come from user input; the only exception are some +/// `Wildcard`s introduced during specialization. pub(crate) struct DeconstructedPat<'p, 'tcx> { ctor: Constructor<'tcx>, fields: Fields<'p, 'tcx>, @@ -1252,26 +1388,13 @@ DeconstructedPat { ctor, fields, ty, span, reachable: Cell::new(false) } } - /// Construct a pattern that matches everything that starts with this constructor. - /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern - /// `Some(_)`. - pub(super) fn wild_from_ctor(pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: Constructor<'tcx>) -> Self { - let fields = Fields::wildcards(pcx, &ctor); - DeconstructedPat::new(ctor, fields, pcx.ty, pcx.span) - } - - /// Clone this value. This method emphasizes that cloning loses reachability information and - /// should be done carefully. - pub(super) fn clone_and_forget_reachability(&self) -> Self { - DeconstructedPat::new(self.ctor.clone(), self.fields, self.ty, self.span) - } - pub(crate) fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self { let mkpat = |pat| DeconstructedPat::from_pat(cx, pat); let ctor; let fields; match &pat.kind { - PatKind::AscribeUserType { subpattern, .. } => return mkpat(subpattern), + PatKind::AscribeUserType { subpattern, .. } + | PatKind::InlineConstant { subpattern, .. } => return mkpat(subpattern), PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat), PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { ctor = Wildcard; @@ -1343,50 +1466,95 @@ } } PatKind::Constant { value } => { - if let Some(int_range) = IntRange::from_constant(cx.tcx, cx.param_env, *value) { - ctor = IntRange(int_range); - fields = Fields::empty(); - } else { - match pat.ty.kind() { - ty::Float(_) => { - ctor = FloatRange(*value, *value, RangeEnd::Included); - fields = Fields::empty(); - } - ty::Ref(_, t, _) if t.is_str() => { - // We want a `&str` constant to behave like a `Deref` pattern, to be compatible - // with other `Deref` patterns. This could have been done in `const_to_pat`, - // but that causes issues with the rest of the matching code. - // So here, the constructor for a `"foo"` pattern is `&` (represented by - // `Single`), and has one field. That field has constructor `Str(value)` and no - // fields. - // Note: `t` is `str`, not `&str`. - let subpattern = - DeconstructedPat::new(Str(*value), Fields::empty(), *t, pat.span); - ctor = Single; - fields = Fields::singleton(cx, subpattern) - } - // All constants that can be structurally matched have already been expanded - // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are - // opaque. - _ => { - ctor = Opaque; - fields = Fields::empty(); - } + match pat.ty.kind() { + ty::Bool => { + ctor = match value.try_eval_bool(cx.tcx, cx.param_env) { + Some(b) => Bool(b), + None => Opaque, + }; + fields = Fields::empty(); + } + ty::Char | ty::Int(_) | ty::Uint(_) => { + ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { + Some(bits) => IntRange(IntRange::from_bits(cx.tcx, pat.ty, bits)), + None => Opaque, + }; + fields = Fields::empty(); + } + ty::Float(ty::FloatTy::F32) => { + ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { + Some(bits) => { + use rustc_apfloat::Float; + let value = rustc_apfloat::ieee::Single::from_bits(bits); + F32Range(value, value, RangeEnd::Included) + } + None => Opaque, + }; + fields = Fields::empty(); + } + ty::Float(ty::FloatTy::F64) => { + ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { + Some(bits) => { + use rustc_apfloat::Float; + let value = rustc_apfloat::ieee::Double::from_bits(bits); + F64Range(value, value, RangeEnd::Included) + } + None => Opaque, + }; + fields = Fields::empty(); + } + ty::Ref(_, t, _) if t.is_str() => { + // We want a `&str` constant to behave like a `Deref` pattern, to be compatible + // with other `Deref` patterns. This could have been done in `const_to_pat`, + // but that causes issues with the rest of the matching code. + // So here, the constructor for a `"foo"` pattern is `&` (represented by + // `Single`), and has one field. That field has constructor `Str(value)` and no + // fields. + // Note: `t` is `str`, not `&str`. + let subpattern = + DeconstructedPat::new(Str(*value), Fields::empty(), *t, pat.span); + ctor = Single; + fields = Fields::singleton(cx, subpattern) + } + // All constants that can be structurally matched have already been expanded + // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are + // opaque. + _ => { + ctor = Opaque; + fields = Fields::empty(); } } } - &PatKind::Range(box PatRange { lo, hi, end }) => { - let ty = lo.ty(); - ctor = if let Some(int_range) = IntRange::from_range( - cx.tcx, - lo.eval_bits(cx.tcx, cx.param_env), - hi.eval_bits(cx.tcx, cx.param_env), - ty, - &end, - ) { - IntRange(int_range) - } else { - FloatRange(lo, hi, end) + PatKind::Range(box PatRange { lo, hi, end, .. }) => { + let ty = pat.ty; + ctor = match ty.kind() { + ty::Char | ty::Int(_) | ty::Uint(_) => { + let lo = + MaybeInfiniteInt::from_pat_range_bdy(*lo, ty, cx.tcx, cx.param_env); + let hi = + MaybeInfiniteInt::from_pat_range_bdy(*hi, ty, cx.tcx, cx.param_env); + IntRange(IntRange::from_range(lo, hi, *end)) + } + ty::Float(fty) => { + use rustc_apfloat::Float; + let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env)); + let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env)); + match fty { + ty::FloatTy::F32 => { + use rustc_apfloat::ieee::Single; + let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY); + let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY); + F32Range(lo, hi, *end) + } + ty::FloatTy::F64 => { + use rustc_apfloat::ieee::Double; + let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY); + let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY); + F64Range(lo, hi, *end) + } + } + } + _ => bug!("invalid type for range pattern: {}", ty), }; fields = Fields::empty(); } @@ -1412,103 +1580,24 @@ let pats = expand_or_pat(pat); fields = Fields::from_iter(cx, pats.into_iter().map(mkpat)); } + PatKind::Error(_) => { + ctor = Opaque; + fields = Fields::empty(); + } } DeconstructedPat::new(ctor, fields, pat.ty, pat.span) } - pub(crate) fn to_pat(&self, cx: &MatchCheckCtxt<'p, 'tcx>) -> Pat<'tcx> { - let is_wildcard = |pat: &Pat<'_>| { - matches!(pat.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild) - }; - let mut subpatterns = self.iter_fields().map(|p| Box::new(p.to_pat(cx))); - let kind = match &self.ctor { - Single | Variant(_) => match self.ty.kind() { - ty::Tuple(..) => PatKind::Leaf { - subpatterns: subpatterns - .enumerate() - .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern }) - .collect(), - }, - ty::Adt(adt_def, _) if adt_def.is_box() => { - // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside - // of `std`). So this branch is only reachable when the feature is enabled and - // the pattern is a box pattern. - PatKind::Deref { subpattern: subpatterns.next().unwrap() } - } - ty::Adt(adt_def, args) => { - let variant_index = self.ctor.variant_index_for_adt(*adt_def); - let variant = &adt_def.variant(variant_index); - let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant) - .zip(subpatterns) - .map(|((field, _ty), pattern)| FieldPat { field, pattern }) - .collect(); - - if adt_def.is_enum() { - PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns } - } else { - PatKind::Leaf { subpatterns } - } - } - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to reconstruct the correct constant pattern here. However a string - // literal pattern will never be reported as a non-exhaustiveness witness, so we - // ignore this issue. - ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, - _ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty), - }, - Slice(slice) => { - match slice.kind { - FixedLen(_) => PatKind::Slice { - prefix: subpatterns.collect(), - slice: None, - suffix: Box::new([]), - }, - VarLen(prefix, _) => { - let mut subpatterns = subpatterns.peekable(); - let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect(); - if slice.array_len.is_some() { - // Improves diagnostics a bit: if the type is a known-size array, instead - // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. - // This is incorrect if the size is not known, since `[_, ..]` captures - // arrays of lengths `>= 1` whereas `[..]` captures any length. - while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) { - prefix.pop(); - } - while subpatterns.peek().is_some() - && is_wildcard(subpatterns.peek().unwrap()) - { - subpatterns.next(); - } - } - let suffix: Box<[_]> = subpatterns.collect(); - let wild = Pat::wildcard_from_ty(self.ty); - PatKind::Slice { - prefix: prefix.into_boxed_slice(), - slice: Some(Box::new(wild)), - suffix, - } - } - } - } - &Str(value) => PatKind::Constant { value }, - &FloatRange(lo, hi, end) => PatKind::Range(Box::new(PatRange { lo, hi, end })), - IntRange(range) => return range.to_pat(cx.tcx, self.ty), - Wildcard | NonExhaustive => PatKind::Wild, - Missing { .. } => bug!( - "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug, - `Missing` should have been processed in `apply_constructors`" - ), - Opaque | Or => { - bug!("can't convert to pattern: {:?}", self) - } - }; - - Pat { ty: self.ty, span: DUMMY_SP, kind } - } - pub(super) fn is_or_pat(&self) -> bool { matches!(self.ctor, Or) } + pub(super) fn flatten_or_pat(&'p self) -> SmallVec<[&'p Self; 1]> { + if self.is_or_pat() { + self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect() + } else { + smallvec![self] + } + } pub(super) fn ctor(&self) -> &Constructor<'tcx> { &self.ctor @@ -1673,21 +1762,151 @@ } write!(f, "]") } - &FloatRange(lo, hi, end) => { - write!(f, "{lo}")?; - write!(f, "{end}")?; - write!(f, "{hi}") - } - IntRange(range) => write!(f, "{range:?}"), // Best-effort, will render e.g. `false` as `0..=0` - Wildcard | Missing { .. } | NonExhaustive => write!(f, "_ : {:?}", self.ty), + Bool(b) => write!(f, "{b}"), + // Best-effort, will render signed ranges incorrectly + IntRange(range) => write!(f, "{range:?}"), + F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"), + F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"), + Str(value) => write!(f, "{value}"), + Opaque => write!(f, ""), Or => { for pat in self.iter_fields() { write!(f, "{}{:?}", start_or_continue(" | "), pat)?; } Ok(()) } - Str(value) => write!(f, "{value}"), - Opaque => write!(f, ""), + Wildcard | Missing { .. } | NonExhaustive | Hidden => write!(f, "_ : {:?}", self.ty), } } } + +/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics +/// purposes. As such they don't use interning and can be cloned. +#[derive(Debug, Clone)] +pub(crate) struct WitnessPat<'tcx> { + ctor: Constructor<'tcx>, + pub(crate) fields: Vec>, + ty: Ty<'tcx>, +} + +impl<'tcx> WitnessPat<'tcx> { + pub(super) fn new(ctor: Constructor<'tcx>, fields: Vec, ty: Ty<'tcx>) -> Self { + Self { ctor, fields, ty } + } + pub(super) fn wildcard(ty: Ty<'tcx>) -> Self { + Self::new(Wildcard, Vec::new(), ty) + } + + /// Construct a pattern that matches everything that starts with this constructor. + /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern + /// `Some(_)`. + pub(super) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self { + // Reuse `Fields::wildcards` to get the types. + let fields = Fields::wildcards(pcx, &ctor) + .iter_patterns() + .map(|deco_pat| Self::wildcard(deco_pat.ty())) + .collect(); + Self::new(ctor, fields, pcx.ty) + } + + pub(super) fn ctor(&self) -> &Constructor<'tcx> { + &self.ctor + } + pub(super) fn ty(&self) -> Ty<'tcx> { + self.ty + } + + /// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't + /// appear in diagnostics, like float ranges. + pub(crate) fn to_diagnostic_pat(&self, cx: &MatchCheckCtxt<'_, 'tcx>) -> Pat<'tcx> { + let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild); + let mut subpatterns = self.iter_fields().map(|p| Box::new(p.to_diagnostic_pat(cx))); + let kind = match &self.ctor { + Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) }, + IntRange(range) => return range.to_diagnostic_pat(self.ty, cx.tcx), + Single | Variant(_) => match self.ty.kind() { + ty::Tuple(..) => PatKind::Leaf { + subpatterns: subpatterns + .enumerate() + .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern }) + .collect(), + }, + ty::Adt(adt_def, _) if adt_def.is_box() => { + // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside + // of `std`). So this branch is only reachable when the feature is enabled and + // the pattern is a box pattern. + PatKind::Deref { subpattern: subpatterns.next().unwrap() } + } + ty::Adt(adt_def, args) => { + let variant_index = self.ctor.variant_index_for_adt(*adt_def); + let variant = &adt_def.variant(variant_index); + let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant) + .zip(subpatterns) + .map(|((field, _ty), pattern)| FieldPat { field, pattern }) + .collect(); + + if adt_def.is_enum() { + PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns } + } else { + PatKind::Leaf { subpatterns } + } + } + // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should + // be careful to reconstruct the correct constant pattern here. However a string + // literal pattern will never be reported as a non-exhaustiveness witness, so we + // ignore this issue. + ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, + _ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty), + }, + Slice(slice) => { + match slice.kind { + FixedLen(_) => PatKind::Slice { + prefix: subpatterns.collect(), + slice: None, + suffix: Box::new([]), + }, + VarLen(prefix, _) => { + let mut subpatterns = subpatterns.peekable(); + let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect(); + if slice.array_len.is_some() { + // Improves diagnostics a bit: if the type is a known-size array, instead + // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. + // This is incorrect if the size is not known, since `[_, ..]` captures + // arrays of lengths `>= 1` whereas `[..]` captures any length. + while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) { + prefix.pop(); + } + while subpatterns.peek().is_some() + && is_wildcard(subpatterns.peek().unwrap()) + { + subpatterns.next(); + } + } + let suffix: Box<[_]> = subpatterns.collect(); + let wild = Pat::wildcard_from_ty(self.ty); + PatKind::Slice { + prefix: prefix.into_boxed_slice(), + slice: Some(Box::new(wild)), + suffix, + } + } + } + } + &Str(value) => PatKind::Constant { value }, + Wildcard | NonExhaustive | Hidden => PatKind::Wild, + Missing { .. } => bug!( + "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug, + `Missing` should have been processed in `apply_constructors`" + ), + F32Range(..) | F64Range(..) | Opaque | Or => { + bug!("can't convert to pattern: {:?}", self) + } + }; + + Pat { ty: self.ty, span: DUMMY_SP, kind } + } + + pub(super) fn iter_fields<'a>(&'a self) -> impl Iterator> { + self.fields.iter() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,18 +17,19 @@ use rustc_hir::pat_util::EnumerateAndAdjustIterator; use rustc_hir::RangeEnd; use rustc_index::Idx; -use rustc_middle::mir::interpret::{ - ErrorHandled, GlobalId, LitToConstError, LitToConstInput, Scalar, +use rustc_middle::mir::interpret::{ErrorHandled, GlobalId, LitToConstError, LitToConstInput}; +use rustc_middle::mir::{self, BorrowKind, Const, Mutability, UserTypeProjection}; +use rustc_middle::thir::{ + Ascription, BindingMode, FieldPat, LocalVarId, Pat, PatKind, PatRange, PatRangeBoundary, }; -use rustc_middle::mir::{self, Const, UserTypeProjection}; -use rustc_middle::mir::{BorrowKind, Mutability}; -use rustc_middle::thir::{Ascription, BindingMode, FieldPat, LocalVarId, Pat, PatKind, PatRange}; -use rustc_middle::ty::CanonicalUserTypeAnnotation; -use rustc_middle::ty::TypeVisitableExt; -use rustc_middle::ty::{self, AdtDef, Region, Ty, TyCtxt, UserType}; -use rustc_middle::ty::{GenericArg, GenericArgsRef}; -use rustc_span::{Span, Symbol}; -use rustc_target::abi::FieldIdx; +use rustc_middle::ty::layout::IntegerExt; +use rustc_middle::ty::{ + self, AdtDef, CanonicalUserTypeAnnotation, GenericArg, GenericArgsRef, Region, Ty, TyCtxt, + TypeVisitableExt, UserType, +}; +use rustc_span::def_id::LocalDefId; +use rustc_span::{ErrorGuaranteed, Span, Symbol}; +use rustc_target::abi::{FieldIdx, Integer}; use std::cmp::Ordering; @@ -85,127 +86,164 @@ ) } - fn lower_range_expr( + fn lower_pattern_range_endpoint( &mut self, - expr: &'tcx hir::Expr<'tcx>, - ) -> (PatKind<'tcx>, Option>) { - match self.lower_lit(expr) { - PatKind::AscribeUserType { ascription, subpattern: box Pat { kind, .. } } => { - (kind, Some(ascription)) + expr: Option<&'tcx hir::Expr<'tcx>>, + ) -> Result< + (Option>, Option>, Option), + ErrorGuaranteed, + > { + match expr { + None => Ok((None, None, None)), + Some(expr) => { + let (kind, ascr, inline_const) = match self.lower_lit(expr) { + PatKind::InlineConstant { subpattern, def } => { + (subpattern.kind, None, Some(def)) + } + PatKind::AscribeUserType { ascription, subpattern: box Pat { kind, .. } } => { + (kind, Some(ascription), None) + } + kind => (kind, None, None), + }; + let value = if let PatKind::Constant { value } = kind { + value + } else { + let msg = format!( + "found bad range pattern endpoint `{expr:?}` outside of error recovery" + ); + return Err(self.tcx.sess.delay_span_bug(expr.span, msg)); + }; + Ok((Some(PatRangeBoundary::Finite(value)), ascr, inline_const)) + } + } + } + + /// Overflowing literals are linted against in a late pass. This is mostly fine, except when we + /// encounter a range pattern like `-130i8..2`: if we believe `eval_bits`, this looks like a + /// range where the endpoints are in the wrong order. To avoid a confusing error message, we + /// check for overflow then. + /// This is only called when the range is already known to be malformed. + fn error_on_literal_overflow( + &self, + expr: Option<&'tcx hir::Expr<'tcx>>, + ty: Ty<'tcx>, + ) -> Result<(), ErrorGuaranteed> { + use hir::{ExprKind, UnOp}; + use rustc_ast::ast::LitKind; + + let Some(mut expr) = expr else { + return Ok(()); + }; + let span = expr.span; + + // We need to inspect the original expression, because if we only inspect the output of + // `eval_bits`, an overflowed value has already been wrapped around. + // We mostly copy the logic from the `rustc_lint::OVERFLOWING_LITERALS` lint. + let mut negated = false; + if let ExprKind::Unary(UnOp::Neg, sub_expr) = expr.kind { + negated = true; + expr = sub_expr; + } + let ExprKind::Lit(lit) = expr.kind else { + return Ok(()); + }; + let LitKind::Int(lit_val, _) = lit.node else { + return Ok(()); + }; + let (min, max): (i128, u128) = match ty.kind() { + ty::Int(ity) => { + let size = Integer::from_int_ty(&self.tcx, *ity).size(); + (size.signed_int_min(), size.signed_int_max() as u128) + } + ty::Uint(uty) => { + let size = Integer::from_uint_ty(&self.tcx, *uty).size(); + (0, size.unsigned_int_max()) + } + _ => { + return Ok(()); } - kind => (kind, None), + }; + // Detect literal value out of range `[min, max]` inclusive, avoiding use of `-min` to + // prevent overflow/panic. + if (negated && lit_val > max + 1) || (!negated && lit_val > max) { + return Err(self.tcx.sess.emit_err(LiteralOutOfRange { span, ty, min, max })); } + Ok(()) } fn lower_pattern_range( &mut self, - ty: Ty<'tcx>, - lo: mir::Const<'tcx>, - hi: mir::Const<'tcx>, + lo_expr: Option<&'tcx hir::Expr<'tcx>>, + hi_expr: Option<&'tcx hir::Expr<'tcx>>, end: RangeEnd, + ty: Ty<'tcx>, span: Span, - lo_expr: Option<&hir::Expr<'tcx>>, - hi_expr: Option<&hir::Expr<'tcx>>, - ) -> PatKind<'tcx> { - assert_eq!(lo.ty(), ty); - assert_eq!(hi.ty(), ty); - let cmp = compare_const_vals(self.tcx, lo, hi, self.param_env); - let max = || { - self.tcx - .layout_of(self.param_env.with_reveal_all_normalized(self.tcx).and(ty)) - .ok() - .unwrap() - .size - .unsigned_int_max() - }; + ) -> Result, ErrorGuaranteed> { + if lo_expr.is_none() && hi_expr.is_none() { + let msg = format!("found twice-open range pattern (`..`) outside of error recovery"); + return Err(self.tcx.sess.delay_span_bug(span, msg)); + } + + let (lo, lo_ascr, lo_inline) = self.lower_pattern_range_endpoint(lo_expr)?; + let (hi, hi_ascr, hi_inline) = self.lower_pattern_range_endpoint(hi_expr)?; + + let lo = lo.unwrap_or(PatRangeBoundary::NegInfinity); + let hi = hi.unwrap_or(PatRangeBoundary::PosInfinity); + + let cmp = lo.compare_with(hi, ty, self.tcx, self.param_env); + let mut kind = PatKind::Range(Box::new(PatRange { lo, hi, end, ty })); match (end, cmp) { // `x..y` where `x < y`. - // Non-empty because the range includes at least `x`. - (RangeEnd::Excluded, Some(Ordering::Less)) => { - PatKind::Range(Box::new(PatRange { lo, hi, end })) - } - // `x..y` where `x >= y`. The range is empty => error. - (RangeEnd::Excluded, _) => { - let mut lower_overflow = false; - let mut higher_overflow = false; - if let Some(hir::Expr { kind: hir::ExprKind::Lit(lit), .. }) = lo_expr - && let rustc_ast::ast::LitKind::Int(val, _) = lit.node - { - if lo.eval_bits(self.tcx, self.param_env) != val { - lower_overflow = true; - self.tcx.sess.emit_err(LiteralOutOfRange { span: lit.span, ty, max: max() }); - } - } - if let Some(hir::Expr { kind: hir::ExprKind::Lit(lit), .. }) = hi_expr - && let rustc_ast::ast::LitKind::Int(val, _) = lit.node - { - if hi.eval_bits(self.tcx, self.param_env) != val { - higher_overflow = true; - self.tcx.sess.emit_err(LiteralOutOfRange { span: lit.span, ty, max: max() }); - } - } - if !lower_overflow && !higher_overflow { - self.tcx.sess.emit_err(LowerRangeBoundMustBeLessThanUpper { span }); - } - PatKind::Wild - } - // `x..=y` where `x == y`. - (RangeEnd::Included, Some(Ordering::Equal)) => PatKind::Constant { value: lo }, + (RangeEnd::Excluded, Some(Ordering::Less)) => {} // `x..=y` where `x < y`. - (RangeEnd::Included, Some(Ordering::Less)) => { - PatKind::Range(Box::new(PatRange { lo, hi, end })) - } - // `x..=y` where `x > y` hence the range is empty => error. - (RangeEnd::Included, _) => { - let mut lower_overflow = false; - let mut higher_overflow = false; - if let Some(hir::Expr { kind: hir::ExprKind::Lit(lit), .. }) = lo_expr - && let rustc_ast::ast::LitKind::Int(val, _) = lit.node - { - if lo.eval_bits(self.tcx, self.param_env) != val { - lower_overflow = true; - self.tcx.sess.emit_err(LiteralOutOfRange { span: lit.span, ty, max: max() }); + (RangeEnd::Included, Some(Ordering::Less)) => {} + // `x..=y` where `x == y` and `x` and `y` are finite. + (RangeEnd::Included, Some(Ordering::Equal)) if lo.is_finite() && hi.is_finite() => { + kind = PatKind::Constant { value: lo.as_finite().unwrap() }; + } + // `..=x` where `x == ty::MIN`. + (RangeEnd::Included, Some(Ordering::Equal)) if !lo.is_finite() => {} + // `x..` where `x == ty::MAX` (yes, `x..` gives `RangeEnd::Included` since it is meant + // to include `ty::MAX`). + (RangeEnd::Included, Some(Ordering::Equal)) if !hi.is_finite() => {} + // `x..y` where `x >= y`, or `x..=y` where `x > y`. The range is empty => error. + _ => { + // Emit a more appropriate message if there was overflow. + self.error_on_literal_overflow(lo_expr, ty)?; + self.error_on_literal_overflow(hi_expr, ty)?; + let e = match end { + RangeEnd::Included => { + self.tcx.sess.emit_err(LowerRangeBoundMustBeLessThanOrEqualToUpper { + span, + teach: self.tcx.sess.teach(&error_code!(E0030)).then_some(()), + }) } - } - if let Some(hir::Expr { kind: hir::ExprKind::Lit(lit), .. }) = hi_expr - && let rustc_ast::ast::LitKind::Int(val, _) = lit.node - { - if hi.eval_bits(self.tcx, self.param_env) != val { - higher_overflow = true; - self.tcx.sess.emit_err(LiteralOutOfRange { span: lit.span, ty, max: max() }); + RangeEnd::Excluded => { + self.tcx.sess.emit_err(LowerRangeBoundMustBeLessThanUpper { span }) } - } - if !lower_overflow && !higher_overflow { - self.tcx.sess.emit_err(LowerRangeBoundMustBeLessThanOrEqualToUpper { - span, - teach: self.tcx.sess.teach(&error_code!(E0030)).then_some(()), - }); - } - PatKind::Wild + }; + return Err(e); } } - } - fn normalize_range_pattern_ends( - &self, - ty: Ty<'tcx>, - lo: Option<&PatKind<'tcx>>, - hi: Option<&PatKind<'tcx>>, - ) -> Option<(mir::Const<'tcx>, mir::Const<'tcx>)> { - match (lo, hi) { - (Some(PatKind::Constant { value: lo }), Some(PatKind::Constant { value: hi })) => { - Some((*lo, *hi)) - } - (Some(PatKind::Constant { value: lo }), None) => { - let hi = ty.numeric_max_val(self.tcx)?; - Some((*lo, mir::Const::from_ty_const(hi, self.tcx))) - } - (None, Some(PatKind::Constant { value: hi })) => { - let lo = ty.numeric_min_val(self.tcx)?; - Some((mir::Const::from_ty_const(lo, self.tcx), *hi)) + // If we are handling a range with associated constants (e.g. + // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated + // constants somewhere. Have them on the range pattern. + for ascr in [lo_ascr, hi_ascr] { + if let Some(ascription) = ascr { + kind = PatKind::AscribeUserType { + ascription, + subpattern: Box::new(Pat { span, ty, kind }), + }; } - _ => None, } + for inline_const in [lo_inline, hi_inline] { + if let Some(def) = inline_const { + kind = + PatKind::InlineConstant { def, subpattern: Box::new(Pat { span, ty, kind }) }; + } + } + Ok(kind) } #[instrument(skip(self), level = "debug")] @@ -220,37 +258,8 @@ hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => { let (lo_expr, hi_expr) = (lo_expr.as_deref(), hi_expr.as_deref()); - let lo_span = lo_expr.map_or(pat.span, |e| e.span); - let lo = lo_expr.map(|e| self.lower_range_expr(e)); - let hi = hi_expr.map(|e| self.lower_range_expr(e)); - - let (lp, hp) = (lo.as_ref().map(|(x, _)| x), hi.as_ref().map(|(x, _)| x)); - let mut kind = match self.normalize_range_pattern_ends(ty, lp, hp) { - Some((lc, hc)) => { - self.lower_pattern_range(ty, lc, hc, end, lo_span, lo_expr, hi_expr) - } - None => { - let msg = format!( - "found bad range pattern `{:?}` outside of error recovery", - (&lo, &hi), - ); - self.tcx.sess.delay_span_bug(pat.span, msg); - PatKind::Wild - } - }; - - // If we are handling a range with associated constants (e.g. - // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated - // constants somewhere. Have them on the range pattern. - for end in &[lo, hi] { - if let Some((_, Some(ascription))) = end { - let subpattern = Box::new(Pat { span: pat.span, ty, kind }); - kind = - PatKind::AscribeUserType { ascription: ascription.clone(), subpattern }; - } - } - - kind + self.lower_pattern_range(lo_expr, hi_expr, end, ty, span) + .unwrap_or_else(PatKind::Error) } hir::PatKind::Path(ref qpath) => { @@ -418,9 +427,9 @@ if adt_def.is_enum() { let args = match ty.kind() { ty::Adt(_, args) | ty::FnDef(_, args) => args, - ty::Error(_) => { + ty::Error(e) => { // Avoid ICE (#50585) - return PatKind::Wild; + return PatKind::Error(*e); } _ => bug!("inappropriate type for def: {:?}", ty), }; @@ -447,7 +456,7 @@ | Res::SelfTyAlias { .. } | Res::SelfCtor(..) => PatKind::Leaf { subpatterns }, _ => { - match res { + let e = match res { Res::Def(DefKind::ConstParam, _) => { self.tcx.sess.emit_err(ConstParamInPattern { span }) } @@ -456,7 +465,7 @@ } _ => self.tcx.sess.emit_err(NonConstPath { span }), }; - PatKind::Wild + PatKind::Error(e) } }; @@ -508,14 +517,13 @@ // It should be assoc consts if there's no error but we cannot resolve it. debug_assert!(is_associated_const); - self.tcx.sess.emit_err(AssocConstInPattern { span }); - - return pat_from_kind(PatKind::Wild); + let e = self.tcx.sess.emit_err(AssocConstInPattern { span }); + return pat_from_kind(PatKind::Error(e)); } Err(_) => { - self.tcx.sess.emit_err(CouldNotEvalConstPattern { span }); - return pat_from_kind(PatKind::Wild); + let e = self.tcx.sess.emit_err(CouldNotEvalConstPattern { span }); + return pat_from_kind(PatKind::Error(e)); } }; @@ -569,12 +577,12 @@ Err(ErrorHandled::TooGeneric(_)) => { // While `Reported | Linted` cases will have diagnostics emitted already // it is not true for TooGeneric case, so we need to give user more information. - self.tcx.sess.emit_err(ConstPatternDependsOnGenericParameter { span }); - pat_from_kind(PatKind::Wild) + let e = self.tcx.sess.emit_err(ConstPatternDependsOnGenericParameter { span }); + pat_from_kind(PatKind::Error(e)) } Err(_) => { - self.tcx.sess.emit_err(CouldNotEvalConstPattern { span }); - pat_from_kind(PatKind::Wild) + let e = self.tcx.sess.emit_err(CouldNotEvalConstPattern { span }); + pat_from_kind(PatKind::Error(e)) } } } @@ -597,11 +605,9 @@ // const eval path below. // FIXME: investigate the performance impact of removing this. let lit_input = match expr.kind { - hir::ExprKind::Lit(ref lit) => Some(LitToConstInput { lit: &lit.node, ty, neg: false }), - hir::ExprKind::Unary(hir::UnOp::Neg, ref expr) => match expr.kind { - hir::ExprKind::Lit(ref lit) => { - Some(LitToConstInput { lit: &lit.node, ty, neg: true }) - } + hir::ExprKind::Lit(lit) => Some(LitToConstInput { lit: &lit.node, ty, neg: false }), + hir::ExprKind::Unary(hir::UnOp::Neg, expr) => match expr.kind { + hir::ExprKind::Lit(lit) => Some(LitToConstInput { lit: &lit.node, ty, neg: true }), _ => None, }, _ => None, @@ -624,30 +630,30 @@ let uneval = mir::UnevaluatedConst { def: def_id.to_def_id(), args, promoted: None }; debug_assert!(!args.has_free_regions()); - let ct = ty::UnevaluatedConst { def: def_id.to_def_id(), args: args }; + let ct = ty::UnevaluatedConst { def: def_id.to_def_id(), args }; // First try using a valtree in order to destructure the constant into a pattern. // FIXME: replace "try to do a thing, then fall back to another thing" // but something more principled, like a trait query checking whether this can be turned into a valtree. if let Ok(Some(valtree)) = self.tcx.const_eval_resolve_for_typeck(self.param_env, ct, Some(span)) { - self.const_to_pat( + let subpattern = self.const_to_pat( Const::Ty(ty::Const::new_value(self.tcx, valtree, ty)), id, span, None, - ) - .kind + ); + PatKind::InlineConstant { subpattern, def: def_id } } else { // If that fails, convert it to an opaque constant pattern. match tcx.const_eval_resolve(self.param_env, uneval, Some(span)) { Ok(val) => self.const_to_pat(mir::Const::Val(val, ty), id, span, None).kind, Err(ErrorHandled::TooGeneric(_)) => { // If we land here it means the const can't be evaluated because it's `TooGeneric`. - self.tcx.sess.emit_err(ConstPatternDependsOnGenericParameter { span }); - PatKind::Wild + let e = self.tcx.sess.emit_err(ConstPatternDependsOnGenericParameter { span }); + PatKind::Error(e) } - Err(ErrorHandled::Reported(..)) => PatKind::Wild, + Err(ErrorHandled::Reported(err, ..)) => PatKind::Error(err.into()), } } } @@ -680,7 +686,7 @@ Ok(constant) => { self.const_to_pat(Const::Ty(constant), expr.hir_id, lit.span, None).kind } - Err(LitToConstError::Reported(_)) => PatKind::Wild, + Err(LitToConstError::Reported(e)) => PatKind::Error(e), Err(LitToConstError::TypeError) => bug!("lower_lit: had type error"), } } @@ -786,6 +792,7 @@ fn super_fold_with>(&self, folder: &mut F) -> Self { match *self { PatKind::Wild => PatKind::Wild, + PatKind::Error(e) => PatKind::Error(e), PatKind::AscribeUserType { ref subpattern, ascription: Ascription { ref annotation, variance }, @@ -819,6 +826,9 @@ PatKind::Deref { subpattern: subpattern.fold_with(folder) } } PatKind::Constant { value } => PatKind::Constant { value }, + PatKind::InlineConstant { def, subpattern: ref pattern } => { + PatKind::InlineConstant { def, subpattern: pattern.fold_with(folder) } + } PatKind::Range(ref range) => PatKind::Range(range.clone()), PatKind::Slice { ref prefix, ref slice, ref suffix } => PatKind::Slice { prefix: prefix.fold_with(folder), @@ -834,59 +844,3 @@ } } } - -#[instrument(skip(tcx), level = "debug")] -pub(crate) fn compare_const_vals<'tcx>( - tcx: TyCtxt<'tcx>, - a: mir::Const<'tcx>, - b: mir::Const<'tcx>, - param_env: ty::ParamEnv<'tcx>, -) -> Option { - assert_eq!(a.ty(), b.ty()); - - let ty = a.ty(); - - // This code is hot when compiling matches with many ranges. So we - // special-case extraction of evaluated scalars for speed, for types where - // raw data comparisons are appropriate. E.g. `unicode-normalization` has - // many ranges such as '\u{037A}'..='\u{037F}', and chars can be compared - // in this way. - match ty.kind() { - ty::Float(_) | ty::Int(_) => {} // require special handling, see below - _ => match (a, b) { - ( - mir::Const::Val(mir::ConstValue::Scalar(Scalar::Int(a)), _a_ty), - mir::Const::Val(mir::ConstValue::Scalar(Scalar::Int(b)), _b_ty), - ) => return Some(a.cmp(&b)), - (mir::Const::Ty(a), mir::Const::Ty(b)) => { - return Some(a.kind().cmp(&b.kind())); - } - _ => {} - }, - } - - let a = a.eval_bits(tcx, param_env); - let b = b.eval_bits(tcx, param_env); - - use rustc_apfloat::Float; - match *ty.kind() { - ty::Float(ty::FloatTy::F32) => { - let a = rustc_apfloat::ieee::Single::from_bits(a); - let b = rustc_apfloat::ieee::Single::from_bits(b); - a.partial_cmp(&b) - } - ty::Float(ty::FloatTy::F64) => { - let a = rustc_apfloat::ieee::Double::from_bits(a); - let b = rustc_apfloat::ieee::Double::from_bits(b); - a.partial_cmp(&b) - } - ty::Int(ity) => { - use rustc_middle::ty::layout::IntegerExt; - let size = rustc_target::abi::Integer::from_int_ty(&tcx, ity).size(); - let a = size.sign_extend(a); - let b = size.sign_extend(b); - Some((a as i128).cmp(&(b as i128))) - } - _ => Some(a.cmp(&b)), - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs 2023-12-21 16:55:28.000000000 +0000 @@ -213,7 +213,7 @@ //! or-patterns in the first column are expanded before being stored in the matrix. Specialization //! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and //! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the -//! [`Fields`] struct. +//! [`super::deconstruct_pat::Fields`] struct. //! //! //! # Computing usefulness @@ -307,8 +307,14 @@ use self::ArmType::*; use self::Usefulness::*; -use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard}; -use crate::errors::{NonExhaustiveOmittedPattern, Uncovered}; +use super::deconstruct_pat::{ + Constructor, ConstructorSet, DeconstructedPat, IntRange, MaybeInfiniteInt, SplitConstructorSet, + WitnessPat, +}; +use crate::errors::{ + NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap, + OverlappingRangeEndpoints, Uncovered, +}; use rustc_data_structures::captures::Captures; @@ -317,12 +323,12 @@ use rustc_hir::def_id::DefId; use rustc_hir::HirId; use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_session::lint; use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; use std::fmt; -use std::iter::once; pub(crate) struct MatchCheckCtxt<'p, 'tcx> { pub(crate) tcx: TyCtxt<'tcx>, @@ -334,6 +340,8 @@ pub(crate) module: DefId, pub(crate) param_env: ty::ParamEnv<'tcx>, pub(crate) pattern_arena: &'p TypedArena>, + /// The span of the whole match, if applicable. + pub(crate) match_span: Option, /// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns. pub(crate) refutable: bool, } @@ -368,8 +376,6 @@ /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a /// subpattern. pub(super) is_top_level: bool, - /// Whether the current pattern is from a `non_exhaustive` enum. - pub(super) is_non_exhaustive: bool, } impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> { @@ -476,11 +482,6 @@ Matrix { patterns: vec![] } } - /// Number of columns of this matrix. `None` is the matrix is empty. - pub(super) fn column_count(&self) -> Option { - self.patterns.get(0).map(|r| r.len()) - } - /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively /// expands it. fn push(&mut self, row: PatStack<'p, 'tcx>) { @@ -557,20 +558,20 @@ /// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of /// witnesses of non-exhaustiveness when there are any. /// Which variant to use is dictated by `ArmType`. -#[derive(Debug)] -enum Usefulness<'p, 'tcx> { +#[derive(Debug, Clone)] +enum Usefulness<'tcx> { /// If we don't care about witnesses, simply remember if the pattern was useful. NoWitnesses { useful: bool }, /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole /// pattern is unreachable. - WithWitnesses(Vec>), + WithWitnesses(Vec>), } -impl<'p, 'tcx> Usefulness<'p, 'tcx> { +impl<'tcx> Usefulness<'tcx> { fn new_useful(preference: ArmType) -> Self { match preference { // A single (empty) witness of reachability. - FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]), + FakeExtraWildcard => WithWitnesses(vec![WitnessStack(vec![])]), RealArm => NoWitnesses { useful: true }, } } @@ -607,8 +608,8 @@ /// with the results of specializing with the other constructors. fn apply_constructor( self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - matrix: &Matrix<'p, 'tcx>, // used to compute missing ctors + pcx: &PatCtxt<'_, '_, 'tcx>, + matrix: &Matrix<'_, 'tcx>, // used to compute missing ctors ctor: &Constructor<'tcx>, ) -> Self { match self { @@ -616,62 +617,34 @@ WithWitnesses(ref witnesses) if witnesses.is_empty() => self, WithWitnesses(witnesses) => { let new_witnesses = if let Constructor::Missing { .. } = ctor { - // We got the special `Missing` constructor, so each of the missing constructors - // gives a new pattern that is not caught by the match. We list those patterns. - if pcx.is_non_exhaustive { - witnesses - .into_iter() - // Here we don't want the user to try to list all variants, we want them to add - // a wildcard, so we only suggest that. - .map(|witness| { - witness.apply_constructor(pcx, &Constructor::NonExhaustive) - }) - .collect() - } else { - let mut split_wildcard = SplitWildcard::new(pcx); - split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor)); - - // This lets us know if we skipped any variants because they are marked - // `doc(hidden)` or they are unstable feature gate (only stdlib types). - let mut hide_variant_show_wild = false; - // Construct for each missing constructor a "wild" version of this - // constructor, that matches everything that can be built with - // it. For example, if `ctor` is a `Constructor::Variant` for - // `Option::Some`, we get the pattern `Some(_)`. - let mut new_patterns: Vec> = split_wildcard - .iter_missing(pcx) - .filter_map(|missing_ctor| { - // Check if this variant is marked `doc(hidden)` - if missing_ctor.is_doc_hidden_variant(pcx) - || missing_ctor.is_unstable_variant(pcx) - { - hide_variant_show_wild = true; - return None; - } - Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone())) - }) - .collect(); + let mut missing = ConstructorSet::for_ty(pcx.cx, pcx.ty) + .compute_missing(pcx, matrix.heads().map(DeconstructedPat::ctor)); + if missing.iter().any(|c| c.is_non_exhaustive()) { + // We only report `_` here; listing other constructors would be redundant. + missing = vec![Constructor::NonExhaustive]; + } - if hide_variant_show_wild { - new_patterns.push(DeconstructedPat::wildcard(pcx.ty, pcx.span)); - } + // We got the special `Missing` constructor, so each of the missing constructors + // gives a new pattern that is not caught by the match. + // We construct for each missing constructor a version of this constructor with + // wildcards for fields, i.e. that matches everything that can be built with it. + // For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get + // the pattern `Some(_)`. + let new_patterns: Vec> = missing + .into_iter() + .map(|missing_ctor| WitnessPat::wild_from_ctor(pcx, missing_ctor.clone())) + .collect(); - witnesses - .into_iter() - .flat_map(|witness| { - new_patterns.iter().map(move |pat| { - Witness( - witness - .0 - .iter() - .chain(once(pat)) - .map(DeconstructedPat::clone_and_forget_reachability) - .collect(), - ) - }) + witnesses + .into_iter() + .flat_map(|witness| { + new_patterns.iter().map(move |pat| { + let mut stack = witness.clone(); + stack.0.push(pat.clone()); + stack }) - .collect() - } + }) + .collect() } else { witnesses .into_iter() @@ -690,15 +663,17 @@ RealArm, } -/// A witness of non-exhaustiveness for error reporting, represented -/// as a list of patterns (in reverse order of construction) with -/// wildcards inside to represent elements that can take any inhabitant -/// of the type as a value. -/// -/// A witness against a list of patterns should have the same types -/// and length as the pattern matched against. Because Rust `match` -/// is always against a single pattern, at the end the witness will -/// have length 1, but in the middle of the algorithm, it can contain +/// A witness-tuple of non-exhaustiveness for error reporting, represented as a list of patterns (in +/// reverse order of construction) with wildcards inside to represent elements that can take any +/// inhabitant of the type as a value. +/// +/// This mirrors `PatStack`: they function similarly, except `PatStack` contains user patterns we +/// are inspecting, and `WitnessStack` contains witnesses we are constructing. +/// FIXME(Nadrieril): use the same order of patterns for both +/// +/// A `WitnessStack` should have the same types and length as the `PatStacks` we are inspecting +/// (except we store the patterns in reverse order). Because Rust `match` is always against a single +/// pattern, at the end the stack will have length 1. In the middle of the algorithm, it can contain /// multiple patterns. /// /// For example, if we are constructing a witness for the match against @@ -713,23 +688,37 @@ /// # } /// ``` /// -/// We'll perform the following steps: -/// 1. Start with an empty witness -/// `Witness(vec![])` -/// 2. Push a witness `true` against the `false` -/// `Witness(vec![true])` -/// 3. Push a witness `Some(_)` against the `None` -/// `Witness(vec![true, Some(_)])` -/// 4. Apply the `Pair` constructor to the witnesses -/// `Witness(vec![Pair(Some(_), true)])` +/// We'll perform the following steps (among others): +/// - Start with a matrix representing the match +/// `PatStack(vec![Pair(None, _)])` +/// `PatStack(vec![Pair(_, false)])` +/// - Specialize with `Pair` +/// `PatStack(vec![None, _])` +/// `PatStack(vec![_, false])` +/// - Specialize with `Some` +/// `PatStack(vec![_, false])` +/// - Specialize with `_` +/// `PatStack(vec![false])` +/// - Specialize with `true` +/// // no patstacks left +/// - This is a non-exhaustive match: we have the empty witness stack as a witness. +/// `WitnessStack(vec![])` +/// - Apply `true` +/// `WitnessStack(vec![true])` +/// - Apply `_` +/// `WitnessStack(vec![true, _])` +/// - Apply `Some` +/// `WitnessStack(vec![true, Some(_)])` +/// - Apply `Pair` +/// `WitnessStack(vec![Pair(Some(_), true)])` /// /// The final `Pair(Some(_), true)` is then the resulting witness. -#[derive(Debug)] -pub(crate) struct Witness<'p, 'tcx>(Vec>); +#[derive(Debug, Clone)] +pub(crate) struct WitnessStack<'tcx>(Vec>); -impl<'p, 'tcx> Witness<'p, 'tcx> { +impl<'tcx> WitnessStack<'tcx> { /// Asserts that the witness contains a single pattern, and returns it. - fn single_pattern(self) -> DeconstructedPat<'p, 'tcx> { + fn single_pattern(self) -> WitnessPat<'tcx> { assert_eq!(self.0.len(), 1); self.0.into_iter().next().unwrap() } @@ -747,13 +736,12 @@ /// /// left_ty: struct X { a: (bool, &'static str), b: usize} /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } - fn apply_constructor(mut self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Self { + fn apply_constructor(mut self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) -> Self { let pat = { let len = self.0.len(); let arity = ctor.arity(pcx); - let pats = self.0.drain((len - arity)..).rev(); - let fields = Fields::from_iter(pcx.cx, pats); - DeconstructedPat::new(ctor.clone(), fields, pcx.ty, pcx.span) + let fields = self.0.drain((len - arity)..).rev().collect(); + WitnessPat::new(ctor.clone(), fields, pcx.ty) }; self.0.push(pat); @@ -793,7 +781,7 @@ lint_root: HirId, is_under_guard: bool, is_top_level: bool, -) -> Usefulness<'p, 'tcx> { +) -> Usefulness<'tcx> { debug!(?matrix, ?v); let Matrix { patterns: rows, .. } = matrix; @@ -844,24 +832,13 @@ ty = row.head().ty(); } } - let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty); debug!("v.head: {:?}, v.span: {:?}", v.head(), v.head().span()); - let pcx = &PatCtxt { cx, ty, span: v.head().span(), is_top_level, is_non_exhaustive }; + let pcx = &PatCtxt { cx, ty, span: v.head().span(), is_top_level }; let v_ctor = v.head().ctor(); debug!(?v_ctor); - if let Constructor::IntRange(ctor_range) = &v_ctor { - // Lint on likely incorrect range patterns (#63987) - ctor_range.lint_overlapping_range_endpoints( - pcx, - matrix.heads(), - matrix.column_count().unwrap_or(0), - lint_root, - ) - } // We split the head constructor of `v`. let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor)); - let is_non_exhaustive_and_wild = is_non_exhaustive && v_ctor.is_wildcard(); // For each constructor, we compute whether there's a value that starts with it that would // witness the usefulness of `v`. let start_matrix = &matrix; @@ -882,56 +859,6 @@ ) }); let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor); - - // When all the conditions are met we have a match with a `non_exhaustive` enum - // that has the potential to trigger the `non_exhaustive_omitted_patterns` lint. - // To understand the workings checkout `Constructor::split` and `SplitWildcard::new/into_ctors` - if is_non_exhaustive_and_wild - // Only emit a lint on refutable patterns. - && cx.refutable - // We check that the match has a wildcard pattern and that wildcard is useful, - // meaning there are variants that are covered by the wildcard. Without the check - // for `witness_preference` the lint would trigger on `if let NonExhaustiveEnum::A = foo {}` - && usefulness.is_useful() && matches!(witness_preference, RealArm) - && matches!( - &ctor, - Constructor::Missing { nonexhaustive_enum_missing_real_variants: true } - ) - { - let patterns = { - let mut split_wildcard = SplitWildcard::new(pcx); - split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor)); - // Construct for each missing constructor a "wild" version of this - // constructor, that matches everything that can be built with - // it. For example, if `ctor` is a `Constructor::Variant` for - // `Option::Some`, we get the pattern `Some(_)`. - split_wildcard - .iter_missing(pcx) - // Filter out the `NonExhaustive` because we want to list only real - // variants. Also remove any unstable feature gated variants. - // Because of how we computed `nonexhaustive_enum_missing_real_variants`, - // this will not return an empty `Vec`. - .filter(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))) - .cloned() - .map(|missing_ctor| DeconstructedPat::wild_from_ctor(pcx, missing_ctor)) - .collect::>() - }; - - // Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns` - // is not exhaustive enough. - // - // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`. - cx.tcx.emit_spanned_lint( - NON_EXHAUSTIVE_OMITTED_PATTERNS, - lint_root, - pcx.span, - NonExhaustiveOmittedPattern { - scrut_ty: pcx.ty, - uncovered: Uncovered::new(pcx.span, pcx.cx, patterns), - }, - ); - } - ret.extend(usefulness); } } @@ -943,6 +870,214 @@ ret } +/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that +/// inspect the same subvalue". +/// This is used to traverse patterns column-by-column for lints. Despite similarities with +/// `is_useful`, this is a different traversal. Notably this is linear in the depth of patterns, +/// whereas `is_useful` is worst-case exponential (exhaustiveness is NP-complete). +#[derive(Debug)] +struct PatternColumn<'p, 'tcx> { + patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>, +} + +impl<'p, 'tcx> PatternColumn<'p, 'tcx> { + fn new(patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>) -> Self { + Self { patterns } + } + + fn is_empty(&self) -> bool { + self.patterns.is_empty() + } + fn head_ty(&self) -> Option> { + if self.patterns.len() == 0 { + return None; + } + // If the type is opaque and it is revealed anywhere in the column, we take the revealed + // version. Otherwise we could encounter constructors for the revealed type and crash. + let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..)); + let first_ty = self.patterns[0].ty(); + if is_opaque(first_ty) { + for pat in &self.patterns { + let ty = pat.ty(); + if !is_opaque(ty) { + return Some(ty); + } + } + } + Some(first_ty) + } + + fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> { + let column_ctors = self.patterns.iter().map(|p| p.ctor()); + ConstructorSet::for_ty(pcx.cx, pcx.ty).split(pcx, column_ctors) + } + fn iter<'a>(&'a self) -> impl Iterator> + Captures<'a> { + self.patterns.iter().copied() + } + + /// Does specialization: given a constructor, this takes the patterns from the column that match + /// the constructor, and outputs their fields. + /// This returns one column per field of the constructor. The normally all have the same length + /// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns + /// which may change the lengths. + fn specialize(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Vec { + let arity = ctor.arity(pcx); + if arity == 0 { + return Vec::new(); + } + + // We specialize the column by `ctor`. This gives us `arity`-many columns of patterns. These + // columns may have different lengths in the presence of or-patterns (this is why we can't + // reuse `Matrix`). + let mut specialized_columns: Vec<_> = + (0..arity).map(|_| Self { patterns: Vec::new() }).collect(); + let relevant_patterns = + self.patterns.iter().filter(|pat| ctor.is_covered_by(pcx, pat.ctor())); + for pat in relevant_patterns { + let specialized = pat.specialize(pcx, &ctor); + for (subpat, column) in specialized.iter().zip(&mut specialized_columns) { + if subpat.is_or_pat() { + column.patterns.extend(subpat.flatten_or_pat()) + } else { + column.patterns.push(subpat) + } + } + } + + assert!( + !specialized_columns[0].is_empty(), + "ctor {ctor:?} was listed as present but isn't; + there is an inconsistency between `Constructor::is_covered_by` and `ConstructorSet::split`" + ); + specialized_columns + } +} + +/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned +/// in a given column. +#[instrument(level = "debug", skip(cx), ret)] +fn collect_nonexhaustive_missing_variants<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + column: &PatternColumn<'p, 'tcx>, +) -> Vec> { + let Some(ty) = column.head_ty() else { + return Vec::new(); + }; + let pcx = &PatCtxt { cx, ty, span: DUMMY_SP, is_top_level: false }; + + let set = column.analyze_ctors(pcx); + if set.present.is_empty() { + // We can't consistently handle the case where no constructors are present (since this would + // require digging deep through any type in case there's a non_exhaustive enum somewhere), + // so for consistency we refuse to handle the top-level case, where we could handle it. + return vec![]; + } + + let mut witnesses = Vec::new(); + if cx.is_foreign_non_exhaustive_enum(ty) { + witnesses.extend( + set.missing + .into_iter() + // This will list missing visible variants. + .filter(|c| !matches!(c, Constructor::Hidden | Constructor::NonExhaustive)) + .map(|missing_ctor| WitnessPat::wild_from_ctor(pcx, missing_ctor)), + ) + } + + // Recurse into the fields. + for ctor in set.present { + let specialized_columns = column.specialize(pcx, &ctor); + let wild_pat = WitnessPat::wild_from_ctor(pcx, ctor); + for (i, col_i) in specialized_columns.iter().enumerate() { + // Compute witnesses for each column. + let wits_for_col_i = collect_nonexhaustive_missing_variants(cx, col_i); + // For each witness, we build a new pattern in the shape of `ctor(_, _, wit, _, _)`, + // adding enough wildcards to match `arity`. + for wit in wits_for_col_i { + let mut pat = wild_pat.clone(); + pat.fields[i] = wit; + witnesses.push(pat); + } + } + } + witnesses +} + +/// Traverse the patterns to warn the user about ranges that overlap on their endpoints. +#[instrument(level = "debug", skip(cx, lint_root))] +fn lint_overlapping_range_endpoints<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + column: &PatternColumn<'p, 'tcx>, + lint_root: HirId, +) { + let Some(ty) = column.head_ty() else { + return; + }; + let pcx = &PatCtxt { cx, ty, span: DUMMY_SP, is_top_level: false }; + + let set = column.analyze_ctors(pcx); + + if IntRange::is_integral(ty) { + let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| { + let overlap_as_pat = overlap.to_diagnostic_pat(ty, cx.tcx); + let overlaps: Vec<_> = overlapped_spans + .iter() + .copied() + .map(|span| Overlap { range: overlap_as_pat.clone(), span }) + .collect(); + cx.tcx.emit_spanned_lint( + lint::builtin::OVERLAPPING_RANGE_ENDPOINTS, + lint_root, + this_span, + OverlappingRangeEndpoints { overlap: overlaps, range: this_span }, + ); + }; + + // If two ranges overlapped, the split set will contain their intersection as a singleton. + let split_int_ranges = set.present.iter().filter_map(|c| c.as_int_range()); + for overlap_range in split_int_ranges.clone() { + if overlap_range.is_singleton() { + let overlap: MaybeInfiniteInt = overlap_range.lo; + // Ranges that look like `lo..=overlap`. + let mut prefixes: SmallVec<[_; 1]> = Default::default(); + // Ranges that look like `overlap..=hi`. + let mut suffixes: SmallVec<[_; 1]> = Default::default(); + // Iterate on patterns that contained `overlap`. + for pat in column.iter() { + let this_span = pat.span(); + let Constructor::IntRange(this_range) = pat.ctor() else { continue }; + if this_range.is_singleton() { + // Don't lint when one of the ranges is a singleton. + continue; + } + if this_range.lo == overlap { + // `this_range` looks like `overlap..=this_range.hi`; it overlaps with any + // ranges that look like `lo..=overlap`. + if !prefixes.is_empty() { + emit_lint(overlap_range, this_span, &prefixes); + } + suffixes.push(this_span) + } else if this_range.hi == overlap.plus_one() { + // `this_range` looks like `this_range.lo..=overlap`; it overlaps with any + // ranges that look like `overlap..=hi`. + if !suffixes.is_empty() { + emit_lint(overlap_range, this_span, &suffixes); + } + prefixes.push(this_span) + } + } + } + } + } else { + // Recurse into the fields. + for ctor in set.present { + for col in column.specialize(pcx, &ctor) { + lint_overlapping_range_endpoints(cx, &col, lint_root); + } + } + } +} + /// The arm of a match expression. #[derive(Clone, Copy, Debug)] pub(crate) struct MatchArm<'p, 'tcx> { @@ -969,7 +1104,7 @@ pub(crate) arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Reachability)>, /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of /// exhaustiveness. - pub(crate) non_exhaustiveness_witnesses: Vec>, + pub(crate) non_exhaustiveness_witnesses: Vec>, } /// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which @@ -983,6 +1118,7 @@ arms: &[MatchArm<'p, 'tcx>], lint_root: HirId, scrut_ty: Ty<'tcx>, + scrut_span: Span, ) -> UsefulnessReport<'p, 'tcx> { let mut matrix = Matrix::empty(); let arm_usefulness: Vec<_> = arms @@ -1007,9 +1143,63 @@ let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty, DUMMY_SP)); let v = PatStack::from_pattern(wild_pattern); let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, lint_root, false, true); - let non_exhaustiveness_witnesses = match usefulness { + let non_exhaustiveness_witnesses: Vec<_> = match usefulness { WithWitnesses(pats) => pats.into_iter().map(|w| w.single_pattern()).collect(), NoWitnesses { .. } => bug!(), }; + + let pat_column = arms.iter().flat_map(|arm| arm.pat.flatten_or_pat()).collect::>(); + let pat_column = PatternColumn::new(pat_column); + lint_overlapping_range_endpoints(cx, &pat_column, lint_root); + + // Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting + // `if let`s. Only run if the match is exhaustive otherwise the error is redundant. + if cx.refutable && non_exhaustiveness_witnesses.is_empty() { + if !matches!( + cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, lint_root).0, + rustc_session::lint::Level::Allow + ) { + let witnesses = collect_nonexhaustive_missing_variants(cx, &pat_column); + + if !witnesses.is_empty() { + // Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns` + // is not exhaustive enough. + // + // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`. + cx.tcx.emit_spanned_lint( + NON_EXHAUSTIVE_OMITTED_PATTERNS, + lint_root, + scrut_span, + NonExhaustiveOmittedPattern { + scrut_ty, + uncovered: Uncovered::new(scrut_span, cx, witnesses), + }, + ); + } + } else { + // We used to allow putting the `#[allow(non_exhaustive_omitted_patterns)]` on a match + // arm. This no longer makes sense so we warn users, to avoid silently breaking their + // usage of the lint. + for arm in arms { + let (lint_level, lint_level_source) = + cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.hir_id); + if !matches!(lint_level, rustc_session::lint::Level::Allow) { + let decorator = NonExhaustiveOmittedPatternLintOnArm { + lint_span: lint_level_source.span(), + suggest_lint_on_match: cx.match_span.map(|span| span.shrink_to_lo()), + lint_level: lint_level.as_str(), + lint_name: "non_exhaustive_omitted_patterns", + }; + + use rustc_errors::DecorateLint; + let mut err = cx.tcx.sess.struct_span_warn(arm.pat.span(), ""); + err.set_primary_message(decorator.msg()); + decorator.decorate_lint(&mut err); + err.emit(); + } + } + } + } + UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/print.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/print.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/print.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_build/src/thir/print.rs 2023-12-21 16:55:28.000000000 +0000 @@ -692,7 +692,7 @@ } PatKind::Deref { subpattern } => { print_indented!(self, "Deref { ", depth_lvl + 1); - print_indented!(self, "subpattern: ", depth_lvl + 2); + print_indented!(self, "subpattern:", depth_lvl + 2); self.print_pat(subpattern, depth_lvl + 2); print_indented!(self, "}", depth_lvl + 1); } @@ -701,6 +701,13 @@ print_indented!(self, format!("value: {:?}", value), depth_lvl + 2); print_indented!(self, "}", depth_lvl + 1); } + PatKind::InlineConstant { def, subpattern } => { + print_indented!(self, "InlineConstant {", depth_lvl + 1); + print_indented!(self, format!("def: {:?}", def), depth_lvl + 2); + print_indented!(self, "subpattern:", depth_lvl + 2); + self.print_pat(subpattern, depth_lvl + 2); + print_indented!(self, "}", depth_lvl + 1); + } PatKind::Range(pat_range) => { print_indented!(self, format!("Range ( {:?} )", pat_range), depth_lvl + 1); } @@ -757,6 +764,9 @@ print_indented!(self, "]", depth_lvl + 2); print_indented!(self, "}", depth_lvl + 1); } + PatKind::Error(_) => { + print_indented!(self, "Error", depth_lvl + 1); + } } print_indented!(self, "}", depth_lvl); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,13 +3,10 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start polonius-engine = "0.13.0" regex = "1" -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } @@ -20,5 +17,8 @@ rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } -rustc_target = { path = "../rustc_target" } rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/debuginfo.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/debuginfo.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/debuginfo.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/debuginfo.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use rustc_index::bit_set::BitSet; +use rustc_middle::mir::visit::*; +use rustc_middle::mir::*; + +/// Return the set of locals that appear in debuginfo. +pub fn debuginfo_locals(body: &Body<'_>) -> BitSet { + let mut visitor = DebuginfoLocals(BitSet::new_empty(body.local_decls.len())); + for debuginfo in body.var_debug_info.iter() { + visitor.visit_var_debug_info(debuginfo); + } + visitor.0 +} + +struct DebuginfoLocals(BitSet); + +impl Visitor<'_> for DebuginfoLocals { + fn visit_local(&mut self, local: Local, _: PlaceContext, _: Location) { + self.0.insert(local); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ use crate::elaborate_drops::DropFlagState; use rustc_middle::mir::{self, Body, Location, Terminator, TerminatorKind}; -use rustc_middle::ty::{self, TyCtxt}; +use rustc_middle::ty::TyCtxt; use rustc_target::abi::VariantIdx; use super::indexes::MovePathIndex; @@ -55,60 +55,6 @@ ) where F: FnMut(MovePathIndex), { - #[inline] - fn is_terminal_path<'tcx>( - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - move_data: &MoveData<'tcx>, - path: MovePathIndex, - ) -> bool { - let place = move_data.move_paths[path].place; - - // When enumerating the child fragments of a path, don't recurse into - // paths (1.) past arrays, slices, and pointers, nor (2.) into a type - // that implements `Drop`. - // - // Places behind references or arrays are not tracked by elaboration - // and are always assumed to be initialized when accessible. As - // references and indexes can be reseated, trying to track them can - // only lead to trouble. - // - // Places behind ADT's with a Drop impl are not tracked by - // elaboration since they can never have a drop-flag state that - // differs from that of the parent with the Drop impl. - // - // In both cases, the contents can only be accessed if and only if - // their parents are initialized. This implies for example that there - // is no need to maintain separate drop flags to track such state. - // - // FIXME: we have to do something for moving slice patterns. - let ty = place.ty(body, tcx).ty; - match ty.kind() { - ty::Adt(def, _) if (def.has_dtor(tcx) && !def.is_box()) || def.is_union() => { - debug!( - "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} Drop => true", - place, ty - ); - true - } - ty::Array(..) => { - debug!( - "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} => false", - place, ty - ); - false - } - ty::Slice(..) | ty::Ref(..) | ty::RawPtr(..) => { - debug!( - "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} refd => true", - place, ty - ); - true - } - _ => false, - } - } - fn on_all_children_bits<'tcx, F>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, @@ -120,10 +66,6 @@ { each_child(move_path_index); - if is_terminal_path(tcx, body, move_data, move_path_index) { - return; - } - let mut next_child_index = move_data.move_paths[move_path_index].first_child; while let Some(child_index) = next_child_index { on_all_children_bits(tcx, body, move_data, child_index, each_child); @@ -133,29 +75,6 @@ on_all_children_bits(tcx, body, move_data, move_path_index, &mut each_child); } -pub fn on_all_drop_children_bits<'tcx, F>( - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - ctxt: &MoveDataParamEnv<'tcx>, - path: MovePathIndex, - mut each_child: F, -) where - F: FnMut(MovePathIndex), -{ - on_all_children_bits(tcx, body, &ctxt.move_data, path, |child| { - let place = &ctxt.move_data.move_paths[path].place; - let ty = place.ty(body, tcx).ty; - debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty); - - let erased_ty = tcx.erase_regions(ty); - if erased_ty.needs_drop(tcx, ctxt.param_env) { - each_child(child); - } else { - debug!("on_all_drop_children_bits - skipping") - } - }) -} - pub fn drop_flag_effects_for_function_entry<'tcx, F>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/elaborate_drops.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/elaborate_drops.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/elaborate_drops.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/elaborate_drops.rs 2023-12-21 16:55:28.000000000 +0000 @@ -860,13 +860,13 @@ let ty = self.place_ty(self.place); match ty.kind() { ty::Closure(_, args) => self.open_drop_for_tuple(&args.as_closure().upvar_tys()), - // Note that `elaborate_drops` only drops the upvars of a generator, + // Note that `elaborate_drops` only drops the upvars of a coroutine, // and this is ok because `open_drop` here can only be reached - // within that own generator's resume function. + // within that own coroutine's resume function. // This should only happen for the self argument on the resume function. - // It effectively only contains upvars until the generator transformation runs. - // See librustc_body/transform/generator.rs for more details. - ty::Generator(_, args, _) => self.open_drop_for_tuple(&args.as_generator().upvar_tys()), + // It effectively only contains upvars until the coroutine transformation runs. + // See librustc_body/transform/coroutine.rs for more details. + ty::Coroutine(_, args, _) => self.open_drop_for_tuple(&args.as_coroutine().upvar_tys()), ty::Tuple(fields) => self.open_drop_for_tuple(fields), ty::Adt(def, args) => self.open_drop_for_adt(*def, args), ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/graphviz.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/graphviz.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/graphviz.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/graphviz.rs 2023-12-21 16:55:28.000000000 +0000 @@ -267,7 +267,7 @@ mir::TerminatorKind::Yield { resume, resume_arg, .. } => { self.write_row(w, "", "(on yield resume)", |this, w, fmt| { - let state_on_generator_drop = this.results.get().clone(); + let state_on_coroutine_drop = this.results.get().clone(); this.results.apply_custom_effect(|analysis, state| { analysis.apply_call_return_effect( state, @@ -283,7 +283,7 @@ fmt = fmt, diff = diff_pretty( this.results.get(), - &state_on_generator_drop, + &state_on_coroutine_drop, this.results.analysis() ), ) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/framework/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -48,7 +48,7 @@ pub use self::cursor::{AnalysisResults, ResultsClonedCursor, ResultsCursor, ResultsRefCursor}; pub use self::direction::{Backward, Direction, Forward}; pub use self::engine::{Engine, EntrySets, Results, ResultsCloned}; -pub use self::lattice::{JoinSemiLattice, MaybeReachable, MeetSemiLattice}; +pub use self::lattice::{JoinSemiLattice, MaybeReachable}; pub use self::visitor::{visit_results, ResultsVisitable, ResultsVisitor}; /// Analysis domains are all bitsets of various kinds. This trait holds @@ -114,7 +114,7 @@ // // FIXME: For backward dataflow analyses, the initial state should be applied to every basic // block where control flow could exit the MIR body (e.g., those terminated with `return` or - // `resume`). It's not obvious how to handle `yield` points in generators, however. + // `resume`). It's not obvious how to handle `yield` points in coroutines, however. fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ /// /// At present, this is used as a very limited form of alias analysis. For example, /// `MaybeBorrowedLocals` is used to compute which locals are live during a yield expression for -/// immovable generators. +/// immovable coroutines. #[derive(Clone, Copy)] pub struct MaybeBorrowedLocals; @@ -141,7 +141,7 @@ | TerminatorKind::Call { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Goto { .. } | TerminatorKind::InlineAsm { .. } | TerminatorKind::UnwindResume diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/initialized.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/initialized.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/initialized.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/initialized.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex}; use crate::on_lookup_result_bits; use crate::MoveDataParamEnv; -use crate::{drop_flag_effects, on_all_children_bits, on_all_drop_children_bits}; +use crate::{drop_flag_effects, on_all_children_bits}; use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis, MaybeReachable}; /// `MaybeInitializedPlaces` tracks all places that might be @@ -72,7 +72,7 @@ ) -> bool { if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) { let mut maybe_live = false; - on_all_drop_children_bits(self.tcx, self.body, self.mdpe, path, |child| { + on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| { maybe_live |= state.contains(child); }); !maybe_live @@ -690,9 +690,13 @@ if let mir::StatementKind::StorageDead(local) = stmt.kind { // End inits for StorageDead, so that an immutable variable can // be reinitialized on the next iteration of the loop. - let move_path_index = rev_lookup.find_local(local); - debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]); - trans.kill_all(init_path_map[move_path_index].iter().copied()); + if let Some(move_path_index) = rev_lookup.find_local(local) { + debug!( + "clears the ever initialized status of {:?}", + init_path_map[move_path_index] + ); + trans.kill_all(init_path_map[move_path_index].iter().copied()); + } } } @@ -763,9 +767,9 @@ ty::Adt(def, _) => return Some((*discriminated, *def)), // `Rvalue::Discriminant` is also used to get the active yield point for a - // generator, but we do not need edge-specific effects in that case. This may + // coroutine, but we do not need edge-specific effects in that case. This may // change in the future. - ty::Generator(..) => return None, + ty::Coroutine(..) => return None, t => bug!("`discriminant` called on unexpected type {:?}", t), } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/liveness.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/liveness.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/liveness.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/liveness.rs 2023-12-21 16:55:28.000000000 +0000 @@ -98,7 +98,7 @@ { fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) { if let PlaceContext::MutatingUse(MutatingUseContext::Yield) = context { - // The resume place is evaluated and assigned to only after generator resumes, so its + // The resume place is evaluated and assigned to only after coroutine resumes, so its // effect is handled separately in `call_resume_effect`. return; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs 2023-12-21 16:55:28.000000000 +0000 @@ -268,7 +268,7 @@ // Note that we do *not* gen the `resume_arg` of `Yield` terminators. The reason for // that is that a `yield` will return from the function, and `resume_arg` is written - // only when the generator is later resumed. Unlike `Call`, this doesn't require the + // only when the coroutine is later resumed. Unlike `Call`, this doesn't require the // place to have storage *before* the yield, only after. TerminatorKind::Yield { .. } => {} @@ -296,7 +296,7 @@ | TerminatorKind::Drop { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Goto { .. } | TerminatorKind::UnwindResume | TerminatorKind::Return @@ -333,7 +333,7 @@ | TerminatorKind::Drop { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Goto { .. } | TerminatorKind::UnwindResume | TerminatorKind::Return diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -23,8 +23,7 @@ pub use self::drop_flag_effects::{ drop_flag_effects_for_function_entry, drop_flag_effects_for_location, - move_path_children_matching, on_all_children_bits, on_all_drop_children_bits, - on_lookup_result_bits, + move_path_children_matching, on_all_children_bits, on_lookup_result_bits, }; pub use self::framework::{ fmt, graphviz, lattice, visit_results, Analysis, AnalysisDomain, AnalysisResults, Backward, @@ -35,6 +34,7 @@ use self::move_paths::MoveData; +pub mod debuginfo; pub mod drop_flag_effects; pub mod elaborate_drops; mod errors; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/builder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,58 +1,66 @@ use rustc_index::IndexVec; -use rustc_middle::mir::tcx::RvalueInitializationState; +use rustc_middle::mir::tcx::{PlaceTy, RvalueInitializationState}; use rustc_middle::mir::*; -use rustc_middle::ty::{self, TyCtxt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; use smallvec::{smallvec, SmallVec}; use std::mem; use super::abs_domain::Lift; -use super::IllegalMoveOriginKind::*; -use super::{Init, InitIndex, InitKind, InitLocation, LookupResult, MoveError}; +use super::{Init, InitIndex, InitKind, InitLocation, LookupResult}; use super::{ LocationMap, MoveData, MoveOut, MoveOutIndex, MovePath, MovePathIndex, MovePathLookup, }; -struct MoveDataBuilder<'a, 'tcx> { +struct MoveDataBuilder<'a, 'tcx, F> { body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, data: MoveData<'tcx>, - errors: Vec<(Place<'tcx>, MoveError<'tcx>)>, + filter: F, } -impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { - fn new(body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Self { +impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { + fn new( + body: &'a Body<'tcx>, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + filter: F, + ) -> Self { let mut move_paths = IndexVec::new(); let mut path_map = IndexVec::new(); let mut init_path_map = IndexVec::new(); + let locals = body + .local_decls + .iter_enumerated() + .map(|(i, l)| { + if l.is_deref_temp() { + return None; + } + if filter(l.ty) { + Some(new_move_path( + &mut move_paths, + &mut path_map, + &mut init_path_map, + None, + Place::from(i), + )) + } else { + None + } + }) + .collect(); + MoveDataBuilder { body, tcx, param_env, - errors: Vec::new(), data: MoveData { moves: IndexVec::new(), loc_map: LocationMap::new(body), rev_lookup: MovePathLookup { - locals: body - .local_decls - .iter_enumerated() - .map(|(i, l)| { - if l.is_deref_temp() { - MovePathIndex::MAX - } else { - Self::new_move_path( - &mut move_paths, - &mut path_map, - &mut init_path_map, - None, - Place::from(i), - ) - } - }) - .collect(), + locals, projections: Default::default(), un_derefer: Default::default(), }, @@ -62,35 +70,42 @@ init_loc_map: LocationMap::new(body), init_path_map, }, + filter, } } +} - fn new_move_path( - move_paths: &mut IndexVec>, - path_map: &mut IndexVec>, - init_path_map: &mut IndexVec>, - parent: Option, - place: Place<'tcx>, - ) -> MovePathIndex { - let move_path = - move_paths.push(MovePath { next_sibling: None, first_child: None, parent, place }); +fn new_move_path<'tcx>( + move_paths: &mut IndexVec>, + path_map: &mut IndexVec>, + init_path_map: &mut IndexVec>, + parent: Option, + place: Place<'tcx>, +) -> MovePathIndex { + let move_path = + move_paths.push(MovePath { next_sibling: None, first_child: None, parent, place }); + + if let Some(parent) = parent { + let next_sibling = mem::replace(&mut move_paths[parent].first_child, Some(move_path)); + move_paths[move_path].next_sibling = next_sibling; + } - if let Some(parent) = parent { - let next_sibling = mem::replace(&mut move_paths[parent].first_child, Some(move_path)); - move_paths[move_path].next_sibling = next_sibling; - } + let path_map_ent = path_map.push(smallvec![]); + assert_eq!(path_map_ent, move_path); - let path_map_ent = path_map.push(smallvec![]); - assert_eq!(path_map_ent, move_path); + let init_path_map_ent = init_path_map.push(smallvec![]); + assert_eq!(init_path_map_ent, move_path); - let init_path_map_ent = init_path_map.push(smallvec![]); - assert_eq!(init_path_map_ent, move_path); + move_path +} - move_path - } +enum MovePathResult { + Path(MovePathIndex), + Union(MovePathIndex), + Error, } -impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { +impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { /// This creates a MovePath for a given place, returning an `MovePathError` /// if that place can't be moved from. /// @@ -98,11 +113,13 @@ /// problematic for borrowck. /// /// Maybe we should have separate "borrowck" and "moveck" modes. - fn move_path_for(&mut self, place: Place<'tcx>) -> Result> { + fn move_path_for(&mut self, place: Place<'tcx>) -> MovePathResult { let data = &mut self.builder.data; debug!("lookup({:?})", place); - let mut base = data.rev_lookup.find_local(place.local); + let Some(mut base) = data.rev_lookup.find_local(place.local) else { + return MovePathResult::Error; + }; // The move path index of the first union that we find. Once this is // some we stop creating child move paths, since moves from unions @@ -118,12 +135,7 @@ match elem { ProjectionElem::Deref => match place_ty.kind() { ty::Ref(..) | ty::RawPtr(..) => { - return Err(MoveError::cannot_move_out_of( - self.loc, - BorrowedContent { - target_place: place_ref.project_deeper(&[elem], tcx), - }, - )); + return MovePathResult::Error; } ty::Adt(adt, _) => { if !adt.is_box() { @@ -143,8 +155,8 @@ | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Alias(_, _) @@ -159,16 +171,13 @@ ProjectionElem::Field(_, _) => match place_ty.kind() { ty::Adt(adt, _) => { if adt.has_dtor(tcx) { - return Err(MoveError::cannot_move_out_of( - self.loc, - InteriorOfTypeWithDestructor { container_ty: place_ty }, - )); + return MovePathResult::Error; } if adt.is_union() { union_path.get_or_insert(base); } } - ty::Closure(_, _) | ty::Generator(_, _, _) | ty::Tuple(_) => (), + ty::Closure(_, _) | ty::Coroutine(_, _, _) | ty::Tuple(_) => (), ty::Bool | ty::Char | ty::Int(_) @@ -183,7 +192,7 @@ | ty::FnDef(_, _) | ty::FnPtr(_) | ty::Dynamic(_, _, _) - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Alias(_, _) | ty::Param(_) @@ -197,33 +206,15 @@ ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => { match place_ty.kind() { ty::Slice(_) => { - return Err(MoveError::cannot_move_out_of( - self.loc, - InteriorOfSliceOrArray { - ty: place_ty, - is_index: matches!(elem, ProjectionElem::Index(..)), - }, - )); + return MovePathResult::Error; } ty::Array(_, _) => (), _ => bug!("Unexpected type {:#?}", place_ty.is_array()), } } ProjectionElem::Index(_) => match place_ty.kind() { - ty::Array(..) => { - return Err(MoveError::cannot_move_out_of( - self.loc, - InteriorOfSliceOrArray { ty: place_ty, is_index: true }, - )); - } - ty::Slice(_) => { - return Err(MoveError::cannot_move_out_of( - self.loc, - InteriorOfSliceOrArray { - ty: place_ty, - is_index: matches!(elem, ProjectionElem::Index(..)), - }, - )); + ty::Array(..) | ty::Slice(_) => { + return MovePathResult::Error; } _ => bug!("Unexpected type {place_ty:#?}"), }, @@ -235,11 +226,15 @@ | ProjectionElem::Subtype(_) | ProjectionElem::Downcast(_, _) => (), } + let elem_ty = PlaceTy::from_ty(place_ty).projection_ty(tcx, elem).ty; + if !(self.builder.filter)(elem_ty) { + return MovePathResult::Error; + } if union_path.is_none() { // inlined from add_move_path because of a borrowck conflict with the iterator base = *data.rev_lookup.projections.entry((base, elem.lift())).or_insert_with(|| { - MoveDataBuilder::new_move_path( + new_move_path( &mut data.move_paths, &mut data.path_map, &mut data.init_path_map, @@ -252,9 +247,9 @@ if let Some(base) = union_path { // Move out of union - always move the entire union. - Err(MoveError::UnionMove { path: base }) + MovePathResult::Union(base) } else { - Ok(base) + MovePathResult::Path(base) } } @@ -270,13 +265,7 @@ .. } = self.builder; *rev_lookup.projections.entry((base, elem.lift())).or_insert_with(move || { - MoveDataBuilder::new_move_path( - move_paths, - path_map, - init_path_map, - Some(base), - mk_place(*tcx), - ) + new_move_path(move_paths, path_map, init_path_map, Some(base), mk_place(*tcx)) }) } @@ -287,11 +276,8 @@ } } -pub type MoveDat<'tcx> = - Result, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)>; - -impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { - fn finalize(self) -> MoveDat<'tcx> { +impl<'a, 'tcx, F> MoveDataBuilder<'a, 'tcx, F> { + fn finalize(self) -> MoveData<'tcx> { debug!("{}", { debug!("moves for {:?}:", self.body.span); for (j, mo) in self.data.moves.iter_enumerated() { @@ -304,7 +290,7 @@ "done dumping moves" }); - if self.errors.is_empty() { Ok(self.data) } else { Err((self.data, self.errors)) } + self.data } } @@ -312,8 +298,9 @@ body: &Body<'tcx>, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, -) -> MoveDat<'tcx> { - let mut builder = MoveDataBuilder::new(body, tcx, param_env); + filter: impl Fn(Ty<'tcx>) -> bool, +) -> MoveData<'tcx> { + let mut builder = MoveDataBuilder::new(body, tcx, param_env, filter); builder.gather_args(); @@ -330,20 +317,20 @@ builder.finalize() } -impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { +impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { fn gather_args(&mut self) { for arg in self.body.args_iter() { - let path = self.data.rev_lookup.find_local(arg); + if let Some(path) = self.data.rev_lookup.find_local(arg) { + let init = self.data.inits.push(Init { + path, + kind: InitKind::Deep, + location: InitLocation::Argument(arg), + }); - let init = self.data.inits.push(Init { - path, - kind: InitKind::Deep, - location: InitLocation::Argument(arg), - }); + debug!("gather_args: adding init {:?} of {:?} for argument {:?}", init, path, arg); - debug!("gather_args: adding init {:?} of {:?} for argument {:?}", init, path, arg); - - self.data.init_path_map[path].push(init); + self.data.init_path_map[path].push(init); + } } } @@ -358,12 +345,12 @@ } } -struct Gatherer<'b, 'a, 'tcx> { - builder: &'b mut MoveDataBuilder<'a, 'tcx>, +struct Gatherer<'b, 'a, 'tcx, F> { + builder: &'b mut MoveDataBuilder<'a, 'tcx, F>, loc: Location, } -impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { +impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { fn gather_statement(&mut self, stmt: &Statement<'tcx>) { match &stmt.kind { StatementKind::Assign(box (place, Rvalue::CopyForDeref(reffed))) => { @@ -454,7 +441,7 @@ | TerminatorKind::Return | TerminatorKind::UnwindResume | TerminatorKind::UnwindTerminate(_) - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } => {} @@ -546,13 +533,12 @@ let base_place = Place { local: place.local, projection: self.builder.tcx.mk_place_elems(base) }; let base_path = match self.move_path_for(base_place) { - Ok(path) => path, - Err(MoveError::UnionMove { path }) => { + MovePathResult::Path(path) => path, + MovePathResult::Union(path) => { self.record_move(place, path); return; } - Err(error @ MoveError::IllegalMove { .. }) => { - self.builder.errors.push((base_place, error)); + MovePathResult::Error => { return; } }; @@ -572,10 +558,10 @@ } } else { match self.move_path_for(place) { - Ok(path) | Err(MoveError::UnionMove { path }) => self.record_move(place, path), - Err(error @ MoveError::IllegalMove { .. }) => { - self.builder.errors.push((place, error)); + MovePathResult::Path(path) | MovePathResult::Union(path) => { + self.record_move(place, path) } + MovePathResult::Error => {} }; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/move_paths/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,3 @@ -use crate::move_paths::builder::MoveDat; use crate::un_derefer::UnDerefer; use rustc_data_structures::fx::FxHashMap; use rustc_index::{IndexSlice, IndexVec}; @@ -291,7 +290,7 @@ /// Tables mapping from a place to its MovePathIndex. #[derive(Debug)] pub struct MovePathLookup<'tcx> { - locals: IndexVec, + locals: IndexVec>, /// projections are made from a base-place and a projection /// elem. The base-place will have a unique MovePathIndex; we use @@ -318,7 +317,9 @@ // unknown place, but will rather return the nearest available // parent. pub fn find(&self, place: PlaceRef<'tcx>) -> LookupResult { - let mut result = self.find_local(place.local); + let Some(mut result) = self.find_local(place.local) else { + return LookupResult::Parent(None); + }; for (_, elem) in self.un_derefer.iter_projections(place) { if let Some(&subpath) = self.projections.get(&(result, elem.lift())) { @@ -332,7 +333,7 @@ } #[inline] - pub fn find_local(&self, local: Local) -> MovePathIndex { + pub fn find_local(&self, local: Local) -> Option { self.locals[local] } @@ -340,46 +341,8 @@ /// `MovePathIndex`es. pub fn iter_locals_enumerated( &self, - ) -> impl DoubleEndedIterator + ExactSizeIterator + '_ { - self.locals.iter_enumerated().map(|(l, &idx)| (l, idx)) - } -} - -#[derive(Debug)] -pub struct IllegalMoveOrigin<'tcx> { - pub location: Location, - pub kind: IllegalMoveOriginKind<'tcx>, -} - -#[derive(Debug)] -pub enum IllegalMoveOriginKind<'tcx> { - /// Illegal move due to attempt to move from behind a reference. - BorrowedContent { - /// The place the reference refers to: if erroneous code was trying to - /// move from `(*x).f` this will be `*x`. - target_place: Place<'tcx>, - }, - - /// Illegal move due to attempt to move from field of an ADT that - /// implements `Drop`. Rust maintains invariant that all `Drop` - /// ADT's remain fully-initialized so that user-defined destructor - /// can safely read from all of the ADT's fields. - InteriorOfTypeWithDestructor { container_ty: Ty<'tcx> }, - - /// Illegal move due to attempt to move out of a slice or array. - InteriorOfSliceOrArray { ty: Ty<'tcx>, is_index: bool }, -} - -#[derive(Debug)] -pub enum MoveError<'tcx> { - IllegalMove { cannot_move_out_of: IllegalMoveOrigin<'tcx> }, - UnionMove { path: MovePathIndex }, -} - -impl<'tcx> MoveError<'tcx> { - fn cannot_move_out_of(location: Location, kind: IllegalMoveOriginKind<'tcx>) -> Self { - let origin = IllegalMoveOrigin { location, kind }; - MoveError::IllegalMove { cannot_move_out_of: origin } + ) -> impl DoubleEndedIterator + '_ { + self.locals.iter_enumerated().filter_map(|(l, &idx)| Some((l, idx?))) } } @@ -388,8 +351,9 @@ body: &Body<'tcx>, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, - ) -> MoveDat<'tcx> { - builder::gather_moves(body, tcx, param_env) + filter: impl Fn(Ty<'tcx>) -> bool, + ) -> MoveData<'tcx> { + builder::gather_moves(body, tcx, param_env, filter) } /// For the move path `mpi`, returns the root local variable (if any) that starts the path. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/rustc_peek.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/rustc_peek.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/rustc_peek.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/rustc_peek.rs 2023-12-21 16:55:28.000000000 +0000 @@ -34,7 +34,7 @@ } let param_env = tcx.param_env(def_id); - let move_data = MoveData::gather_moves(body, tcx, param_env).unwrap(); + let move_data = MoveData::gather_moves(&body, tcx, param_env, |_| true); let mdpe = MoveDataParamEnv { move_data, param_env }; if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_init).is_some() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/value_analysis.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/value_analysis.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/value_analysis.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_dataflow/src/value_analysis.rs 2023-12-21 16:55:28.000000000 +0000 @@ -274,7 +274,7 @@ | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::Assert { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } => { // These terminators have no effect on the analysis. @@ -463,7 +463,19 @@ } } -impl State { +impl State { + pub fn new(init: V, map: &Map) -> State { + let values = IndexVec::from_elem_n(init, map.value_count); + State(StateData::Reachable(values)) + } + + pub fn all(&self, f: impl Fn(&V) -> bool) -> bool { + match self.0 { + StateData::Unreachable => true, + StateData::Reachable(ref values) => values.iter().all(f), + } + } + pub fn is_reachable(&self) -> bool { matches!(&self.0, StateData::Reachable(_)) } @@ -472,7 +484,10 @@ self.0 = StateData::Unreachable; } - pub fn flood_all(&mut self) { + pub fn flood_all(&mut self) + where + V: HasTop, + { self.flood_all_with(V::TOP) } @@ -481,28 +496,52 @@ values.raw.fill(value); } + /// Assign `value` to all places that are contained in `place` or may alias one. pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) { - let StateData::Reachable(values) = &mut self.0 else { return }; - map.for_each_aliasing_place(place, None, &mut |vi| { - values[vi] = value.clone(); - }); + self.flood_with_tail_elem(place, None, map, value) } - pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) { + /// Assign `TOP` to all places that are contained in `place` or may alias one. + pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) + where + V: HasTop, + { self.flood_with(place, map, V::TOP) } + /// Assign `value` to the discriminant of `place` and all places that may alias it. pub fn flood_discr_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) { - let StateData::Reachable(values) = &mut self.0 else { return }; - map.for_each_aliasing_place(place, Some(TrackElem::Discriminant), &mut |vi| { - values[vi] = value.clone(); - }); + self.flood_with_tail_elem(place, Some(TrackElem::Discriminant), map, value) } - pub fn flood_discr(&mut self, place: PlaceRef<'_>, map: &Map) { + /// Assign `TOP` to the discriminant of `place` and all places that may alias it. + pub fn flood_discr(&mut self, place: PlaceRef<'_>, map: &Map) + where + V: HasTop, + { self.flood_discr_with(place, map, V::TOP) } + /// This method is the most general version of the `flood_*` method. + /// + /// Assign `value` on the given place and all places that may alias it. In particular, when + /// the given place has a variant downcast, we invoke the function on all the other variants. + /// + /// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track + /// as such. + pub fn flood_with_tail_elem( + &mut self, + place: PlaceRef<'_>, + tail_elem: Option, + map: &Map, + value: V, + ) { + let StateData::Reachable(values) = &mut self.0 else { return }; + map.for_each_aliasing_place(place, tail_elem, &mut |vi| { + values[vi] = value.clone(); + }); + } + /// Low-level method that assigns to a place. /// This does nothing if the place is not tracked. /// @@ -553,7 +592,10 @@ } /// Helper method to interpret `target = result`. - pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) { + pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) + where + V: HasTop, + { self.flood(target, map); if let Some(target) = map.find(target) { self.insert_idx(target, result, map); @@ -561,36 +603,93 @@ } /// Helper method for assignments to a discriminant. - pub fn assign_discr(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) { + pub fn assign_discr(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) + where + V: HasTop, + { self.flood_discr(target, map); if let Some(target) = map.find_discr(target) { self.insert_idx(target, result, map); } } + /// Retrieve the value stored for a place, or `None` if it is not tracked. + pub fn try_get(&self, place: PlaceRef<'_>, map: &Map) -> Option { + let place = map.find(place)?; + self.try_get_idx(place, map) + } + + /// Retrieve the discriminant stored for a place, or `None` if it is not tracked. + pub fn try_get_discr(&self, place: PlaceRef<'_>, map: &Map) -> Option { + let place = map.find_discr(place)?; + self.try_get_idx(place, map) + } + + /// Retrieve the slice length stored for a place, or `None` if it is not tracked. + pub fn try_get_len(&self, place: PlaceRef<'_>, map: &Map) -> Option { + let place = map.find_len(place)?; + self.try_get_idx(place, map) + } + + /// Retrieve the value stored for a place index, or `None` if it is not tracked. + pub fn try_get_idx(&self, place: PlaceIndex, map: &Map) -> Option { + match &self.0 { + StateData::Reachable(values) => { + map.places[place].value_index.map(|v| values[v].clone()) + } + StateData::Unreachable => None, + } + } + /// Retrieve the value stored for a place, or ⊤ if it is not tracked. - pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V { - map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::TOP) + /// + /// This method returns ⊥ if the place is tracked and the state is unreachable. + pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V + where + V: HasBottom + HasTop, + { + match &self.0 { + StateData::Reachable(_) => self.try_get(place, map).unwrap_or(V::TOP), + // Because this is unreachable, we can return any value we want. + StateData::Unreachable => V::BOTTOM, + } } /// Retrieve the value stored for a place, or ⊤ if it is not tracked. - pub fn get_discr(&self, place: PlaceRef<'_>, map: &Map) -> V { - match map.find_discr(place) { - Some(place) => self.get_idx(place, map), - None => V::TOP, + /// + /// This method returns ⊥ the current state is unreachable. + pub fn get_discr(&self, place: PlaceRef<'_>, map: &Map) -> V + where + V: HasBottom + HasTop, + { + match &self.0 { + StateData::Reachable(_) => self.try_get_discr(place, map).unwrap_or(V::TOP), + // Because this is unreachable, we can return any value we want. + StateData::Unreachable => V::BOTTOM, } } /// Retrieve the value stored for a place, or ⊤ if it is not tracked. - pub fn get_len(&self, place: PlaceRef<'_>, map: &Map) -> V { - match map.find_len(place) { - Some(place) => self.get_idx(place, map), - None => V::TOP, + /// + /// This method returns ⊥ the current state is unreachable. + pub fn get_len(&self, place: PlaceRef<'_>, map: &Map) -> V + where + V: HasBottom + HasTop, + { + match &self.0 { + StateData::Reachable(_) => self.try_get_len(place, map).unwrap_or(V::TOP), + // Because this is unreachable, we can return any value we want. + StateData::Unreachable => V::BOTTOM, } } /// Retrieve the value stored for a place index, or ⊤ if it is not tracked. - pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V { + /// + /// This method returns ⊥ the current state is unreachable. + pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V + where + V: HasBottom + HasTop, + { match &self.0 { StateData::Reachable(values) => { map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::TOP) @@ -685,8 +784,10 @@ // `elem1` is either `Some(Variant(i))` or `None`. while let Some((mut place, elem1, elem2, ty)) = worklist.pop_front() { // The user requires a bound on the number of created values. - if let Some(value_limit) = value_limit && self.value_count >= value_limit { - break + if let Some(value_limit) = value_limit + && self.value_count >= value_limit + { + break; } // Create a place for this projection. @@ -717,7 +818,9 @@ // Trim useless places. for opt_place in self.locals.iter_mut() { - if let Some(place) = *opt_place && self.inner_values[place].is_empty() { + if let Some(place) = *opt_place + && self.inner_values[place].is_empty() + { *opt_place = None; } } @@ -772,7 +875,7 @@ assert!(old.is_none()); // Allocate a value slot since it doesn't have one. - assert!( self.places[len].value_index.is_none() ); + assert!(self.places[len].value_index.is_none()); self.places[len].value_index = Some(self.value_count.into()); self.value_count += 1; } @@ -911,7 +1014,7 @@ ) { for sibling in self.children(parent) { let elem = self.places[sibling].proj_elem; - // Only invalidate variants and discriminant. Fields (for generators) are not + // Only invalidate variants and discriminant. Fields (for coroutines) are not // invalidated by assignment to a variant. if let Some(TrackElem::Variant(..) | TrackElem::Discriminant) = elem // Only invalidate the other variants, the current one is fine. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,30 +3,33 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -itertools = "0.10.1" -smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } -tracing = "0.1" +# tidy-alphabetical-start either = "1" +itertools = "0.10.1" +rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } +rustc_const_eval = { path = "../rustc_const_eval" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } +rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } -rustc_const_eval = { path = "../rustc_const_eval" } rustc_mir_build = { path = "../rustc_mir_build" } rustc_mir_dataflow = { path = "../rustc_mir_dataflow" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } -rustc_span = { path = "../rustc_span" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_macros = { path = "../rustc_macros" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end [dev-dependencies] +# tidy-alphabetical-start coverage_test_macros = { path = "src/coverage/test_macros" } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs 2023-12-21 16:55:28.000000000 +0000 @@ -40,7 +40,7 @@ let body_abi = match body_ty.kind() { ty::FnDef(..) => body_ty.fn_sig(tcx).abi(), ty::Closure(..) => Abi::RustCall, - ty::Generator(..) => Abi::Rust, + ty::Coroutine(..) => Abi::Rust, _ => span_bug!(body.span, "unexpected body ty: {:?}", body_ty), }; let body_can_unwind = layout::fn_can_unwind(tcx, Some(def_id), body_abi); @@ -113,6 +113,6 @@ } // We may have invalidated some `cleanup` blocks so clean those up now. - super::simplify::remove_dead_blocks(tcx, body); + super::simplify::remove_dead_blocks(body); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_alignment.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_alignment.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_alignment.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_alignment.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,13 +1,12 @@ use crate::MirPass; -use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; use rustc_index::IndexVec; use rustc_middle::mir::*; use rustc_middle::mir::{ interpret::Scalar, - visit::{PlaceContext, Visitor}, + visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}, }; -use rustc_middle::ty::{Ty, TyCtxt, TypeAndMut}; +use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeAndMut}; use rustc_session::Session; pub struct CheckAlignment; @@ -30,7 +29,12 @@ let basic_blocks = body.basic_blocks.as_mut(); let local_decls = &mut body.local_decls; + let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); + // This pass inserts new blocks. Each insertion changes the Location for all + // statements/blocks after. Iterating or visiting the MIR in order would require updating + // our current location after every insertion. By iterating backwards, we dodge this issue: + // The only Locations that an insertion changes have already been handled. for block in (0..basic_blocks.len()).rev() { let block = block.into(); for statement_index in (0..basic_blocks[block].statements.len()).rev() { @@ -38,22 +42,19 @@ let statement = &basic_blocks[block].statements[statement_index]; let source_info = statement.source_info; - let mut finder = PointerFinder { - local_decls, - tcx, - pointers: Vec::new(), - def_id: body.source.def_id(), - }; - for (pointer, pointee_ty) in finder.find_pointers(statement) { - debug!("Inserting alignment check for {:?}", pointer.ty(&*local_decls, tcx).ty); + let mut finder = + PointerFinder { tcx, local_decls, param_env, pointers: Vec::new() }; + finder.visit_statement(statement, location); + for (local, ty) in finder.pointers { + debug!("Inserting alignment check for {:?}", ty); let new_block = split_block(basic_blocks, location); insert_alignment_check( tcx, local_decls, &mut basic_blocks[block], - pointer, - pointee_ty, + local, + ty, source_info, new_block, ); @@ -63,69 +64,71 @@ } } -impl<'tcx, 'a> PointerFinder<'tcx, 'a> { - fn find_pointers(&mut self, statement: &Statement<'tcx>) -> Vec<(Place<'tcx>, Ty<'tcx>)> { - self.pointers.clear(); - self.visit_statement(statement, Location::START); - core::mem::take(&mut self.pointers) - } -} - struct PointerFinder<'tcx, 'a> { - local_decls: &'a mut LocalDecls<'tcx>, tcx: TyCtxt<'tcx>, - def_id: DefId, + local_decls: &'a mut LocalDecls<'tcx>, + param_env: ParamEnv<'tcx>, pointers: Vec<(Place<'tcx>, Ty<'tcx>)>, } impl<'tcx, 'a> Visitor<'tcx> for PointerFinder<'tcx, 'a> { - fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { - if let Rvalue::AddressOf(..) = rvalue { - // Ignore dereferences inside of an AddressOf - return; + fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { + // We want to only check reads and writes to Places, so we specifically exclude + // Borrows and AddressOf. + match context { + PlaceContext::MutatingUse( + MutatingUseContext::Store + | MutatingUseContext::AsmOutput + | MutatingUseContext::Call + | MutatingUseContext::Yield + | MutatingUseContext::Drop, + ) => {} + PlaceContext::NonMutatingUse( + NonMutatingUseContext::Copy | NonMutatingUseContext::Move, + ) => {} + _ => { + return; + } } - self.super_rvalue(rvalue, location); - } - fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) { - if let PlaceContext::NonUse(_) = context { - return; - } if !place.is_indirect() { return; } + // Since Deref projections must come first and only once, the pointer for an indirect place + // is the Local that the Place is based on. let pointer = Place::from(place.local); - let pointer_ty = pointer.ty(&*self.local_decls, self.tcx).ty; + let pointer_ty = self.local_decls[place.local].ty; - // We only want to check unsafe pointers + // We only want to check places based on unsafe pointers if !pointer_ty.is_unsafe_ptr() { - trace!("Indirect, but not an unsafe ptr, not checking {:?}", pointer_ty); + trace!("Indirect, but not based on an unsafe ptr, not checking {:?}", place); return; } - let Some(pointee) = pointer_ty.builtin_deref(true) else { - debug!("Indirect but no builtin deref: {:?}", pointer_ty); + let pointee_ty = + pointer_ty.builtin_deref(true).expect("no builtin_deref for an unsafe pointer").ty; + // Ideally we'd support this in the future, but for now we are limited to sized types. + if !pointee_ty.is_sized(self.tcx, self.param_env) { + debug!("Unsafe pointer, but pointee is not known to be sized: {:?}", pointer_ty); return; - }; - let mut pointee_ty = pointee.ty; - if pointee_ty.is_array() || pointee_ty.is_slice() || pointee_ty.is_str() { - pointee_ty = pointee_ty.sequence_element_type(self.tcx); } - if !pointee_ty.is_sized(self.tcx, self.tcx.param_env_reveal_all_normalized(self.def_id)) { - debug!("Unsafe pointer, but unsized: {:?}", pointer_ty); + // Try to detect types we are sure have an alignment of 1 and skip the check + // We don't need to look for str and slices, we already rejected unsized types above + let element_ty = match pointee_ty.kind() { + ty::Array(ty, _) => *ty, + _ => pointee_ty, + }; + if [self.tcx.types.bool, self.tcx.types.i8, self.tcx.types.u8].contains(&element_ty) { + debug!("Trivially aligned place type: {:?}", pointee_ty); return; } - if [self.tcx.types.bool, self.tcx.types.i8, self.tcx.types.u8, self.tcx.types.str_] - .contains(&pointee_ty) - { - debug!("Trivially aligned pointee type: {:?}", pointer_ty); - return; - } + // Ensure that this place is based on an aligned pointer. + self.pointers.push((pointer, pointee_ty)); - self.pointers.push((pointer, pointee_ty)) + self.super_place(place, context, location); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_const_item_mutation.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_const_item_mutation.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_const_item_mutation.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_const_item_mutation.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,13 +97,15 @@ // so emitting a lint would be redundant. if !lhs.projection.is_empty() { if let Some(def_id) = self.is_const_item_without_destructor(lhs.local) - && let Some((lint_root, span, item)) = self.should_lint_const_item_usage(&lhs, def_id, loc) { - self.tcx.emit_spanned_lint( - CONST_ITEM_MUTATION, - lint_root, - span, - errors::ConstMutate::Modify { konst: item } - ); + && let Some((lint_root, span, item)) = + self.should_lint_const_item_usage(&lhs, def_id, loc) + { + self.tcx.emit_spanned_lint( + CONST_ITEM_MUTATION, + lint_root, + span, + errors::ConstMutate::Modify { konst: item }, + ); } } // We are looking for MIR of the form: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_packed_ref.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_packed_ref.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_packed_ref.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_packed_ref.rs 2023-12-21 16:55:28.000000000 +0000 @@ -46,9 +46,14 @@ // If we ever reach here it means that the generated derive // code is somehow doing an unaligned reference, which it // shouldn't do. - span_bug!(self.source_info.span, "builtin derive created an unaligned reference"); + span_bug!( + self.source_info.span, + "builtin derive created an unaligned reference" + ); } else { - self.tcx.sess.emit_err(errors::UnalignedPackedRef { span: self.source_info.span }); + self.tcx + .sess + .emit_err(errors::UnalignedPackedRef { span: self.source_info.span }); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_unsafety.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_unsafety.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_unsafety.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/check_unsafety.rs 2023-12-21 16:55:28.000000000 +0000 @@ -56,7 +56,7 @@ | TerminatorKind::Drop { .. } | TerminatorKind::Yield { .. } | TerminatorKind::Assert { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::UnwindResume | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return @@ -128,7 +128,7 @@ ), } } - &AggregateKind::Closure(def_id, _) | &AggregateKind::Generator(def_id, _, _) => { + &AggregateKind::Closure(def_id, _) | &AggregateKind::Coroutine(def_id, _, _) => { let def_id = def_id.expect_local(); let UnsafetyCheckResult { violations, used_unsafe_blocks, .. } = self.tcx.unsafety_check_result(def_id); @@ -179,7 +179,7 @@ // Check the base local: it might be an unsafe-to-access static. We only check derefs of the // temporary holding the static pointer to avoid duplicate errors // . - if decl.internal && place.projection.first() == Some(&ProjectionElem::Deref) { + if place.projection.first() == Some(&ProjectionElem::Deref) { // If the projection root is an artificial local that we introduced when // desugaring `static`, give a more specific error message // (avoid the general "raw pointer" clause below, that would only be confusing). @@ -540,8 +540,7 @@ && let BlockCheckMode::UnsafeBlock(_) = block.rules { true - } - else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) + } else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) && sig.header.is_unsafe() { true diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_debuginfo.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_debuginfo.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_debuginfo.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_debuginfo.rs 2023-12-21 16:55:28.000000000 +0000 @@ -55,7 +55,9 @@ let mut locals_to_debuginfo = BitSet::new_empty(body.local_decls.len()); for debuginfo in &body.var_debug_info { - if let VarDebugInfoContents::Place(p) = debuginfo.value && let Some(l) = p.as_local() { + if let VarDebugInfoContents::Place(p) = debuginfo.value + && let Some(l) = p.as_local() + { locals_to_debuginfo.insert(l); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,8 +2,6 @@ //! assertion failures use either::Right; - -use rustc_const_eval::const_eval::CheckAlignment; use rustc_const_eval::ReportErrorExt; use rustc_data_structures::fx::FxHashSet; use rustc_hir::def::DefKind; @@ -16,7 +14,7 @@ use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::{self, GenericArgs, Instance, ParamEnv, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::{def_id::DefId, Span}; -use rustc_target::abi::{self, Align, HasDataLayout, Size, TargetDataLayout}; +use rustc_target::abi::{self, HasDataLayout, Size, TargetDataLayout}; use rustc_target::spec::abi::Abi as CallAbi; use crate::dataflow_const_prop::Patch; @@ -84,11 +82,11 @@ return; } - // FIXME(welseywiser) const prop doesn't work on generators because of query cycles + // FIXME(welseywiser) const prop doesn't work on coroutines because of query cycles // computing their layout. - let is_generator = def_kind == DefKind::Generator; - if is_generator { - trace!("ConstProp skipped for generator {:?}", def_id); + let is_coroutine = def_kind == DefKind::Coroutine; + if is_coroutine { + trace!("ConstProp skipped for coroutine {:?}", def_id); return; } @@ -141,27 +139,14 @@ type MemoryKind = !; #[inline(always)] - fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment { - // We do not check for alignment to avoid having to carry an `Align` - // in `ConstValue::Indirect`. - CheckAlignment::No + fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + false // no reason to enforce alignment } #[inline(always)] fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>, _layout: TyAndLayout<'tcx>) -> bool { false // for now, we don't enforce validity } - fn alignment_check_failed( - ecx: &InterpCx<'mir, 'tcx, Self>, - _has: Align, - _required: Align, - _check: CheckAlignment, - ) -> InterpResult<'tcx, ()> { - span_bug!( - ecx.cur_span(), - "`alignment_check_failed` called when no alignment check requested" - ) - } fn load_mir( _ecx: &InterpCx<'mir, 'tcx, Self>, @@ -455,6 +440,7 @@ // FIXME we need to revisit this for #67176 if rvalue.has_param() { + trace!("skipping, has param"); return None; } if !rvalue @@ -527,7 +513,7 @@ fn replace_with_const(&mut self, place: Place<'tcx>) -> Option> { // This will return None if the above `const_prop` invocation only "wrote" a - // type whose creation requires no write. E.g. a generator whose initial state + // type whose creation requires no write. E.g. a coroutine whose initial state // consists solely of uninitialized memory (so it doesn't capture any locals). let value = self.get_const(place)?; if !self.tcx.consider_optimizing(|| format!("ConstantPropagation - {value:?}")) { @@ -699,7 +685,9 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { self.super_operand(operand, location); - if let Some(place) = operand.place() && let Some(value) = self.replace_with_const(place) { + if let Some(place) = operand.place() + && let Some(value) = self.replace_with_const(place) + { self.patch.before_effect.insert((location, place), value); } } @@ -721,7 +709,11 @@ fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { self.super_assign(place, rvalue, location); - let Some(()) = self.check_rvalue(rvalue) else { return }; + let Some(()) = self.check_rvalue(rvalue) else { + trace!("rvalue check failed, removing const"); + Self::remove_const(&mut self.ecx, place.local); + return; + }; match self.ecx.machine.can_const_prop[place.local] { // Do nothing if the place is indirect. @@ -733,7 +725,10 @@ if let Rvalue::Use(Operand::Constant(c)) = rvalue && let Const::Val(..) = c.const_ { - trace!("skipping replace of Rvalue::Use({:?} because it is already a const", c); + trace!( + "skipping replace of Rvalue::Use({:?} because it is already a const", + c + ); } else if let Some(operand) = self.replace_with_const(*place) { self.patch.assignments.insert(location, operand); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop_lint.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop_lint.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop_lint.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/const_prop_lint.rs 2023-12-21 16:55:28.000000000 +0000 @@ -22,7 +22,6 @@ }; use rustc_span::Span; use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout}; -use rustc_trait_selection::traits; use crate::const_prop::CanConstProp; use crate::const_prop::ConstPropMachine; @@ -35,9 +34,9 @@ /// Severely regress performance. const MAX_ALLOC_LIMIT: u64 = 1024; -pub struct ConstProp; +pub struct ConstPropLint; -impl<'tcx> MirLint<'tcx> for ConstProp { +impl<'tcx> MirLint<'tcx> for ConstPropLint { fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { if body.tainted_by_errors.is_some() { return; @@ -49,61 +48,25 @@ } let def_id = body.source.def_id().expect_local(); - let is_fn_like = tcx.def_kind(def_id).is_fn_like(); - let is_assoc_const = tcx.def_kind(def_id) == DefKind::AssocConst; + let def_kind = tcx.def_kind(def_id); + let is_fn_like = def_kind.is_fn_like(); + let is_assoc_const = def_kind == DefKind::AssocConst; // Only run const prop on functions, methods, closures and associated constants if !is_fn_like && !is_assoc_const { // skip anon_const/statics/consts because they'll be evaluated by miri anyway - trace!("ConstProp skipped for {:?}", def_id); + trace!("ConstPropLint skipped for {:?}", def_id); return; } - let is_generator = tcx.type_of(def_id.to_def_id()).instantiate_identity().is_generator(); - // FIXME(welseywiser) const prop doesn't work on generators because of query cycles + // FIXME(welseywiser) const prop doesn't work on coroutines because of query cycles // computing their layout. - if is_generator { - trace!("ConstProp skipped for generator {:?}", def_id); + if let DefKind::Coroutine = def_kind { + trace!("ConstPropLint skipped for coroutine {:?}", def_id); return; } - // Check if it's even possible to satisfy the 'where' clauses - // for this item. - // This branch will never be taken for any normal function. - // However, it's possible to `#!feature(trivial_bounds)]` to write - // a function with impossible to satisfy clauses, e.g.: - // `fn foo() where String: Copy {}` - // - // We don't usually need to worry about this kind of case, - // since we would get a compilation error if the user tried - // to call it. However, since we can do const propagation - // even without any calls to the function, we need to make - // sure that it even makes sense to try to evaluate the body. - // If there are unsatisfiable where clauses, then all bets are - // off, and we just give up. - // - // We manually filter the predicates, skipping anything that's not - // "global". We are in a potentially generic context - // (e.g. we are evaluating a function without substituting generic - // parameters, so this filtering serves two purposes: - // - // 1. We skip evaluating any predicates that we would - // never be able prove are unsatisfiable (e.g. `` - // 2. We avoid trying to normalize predicates involving generic - // parameters (e.g. `::MyItem`). This can confuse - // the normalization code (leading to cycle errors), since - // it's usually never invoked in this way. - let predicates = tcx - .predicates_of(def_id.to_def_id()) - .predicates - .iter() - .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None }); - if traits::impossible_predicates(tcx, traits::elaborate(tcx, predicates).collect()) { - trace!("ConstProp skipped for {:?}: found unsatisfiable predicates", def_id); - return; - } - - trace!("ConstProp starting for {:?}", def_id); + trace!("ConstPropLint starting for {:?}", def_id); // FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold // constants, instead of just checking for const-folding succeeding. @@ -112,7 +75,7 @@ let mut linter = ConstPropagator::new(body, tcx); linter.visit_body(body); - trace!("ConstProp done for {:?}", def_id); + trace!("ConstPropLint done for {:?}", def_id); } } @@ -664,9 +627,10 @@ } TerminatorKind::SwitchInt { ref discr, ref targets } => { if let Some(ref value) = self.eval_operand(&discr, location) - && let Some(value_const) = self.use_ecx(location, |this| this.ecx.read_scalar(value)) - && let Ok(constant) = value_const.try_to_int() - && let Ok(constant) = constant.to_bits(constant.size()) + && let Some(value_const) = + self.use_ecx(location, |this| this.ecx.read_scalar(value)) + && let Ok(constant) = value_const.try_to_int() + && let Ok(constant) = constant.to_bits(constant.size()) { // We managed to evaluate the discriminant, so we know we only need to visit // one target. @@ -684,7 +648,7 @@ | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } | TerminatorKind::Yield { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } | TerminatorKind::Call { .. } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/copy_prop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/copy_prop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/copy_prop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/copy_prop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -168,14 +168,15 @@ && self.storage_to_remove.contains(l) { stmt.make_nop(); - return + return; } self.super_statement(stmt, loc); // Do not leave tautological assignments around. if let StatementKind::Assign(box (lhs, ref rhs)) = stmt.kind - && let Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)) | Rvalue::CopyForDeref(rhs) = *rhs + && let Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)) | Rvalue::CopyForDeref(rhs) = + *rhs && lhs == rhs { stmt.make_nop(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coroutine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coroutine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coroutine.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coroutine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,1964 @@ +//! This is the implementation of the pass which transforms coroutines into state machines. +//! +//! MIR generation for coroutines creates a function which has a self argument which +//! passes by value. This argument is effectively a coroutine type which only contains upvars and +//! is only used for this argument inside the MIR for the coroutine. +//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that +//! MIR before this pass and creates drop flags for MIR locals. +//! It will also drop the coroutine argument (which only consists of upvars) if any of the upvars +//! are moved out of. This pass elaborates the drops of upvars / coroutine argument in the case +//! that none of the upvars were moved out of. This is because we cannot have any drops of this +//! coroutine in the MIR, since it is used to create the drop glue for the coroutine. We'd get +//! infinite recursion otherwise. +//! +//! This pass creates the implementation for either the `Coroutine::resume` or `Future::poll` +//! function and the drop shim for the coroutine based on the MIR input. +//! It converts the coroutine argument from Self to &mut Self adding derefs in the MIR as needed. +//! It computes the final layout of the coroutine struct which looks like this: +//! First upvars are stored +//! It is followed by the coroutine state field. +//! Then finally the MIR locals which are live across a suspension point are stored. +//! ```ignore (illustrative) +//! struct Coroutine { +//! upvars..., +//! state: u32, +//! mir_locals..., +//! } +//! ``` +//! This pass computes the meaning of the state field and the MIR locals which are live +//! across a suspension point. There are however three hardcoded coroutine states: +//! 0 - Coroutine have not been resumed yet +//! 1 - Coroutine has returned / is completed +//! 2 - Coroutine has been poisoned +//! +//! It also rewrites `return x` and `yield y` as setting a new coroutine state and returning +//! `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`, +//! or `Poll::Ready(x)` and `Poll::Pending` respectively. +//! MIR locals which are live across a suspension point are moved to the coroutine struct +//! with references to them being updated with references to the coroutine struct. +//! +//! The pass creates two functions which have a switch on the coroutine state giving +//! the action to take. +//! +//! One of them is the implementation of `Coroutine::resume` / `Future::poll`. +//! For coroutines with state 0 (unresumed) it starts the execution of the coroutine. +//! For coroutines with state 1 (returned) and state 2 (poisoned) it panics. +//! Otherwise it continues the execution from the last suspension point. +//! +//! The other function is the drop glue for the coroutine. +//! For coroutines with state 0 (unresumed) it drops the upvars of the coroutine. +//! For coroutines with state 1 (returned) and state 2 (poisoned) it does nothing. +//! Otherwise it drops all the values in scope at the last suspension point. + +use crate::abort_unwinding_calls; +use crate::deref_separator::deref_finder; +use crate::errors; +use crate::pass_manager as pm; +use crate::simplify; +use crate::MirPass; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_errors::pluralize; +use rustc_hir as hir; +use rustc_hir::lang_items::LangItem; +use rustc_hir::CoroutineKind; +use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet}; +use rustc_index::{Idx, IndexVec}; +use rustc_middle::mir::dump_mir; +use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; +use rustc_middle::mir::*; +use rustc_middle::ty::InstanceDef; +use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt}; +use rustc_middle::ty::{CoroutineArgs, GenericArgsRef}; +use rustc_mir_dataflow::impls::{ + MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive, +}; +use rustc_mir_dataflow::storage::always_storage_live_locals; +use rustc_mir_dataflow::{self, Analysis}; +use rustc_span::def_id::{DefId, LocalDefId}; +use rustc_span::symbol::sym; +use rustc_span::Span; +use rustc_target::abi::{FieldIdx, VariantIdx}; +use rustc_target::spec::PanicStrategy; +use std::{iter, ops}; + +pub struct StateTransform; + +struct RenameLocalVisitor<'tcx> { + from: Local, + to: Local, + tcx: TyCtxt<'tcx>, +} + +impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { + if *local == self.from { + *local = self.to; + } + } + + fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) { + match terminator.kind { + TerminatorKind::Return => { + // Do not replace the implicit `_0` access here, as that's not possible. The + // transform already handles `return` correctly. + } + _ => self.super_terminator(terminator, location), + } + } +} + +struct DerefArgVisitor<'tcx> { + tcx: TyCtxt<'tcx>, +} + +impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { + assert_ne!(*local, SELF_ARG); + } + + fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { + if place.local == SELF_ARG { + replace_base( + place, + Place { + local: SELF_ARG, + projection: self.tcx().mk_place_elems(&[ProjectionElem::Deref]), + }, + self.tcx, + ); + } else { + self.visit_local(&mut place.local, context, location); + + for elem in place.projection.iter() { + if let PlaceElem::Index(local) = elem { + assert_ne!(local, SELF_ARG); + } + } + } + } +} + +struct PinArgVisitor<'tcx> { + ref_coroutine_ty: Ty<'tcx>, + tcx: TyCtxt<'tcx>, +} + +impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { + assert_ne!(*local, SELF_ARG); + } + + fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { + if place.local == SELF_ARG { + replace_base( + place, + Place { + local: SELF_ARG, + projection: self.tcx().mk_place_elems(&[ProjectionElem::Field( + FieldIdx::new(0), + self.ref_coroutine_ty, + )]), + }, + self.tcx, + ); + } else { + self.visit_local(&mut place.local, context, location); + + for elem in place.projection.iter() { + if let PlaceElem::Index(local) = elem { + assert_ne!(local, SELF_ARG); + } + } + } + } +} + +fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) { + place.local = new_base.local; + + let mut new_projection = new_base.projection.to_vec(); + new_projection.append(&mut place.projection.to_vec()); + + place.projection = tcx.mk_place_elems(&new_projection); +} + +const SELF_ARG: Local = Local::from_u32(1); + +/// Coroutine has not been resumed yet. +const UNRESUMED: usize = CoroutineArgs::UNRESUMED; +/// Coroutine has returned / is completed. +const RETURNED: usize = CoroutineArgs::RETURNED; +/// Coroutine has panicked and is poisoned. +const POISONED: usize = CoroutineArgs::POISONED; + +/// Number of variants to reserve in coroutine state. Corresponds to +/// `UNRESUMED` (beginning of a coroutine) and `RETURNED`/`POISONED` +/// (end of a coroutine) states. +const RESERVED_VARIANTS: usize = 3; + +/// A `yield` point in the coroutine. +struct SuspensionPoint<'tcx> { + /// State discriminant used when suspending or resuming at this point. + state: usize, + /// The block to jump to after resumption. + resume: BasicBlock, + /// Where to move the resume argument after resumption. + resume_arg: Place<'tcx>, + /// Which block to jump to if the coroutine is dropped in this state. + drop: Option, + /// Set of locals that have live storage while at this suspension point. + storage_liveness: GrowableBitSet, +} + +struct TransformVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + coroutine_kind: hir::CoroutineKind, + state_adt_ref: AdtDef<'tcx>, + state_args: GenericArgsRef<'tcx>, + + // The type of the discriminant in the coroutine struct + discr_ty: Ty<'tcx>, + + // Mapping from Local to (type of local, coroutine struct index) + // FIXME(eddyb) This should use `IndexVec>`. + remap: FxHashMap, VariantIdx, FieldIdx)>, + + // A map from a suspension point in a block to the locals which have live storage at that point + storage_liveness: IndexVec>>, + + // A list of suspension points, generated during the transform + suspension_points: Vec>, + + // The set of locals that have no `StorageLive`/`StorageDead` annotations. + always_live_locals: BitSet, + + // The original RETURN_PLACE local + new_ret_local: Local, +} + +impl<'tcx> TransformVisitor<'tcx> { + fn insert_none_ret_block(&self, body: &mut Body<'tcx>) -> BasicBlock { + let block = BasicBlock::new(body.basic_blocks.len()); + + let source_info = SourceInfo::outermost(body.span); + + let (kind, idx) = self.coroutine_state_adt_and_variant_idx(true); + assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 0); + let statements = vec![Statement { + kind: StatementKind::Assign(Box::new(( + Place::return_place(), + Rvalue::Aggregate(Box::new(kind), IndexVec::new()), + ))), + source_info, + }]; + + body.basic_blocks_mut().push(BasicBlockData { + statements, + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }); + + block + } + + fn coroutine_state_adt_and_variant_idx( + &self, + is_return: bool, + ) -> (AggregateKind<'tcx>, VariantIdx) { + let idx = VariantIdx::new(match (is_return, self.coroutine_kind) { + (true, hir::CoroutineKind::Coroutine) => 1, // CoroutineState::Complete + (false, hir::CoroutineKind::Coroutine) => 0, // CoroutineState::Yielded + (true, hir::CoroutineKind::Async(_)) => 0, // Poll::Ready + (false, hir::CoroutineKind::Async(_)) => 1, // Poll::Pending + (true, hir::CoroutineKind::Gen(_)) => 0, // Option::None + (false, hir::CoroutineKind::Gen(_)) => 1, // Option::Some + }); + + let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_args, None, None); + (kind, idx) + } + + // Make a `CoroutineState` or `Poll` variant assignment. + // + // `core::ops::CoroutineState` only has single element tuple variants, + // so we can just write to the downcasted first field and then set the + // discriminant to the appropriate variant. + fn make_state( + &self, + val: Operand<'tcx>, + source_info: SourceInfo, + is_return: bool, + statements: &mut Vec>, + ) { + let (kind, idx) = self.coroutine_state_adt_and_variant_idx(is_return); + + match self.coroutine_kind { + // `Poll::Pending` + CoroutineKind::Async(_) => { + if !is_return { + assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 0); + + // FIXME(swatinem): assert that `val` is indeed unit? + statements.push(Statement { + kind: StatementKind::Assign(Box::new(( + Place::return_place(), + Rvalue::Aggregate(Box::new(kind), IndexVec::new()), + ))), + source_info, + }); + return; + } + } + // `Option::None` + CoroutineKind::Gen(_) => { + if is_return { + assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 0); + + statements.push(Statement { + kind: StatementKind::Assign(Box::new(( + Place::return_place(), + Rvalue::Aggregate(Box::new(kind), IndexVec::new()), + ))), + source_info, + }); + return; + } + } + CoroutineKind::Coroutine => {} + } + + // else: `Poll::Ready(x)`, `CoroutineState::Yielded(x)`, `CoroutineState::Complete(x)`, or `Option::Some(x)` + assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 1); + + statements.push(Statement { + kind: StatementKind::Assign(Box::new(( + Place::return_place(), + Rvalue::Aggregate(Box::new(kind), [val].into()), + ))), + source_info, + }); + } + + // Create a Place referencing a coroutine struct field + fn make_field(&self, variant_index: VariantIdx, idx: FieldIdx, ty: Ty<'tcx>) -> Place<'tcx> { + let self_place = Place::from(SELF_ARG); + let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index); + let mut projection = base.projection.to_vec(); + projection.push(ProjectionElem::Field(idx, ty)); + + Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) } + } + + // Create a statement which changes the discriminant + fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> { + let self_place = Place::from(SELF_ARG); + Statement { + source_info, + kind: StatementKind::SetDiscriminant { + place: Box::new(self_place), + variant_index: state_disc, + }, + } + } + + // Create a statement which reads the discriminant into a temporary + fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) { + let temp_decl = LocalDecl::new(self.discr_ty, body.span); + let local_decls_len = body.local_decls.push(temp_decl); + let temp = Place::from(local_decls_len); + + let self_place = Place::from(SELF_ARG); + let assign = Statement { + source_info: SourceInfo::outermost(body.span), + kind: StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))), + }; + (assign, temp) + } +} + +impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { + assert_eq!(self.remap.get(local), None); + } + + fn visit_place( + &mut self, + place: &mut Place<'tcx>, + _context: PlaceContext, + _location: Location, + ) { + // Replace an Local in the remap with a coroutine struct access + if let Some(&(ty, variant_index, idx)) = self.remap.get(&place.local) { + replace_base(place, self.make_field(variant_index, idx, ty), self.tcx); + } + } + + fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) { + // Remove StorageLive and StorageDead statements for remapped locals + data.retain_statements(|s| match s.kind { + StatementKind::StorageLive(l) | StatementKind::StorageDead(l) => { + !self.remap.contains_key(&l) + } + _ => true, + }); + + let ret_val = match data.terminator().kind { + TerminatorKind::Return => { + Some((true, None, Operand::Move(Place::from(self.new_ret_local)), None)) + } + TerminatorKind::Yield { ref value, resume, resume_arg, drop } => { + Some((false, Some((resume, resume_arg)), value.clone(), drop)) + } + _ => None, + }; + + if let Some((is_return, resume, v, drop)) = ret_val { + let source_info = data.terminator().source_info; + // We must assign the value first in case it gets declared dead below + self.make_state(v, source_info, is_return, &mut data.statements); + let state = if let Some((resume, mut resume_arg)) = resume { + // Yield + let state = RESERVED_VARIANTS + self.suspension_points.len(); + + // The resume arg target location might itself be remapped if its base local is + // live across a yield. + let resume_arg = + if let Some(&(ty, variant, idx)) = self.remap.get(&resume_arg.local) { + replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx); + resume_arg + } else { + resume_arg + }; + + self.suspension_points.push(SuspensionPoint { + state, + resume, + resume_arg, + drop, + storage_liveness: self.storage_liveness[block].clone().unwrap().into(), + }); + + VariantIdx::new(state) + } else { + // Return + VariantIdx::new(RETURNED) // state for returned + }; + data.statements.push(self.set_discr(state, source_info)); + data.terminator_mut().kind = TerminatorKind::Return; + } + + self.super_basic_block_data(block, data); + } +} + +fn make_coroutine_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let coroutine_ty = body.local_decls.raw[1].ty; + + let ref_coroutine_ty = Ty::new_ref( + tcx, + tcx.lifetimes.re_erased, + ty::TypeAndMut { ty: coroutine_ty, mutbl: Mutability::Mut }, + ); + + // Replace the by value coroutine argument + body.local_decls.raw[1].ty = ref_coroutine_ty; + + // Add a deref to accesses of the coroutine state + DerefArgVisitor { tcx }.visit_body(body); +} + +fn make_coroutine_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let ref_coroutine_ty = body.local_decls.raw[1].ty; + + let pin_did = tcx.require_lang_item(LangItem::Pin, Some(body.span)); + let pin_adt_ref = tcx.adt_def(pin_did); + let args = tcx.mk_args(&[ref_coroutine_ty.into()]); + let pin_ref_coroutine_ty = Ty::new_adt(tcx, pin_adt_ref, args); + + // Replace the by ref coroutine argument + body.local_decls.raw[1].ty = pin_ref_coroutine_ty; + + // Add the Pin field access to accesses of the coroutine state + PinArgVisitor { ref_coroutine_ty, tcx }.visit_body(body); +} + +/// Allocates a new local and replaces all references of `local` with it. Returns the new local. +/// +/// `local` will be changed to a new local decl with type `ty`. +/// +/// Note that the new local will be uninitialized. It is the caller's responsibility to assign some +/// valid value to it before its first use. +fn replace_local<'tcx>( + local: Local, + ty: Ty<'tcx>, + body: &mut Body<'tcx>, + tcx: TyCtxt<'tcx>, +) -> Local { + let new_decl = LocalDecl::new(ty, body.span); + let new_local = body.local_decls.push(new_decl); + body.local_decls.swap(local, new_local); + + RenameLocalVisitor { from: local, to: new_local, tcx }.visit_body(body); + + new_local +} + +/// Transforms the `body` of the coroutine applying the following transforms: +/// +/// - Eliminates all the `get_context` calls that async lowering created. +/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`). +/// +/// The `Local`s that have their types replaced are: +/// - The `resume` argument itself. +/// - The argument to `get_context`. +/// - The yielded value of a `yield`. +/// +/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the +/// `get_context` function is being used to convert that back to a `&mut Context<'_>`. +/// +/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection, +/// but rather directly use `&mut Context<'_>`, however that would currently +/// lead to higher-kinded lifetime errors. +/// See . +/// +/// The async lowering step and the type / lifetime inference / checking are +/// still using the `ResumeTy` indirection for the time being, and that indirection +/// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`. +fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let context_mut_ref = Ty::new_task_context(tcx); + + // replace the type of the `resume` argument + replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref); + + let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None); + + for bb in START_BLOCK..body.basic_blocks.next_index() { + let bb_data = &body[bb]; + if bb_data.is_cleanup { + continue; + } + + match &bb_data.terminator().kind { + TerminatorKind::Call { func, .. } => { + let func_ty = func.ty(body, tcx); + if let ty::FnDef(def_id, _) = *func_ty.kind() { + if def_id == get_context_def_id { + let local = eliminate_get_context_call(&mut body[bb]); + replace_resume_ty_local(tcx, body, local, context_mut_ref); + } + } else { + continue; + } + } + TerminatorKind::Yield { resume_arg, .. } => { + replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref); + } + _ => {} + } + } +} + +fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local { + let terminator = bb_data.terminator.take().unwrap(); + if let TerminatorKind::Call { mut args, destination, target, .. } = terminator.kind { + let arg = args.pop().unwrap(); + let local = arg.place().unwrap().local; + + let arg = Rvalue::Use(arg); + let assign = Statement { + source_info: terminator.source_info, + kind: StatementKind::Assign(Box::new((destination, arg))), + }; + bb_data.statements.push(assign); + bb_data.terminator = Some(Terminator { + source_info: terminator.source_info, + kind: TerminatorKind::Goto { target: target.unwrap() }, + }); + local + } else { + bug!(); + } +} + +#[cfg_attr(not(debug_assertions), allow(unused))] +fn replace_resume_ty_local<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + local: Local, + context_mut_ref: Ty<'tcx>, +) { + let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref); + // We have to replace the `ResumeTy` that is used for type and borrow checking + // with `&mut Context<'_>` in MIR. + #[cfg(debug_assertions)] + { + if let ty::Adt(resume_ty_adt, _) = local_ty.kind() { + let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None)); + assert_eq!(*resume_ty_adt, expected_adt); + } else { + panic!("expected `ResumeTy`, found `{:?}`", local_ty); + }; + } +} + +struct LivenessInfo { + /// Which locals are live across any suspension point. + saved_locals: CoroutineSavedLocals, + + /// The set of saved locals live at each suspension point. + live_locals_at_suspension_points: Vec>, + + /// Parallel vec to the above with SourceInfo for each yield terminator. + source_info_at_suspension_points: Vec, + + /// For every saved local, the set of other saved locals that are + /// storage-live at the same time as this local. We cannot overlap locals in + /// the layout which have conflicting storage. + storage_conflicts: BitMatrix, + + /// For every suspending block, the locals which are storage-live across + /// that suspension point. + storage_liveness: IndexVec>>, +} + +/// Computes which locals have to be stored in the state-machine for the +/// given coroutine. +/// +/// The basic idea is as follows: +/// - a local is live until we encounter a `StorageDead` statement. In +/// case none exist, the local is considered to be always live. +/// - a local has to be stored if it is either directly used after the +/// the suspend point, or if it is live and has been previously borrowed. +fn locals_live_across_suspend_points<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + always_live_locals: &BitSet, + movable: bool, +) -> LivenessInfo { + let body_ref: &Body<'_> = &body; + + // Calculate when MIR locals have live storage. This gives us an upper bound of their + // lifetimes. + let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals)) + .into_engine(tcx, body_ref) + .iterate_to_fixpoint() + .into_results_cursor(body_ref); + + // Calculate the MIR locals which have been previously + // borrowed (even if they are still active). + let borrowed_locals_results = + MaybeBorrowedLocals.into_engine(tcx, body_ref).pass_name("coroutine").iterate_to_fixpoint(); + + let mut borrowed_locals_cursor = borrowed_locals_results.cloned_results_cursor(body_ref); + + // Calculate the MIR locals that we actually need to keep storage around + // for. + let mut requires_storage_results = + MaybeRequiresStorage::new(borrowed_locals_results.cloned_results_cursor(body)) + .into_engine(tcx, body_ref) + .iterate_to_fixpoint(); + let mut requires_storage_cursor = requires_storage_results.as_results_cursor(body_ref); + + // Calculate the liveness of MIR locals ignoring borrows. + let mut liveness = MaybeLiveLocals + .into_engine(tcx, body_ref) + .pass_name("coroutine") + .iterate_to_fixpoint() + .into_results_cursor(body_ref); + + let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks); + let mut live_locals_at_suspension_points = Vec::new(); + let mut source_info_at_suspension_points = Vec::new(); + let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len()); + + for (block, data) in body.basic_blocks.iter_enumerated() { + if let TerminatorKind::Yield { .. } = data.terminator().kind { + let loc = Location { block, statement_index: data.statements.len() }; + + liveness.seek_to_block_end(block); + let mut live_locals: BitSet<_> = BitSet::new_empty(body.local_decls.len()); + live_locals.union(liveness.get()); + + if !movable { + // The `liveness` variable contains the liveness of MIR locals ignoring borrows. + // This is correct for movable coroutines since borrows cannot live across + // suspension points. However for immovable coroutines we need to account for + // borrows, so we conservatively assume that all borrowed locals are live until + // we find a StorageDead statement referencing the locals. + // To do this we just union our `liveness` result with `borrowed_locals`, which + // contains all the locals which has been borrowed before this suspension point. + // If a borrow is converted to a raw reference, we must also assume that it lives + // forever. Note that the final liveness is still bounded by the storage liveness + // of the local, which happens using the `intersect` operation below. + borrowed_locals_cursor.seek_before_primary_effect(loc); + live_locals.union(borrowed_locals_cursor.get()); + } + + // Store the storage liveness for later use so we can restore the state + // after a suspension point + storage_live.seek_before_primary_effect(loc); + storage_liveness_map[block] = Some(storage_live.get().clone()); + + // Locals live are live at this point only if they are used across + // suspension points (the `liveness` variable) + // and their storage is required (the `storage_required` variable) + requires_storage_cursor.seek_before_primary_effect(loc); + live_locals.intersect(requires_storage_cursor.get()); + + // The coroutine argument is ignored. + live_locals.remove(SELF_ARG); + + debug!("loc = {:?}, live_locals = {:?}", loc, live_locals); + + // Add the locals live at this suspension point to the set of locals which live across + // any suspension points + live_locals_at_any_suspension_point.union(&live_locals); + + live_locals_at_suspension_points.push(live_locals); + source_info_at_suspension_points.push(data.terminator().source_info); + } + } + + debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point); + let saved_locals = CoroutineSavedLocals(live_locals_at_any_suspension_point); + + // Renumber our liveness_map bitsets to include only the locals we are + // saving. + let live_locals_at_suspension_points = live_locals_at_suspension_points + .iter() + .map(|live_here| saved_locals.renumber_bitset(&live_here)) + .collect(); + + let storage_conflicts = compute_storage_conflicts( + body_ref, + &saved_locals, + always_live_locals.clone(), + requires_storage_results, + ); + + LivenessInfo { + saved_locals, + live_locals_at_suspension_points, + source_info_at_suspension_points, + storage_conflicts, + storage_liveness: storage_liveness_map, + } +} + +/// The set of `Local`s that must be saved across yield points. +/// +/// `CoroutineSavedLocal` is indexed in terms of the elements in this set; +/// i.e. `CoroutineSavedLocal::new(1)` corresponds to the second local +/// included in this set. +struct CoroutineSavedLocals(BitSet); + +impl CoroutineSavedLocals { + /// Returns an iterator over each `CoroutineSavedLocal` along with the `Local` it corresponds + /// to. + fn iter_enumerated(&self) -> impl '_ + Iterator { + self.iter().enumerate().map(|(i, l)| (CoroutineSavedLocal::from(i), l)) + } + + /// Transforms a `BitSet` that contains only locals saved across yield points to the + /// equivalent `BitSet`. + fn renumber_bitset(&self, input: &BitSet) -> BitSet { + assert!(self.superset(&input), "{:?} not a superset of {:?}", self.0, input); + let mut out = BitSet::new_empty(self.count()); + for (saved_local, local) in self.iter_enumerated() { + if input.contains(local) { + out.insert(saved_local); + } + } + out + } + + fn get(&self, local: Local) -> Option { + if !self.contains(local) { + return None; + } + + let idx = self.iter().take_while(|&l| l < local).count(); + Some(CoroutineSavedLocal::new(idx)) + } +} + +impl ops::Deref for CoroutineSavedLocals { + type Target = BitSet; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// For every saved local, looks for which locals are StorageLive at the same +/// time. Generates a bitset for every local of all the other locals that may be +/// StorageLive simultaneously with that local. This is used in the layout +/// computation; see `CoroutineLayout` for more. +fn compute_storage_conflicts<'mir, 'tcx>( + body: &'mir Body<'tcx>, + saved_locals: &CoroutineSavedLocals, + always_live_locals: BitSet, + mut requires_storage: rustc_mir_dataflow::Results<'tcx, MaybeRequiresStorage<'_, 'mir, 'tcx>>, +) -> BitMatrix { + assert_eq!(body.local_decls.len(), saved_locals.domain_size()); + + debug!("compute_storage_conflicts({:?})", body.span); + debug!("always_live = {:?}", always_live_locals); + + // Locals that are always live or ones that need to be stored across + // suspension points are not eligible for overlap. + let mut ineligible_locals = always_live_locals; + ineligible_locals.intersect(&**saved_locals); + + // Compute the storage conflicts for all eligible locals. + let mut visitor = StorageConflictVisitor { + body, + saved_locals: &saved_locals, + local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()), + }; + + requires_storage.visit_reachable_with(body, &mut visitor); + + let local_conflicts = visitor.local_conflicts; + + // Compress the matrix using only stored locals (Local -> CoroutineSavedLocal). + // + // NOTE: Today we store a full conflict bitset for every local. Technically + // this is twice as many bits as we need, since the relation is symmetric. + // However, in practice these bitsets are not usually large. The layout code + // also needs to keep track of how many conflicts each local has, so it's + // simpler to keep it this way for now. + let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count()); + for (saved_local_a, local_a) in saved_locals.iter_enumerated() { + if ineligible_locals.contains(local_a) { + // Conflicts with everything. + storage_conflicts.insert_all_into_row(saved_local_a); + } else { + // Keep overlap information only for stored locals. + for (saved_local_b, local_b) in saved_locals.iter_enumerated() { + if local_conflicts.contains(local_a, local_b) { + storage_conflicts.insert(saved_local_a, saved_local_b); + } + } + } + } + storage_conflicts +} + +struct StorageConflictVisitor<'mir, 'tcx, 's> { + body: &'mir Body<'tcx>, + saved_locals: &'s CoroutineSavedLocals, + // FIXME(tmandry): Consider using sparse bitsets here once we have good + // benchmarks for coroutines. + local_conflicts: BitMatrix, +} + +impl<'mir, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx, R> + for StorageConflictVisitor<'mir, 'tcx, '_> +{ + type FlowState = BitSet; + + fn visit_statement_before_primary_effect( + &mut self, + _results: &mut R, + state: &Self::FlowState, + _statement: &'mir Statement<'tcx>, + loc: Location, + ) { + self.apply_state(state, loc); + } + + fn visit_terminator_before_primary_effect( + &mut self, + _results: &mut R, + state: &Self::FlowState, + _terminator: &'mir Terminator<'tcx>, + loc: Location, + ) { + self.apply_state(state, loc); + } +} + +impl StorageConflictVisitor<'_, '_, '_> { + fn apply_state(&mut self, flow_state: &BitSet, loc: Location) { + // Ignore unreachable blocks. + if self.body.basic_blocks[loc.block].terminator().kind == TerminatorKind::Unreachable { + return; + } + + let mut eligible_storage_live = flow_state.clone(); + eligible_storage_live.intersect(&**self.saved_locals); + + for local in eligible_storage_live.iter() { + self.local_conflicts.union_row_with(&eligible_storage_live, local); + } + + if eligible_storage_live.count() > 1 { + trace!("at {:?}, eligible_storage_live={:?}", loc, eligible_storage_live); + } + } +} + +fn compute_layout<'tcx>( + liveness: LivenessInfo, + body: &Body<'tcx>, +) -> ( + FxHashMap, VariantIdx, FieldIdx)>, + CoroutineLayout<'tcx>, + IndexVec>>, +) { + let LivenessInfo { + saved_locals, + live_locals_at_suspension_points, + source_info_at_suspension_points, + storage_conflicts, + storage_liveness, + } = liveness; + + // Gather live local types and their indices. + let mut locals = IndexVec::::new(); + let mut tys = IndexVec::::new(); + for (saved_local, local) in saved_locals.iter_enumerated() { + debug!("coroutine saved local {:?} => {:?}", saved_local, local); + + locals.push(local); + let decl = &body.local_decls[local]; + debug!(?decl); + + // Do not `assert_crate_local` here, as post-borrowck cleanup may have already cleared + // the information. This is alright, since `ignore_for_traits` is only relevant when + // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer + // default. + let ignore_for_traits = match decl.local_info { + // Do not include raw pointers created from accessing `static` items, as those could + // well be re-created by another access to the same static. + ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => { + !is_thread_local + } + // Fake borrows are only read by fake reads, so do not have any reality in + // post-analysis MIR. + ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true, + _ => false, + }; + let decl = + CoroutineSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits }; + debug!(?decl); + + tys.push(decl); + } + + // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states. + // In debuginfo, these will correspond to the beginning (UNRESUMED) or end + // (RETURNED, POISONED) of the function. + let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span; + let mut variant_source_info: IndexVec = [ + SourceInfo::outermost(body_span.shrink_to_lo()), + SourceInfo::outermost(body_span.shrink_to_hi()), + SourceInfo::outermost(body_span.shrink_to_hi()), + ] + .iter() + .copied() + .collect(); + + // Build the coroutine variant field list. + // Create a map from local indices to coroutine struct indices. + let mut variant_fields: IndexVec> = + iter::repeat(IndexVec::new()).take(RESERVED_VARIANTS).collect(); + let mut remap = FxHashMap::default(); + for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() { + let variant_index = VariantIdx::from(RESERVED_VARIANTS + suspension_point_idx); + let mut fields = IndexVec::new(); + for (idx, saved_local) in live_locals.iter().enumerate() { + fields.push(saved_local); + // Note that if a field is included in multiple variants, we will + // just use the first one here. That's fine; fields do not move + // around inside coroutines, so it doesn't matter which variant + // index we access them by. + let idx = FieldIdx::from_usize(idx); + remap.entry(locals[saved_local]).or_insert((tys[saved_local].ty, variant_index, idx)); + } + variant_fields.push(fields); + variant_source_info.push(source_info_at_suspension_points[suspension_point_idx]); + } + debug!("coroutine variant_fields = {:?}", variant_fields); + debug!("coroutine storage_conflicts = {:#?}", storage_conflicts); + + let mut field_names = IndexVec::from_elem(None, &tys); + for var in &body.var_debug_info { + let VarDebugInfoContents::Place(place) = &var.value else { continue }; + let Some(local) = place.as_local() else { continue }; + let Some(&(_, variant, field)) = remap.get(&local) else { continue }; + + let saved_local = variant_fields[variant][field]; + field_names.get_or_insert_with(saved_local, || var.name); + } + + let layout = CoroutineLayout { + field_tys: tys, + field_names, + variant_fields, + variant_source_info, + storage_conflicts, + }; + debug!(?layout); + + (remap, layout, storage_liveness) +} + +/// Replaces the entry point of `body` with a block that switches on the coroutine discriminant and +/// dispatches to blocks according to `cases`. +/// +/// After this function, the former entry point of the function will be bb1. +fn insert_switch<'tcx>( + body: &mut Body<'tcx>, + cases: Vec<(usize, BasicBlock)>, + transform: &TransformVisitor<'tcx>, + default: TerminatorKind<'tcx>, +) { + let default_block = insert_term_block(body, default); + let (assign, discr) = transform.get_discr(body); + let switch_targets = + SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block); + let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets }; + + let source_info = SourceInfo::outermost(body.span); + body.basic_blocks_mut().raw.insert( + 0, + BasicBlockData { + statements: vec![assign], + terminator: Some(Terminator { source_info, kind: switch }), + is_cleanup: false, + }, + ); + + let blocks = body.basic_blocks_mut().iter_mut(); + + for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) { + *target = BasicBlock::new(target.index() + 1); + } +} + +fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + use crate::shim::DropShimElaborator; + use rustc_middle::mir::patch::MirPatch; + use rustc_mir_dataflow::elaborate_drops::{elaborate_drop, Unwind}; + + // Note that `elaborate_drops` only drops the upvars of a coroutine, and + // this is ok because `open_drop` can only be reached within that own + // coroutine's resume function. + + let def_id = body.source.def_id(); + let param_env = tcx.param_env(def_id); + + let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, param_env }; + + for (block, block_data) in body.basic_blocks.iter_enumerated() { + let (target, unwind, source_info) = match block_data.terminator() { + Terminator { + source_info, + kind: TerminatorKind::Drop { place, target, unwind, replace: _ }, + } => { + if let Some(local) = place.as_local() { + if local == SELF_ARG { + (target, unwind, source_info) + } else { + continue; + } + } else { + continue; + } + } + _ => continue, + }; + let unwind = if block_data.is_cleanup { + Unwind::InCleanup + } else { + Unwind::To(match *unwind { + UnwindAction::Cleanup(tgt) => tgt, + UnwindAction::Continue => elaborator.patch.resume_block(), + UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(), + UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason), + }) + }; + elaborate_drop( + &mut elaborator, + *source_info, + Place::from(SELF_ARG), + (), + *target, + unwind, + block, + ); + } + elaborator.patch.apply(body); +} + +fn create_coroutine_drop_shim<'tcx>( + tcx: TyCtxt<'tcx>, + transform: &TransformVisitor<'tcx>, + coroutine_ty: Ty<'tcx>, + body: &mut Body<'tcx>, + drop_clean: BasicBlock, +) -> Body<'tcx> { + let mut body = body.clone(); + body.arg_count = 1; // make sure the resume argument is not included here + + let source_info = SourceInfo::outermost(body.span); + + let mut cases = create_cases(&mut body, transform, Operation::Drop); + + cases.insert(0, (UNRESUMED, drop_clean)); + + // The returned state and the poisoned state fall through to the default + // case which is just to return + + insert_switch(&mut body, cases, &transform, TerminatorKind::Return); + + for block in body.basic_blocks_mut() { + let kind = &mut block.terminator_mut().kind; + if let TerminatorKind::CoroutineDrop = *kind { + *kind = TerminatorKind::Return; + } + } + + // Replace the return variable + body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(Ty::new_unit(tcx), source_info); + + make_coroutine_state_argument_indirect(tcx, &mut body); + + // Change the coroutine argument from &mut to *mut + body.local_decls[SELF_ARG] = LocalDecl::with_source_info( + Ty::new_ptr(tcx, ty::TypeAndMut { ty: coroutine_ty, mutbl: hir::Mutability::Mut }), + source_info, + ); + + // Make sure we remove dead blocks to remove + // unrelated code from the resume part of the function + simplify::remove_dead_blocks(&mut body); + + // Update the body's def to become the drop glue. + let coroutine_instance = body.source.instance; + let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None); + let drop_instance = InstanceDef::DropGlue(drop_in_place, Some(coroutine_ty)); + + // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible + // filename. + body.source.instance = coroutine_instance; + dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(())); + body.source.instance = drop_instance; + + body +} + +fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock { + let source_info = SourceInfo::outermost(body.span); + body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { source_info, kind }), + is_cleanup: false, + }) +} + +fn insert_panic_block<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + message: AssertMessage<'tcx>, +) -> BasicBlock { + let assert_block = BasicBlock::new(body.basic_blocks.len()); + let term = TerminatorKind::Assert { + cond: Operand::Constant(Box::new(ConstOperand { + span: body.span, + user_ty: None, + const_: Const::from_bool(tcx, false), + })), + expected: true, + msg: Box::new(message), + target: assert_block, + unwind: UnwindAction::Continue, + }; + + let source_info = SourceInfo::outermost(body.span); + body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { source_info, kind: term }), + is_cleanup: false, + }); + + assert_block +} + +fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { + // Returning from a function with an uninhabited return type is undefined behavior. + if body.return_ty().is_privately_uninhabited(tcx, param_env) { + return false; + } + + // If there's a return terminator the function may return. + for block in body.basic_blocks.iter() { + if let TerminatorKind::Return = block.terminator().kind { + return true; + } + } + + // Otherwise the function can't return. + false +} + +fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { + // Nothing can unwind when landing pads are off. + if tcx.sess.panic_strategy() == PanicStrategy::Abort { + return false; + } + + // Unwinds can only start at certain terminators. + for block in body.basic_blocks.iter() { + match block.terminator().kind { + // These never unwind. + TerminatorKind::Goto { .. } + | TerminatorKind::SwitchInt { .. } + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => {} + + // Resume will *continue* unwinding, but if there's no other unwinding terminator it + // will never be reached. + TerminatorKind::UnwindResume => {} + + TerminatorKind::Yield { .. } => { + unreachable!("`can_unwind` called before coroutine transform") + } + + // These may unwind. + TerminatorKind::Drop { .. } + | TerminatorKind::Call { .. } + | TerminatorKind::InlineAsm { .. } + | TerminatorKind::Assert { .. } => return true, + } + } + + // If we didn't find an unwinding terminator, the function cannot unwind. + false +} + +fn create_coroutine_resume_function<'tcx>( + tcx: TyCtxt<'tcx>, + transform: TransformVisitor<'tcx>, + body: &mut Body<'tcx>, + can_return: bool, +) { + let can_unwind = can_unwind(tcx, body); + + // Poison the coroutine when it unwinds + if can_unwind { + let source_info = SourceInfo::outermost(body.span); + let poison_block = body.basic_blocks_mut().push(BasicBlockData { + statements: vec![transform.set_discr(VariantIdx::new(POISONED), source_info)], + terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }), + is_cleanup: true, + }); + + for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() { + let source_info = block.terminator().source_info; + + if let TerminatorKind::UnwindResume = block.terminator().kind { + // An existing `Resume` terminator is redirected to jump to our dedicated + // "poisoning block" above. + if idx != poison_block { + *block.terminator_mut() = Terminator { + source_info, + kind: TerminatorKind::Goto { target: poison_block }, + }; + } + } else if !block.is_cleanup { + // Any terminators that *can* unwind but don't have an unwind target set are also + // pointed at our poisoning block (unless they're part of the cleanup path). + if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { + *unwind = UnwindAction::Cleanup(poison_block); + } + } + } + } + + let mut cases = create_cases(body, &transform, Operation::Resume); + + use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn}; + + // Jump to the entry point on the unresumed + cases.insert(0, (UNRESUMED, START_BLOCK)); + + // Panic when resumed on the returned or poisoned state + let coroutine_kind = body.coroutine_kind().unwrap(); + + if can_unwind { + cases.insert( + 1, + (POISONED, insert_panic_block(tcx, body, ResumedAfterPanic(coroutine_kind))), + ); + } + + if can_return { + let block = match coroutine_kind { + CoroutineKind::Async(_) | CoroutineKind::Coroutine => { + insert_panic_block(tcx, body, ResumedAfterReturn(coroutine_kind)) + } + CoroutineKind::Gen(_) => transform.insert_none_ret_block(body), + }; + cases.insert(1, (RETURNED, block)); + } + + insert_switch(body, cases, &transform, TerminatorKind::Unreachable); + + make_coroutine_state_argument_indirect(tcx, body); + make_coroutine_state_argument_pinned(tcx, body); + + // Make sure we remove dead blocks to remove + // unrelated code from the drop part of the function + simplify::remove_dead_blocks(body); + + pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None); + + dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(())); +} + +fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { + let return_block = insert_term_block(body, TerminatorKind::Return); + + let term = TerminatorKind::Drop { + place: Place::from(SELF_ARG), + target: return_block, + unwind: UnwindAction::Continue, + replace: false, + }; + let source_info = SourceInfo::outermost(body.span); + + // Create a block to destroy an unresumed coroutines. This can only destroy upvars. + body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: Some(Terminator { source_info, kind: term }), + is_cleanup: false, + }) +} + +/// An operation that can be performed on a coroutine. +#[derive(PartialEq, Copy, Clone)] +enum Operation { + Resume, + Drop, +} + +impl Operation { + fn target_block(self, point: &SuspensionPoint<'_>) -> Option { + match self { + Operation::Resume => Some(point.resume), + Operation::Drop => point.drop, + } + } +} + +fn create_cases<'tcx>( + body: &mut Body<'tcx>, + transform: &TransformVisitor<'tcx>, + operation: Operation, +) -> Vec<(usize, BasicBlock)> { + let source_info = SourceInfo::outermost(body.span); + + transform + .suspension_points + .iter() + .filter_map(|point| { + // Find the target for this suspension point, if applicable + operation.target_block(point).map(|target| { + let mut statements = Vec::new(); + + // Create StorageLive instructions for locals with live storage + for i in 0..(body.local_decls.len()) { + if i == 2 { + // The resume argument is live on function entry. Don't insert a + // `StorageLive`, or the following `Assign` will read from uninitialized + // memory. + continue; + } + + let l = Local::new(i); + let needs_storage_live = point.storage_liveness.contains(l) + && !transform.remap.contains_key(&l) + && !transform.always_live_locals.contains(l); + if needs_storage_live { + statements + .push(Statement { source_info, kind: StatementKind::StorageLive(l) }); + } + } + + if operation == Operation::Resume { + // Move the resume argument to the destination place of the `Yield` terminator + let resume_arg = Local::new(2); // 0 = return, 1 = self + statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + point.resume_arg, + Rvalue::Use(Operand::Move(resume_arg.into())), + ))), + }); + } + + // Then jump to the real target + let block = body.basic_blocks_mut().push(BasicBlockData { + statements, + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Goto { target }, + }), + is_cleanup: false, + }); + + (point.state, block) + }) + }) + .collect() +} + +#[instrument(level = "debug", skip(tcx), ret)] +pub(crate) fn mir_coroutine_witnesses<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: LocalDefId, +) -> Option> { + let (body, _) = tcx.mir_promoted(def_id); + let body = body.borrow(); + let body = &*body; + + // The first argument is the coroutine type passed by value + let coroutine_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty; + + let movable = match *coroutine_ty.kind() { + ty::Coroutine(_, _, movability) => movability == hir::Movability::Movable, + ty::Error(_) => return None, + _ => span_bug!(body.span, "unexpected coroutine type {}", coroutine_ty), + }; + + // The witness simply contains all locals live across suspend points. + + let always_live_locals = always_storage_live_locals(&body); + let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); + + // Extract locals which are live across suspension point into `layout` + // `remap` gives a mapping from local indices onto coroutine struct indices + // `storage_liveness` tells us which locals have live storage at suspension points + let (_, coroutine_layout, _) = compute_layout(liveness_info, body); + + check_suspend_tys(tcx, &coroutine_layout, &body); + + Some(coroutine_layout) +} + +impl<'tcx> MirPass<'tcx> for StateTransform { + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let Some(yield_ty) = body.yield_ty() else { + // This only applies to coroutines + return; + }; + + assert!(body.coroutine_drop().is_none()); + + // The first argument is the coroutine type passed by value + let coroutine_ty = body.local_decls.raw[1].ty; + + // Get the discriminant type and args which typeck computed + let (discr_ty, movable) = match *coroutine_ty.kind() { + ty::Coroutine(_, args, movability) => { + let args = args.as_coroutine(); + (args.discr_ty(tcx), movability == hir::Movability::Movable) + } + _ => { + tcx.sess + .delay_span_bug(body.span, format!("unexpected coroutine type {coroutine_ty}")); + return; + } + }; + + let is_async_kind = matches!(body.coroutine_kind(), Some(CoroutineKind::Async(_))); + let (state_adt_ref, state_args) = match body.coroutine_kind().unwrap() { + CoroutineKind::Async(_) => { + // Compute Poll + let poll_did = tcx.require_lang_item(LangItem::Poll, None); + let poll_adt_ref = tcx.adt_def(poll_did); + let poll_args = tcx.mk_args(&[body.return_ty().into()]); + (poll_adt_ref, poll_args) + } + CoroutineKind::Gen(_) => { + // Compute Option + let option_did = tcx.require_lang_item(LangItem::Option, None); + let option_adt_ref = tcx.adt_def(option_did); + let option_args = tcx.mk_args(&[body.yield_ty().unwrap().into()]); + (option_adt_ref, option_args) + } + CoroutineKind::Coroutine => { + // Compute CoroutineState + let state_did = tcx.require_lang_item(LangItem::CoroutineState, None); + let state_adt_ref = tcx.adt_def(state_did); + let state_args = tcx.mk_args(&[yield_ty.into(), body.return_ty().into()]); + (state_adt_ref, state_args) + } + }; + let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args); + + // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local + // RETURN_PLACE then is a fresh unused local with type ret_ty. + let new_ret_local = replace_local(RETURN_PLACE, ret_ty, body, tcx); + + // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies. + if is_async_kind { + transform_async_context(tcx, body); + } + + // We also replace the resume argument and insert an `Assign`. + // This is needed because the resume argument `_2` might be live across a `yield`, in which + // case there is no `Assign` to it that the transform can turn into a store to the coroutine + // state. After the yield the slot in the coroutine state would then be uninitialized. + let resume_local = Local::new(2); + let resume_ty = if is_async_kind { + Ty::new_task_context(tcx) + } else { + body.local_decls[resume_local].ty + }; + let new_resume_local = replace_local(resume_local, resume_ty, body, tcx); + + // When first entering the coroutine, move the resume argument into its new local. + let source_info = SourceInfo::outermost(body.span); + let stmts = &mut body.basic_blocks_mut()[START_BLOCK].statements; + stmts.insert( + 0, + Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + new_resume_local.into(), + Rvalue::Use(Operand::Move(resume_local.into())), + ))), + }, + ); + + let always_live_locals = always_storage_live_locals(&body); + + let liveness_info = + locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); + + if tcx.sess.opts.unstable_opts.validate_mir { + let mut vis = EnsureCoroutineFieldAssignmentsNeverAlias { + assigned_local: None, + saved_locals: &liveness_info.saved_locals, + storage_conflicts: &liveness_info.storage_conflicts, + }; + + vis.visit_body(body); + } + + // Extract locals which are live across suspension point into `layout` + // `remap` gives a mapping from local indices onto coroutine struct indices + // `storage_liveness` tells us which locals have live storage at suspension points + let (remap, layout, storage_liveness) = compute_layout(liveness_info, body); + + let can_return = can_return(tcx, body, tcx.param_env(body.source.def_id())); + + // Run the transformation which converts Places from Local to coroutine struct + // accesses for locals in `remap`. + // It also rewrites `return x` and `yield y` as writing a new coroutine state and returning + // either CoroutineState::Complete(x) and CoroutineState::Yielded(y), + // or Poll::Ready(x) and Poll::Pending respectively depending on `is_async_kind`. + let mut transform = TransformVisitor { + tcx, + coroutine_kind: body.coroutine_kind().unwrap(), + state_adt_ref, + state_args, + remap, + storage_liveness, + always_live_locals, + suspension_points: Vec::new(), + new_ret_local, + discr_ty, + }; + transform.visit_body(body); + + // Update our MIR struct to reflect the changes we've made + body.arg_count = 2; // self, resume arg + body.spread_arg = None; + + // The original arguments to the function are no longer arguments, mark them as such. + // Otherwise they'll conflict with our new arguments, which although they don't have + // argument_index set, will get emitted as unnamed arguments. + for var in &mut body.var_debug_info { + var.argument_index = None; + } + + body.coroutine.as_mut().unwrap().yield_ty = None; + body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout); + + // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in + // the unresumed state. + // This is expanded to a drop ladder in `elaborate_coroutine_drops`. + let drop_clean = insert_clean_drop(body); + + dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(())); + + // Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars. + // If any upvars are moved out of, drop elaboration will handle upvar destruction. + // However we need to also elaborate the code generated by `insert_clean_drop`. + elaborate_coroutine_drops(tcx, body); + + dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(())); + + // Create a copy of our MIR and use it to create the drop shim for the coroutine + let drop_shim = create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean); + + body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim); + + // Create the Coroutine::resume / Future::poll function + create_coroutine_resume_function(tcx, transform, body, can_return); + + // Run derefer to fix Derefs that are not in the first place + deref_finder(tcx, body); + } +} + +/// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields +/// in the coroutine state machine but whose storage is not marked as conflicting +/// +/// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after. +/// +/// This condition would arise when the assignment is the last use of `_5` but the initial +/// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as +/// conflicting. Non-conflicting coroutine saved locals may be stored at the same location within +/// the coroutine state machine, which would result in ill-formed MIR: the left-hand and right-hand +/// sides of an assignment may not alias. This caused a miscompilation in [#73137]. +/// +/// [#73137]: https://github.com/rust-lang/rust/issues/73137 +struct EnsureCoroutineFieldAssignmentsNeverAlias<'a> { + saved_locals: &'a CoroutineSavedLocals, + storage_conflicts: &'a BitMatrix, + assigned_local: Option, +} + +impl EnsureCoroutineFieldAssignmentsNeverAlias<'_> { + fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option { + if place.is_indirect() { + return None; + } + + self.saved_locals.get(place.local) + } + + fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) { + if let Some(assigned_local) = self.saved_local_for_direct_place(place) { + assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse"); + + self.assigned_local = Some(assigned_local); + f(self); + self.assigned_local = None; + } + } +} + +impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> { + fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { + let Some(lhs) = self.assigned_local else { + // This visitor only invokes `visit_place` for the right-hand side of an assignment + // and only after setting `self.assigned_local`. However, the default impl of + // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places + // with debuginfo. Ignore them here. + assert!(!context.is_use()); + return; + }; + + let Some(rhs) = self.saved_local_for_direct_place(*place) else { return }; + + if !self.storage_conflicts.contains(lhs, rhs) { + bug!( + "Assignment between coroutine saved locals whose storage is not \ + marked as conflicting: {:?}: {:?} = {:?}", + location, + lhs, + rhs, + ); + } + } + + fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { + match &statement.kind { + StatementKind::Assign(box (lhs, rhs)) => { + self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location)); + } + + StatementKind::FakeRead(..) + | StatementKind::SetDiscriminant { .. } + | StatementKind::Deinit(..) + | StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Retag(..) + | StatementKind::AscribeUserType(..) + | StatementKind::PlaceMention(..) + | StatementKind::Coverage(..) + | StatementKind::Intrinsic(..) + | StatementKind::ConstEvalCounter + | StatementKind::Nop => {} + } + } + + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { + // Checking for aliasing in terminators is probably overkill, but until we have actual + // semantics, we should be conservative here. + match &terminator.kind { + TerminatorKind::Call { + func, + args, + destination, + target: Some(_), + unwind: _, + call_source: _, + fn_span: _, + } => { + self.check_assigned_place(*destination, |this| { + this.visit_operand(func, location); + for arg in args { + this.visit_operand(arg, location); + } + }); + } + + TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => { + self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location)); + } + + // FIXME: Does `asm!` have any aliasing requirements? + TerminatorKind::InlineAsm { .. } => {} + + TerminatorKind::Call { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::SwitchInt { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } + | TerminatorKind::Assert { .. } + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => {} + } + } +} + +fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &CoroutineLayout<'tcx>, body: &Body<'tcx>) { + let mut linted_tys = FxHashSet::default(); + + // We want a user-facing param-env. + let param_env = tcx.param_env(body.source.def_id()); + + for (variant, yield_source_info) in + layout.variant_fields.iter().zip(&layout.variant_source_info) + { + debug!(?variant); + for &local in variant { + let decl = &layout.field_tys[local]; + debug!(?decl); + + if !decl.ignore_for_traits && linted_tys.insert(decl.ty) { + let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else { + continue; + }; + + check_must_not_suspend_ty( + tcx, + decl.ty, + hir_id, + param_env, + SuspendCheckData { + source_span: decl.source_info.span, + yield_span: yield_source_info.span, + plural_len: 1, + ..Default::default() + }, + ); + } + } + } +} + +#[derive(Default)] +struct SuspendCheckData<'a> { + source_span: Span, + yield_span: Span, + descr_pre: &'a str, + descr_post: &'a str, + plural_len: usize, +} + +// Returns whether it emitted a diagnostic or not +// Note that this fn and the proceeding one are based on the code +// for creating must_use diagnostics +// +// Note that this technique was chosen over things like a `Suspend` marker trait +// as it is simpler and has precedent in the compiler +fn check_must_not_suspend_ty<'tcx>( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + hir_id: hir::HirId, + param_env: ty::ParamEnv<'tcx>, + data: SuspendCheckData<'_>, +) -> bool { + if ty.is_unit() { + return false; + } + + let plural_suffix = pluralize!(data.plural_len); + + debug!("Checking must_not_suspend for {}", ty); + + match *ty.kind() { + ty::Adt(..) if ty.is_box() => { + let boxed_ty = ty.boxed_ty(); + let descr_pre = &format!("{}boxed ", data.descr_pre); + check_must_not_suspend_ty( + tcx, + boxed_ty, + hir_id, + param_env, + SuspendCheckData { descr_pre, ..data }, + ) + } + ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data), + // FIXME: support adding the attribute to TAITs + ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => { + let mut has_emitted = false; + for &(predicate, _) in tcx.explicit_item_bounds(def).skip_binder() { + // We only look at the `DefId`, so it is safe to skip the binder here. + if let ty::ClauseKind::Trait(ref poly_trait_predicate) = + predicate.kind().skip_binder() + { + let def_id = poly_trait_predicate.trait_ref.def_id; + let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix); + if check_must_not_suspend_def( + tcx, + def_id, + hir_id, + SuspendCheckData { descr_pre, ..data }, + ) { + has_emitted = true; + break; + } + } + } + has_emitted + } + ty::Dynamic(binder, _, _) => { + let mut has_emitted = false; + for predicate in binder.iter() { + if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() { + let def_id = trait_ref.def_id; + let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post); + if check_must_not_suspend_def( + tcx, + def_id, + hir_id, + SuspendCheckData { descr_post, ..data }, + ) { + has_emitted = true; + break; + } + } + } + has_emitted + } + ty::Tuple(fields) => { + let mut has_emitted = false; + for (i, ty) in fields.iter().enumerate() { + let descr_post = &format!(" in tuple element {i}"); + if check_must_not_suspend_ty( + tcx, + ty, + hir_id, + param_env, + SuspendCheckData { descr_post, ..data }, + ) { + has_emitted = true; + } + } + has_emitted + } + ty::Array(ty, len) => { + let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix); + check_must_not_suspend_ty( + tcx, + ty, + hir_id, + param_env, + SuspendCheckData { + descr_pre, + plural_len: len.try_eval_target_usize(tcx, param_env).unwrap_or(0) as usize + 1, + ..data + }, + ) + } + // If drop tracking is enabled, we want to look through references, since the referent + // may not be considered live across the await point. + ty::Ref(_region, ty, _mutability) => { + let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix); + check_must_not_suspend_ty( + tcx, + ty, + hir_id, + param_env, + SuspendCheckData { descr_pre, ..data }, + ) + } + _ => false, + } +} + +fn check_must_not_suspend_def( + tcx: TyCtxt<'_>, + def_id: DefId, + hir_id: hir::HirId, + data: SuspendCheckData<'_>, +) -> bool { + if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) { + let reason = attr.value_str().map(|s| errors::MustNotSuspendReason { + span: data.source_span, + reason: s.as_str().to_string(), + }); + tcx.emit_spanned_lint( + rustc_session::lint::builtin::MUST_NOT_SUSPEND, + hir_id, + data.source_span, + errors::MustNotSupend { + tcx, + yield_sp: data.yield_span, + reason, + src_sp: data.source_span, + pre: data.descr_pre, + def_id, + post: data.descr_post, + }, + ); + + true + } else { + false + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cost_checker.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cost_checker.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cost_checker.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cost_checker.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,98 @@ +use rustc_middle::mir::visit::*; +use rustc_middle::mir::*; +use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt}; + +const INSTR_COST: usize = 5; +const CALL_PENALTY: usize = 25; +const LANDINGPAD_PENALTY: usize = 50; +const RESUME_PENALTY: usize = 45; + +/// Verify that the callee body is compatible with the caller. +#[derive(Clone)] +pub(crate) struct CostChecker<'b, 'tcx> { + tcx: TyCtxt<'tcx>, + param_env: ParamEnv<'tcx>, + cost: usize, + callee_body: &'b Body<'tcx>, + instance: Option>, +} + +impl<'b, 'tcx> CostChecker<'b, 'tcx> { + pub fn new( + tcx: TyCtxt<'tcx>, + param_env: ParamEnv<'tcx>, + instance: Option>, + callee_body: &'b Body<'tcx>, + ) -> CostChecker<'b, 'tcx> { + CostChecker { tcx, param_env, callee_body, instance, cost: 0 } + } + + pub fn cost(&self) -> usize { + self.cost + } + + fn instantiate_ty(&self, v: Ty<'tcx>) -> Ty<'tcx> { + if let Some(instance) = self.instance { + instance.instantiate_mir(self.tcx, ty::EarlyBinder::bind(&v)) + } else { + v + } + } +} + +impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> { + fn visit_statement(&mut self, statement: &Statement<'tcx>, _: Location) { + // Don't count StorageLive/StorageDead in the inlining cost. + match statement.kind { + StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Deinit(_) + | StatementKind::Nop => {} + _ => self.cost += INSTR_COST, + } + } + + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, _: Location) { + let tcx = self.tcx; + match terminator.kind { + TerminatorKind::Drop { ref place, unwind, .. } => { + // If the place doesn't actually need dropping, treat it like a regular goto. + let ty = self.instantiate_ty(place.ty(self.callee_body, tcx).ty); + if ty.needs_drop(tcx, self.param_env) { + self.cost += CALL_PENALTY; + if let UnwindAction::Cleanup(_) = unwind { + self.cost += LANDINGPAD_PENALTY; + } + } else { + self.cost += INSTR_COST; + } + } + TerminatorKind::Call { func: Operand::Constant(ref f), unwind, .. } => { + let fn_ty = self.instantiate_ty(f.const_.ty()); + self.cost += if let ty::FnDef(def_id, _) = *fn_ty.kind() && tcx.is_intrinsic(def_id) { + // Don't give intrinsics the extra penalty for calls + INSTR_COST + } else { + CALL_PENALTY + }; + if let UnwindAction::Cleanup(_) = unwind { + self.cost += LANDINGPAD_PENALTY; + } + } + TerminatorKind::Assert { unwind, .. } => { + self.cost += CALL_PENALTY; + if let UnwindAction::Cleanup(_) = unwind { + self.cost += LANDINGPAD_PENALTY; + } + } + TerminatorKind::UnwindResume => self.cost += RESUME_PENALTY, + TerminatorKind::InlineAsm { unwind, .. } => { + self.cost += INSTR_COST; + if let UnwindAction::Cleanup(_) = unwind { + self.cost += LANDINGPAD_PENALTY; + } + } + _ => self.cost += INSTR_COST, + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/counters.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/counters.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/counters.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/counters.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,6 @@ -use super::Error; - use super::graph; -use super::spans; use graph::{BasicCoverageBlock, BcbBranch, CoverageGraph, TraverseCoverageGraphWithLoops}; -use spans::CoverageSpan; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::WithNumNodes; @@ -14,14 +10,12 @@ use std::fmt::{self, Debug}; -const NESTED_INDENT: &str = " "; - /// The coverage counter or counter expression associated with a particular /// BCB node or BCB edge. #[derive(Clone)] pub(super) enum BcbCounter { Counter { id: CounterId }, - Expression { id: ExpressionId, lhs: Operand, op: Op, rhs: Operand }, + Expression { id: ExpressionId }, } impl BcbCounter { @@ -29,10 +23,10 @@ matches!(self, Self::Expression { .. }) } - pub(super) fn as_operand(&self) -> Operand { + pub(super) fn as_term(&self) -> CovTerm { match *self { - BcbCounter::Counter { id, .. } => Operand::Counter(id), - BcbCounter::Expression { id, .. } => Operand::Expression(id), + BcbCounter::Counter { id, .. } => CovTerm::Counter(id), + BcbCounter::Expression { id, .. } => CovTerm::Expression(id), } } } @@ -41,17 +35,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Counter { id, .. } => write!(fmt, "Counter({:?})", id.index()), - Self::Expression { id, lhs, op, rhs } => write!( - fmt, - "Expression({:?}) = {:?} {} {:?}", - id.index(), - lhs, - match op { - Op::Add => "+", - Op::Subtract => "-", - }, - rhs, - ), + Self::Expression { id } => write!(fmt, "Expression({:?})", id.index()), } } } @@ -60,7 +44,6 @@ /// associated with nodes/edges in the BCB graph. pub(super) struct CoverageCounters { next_counter_id: CounterId, - next_expression_id: ExpressionId, /// Coverage counters/expressions that are associated with individual BCBs. bcb_counters: IndexVec>, @@ -68,13 +51,12 @@ /// edge between two BCBs. bcb_edge_counters: FxHashMap<(BasicCoverageBlock, BasicCoverageBlock), BcbCounter>, /// Tracks which BCBs have a counter associated with some incoming edge. - /// Only used by debug assertions, to verify that BCBs with incoming edge + /// Only used by assertions, to verify that BCBs with incoming edge /// counters do not have their own physical counters (expressions are allowed). bcb_has_incoming_edge_counters: BitSet, - /// Expression nodes that are not directly associated with any particular - /// BCB/edge, but are needed as operands to more complex expressions. - /// These are always [`BcbCounter::Expression`]. - pub(super) intermediate_expressions: Vec, + /// Table of expression data, associating each expression ID with its + /// corresponding operator (+ or -) and its LHS/RHS operands. + expressions: IndexVec, } impl CoverageCounters { @@ -83,24 +65,22 @@ Self { next_counter_id: CounterId::START, - next_expression_id: ExpressionId::START, - bcb_counters: IndexVec::from_elem_n(None, num_bcbs), bcb_edge_counters: FxHashMap::default(), bcb_has_incoming_edge_counters: BitSet::new_empty(num_bcbs), - intermediate_expressions: Vec::new(), + expressions: IndexVec::new(), } } /// Makes [`BcbCounter`] `Counter`s and `Expressions` for the `BasicCoverageBlock`s directly or - /// indirectly associated with `CoverageSpans`, and accumulates additional `Expression`s + /// indirectly associated with coverage spans, and accumulates additional `Expression`s /// representing intermediate values. pub fn make_bcb_counters( &mut self, basic_coverage_blocks: &CoverageGraph, - coverage_spans: &[CoverageSpan], - ) -> Result<(), Error> { - MakeBcbCounters::new(self, basic_coverage_blocks).make_bcb_counters(coverage_spans) + bcb_has_coverage_spans: impl Fn(BasicCoverageBlock) -> bool, + ) { + MakeBcbCounters::new(self, basic_coverage_blocks).make_bcb_counters(bcb_has_coverage_spans) } fn make_counter(&mut self) -> BcbCounter { @@ -108,50 +88,44 @@ BcbCounter::Counter { id } } - fn make_expression(&mut self, lhs: Operand, op: Op, rhs: Operand) -> BcbCounter { - let id = self.next_expression(); - BcbCounter::Expression { id, lhs, op, rhs } - } - - pub fn make_identity_counter(&mut self, counter_operand: Operand) -> BcbCounter { - self.make_expression(counter_operand, Op::Add, Operand::Zero) + fn make_expression(&mut self, lhs: CovTerm, op: Op, rhs: CovTerm) -> BcbCounter { + let id = self.expressions.push(Expression { lhs, op, rhs }); + BcbCounter::Expression { id } } /// Counter IDs start from one and go up. fn next_counter(&mut self) -> CounterId { let next = self.next_counter_id; - self.next_counter_id = next.next_id(); + self.next_counter_id = self.next_counter_id + 1; next } - /// Expression IDs start from 0 and go up. - /// (Counter IDs and Expression IDs are distinguished by the `Operand` enum.) - fn next_expression(&mut self) -> ExpressionId { - let next = self.next_expression_id; - self.next_expression_id = next.next_id(); - next + pub(super) fn num_counters(&self) -> usize { + self.next_counter_id.as_usize() } - fn set_bcb_counter( - &mut self, - bcb: BasicCoverageBlock, - counter_kind: BcbCounter, - ) -> Result { - debug_assert!( + #[cfg(test)] + pub(super) fn num_expressions(&self) -> usize { + self.expressions.len() + } + + fn set_bcb_counter(&mut self, bcb: BasicCoverageBlock, counter_kind: BcbCounter) -> CovTerm { + assert!( // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also // have an expression (to be injected into an existing `BasicBlock` represented by this // `BasicCoverageBlock`). counter_kind.is_expression() || !self.bcb_has_incoming_edge_counters.contains(bcb), "attempt to add a `Counter` to a BCB target with existing incoming edge counters" ); - let operand = counter_kind.as_operand(); + + let term = counter_kind.as_term(); if let Some(replaced) = self.bcb_counters[bcb].replace(counter_kind) { - Error::from_string(format!( + bug!( "attempt to set a BasicCoverageBlock coverage counter more than once; \ {bcb:?} already had counter {replaced:?}", - )) + ); } else { - Ok(operand) + term } } @@ -160,27 +134,26 @@ from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock, counter_kind: BcbCounter, - ) -> Result { - if level_enabled!(tracing::Level::DEBUG) { - // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also - // have an expression (to be injected into an existing `BasicBlock` represented by this - // `BasicCoverageBlock`). - if self.bcb_counter(to_bcb).is_some_and(|c| !c.is_expression()) { - return Error::from_string(format!( - "attempt to add an incoming edge counter from {from_bcb:?} when the target BCB already \ - has a `Counter`" - )); - } + ) -> CovTerm { + // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also + // have an expression (to be injected into an existing `BasicBlock` represented by this + // `BasicCoverageBlock`). + if let Some(node_counter) = self.bcb_counter(to_bcb) && !node_counter.is_expression() { + bug!( + "attempt to add an incoming edge counter from {from_bcb:?} \ + when the target BCB already has {node_counter:?}" + ); } + self.bcb_has_incoming_edge_counters.insert(to_bcb); - let operand = counter_kind.as_operand(); + let term = counter_kind.as_term(); if let Some(replaced) = self.bcb_edge_counters.insert((from_bcb, to_bcb), counter_kind) { - Error::from_string(format!( + bug!( "attempt to set an edge counter more than once; from_bcb: \ {from_bcb:?} already had counter {replaced:?}", - )) + ); } else { - Ok(operand) + term } } @@ -188,27 +161,31 @@ self.bcb_counters[bcb].as_ref() } - pub(super) fn take_bcb_counter(&mut self, bcb: BasicCoverageBlock) -> Option { - self.bcb_counters[bcb].take() + pub(super) fn bcb_node_counters( + &self, + ) -> impl Iterator { + self.bcb_counters + .iter_enumerated() + .filter_map(|(bcb, counter_kind)| Some((bcb, counter_kind.as_ref()?))) } - pub(super) fn drain_bcb_counters( - &mut self, - ) -> impl Iterator + '_ { - self.bcb_counters - .iter_enumerated_mut() - .filter_map(|(bcb, counter)| Some((bcb, counter.take()?))) + /// For each edge in the BCB graph that has an associated counter, yields + /// that edge's *from* and *to* nodes, and its counter. + pub(super) fn bcb_edge_counters( + &self, + ) -> impl Iterator { + self.bcb_edge_counters + .iter() + .map(|(&(from_bcb, to_bcb), counter_kind)| (from_bcb, to_bcb, counter_kind)) } - pub(super) fn drain_bcb_edge_counters( - &mut self, - ) -> impl Iterator + '_ { - self.bcb_edge_counters.drain() + pub(super) fn take_expressions(&mut self) -> IndexVec { + std::mem::take(&mut self.expressions) } } /// Traverse the `CoverageGraph` and add either a `Counter` or `Expression` to every BCB, to be -/// injected with `CoverageSpan`s. `Expressions` have no runtime overhead, so if a viable expression +/// injected with coverage spans. `Expressions` have no runtime overhead, so if a viable expression /// (adding or subtracting two other counters or expressions) can compute the same result as an /// embedded counter, an `Expression` should be used. struct MakeBcbCounters<'a> { @@ -230,21 +207,11 @@ /// One way to predict which branch executes the least is by considering loops. A loop is exited /// at a branch, so the branch that jumps to a `BasicCoverageBlock` outside the loop is almost /// always executed less than the branch that does not exit the loop. - /// - /// Returns any non-code-span expressions created to represent intermediate values (such as to - /// add two counters so the result can be subtracted from another counter), or an Error with - /// message for subsequent debugging. - fn make_bcb_counters(&mut self, coverage_spans: &[CoverageSpan]) -> Result<(), Error> { + fn make_bcb_counters(&mut self, bcb_has_coverage_spans: impl Fn(BasicCoverageBlock) -> bool) { debug!("make_bcb_counters(): adding a counter or expression to each BasicCoverageBlock"); - let num_bcbs = self.basic_coverage_blocks.num_nodes(); - - let mut bcbs_with_coverage = BitSet::new_empty(num_bcbs); - for covspan in coverage_spans { - bcbs_with_coverage.insert(covspan.bcb); - } // Walk the `CoverageGraph`. For each `BasicCoverageBlock` node with an associated - // `CoverageSpan`, add a counter. If the `BasicCoverageBlock` branches, add a counter or + // coverage span, add a counter. If the `BasicCoverageBlock` branches, add a counter or // expression to each branch `BasicCoverageBlock` (if the branch BCB has only one incoming // edge) or edge from the branching BCB to the branch BCB (if the branch BCB has multiple // incoming edges). @@ -254,39 +221,36 @@ // the loop. The `traversal` state includes a `context_stack`, providing a way to know if // the current BCB is in one or more nested loops or not. let mut traversal = TraverseCoverageGraphWithLoops::new(&self.basic_coverage_blocks); - while let Some(bcb) = traversal.next(self.basic_coverage_blocks) { - if bcbs_with_coverage.contains(bcb) { - debug!("{:?} has at least one `CoverageSpan`. Get or make its counter", bcb); - let branching_counter_operand = self.get_or_make_counter_operand(bcb)?; + while let Some(bcb) = traversal.next() { + if bcb_has_coverage_spans(bcb) { + debug!("{:?} has at least one coverage span. Get or make its counter", bcb); + let branching_counter_operand = self.get_or_make_counter_operand(bcb); if self.bcb_needs_branch_counters(bcb) { - self.make_branch_counters(&mut traversal, bcb, branching_counter_operand)?; + self.make_branch_counters(&traversal, bcb, branching_counter_operand); } } else { debug!( - "{:?} does not have any `CoverageSpan`s. A counter will only be added if \ + "{:?} does not have any coverage spans. A counter will only be added if \ and when a covered BCB has an expression dependency.", bcb, ); } } - if traversal.is_complete() { - Ok(()) - } else { - Error::from_string(format!( - "`TraverseCoverageGraphWithLoops` missed some `BasicCoverageBlock`s: {:?}", - traversal.unvisited(), - )) - } + assert!( + traversal.is_complete(), + "`TraverseCoverageGraphWithLoops` missed some `BasicCoverageBlock`s: {:?}", + traversal.unvisited(), + ); } fn make_branch_counters( &mut self, - traversal: &mut TraverseCoverageGraphWithLoops, + traversal: &TraverseCoverageGraphWithLoops<'_>, branching_bcb: BasicCoverageBlock, - branching_counter_operand: Operand, - ) -> Result<(), Error> { + branching_counter_operand: CovTerm, + ) { let branches = self.bcb_branches(branching_bcb); debug!( "{:?} has some branch(es) without counters:\n {}", @@ -319,10 +283,10 @@ counter", branch, branching_bcb ); - self.get_or_make_counter_operand(branch.target_bcb)? + self.get_or_make_counter_operand(branch.target_bcb) } else { debug!(" {:?} has multiple incoming edges, so adding an edge counter", branch); - self.get_or_make_edge_counter_operand(branching_bcb, branch.target_bcb)? + self.get_or_make_edge_counter_operand(branching_bcb, branch.target_bcb) }; if let Some(sumup_counter_operand) = some_sumup_counter_operand.replace(branch_counter_operand) @@ -333,8 +297,7 @@ sumup_counter_operand, ); debug!(" [new intermediate expression: {:?}]", intermediate_expression); - let intermediate_expression_operand = intermediate_expression.as_operand(); - self.coverage_counters.intermediate_expressions.push(intermediate_expression); + let intermediate_expression_operand = intermediate_expression.as_term(); some_sumup_counter_operand.replace(intermediate_expression_operand); } } @@ -358,31 +321,18 @@ debug!("{:?} gets an expression: {:?}", expression_branch, expression); let bcb = expression_branch.target_bcb; if expression_branch.is_only_path_to_target() { - self.coverage_counters.set_bcb_counter(bcb, expression)?; + self.coverage_counters.set_bcb_counter(bcb, expression); } else { - self.coverage_counters.set_bcb_edge_counter(branching_bcb, bcb, expression)?; + self.coverage_counters.set_bcb_edge_counter(branching_bcb, bcb, expression); } - Ok(()) - } - - fn get_or_make_counter_operand(&mut self, bcb: BasicCoverageBlock) -> Result { - self.recursive_get_or_make_counter_operand(bcb, 1) } - fn recursive_get_or_make_counter_operand( - &mut self, - bcb: BasicCoverageBlock, - debug_indent_level: usize, - ) -> Result { + #[instrument(level = "debug", skip(self))] + fn get_or_make_counter_operand(&mut self, bcb: BasicCoverageBlock) -> CovTerm { // If the BCB already has a counter, return it. if let Some(counter_kind) = &self.coverage_counters.bcb_counters[bcb] { - debug!( - "{}{:?} already has a counter: {:?}", - NESTED_INDENT.repeat(debug_indent_level), - bcb, - counter_kind, - ); - return Ok(counter_kind.as_operand()); + debug!("{bcb:?} already has a counter: {counter_kind:?}"); + return counter_kind.as_term(); } // A BCB with only one incoming edge gets a simple `Counter` (via `make_counter()`). @@ -392,20 +342,12 @@ if one_path_to_target || self.bcb_predecessors(bcb).contains(&bcb) { let counter_kind = self.coverage_counters.make_counter(); if one_path_to_target { - debug!( - "{}{:?} gets a new counter: {:?}", - NESTED_INDENT.repeat(debug_indent_level), - bcb, - counter_kind, - ); + debug!("{bcb:?} gets a new counter: {counter_kind:?}"); } else { debug!( - "{}{:?} has itself as its own predecessor. It can't be part of its own \ - Expression sum, so it will get its own new counter: {:?}. (Note, the compiled \ - code will generate an infinite loop.)", - NESTED_INDENT.repeat(debug_indent_level), - bcb, - counter_kind, + "{bcb:?} has itself as its own predecessor. It can't be part of its own \ + Expression sum, so it will get its own new counter: {counter_kind:?}. \ + (Note, the compiled code will generate an infinite loop.)", ); } return self.coverage_counters.set_bcb_counter(bcb, counter_kind); @@ -415,24 +357,14 @@ // counters and/or expressions of its incoming edges. This will recursively get or create // counters for those incoming edges first, then call `make_expression()` to sum them up, // with additional intermediate expressions as needed. + let _sumup_debug_span = debug_span!("(preparing sum-up expression)").entered(); + let mut predecessors = self.bcb_predecessors(bcb).to_owned().into_iter(); - debug!( - "{}{:?} has multiple incoming edges and will get an expression that sums them up...", - NESTED_INDENT.repeat(debug_indent_level), - bcb, - ); - let first_edge_counter_operand = self.recursive_get_or_make_edge_counter_operand( - predecessors.next().unwrap(), - bcb, - debug_indent_level + 1, - )?; + let first_edge_counter_operand = + self.get_or_make_edge_counter_operand(predecessors.next().unwrap(), bcb); let mut some_sumup_edge_counter_operand = None; for predecessor in predecessors { - let edge_counter_operand = self.recursive_get_or_make_edge_counter_operand( - predecessor, - bcb, - debug_indent_level + 1, - )?; + let edge_counter_operand = self.get_or_make_edge_counter_operand(predecessor, bcb); if let Some(sumup_edge_counter_operand) = some_sumup_edge_counter_operand.replace(edge_counter_operand) { @@ -441,13 +373,8 @@ Op::Add, edge_counter_operand, ); - debug!( - "{}new intermediate expression: {:?}", - NESTED_INDENT.repeat(debug_indent_level), - intermediate_expression - ); - let intermediate_expression_operand = intermediate_expression.as_operand(); - self.coverage_counters.intermediate_expressions.push(intermediate_expression); + debug!("new intermediate expression: {intermediate_expression:?}"); + let intermediate_expression_operand = intermediate_expression.as_term(); some_sumup_edge_counter_operand.replace(intermediate_expression_operand); } } @@ -456,59 +383,36 @@ Op::Add, some_sumup_edge_counter_operand.unwrap(), ); - debug!( - "{}{:?} gets a new counter (sum of predecessor counters): {:?}", - NESTED_INDENT.repeat(debug_indent_level), - bcb, - counter_kind - ); + drop(_sumup_debug_span); + + debug!("{bcb:?} gets a new counter (sum of predecessor counters): {counter_kind:?}"); self.coverage_counters.set_bcb_counter(bcb, counter_kind) } + #[instrument(level = "debug", skip(self))] fn get_or_make_edge_counter_operand( &mut self, from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock, - ) -> Result { - self.recursive_get_or_make_edge_counter_operand(from_bcb, to_bcb, 1) - } - - fn recursive_get_or_make_edge_counter_operand( - &mut self, - from_bcb: BasicCoverageBlock, - to_bcb: BasicCoverageBlock, - debug_indent_level: usize, - ) -> Result { + ) -> CovTerm { // If the source BCB has only one successor (assumed to be the given target), an edge // counter is unnecessary. Just get or make a counter for the source BCB. let successors = self.bcb_successors(from_bcb).iter(); if successors.len() == 1 { - return self.recursive_get_or_make_counter_operand(from_bcb, debug_indent_level + 1); + return self.get_or_make_counter_operand(from_bcb); } // If the edge already has a counter, return it. if let Some(counter_kind) = self.coverage_counters.bcb_edge_counters.get(&(from_bcb, to_bcb)) { - debug!( - "{}Edge {:?}->{:?} already has a counter: {:?}", - NESTED_INDENT.repeat(debug_indent_level), - from_bcb, - to_bcb, - counter_kind - ); - return Ok(counter_kind.as_operand()); + debug!("Edge {from_bcb:?}->{to_bcb:?} already has a counter: {counter_kind:?}"); + return counter_kind.as_term(); } // Make a new counter to count this edge. let counter_kind = self.coverage_counters.make_counter(); - debug!( - "{}Edge {:?}->{:?} gets a new counter: {:?}", - NESTED_INDENT.repeat(debug_indent_level), - from_bcb, - to_bcb, - counter_kind - ); + debug!("Edge {from_bcb:?}->{to_bcb:?} gets a new counter: {counter_kind:?}"); self.coverage_counters.set_bcb_edge_counter(from_bcb, to_bcb, counter_kind) } @@ -516,21 +420,14 @@ /// found, select any branch. fn choose_preferred_expression_branch( &self, - traversal: &TraverseCoverageGraphWithLoops, + traversal: &TraverseCoverageGraphWithLoops<'_>, branches: &[BcbBranch], ) -> BcbBranch { - let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch); - - let some_reloop_branch = self.find_some_reloop_branch(traversal, &branches); - if let Some(reloop_branch_without_counter) = - some_reloop_branch.filter(branch_needs_a_counter) - { - debug!( - "Selecting reloop_branch={:?} that still needs a counter, to get the \ - `Expression`", - reloop_branch_without_counter - ); - reloop_branch_without_counter + let good_reloop_branch = self.find_good_reloop_branch(traversal, &branches); + if let Some(reloop_branch) = good_reloop_branch { + assert!(self.branch_has_no_counter(&reloop_branch)); + debug!("Selecting reloop branch {reloop_branch:?} to get an expression"); + reloop_branch } else { let &branch_without_counter = branches.iter().find(|&branch| self.branch_has_no_counter(branch)).expect( @@ -547,75 +444,52 @@ } } - /// At most, one of the branches (or its edge, from the branching_bcb, if the branch has - /// multiple incoming edges) can have a counter computed by expression. - /// - /// If at least one of the branches leads outside of a loop (`found_loop_exit` is - /// true), and at least one other branch does not exit the loop (the first of which - /// is captured in `some_reloop_branch`), it's likely any reloop branch will be - /// executed far more often than loop exit branch, making the reloop branch a better - /// candidate for an expression. - fn find_some_reloop_branch( + /// Tries to find a branch that leads back to the top of a loop, and that + /// doesn't already have a counter. Such branches are good candidates to + /// be given an expression (instead of a physical counter), because they + /// will tend to be executed more times than a loop-exit branch. + fn find_good_reloop_branch( &self, - traversal: &TraverseCoverageGraphWithLoops, + traversal: &TraverseCoverageGraphWithLoops<'_>, branches: &[BcbBranch], ) -> Option { - let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch); - - let mut some_reloop_branch: Option = None; - for context in traversal.context_stack.iter().rev() { - if let Some((backedge_from_bcbs, _)) = &context.loop_backedges { - let mut found_loop_exit = false; - for &branch in branches.iter() { - if backedge_from_bcbs.iter().any(|&backedge_from_bcb| { - self.bcb_dominates(branch.target_bcb, backedge_from_bcb) - }) { - if let Some(reloop_branch) = some_reloop_branch { - if self.branch_has_no_counter(&reloop_branch) { - // we already found a candidate reloop_branch that still - // needs a counter - continue; - } - } - // The path from branch leads back to the top of the loop. Set this - // branch as the `reloop_branch`. If this branch already has a - // counter, and we find another reloop branch that doesn't have a - // counter yet, that branch will be selected as the `reloop_branch` - // instead. - some_reloop_branch = Some(branch); - } else { - // The path from branch leads outside this loop - found_loop_exit = true; - } - if found_loop_exit - && some_reloop_branch.filter(branch_needs_a_counter).is_some() - { - // Found both a branch that exits the loop and a branch that returns - // to the top of the loop (`reloop_branch`), and the `reloop_branch` - // doesn't already have a counter. - break; + // Consider each loop on the current traversal context stack, top-down. + for reloop_bcbs in traversal.reloop_bcbs_per_loop() { + let mut all_branches_exit_this_loop = true; + + // Try to find a branch that doesn't exit this loop and doesn't + // already have a counter. + for &branch in branches { + // A branch is a reloop branch if it dominates any BCB that has + // an edge back to the loop header. (Other branches are exits.) + let is_reloop_branch = reloop_bcbs.iter().any(|&reloop_bcb| { + self.basic_coverage_blocks.dominates(branch.target_bcb, reloop_bcb) + }); + + if is_reloop_branch { + all_branches_exit_this_loop = false; + if self.branch_has_no_counter(&branch) { + // We found a good branch to be given an expression. + return Some(branch); } + // Keep looking for another reloop branch without a counter. + } else { + // This branch exits the loop. } - if !found_loop_exit { - debug!( - "No branches exit the loop, so any branch without an existing \ - counter can have the `Expression`." - ); - break; - } - if some_reloop_branch.is_some() { - debug!( - "Found a branch that exits the loop and a branch the loops back to \ - the top of the loop (`reloop_branch`). The `reloop_branch` will \ - get the `Expression`, as long as it still needs a counter." - ); - break; - } - // else all branches exited this loop context, so run the same checks with - // the outer loop(s) } + + if !all_branches_exit_this_loop { + // We found one or more reloop branches, but all of them already + // have counters. Let the caller choose one of the exit branches. + debug!("All reloop branches had counters; skip checking the other loops"); + return None; + } + + // All of the branches exit this loop, so keep looking for a good + // reloop branch for one of the outer loops. } - some_reloop_branch + + None } #[inline] @@ -661,9 +535,4 @@ fn bcb_has_one_path_to_target(&self, bcb: BasicCoverageBlock) -> bool { self.bcb_predecessors(bcb).len() <= 1 } - - #[inline] - fn bcb_dominates(&self, dom: BasicCoverageBlock, node: BasicCoverageBlock) -> bool { - self.basic_coverage_blocks.dominates(dom, node) - } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/graph.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/graph.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/graph.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/graph.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,10 +1,12 @@ +use rustc_data_structures::captures::Captures; use rustc_data_structures::graph::dominators::{self, Dominators}; use rustc_data_structures::graph::{self, GraphSuccessors, WithNumNodes, WithStartNode}; use rustc_index::bit_set::BitSet; use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::mir::{self, BasicBlock, BasicBlockData, Terminator, TerminatorKind}; +use rustc_middle::mir::{self, BasicBlock, TerminatorKind}; use std::cmp::Ordering; +use std::collections::VecDeque; use std::ops::{Index, IndexMut}; /// A coverage-specific simplification of the MIR control flow graph (CFG). The `CoverageGraph`s @@ -36,9 +38,8 @@ } let bcb_data = &bcbs[bcb]; let mut bcb_successors = Vec::new(); - for successor in - bcb_filtered_successors(&mir_body, &bcb_data.terminator(mir_body).kind) - .filter_map(|successor_bb| bb_to_bcb[successor_bb]) + for successor in bcb_filtered_successors(&mir_body, bcb_data.last_bb()) + .filter_map(|successor_bb| bb_to_bcb[successor_bb]) { if !seen[successor] { seen[successor] = true; @@ -80,10 +81,9 @@ // intentionally omits unwind paths. // FIXME(#78544): MIR InstrumentCoverage: Improve coverage of `#[should_panic]` tests and // `catch_unwind()` handlers. - let mir_cfg_without_unwind = ShortCircuitPreorder::new(&mir_body, bcb_filtered_successors); let mut basic_blocks = Vec::new(); - for (bb, data) in mir_cfg_without_unwind { + for bb in short_circuit_preorder(mir_body, bcb_filtered_successors) { if let Some(last) = basic_blocks.last() { let predecessors = &mir_body.basic_blocks.predecessors()[bb]; if predecessors.len() > 1 || !predecessors.contains(last) { @@ -109,7 +109,7 @@ } basic_blocks.push(bb); - let term = data.terminator(); + let term = mir_body[bb].terminator(); match term.kind { TerminatorKind::Return { .. } @@ -147,7 +147,7 @@ | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } | TerminatorKind::Call { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Assert { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } @@ -288,9 +288,9 @@ /// not relevant to coverage analysis. `FalseUnwind`, for example, can be treated the same as /// a `Goto`, and merged with its successor into the same BCB. /// -/// Each BCB with at least one computed `CoverageSpan` will have no more than one `Counter`. +/// Each BCB with at least one computed coverage span will have no more than one `Counter`. /// In some cases, a BCB's execution count can be computed by `Expression`. Additional -/// disjoint `CoverageSpan`s in a BCB can also be counted by `Expression` (by adding `ZERO` +/// disjoint coverage spans in a BCB can also be counted by `Expression` (by adding `ZERO` /// to the BCB's primary counter or expression). /// /// The BCB CFG is critical to simplifying the coverage analysis by ensuring graph path-based @@ -316,11 +316,6 @@ pub fn last_bb(&self) -> BasicBlock { *self.basic_blocks.last().unwrap() } - - #[inline(always)] - pub fn terminator<'a, 'tcx>(&self, mir_body: &'a mir::Body<'tcx>) -> &'a Terminator<'tcx> { - &mir_body[self.last_bb()].terminator() - } } /// Represents a successor from a branching BasicCoverageBlock (such as the arms of a `SwitchInt`) @@ -362,26 +357,28 @@ } } -// Returns the `Terminator`s non-unwind successors. +// Returns the subset of a block's successors that are relevant to the coverage +// graph, i.e. those that do not represent unwinds or unreachable branches. // FIXME(#78544): MIR InstrumentCoverage: Improve coverage of `#[should_panic]` tests and // `catch_unwind()` handlers. fn bcb_filtered_successors<'a, 'tcx>( body: &'a mir::Body<'tcx>, - term_kind: &'a TerminatorKind<'tcx>, -) -> Box + 'a> { - Box::new( - match &term_kind { - // SwitchInt successors are never unwind, and all of them should be traversed. - TerminatorKind::SwitchInt { ref targets, .. } => { - None.into_iter().chain(targets.all_targets().into_iter().copied()) - } - // For all other kinds, return only the first successor, if any, and ignore unwinds. - // NOTE: `chain(&[])` is required to coerce the `option::iter` (from - // `next().into_iter()`) into the `mir::Successors` aliased type. - _ => term_kind.successors().next().into_iter().chain((&[]).into_iter().copied()), - } - .filter(move |&successor| body[successor].terminator().kind != TerminatorKind::Unreachable), - ) + bb: BasicBlock, +) -> impl Iterator + Captures<'a> + Captures<'tcx> { + let terminator = body[bb].terminator(); + + let take_n_successors = match terminator.kind { + // SwitchInt successors are never unwinds, so all of them should be traversed. + TerminatorKind::SwitchInt { .. } => usize::MAX, + // For all other kinds, return only the first successor (if any), ignoring any + // unwind successors. + _ => 1, + }; + + terminator + .successors() + .take(take_n_successors) + .filter(move |&successor| body[successor].terminator().kind != TerminatorKind::Unreachable) } /// Maintains separate worklists for each loop in the BasicCoverageBlock CFG, plus one for the @@ -389,57 +386,72 @@ /// ensures a loop is completely traversed before processing Blocks after the end of the loop. #[derive(Debug)] pub(super) struct TraversalContext { - /// From one or more backedges returning to a loop header. - pub loop_backedges: Option<(Vec, BasicCoverageBlock)>, + /// BCB with one or more incoming loop backedges, indicating which loop + /// this context is for. + /// + /// If `None`, this is the non-loop context for the function as a whole. + loop_header: Option, - /// worklist, to be traversed, of CoverageGraph in the loop with the given loop - /// backedges, such that the loop is the inner inner-most loop containing these - /// CoverageGraph - pub worklist: Vec, + /// Worklist of BCBs to be processed in this context. + worklist: VecDeque, } -pub(super) struct TraverseCoverageGraphWithLoops { - pub backedges: IndexVec>, - pub context_stack: Vec, +pub(super) struct TraverseCoverageGraphWithLoops<'a> { + basic_coverage_blocks: &'a CoverageGraph, + + backedges: IndexVec>, + context_stack: Vec, visited: BitSet, } -impl TraverseCoverageGraphWithLoops { - pub fn new(basic_coverage_blocks: &CoverageGraph) -> Self { - let start_bcb = basic_coverage_blocks.start_node(); +impl<'a> TraverseCoverageGraphWithLoops<'a> { + pub(super) fn new(basic_coverage_blocks: &'a CoverageGraph) -> Self { let backedges = find_loop_backedges(basic_coverage_blocks); - let context_stack = - vec![TraversalContext { loop_backedges: None, worklist: vec![start_bcb] }]; + + let worklist = VecDeque::from([basic_coverage_blocks.start_node()]); + let context_stack = vec![TraversalContext { loop_header: None, worklist }]; + // `context_stack` starts with a `TraversalContext` for the main function context (beginning // with the `start` BasicCoverageBlock of the function). New worklists are pushed to the top // of the stack as loops are entered, and popped off of the stack when a loop's worklist is // exhausted. let visited = BitSet::new_empty(basic_coverage_blocks.num_nodes()); - Self { backedges, context_stack, visited } + Self { basic_coverage_blocks, backedges, context_stack, visited } } - pub fn next(&mut self, basic_coverage_blocks: &CoverageGraph) -> Option { + /// For each loop on the loop context stack (top-down), yields a list of BCBs + /// within that loop that have an outgoing edge back to the loop header. + pub(super) fn reloop_bcbs_per_loop(&self) -> impl Iterator { + self.context_stack + .iter() + .rev() + .filter_map(|context| context.loop_header) + .map(|header_bcb| self.backedges[header_bcb].as_slice()) + } + + pub(super) fn next(&mut self) -> Option { debug!( "TraverseCoverageGraphWithLoops::next - context_stack: {:?}", self.context_stack.iter().rev().collect::>() ); while let Some(context) = self.context_stack.last_mut() { - if let Some(next_bcb) = context.worklist.pop() { - if !self.visited.insert(next_bcb) { - debug!("Already visited: {:?}", next_bcb); + if let Some(bcb) = context.worklist.pop_front() { + if !self.visited.insert(bcb) { + debug!("Already visited: {bcb:?}"); continue; } - debug!("Visiting {:?}", next_bcb); - if self.backedges[next_bcb].len() > 0 { - debug!("{:?} is a loop header! Start a new TraversalContext...", next_bcb); + debug!("Visiting {bcb:?}"); + + if self.backedges[bcb].len() > 0 { + debug!("{bcb:?} is a loop header! Start a new TraversalContext..."); self.context_stack.push(TraversalContext { - loop_backedges: Some((self.backedges[next_bcb].clone(), next_bcb)), - worklist: Vec::new(), + loop_header: Some(bcb), + worklist: VecDeque::new(), }); } - self.extend_worklist(basic_coverage_blocks, next_bcb); - return Some(next_bcb); + self.add_successors_to_worklists(bcb); + return Some(bcb); } else { // Strip contexts with empty worklists from the top of the stack self.context_stack.pop(); @@ -449,13 +461,10 @@ None } - pub fn extend_worklist( - &mut self, - basic_coverage_blocks: &CoverageGraph, - bcb: BasicCoverageBlock, - ) { - let successors = &basic_coverage_blocks.successors[bcb]; + pub fn add_successors_to_worklists(&mut self, bcb: BasicCoverageBlock) { + let successors = &self.basic_coverage_blocks.successors[bcb]; debug!("{:?} has {} successors:", bcb, successors.len()); + for &successor in successors { if successor == bcb { debug!( @@ -464,56 +473,44 @@ bcb ); // Don't re-add this successor to the worklist. We are already processing it. + // FIXME: This claims to skip just the self-successor, but it actually skips + // all other successors as well. Does that matter? break; } - for context in self.context_stack.iter_mut().rev() { - // Add successors of the current BCB to the appropriate context. Successors that - // stay within a loop are added to the BCBs context worklist. Successors that - // exit the loop (they are not dominated by the loop header) must be reachable - // from other BCBs outside the loop, and they will be added to a different - // worklist. - // - // Branching blocks (with more than one successor) must be processed before - // blocks with only one successor, to prevent unnecessarily complicating - // `Expression`s by creating a Counter in a `BasicCoverageBlock` that the - // branching block would have given an `Expression` (or vice versa). - let (some_successor_to_add, some_loop_header) = - if let Some((_, loop_header)) = context.loop_backedges { - if basic_coverage_blocks.dominates(loop_header, successor) { - (Some(successor), Some(loop_header)) - } else { - (None, None) - } - } else { - (Some(successor), None) - }; - if let Some(successor_to_add) = some_successor_to_add { - if basic_coverage_blocks.successors[successor_to_add].len() > 1 { - debug!( - "{:?} successor is branching. Prioritize it at the beginning of \ - the {}", - successor_to_add, - if let Some(loop_header) = some_loop_header { - format!("worklist for the loop headed by {loop_header:?}") - } else { - String::from("non-loop worklist") - }, - ); - context.worklist.insert(0, successor_to_add); - } else { - debug!( - "{:?} successor is non-branching. Defer it to the end of the {}", - successor_to_add, - if let Some(loop_header) = some_loop_header { - format!("worklist for the loop headed by {loop_header:?}") - } else { - String::from("non-loop worklist") - }, - ); - context.worklist.push(successor_to_add); + + // Add successors of the current BCB to the appropriate context. Successors that + // stay within a loop are added to the BCBs context worklist. Successors that + // exit the loop (they are not dominated by the loop header) must be reachable + // from other BCBs outside the loop, and they will be added to a different + // worklist. + // + // Branching blocks (with more than one successor) must be processed before + // blocks with only one successor, to prevent unnecessarily complicating + // `Expression`s by creating a Counter in a `BasicCoverageBlock` that the + // branching block would have given an `Expression` (or vice versa). + + let context = self + .context_stack + .iter_mut() + .rev() + .find(|context| match context.loop_header { + Some(loop_header) => { + self.basic_coverage_blocks.dominates(loop_header, successor) } - break; - } + None => true, + }) + .unwrap_or_else(|| bug!("should always fall back to the root non-loop context")); + debug!("adding to worklist for {:?}", context.loop_header); + + // FIXME: The code below had debug messages claiming to add items to a + // particular end of the worklist, but was confused about which end was + // which. The existing behaviour has been preserved for now, but it's + // unclear what the intended behaviour was. + + if self.basic_coverage_blocks.successors[successor].len() > 1 { + context.worklist.push_back(successor); + } else { + context.worklist.push_front(successor); } } } @@ -553,66 +550,28 @@ backedges } -pub struct ShortCircuitPreorder< - 'a, - 'tcx, - F: Fn(&'a mir::Body<'tcx>, &'a TerminatorKind<'tcx>) -> Box + 'a>, -> { +fn short_circuit_preorder<'a, 'tcx, F, Iter>( body: &'a mir::Body<'tcx>, - visited: BitSet, - worklist: Vec, filtered_successors: F, -} - -impl< - 'a, - 'tcx, - F: Fn(&'a mir::Body<'tcx>, &'a TerminatorKind<'tcx>) -> Box + 'a>, -> ShortCircuitPreorder<'a, 'tcx, F> -{ - pub fn new( - body: &'a mir::Body<'tcx>, - filtered_successors: F, - ) -> ShortCircuitPreorder<'a, 'tcx, F> { - let worklist = vec![mir::START_BLOCK]; - - ShortCircuitPreorder { - body, - visited: BitSet::new_empty(body.basic_blocks.len()), - worklist, - filtered_successors, - } - } -} - -impl< - 'a, - 'tcx, - F: Fn(&'a mir::Body<'tcx>, &'a TerminatorKind<'tcx>) -> Box + 'a>, -> Iterator for ShortCircuitPreorder<'a, 'tcx, F> +) -> impl Iterator + Captures<'a> + Captures<'tcx> +where + F: Fn(&'a mir::Body<'tcx>, BasicBlock) -> Iter, + Iter: Iterator, { - type Item = (BasicBlock, &'a BasicBlockData<'tcx>); + let mut visited = BitSet::new_empty(body.basic_blocks.len()); + let mut worklist = vec![mir::START_BLOCK]; - fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { - while let Some(idx) = self.worklist.pop() { - if !self.visited.insert(idx) { + std::iter::from_fn(move || { + while let Some(bb) = worklist.pop() { + if !visited.insert(bb) { continue; } - let data = &self.body[idx]; - - if let Some(ref term) = data.terminator { - self.worklist.extend((self.filtered_successors)(&self.body, &term.kind)); - } + worklist.extend(filtered_successors(body, bb)); - return Some((idx, data)); + return Some(bb); } None - } - - fn size_hint(&self) -> (usize, Option) { - let size = self.body.basic_blocks.len() - self.visited.count(); - (size, Some(size)) - } + }) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,14 +8,12 @@ mod tests; use self::counters::{BcbCounter, CoverageCounters}; -use self::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph}; -use self::spans::{CoverageSpan, CoverageSpans}; +use self::graph::CoverageGraph; +use self::spans::CoverageSpans; use crate::MirPass; -use rustc_data_structures::graph::WithNumNodes; use rustc_data_structures::sync::Lrc; -use rustc_index::IndexVec; use rustc_middle::hir; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::mir::coverage::*; @@ -28,18 +26,6 @@ use rustc_span::source_map::SourceMap; use rustc_span::{ExpnKind, SourceFile, Span, Symbol}; -/// A simple error message wrapper for `coverage::Error`s. -#[derive(Debug)] -struct Error { - message: String, -} - -impl Error { - pub fn from_string(message: String) -> Result { - Err(Self { message }) - } -} - /// Inserts `StatementKind::Coverage` statements that either instrument the binary with injected /// counters, via intrinsic `llvm.instrprof.increment`, and/or inject metadata used during codegen /// to construct the coverage map. @@ -154,7 +140,7 @@ let body_span = self.body_span; //////////////////////////////////////////////////// - // Compute `CoverageSpan`s from the `CoverageGraph`. + // Compute coverage spans from the `CoverageGraph`. let coverage_spans = CoverageSpans::generate_coverage_spans( &self.mir_body, fn_sig_span, @@ -164,179 +150,106 @@ //////////////////////////////////////////////////// // Create an optimized mix of `Counter`s and `Expression`s for the `CoverageGraph`. Ensure - // every `CoverageSpan` has a `Counter` or `Expression` assigned to its `BasicCoverageBlock` + // every coverage span has a `Counter` or `Expression` assigned to its `BasicCoverageBlock` // and all `Expression` dependencies (operands) are also generated, for any other - // `BasicCoverageBlock`s not already associated with a `CoverageSpan`. - // - // Intermediate expressions (used to compute other `Expression` values), which have no - // direct association with any `BasicCoverageBlock`, are accumulated inside `coverage_counters`. - let result = self - .coverage_counters - .make_bcb_counters(&mut self.basic_coverage_blocks, &coverage_spans); - - if let Ok(()) = result { - //////////////////////////////////////////////////// - // Remove the counter or edge counter from of each `CoverageSpan`s associated - // `BasicCoverageBlock`, and inject a `Coverage` statement into the MIR. - // - // `Coverage` statements injected from `CoverageSpan`s will include the code regions - // (source code start and end positions) to be counted by the associated counter. - // - // These `CoverageSpan`-associated counters are removed from their associated - // `BasicCoverageBlock`s so that the only remaining counters in the `CoverageGraph` - // are indirect counters (to be injected next, without associated code regions). - self.inject_coverage_span_counters(coverage_spans); - - //////////////////////////////////////////////////// - // For any remaining `BasicCoverageBlock` counters (that were not associated with - // any `CoverageSpan`), inject `Coverage` statements (_without_ code region `Span`s) - // to ensure `BasicCoverageBlock` counters that other `Expression`s may depend on - // are in fact counted, even though they don't directly contribute to counting - // their own independent code region's coverage. - self.inject_indirect_counters(); - - // Intermediate expressions will be injected as the final step, after generating - // debug output, if any. - //////////////////////////////////////////////////// - }; + // `BasicCoverageBlock`s not already associated with a coverage span. + let bcb_has_coverage_spans = |bcb| coverage_spans.bcb_has_coverage_spans(bcb); + self.coverage_counters + .make_bcb_counters(&self.basic_coverage_blocks, bcb_has_coverage_spans); + + let mappings = self.create_mappings_and_inject_coverage_statements(&coverage_spans); + + self.mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo { + function_source_hash: self.function_source_hash, + num_counters: self.coverage_counters.num_counters(), + expressions: self.coverage_counters.take_expressions(), + mappings, + })); + } + + /// For each [`BcbCounter`] associated with a BCB node or BCB edge, create + /// any corresponding mappings (for BCB nodes only), and inject any necessary + /// coverage statements into MIR. + fn create_mappings_and_inject_coverage_statements( + &mut self, + coverage_spans: &CoverageSpans, + ) -> Vec { + let source_map = self.tcx.sess.source_map(); + let body_span = self.body_span; - if let Err(e) = result { - bug!("Error processing: {:?}: {:?}", self.mir_body.source.def_id(), e.message) - }; + use rustc_session::RemapFileNameExt; + let file_name = + Symbol::intern(&self.source_file.name.for_codegen(self.tcx.sess).to_string_lossy()); + + let mut mappings = Vec::new(); + + // Process the counters and spans associated with BCB nodes. + for (bcb, counter_kind) in self.coverage_counters.bcb_node_counters() { + let spans = coverage_spans.spans_for_bcb(bcb); + let has_mappings = !spans.is_empty(); + + // If this BCB has any coverage spans, add corresponding mappings to + // the mappings table. + if has_mappings { + let term = counter_kind.as_term(); + mappings.extend(spans.iter().map(|&span| { + let code_region = make_code_region(source_map, file_name, span, body_span); + Mapping { code_region, term } + })); + } - //////////////////////////////////////////////////// - // Finally, inject the intermediate expressions collected along the way. - for intermediate_expression in &self.coverage_counters.intermediate_expressions { - inject_intermediate_expression( - self.mir_body, - self.make_mir_coverage_kind(intermediate_expression), - ); + let do_inject = match counter_kind { + // Counter-increment statements always need to be injected. + BcbCounter::Counter { .. } => true, + // The only purpose of expression-used statements is to detect + // when a mapping is unreachable, so we only inject them for + // expressions with one or more mappings. + BcbCounter::Expression { .. } => has_mappings, + }; + if do_inject { + inject_statement( + self.mir_body, + self.make_mir_coverage_kind(counter_kind), + self.basic_coverage_blocks[bcb].leader_bb(), + ); + } } - } - - /// Inject a counter for each `CoverageSpan`. There can be multiple `CoverageSpan`s for a given - /// BCB, but only one actual counter needs to be incremented per BCB. `bb_counters` maps each - /// `bcb` to its `Counter`, when injected. Subsequent `CoverageSpan`s for a BCB that already has - /// a `Counter` will inject an `Expression` instead, and compute its value by adding `ZERO` to - /// the BCB `Counter` value. - fn inject_coverage_span_counters(&mut self, coverage_spans: Vec) { - let tcx = self.tcx; - let source_map = tcx.sess.source_map(); - let body_span = self.body_span; - let file_name = Symbol::intern(&self.source_file.name.prefer_remapped().to_string_lossy()); - let mut bcb_counters = IndexVec::from_elem_n(None, self.basic_coverage_blocks.num_nodes()); - for covspan in coverage_spans { - let bcb = covspan.bcb; - let span = covspan.span; - let counter_kind = if let Some(&counter_operand) = bcb_counters[bcb].as_ref() { - self.coverage_counters.make_identity_counter(counter_operand) - } else if let Some(counter_kind) = self.coverage_counters.take_bcb_counter(bcb) { - bcb_counters[bcb] = Some(counter_kind.as_operand()); - counter_kind - } else { - bug!("Every BasicCoverageBlock should have a Counter or Expression"); + // Process the counters associated with BCB edges. + for (from_bcb, to_bcb, counter_kind) in self.coverage_counters.bcb_edge_counters() { + let do_inject = match counter_kind { + // Counter-increment statements always need to be injected. + BcbCounter::Counter { .. } => true, + // BCB-edge expressions never have mappings, so they never need + // a corresponding statement. + BcbCounter::Expression { .. } => false, }; + if !do_inject { + continue; + } - let code_region = make_code_region(source_map, file_name, span, body_span); - - inject_statement( - self.mir_body, - self.make_mir_coverage_kind(&counter_kind), - self.bcb_leader_bb(bcb), - Some(code_region), + // We need to inject a coverage statement into a new BB between the + // last BB of `from_bcb` and the first BB of `to_bcb`. + let from_bb = self.basic_coverage_blocks[from_bcb].last_bb(); + let to_bb = self.basic_coverage_blocks[to_bcb].leader_bb(); + + let new_bb = inject_edge_counter_basic_block(self.mir_body, from_bb, to_bb); + debug!( + "Edge {from_bcb:?} (last {from_bb:?}) -> {to_bcb:?} (leader {to_bb:?}) \ + requires a new MIR BasicBlock {new_bb:?} for edge counter {counter_kind:?}", ); - } - } - /// `inject_coverage_span_counters()` looped through the `CoverageSpan`s and injected the - /// counter from the `CoverageSpan`s `BasicCoverageBlock`, removing it from the BCB in the - /// process (via `take_counter()`). - /// - /// Any other counter associated with a `BasicCoverageBlock`, or its incoming edge, but not - /// associated with a `CoverageSpan`, should only exist if the counter is an `Expression` - /// dependency (one of the expression operands). Collect them, and inject the additional - /// counters into the MIR, without a reportable coverage span. - fn inject_indirect_counters(&mut self) { - let mut bcb_counters_without_direct_coverage_spans = Vec::new(); - for (target_bcb, counter_kind) in self.coverage_counters.drain_bcb_counters() { - bcb_counters_without_direct_coverage_spans.push((None, target_bcb, counter_kind)); - } - for ((from_bcb, target_bcb), counter_kind) in - self.coverage_counters.drain_bcb_edge_counters() - { - bcb_counters_without_direct_coverage_spans.push(( - Some(from_bcb), - target_bcb, - counter_kind, - )); - } - - for (edge_from_bcb, target_bcb, counter_kind) in bcb_counters_without_direct_coverage_spans - { - match counter_kind { - BcbCounter::Counter { .. } => { - let inject_to_bb = if let Some(from_bcb) = edge_from_bcb { - // The MIR edge starts `from_bb` (the outgoing / last BasicBlock in - // `from_bcb`) and ends at `to_bb` (the incoming / first BasicBlock in the - // `target_bcb`; also called the `leader_bb`). - let from_bb = self.bcb_last_bb(from_bcb); - let to_bb = self.bcb_leader_bb(target_bcb); - - let new_bb = inject_edge_counter_basic_block(self.mir_body, from_bb, to_bb); - debug!( - "Edge {:?} (last {:?}) -> {:?} (leader {:?}) requires a new MIR \ - BasicBlock {:?}, for unclaimed edge counter {:?}", - edge_from_bcb, from_bb, target_bcb, to_bb, new_bb, counter_kind, - ); - new_bb - } else { - let target_bb = self.bcb_last_bb(target_bcb); - debug!( - "{:?} ({:?}) gets a new Coverage statement for unclaimed counter {:?}", - target_bcb, target_bb, counter_kind, - ); - target_bb - }; - - inject_statement( - self.mir_body, - self.make_mir_coverage_kind(&counter_kind), - inject_to_bb, - None, - ); - } - BcbCounter::Expression { .. } => inject_intermediate_expression( - self.mir_body, - self.make_mir_coverage_kind(&counter_kind), - ), - } + // Inject a counter into the newly-created BB. + inject_statement(self.mir_body, self.make_mir_coverage_kind(&counter_kind), new_bb); } - } - - #[inline] - fn bcb_leader_bb(&self, bcb: BasicCoverageBlock) -> BasicBlock { - self.bcb_data(bcb).leader_bb() - } - - #[inline] - fn bcb_last_bb(&self, bcb: BasicCoverageBlock) -> BasicBlock { - self.bcb_data(bcb).last_bb() - } - #[inline] - fn bcb_data(&self, bcb: BasicCoverageBlock) -> &BasicCoverageBlockData { - &self.basic_coverage_blocks[bcb] + mappings } fn make_mir_coverage_kind(&self, counter_kind: &BcbCounter) -> CoverageKind { match *counter_kind { - BcbCounter::Counter { id } => { - CoverageKind::Counter { function_source_hash: self.function_source_hash, id } - } - BcbCounter::Expression { id, lhs, op, rhs } => { - CoverageKind::Expression { id, lhs, op, rhs } - } + BcbCounter::Counter { id } => CoverageKind::CounterIncrement { id }, + BcbCounter::Expression { id } => CoverageKind::ExpressionUsed { id }, } } } @@ -364,42 +277,17 @@ new_bb } -fn inject_statement( - mir_body: &mut mir::Body<'_>, - counter_kind: CoverageKind, - bb: BasicBlock, - some_code_region: Option, -) { - debug!( - " injecting statement {:?} for {:?} at code region: {:?}", - counter_kind, bb, some_code_region - ); +fn inject_statement(mir_body: &mut mir::Body<'_>, counter_kind: CoverageKind, bb: BasicBlock) { + debug!(" injecting statement {counter_kind:?} for {bb:?}"); let data = &mut mir_body[bb]; let source_info = data.terminator().source_info; let statement = Statement { source_info, - kind: StatementKind::Coverage(Box::new(Coverage { - kind: counter_kind, - code_region: some_code_region, - })), + kind: StatementKind::Coverage(Box::new(Coverage { kind: counter_kind })), }; data.statements.insert(0, statement); } -// Non-code expressions are injected into the coverage map, without generating executable code. -fn inject_intermediate_expression(mir_body: &mut mir::Body<'_>, expression: CoverageKind) { - debug_assert!(matches!(expression, CoverageKind::Expression { .. })); - debug!(" injecting non-code expression {:?}", expression); - let inject_in_bb = mir::START_BLOCK; - let data = &mut mir_body[inject_in_bb]; - let source_info = data.terminator().source_info; - let statement = Statement { - source_info, - kind: StatementKind::Coverage(Box::new(Coverage { kind: expression, code_region: None })), - }; - data.statements.push(statement); -} - /// Convert the Span into its file name, start line and column, and end line and column fn make_code_region( source_map: &SourceMap, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/query.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/query.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/query.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/query.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,100 +2,31 @@ use rustc_data_structures::captures::Captures; use rustc_middle::mir::coverage::*; -use rustc_middle::mir::{self, Body, Coverage, CoverageInfo}; +use rustc_middle::mir::{Body, Coverage, CoverageIdsInfo}; use rustc_middle::query::Providers; use rustc_middle::ty::{self, TyCtxt}; -use rustc_span::def_id::DefId; /// A `query` provider for retrieving coverage information injected into MIR. pub(crate) fn provide(providers: &mut Providers) { - providers.coverageinfo = |tcx, def_id| coverageinfo(tcx, def_id); - providers.covered_code_regions = |tcx, def_id| covered_code_regions(tcx, def_id); + providers.coverage_ids_info = |tcx, def_id| coverage_ids_info(tcx, def_id); } -/// Coverage codegen needs to know the total number of counter IDs and expression IDs that have -/// been used by a function's coverage mappings. These totals are used to create vectors to hold -/// the relevant counter and expression data, and the maximum counter ID (+ 1) is also needed by -/// the `llvm.instrprof.increment` intrinsic. -/// -/// MIR optimization may split and duplicate some BasicBlock sequences, or optimize out some code -/// including injected counters. (It is OK if some counters are optimized out, but those counters -/// are still included in the total `num_counters` or `num_expressions`.) Simply counting the -/// calls may not work; but computing the number of counters or expressions by adding `1` to the -/// highest ID (for a given instrumented function) is valid. -/// -/// It's possible for a coverage expression to remain in MIR while one or both of its operands -/// have been optimized away. To avoid problems in codegen, we include those operands' IDs when -/// determining the maximum counter/expression ID, even if the underlying counter/expression is -/// no longer present. -struct CoverageVisitor { - max_counter_id: CounterId, - max_expression_id: ExpressionId, -} - -impl CoverageVisitor { - /// Updates `max_counter_id` to the maximum encountered counter ID. - #[inline(always)] - fn update_max_counter_id(&mut self, counter_id: CounterId) { - self.max_counter_id = self.max_counter_id.max(counter_id); - } - - /// Updates `max_expression_id` to the maximum encountered expression ID. - #[inline(always)] - fn update_max_expression_id(&mut self, expression_id: ExpressionId) { - self.max_expression_id = self.max_expression_id.max(expression_id); - } - - fn update_from_expression_operand(&mut self, operand: Operand) { - match operand { - Operand::Counter(id) => self.update_max_counter_id(id), - Operand::Expression(id) => self.update_max_expression_id(id), - Operand::Zero => {} - } - } - - fn visit_body(&mut self, body: &Body<'_>) { - for coverage in all_coverage_in_mir_body(body) { - self.visit_coverage(coverage); - } - } - - fn visit_coverage(&mut self, coverage: &Coverage) { - match coverage.kind { - CoverageKind::Counter { id, .. } => self.update_max_counter_id(id), - CoverageKind::Expression { id, lhs, rhs, .. } => { - self.update_max_expression_id(id); - self.update_from_expression_operand(lhs); - self.update_from_expression_operand(rhs); - } - CoverageKind::Unreachable => {} - } - } -} - -fn coverageinfo<'tcx>(tcx: TyCtxt<'tcx>, instance_def: ty::InstanceDef<'tcx>) -> CoverageInfo { +/// Query implementation for `coverage_ids_info`. +fn coverage_ids_info<'tcx>( + tcx: TyCtxt<'tcx>, + instance_def: ty::InstanceDef<'tcx>, +) -> CoverageIdsInfo { let mir_body = tcx.instance_mir(instance_def); - let mut coverage_visitor = CoverageVisitor { - max_counter_id: CounterId::START, - max_expression_id: ExpressionId::START, - }; - - coverage_visitor.visit_body(mir_body); - - // Add 1 to the highest IDs to get the total number of IDs. - CoverageInfo { - num_counters: (coverage_visitor.max_counter_id + 1).as_u32(), - num_expressions: (coverage_visitor.max_expression_id + 1).as_u32(), - } -} + let max_counter_id = all_coverage_in_mir_body(mir_body) + .filter_map(|coverage| match coverage.kind { + CoverageKind::CounterIncrement { id } => Some(id), + _ => None, + }) + .max() + .unwrap_or(CounterId::START); -fn covered_code_regions(tcx: TyCtxt<'_>, def_id: DefId) -> Vec<&CodeRegion> { - let body = mir_body(tcx, def_id); - all_coverage_in_mir_body(body) - // Not all coverage statements have an attached code region. - .filter_map(|coverage| coverage.code_region.as_ref()) - .collect() + CoverageIdsInfo { max_counter_id } } fn all_coverage_in_mir_body<'a, 'tcx>( @@ -115,11 +46,3 @@ let scope_data = &body.source_scopes[statement.source_info.scope]; scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some() } - -/// This function ensures we obtain the correct MIR for the given item irrespective of -/// whether that means const mir or runtime mir. For `const fn` this opts for runtime -/// mir. -fn mir_body(tcx: TyCtxt<'_>, def_id: DefId) -> &mir::Body<'_> { - let def = ty::InstanceDef::Item(def_id); - tcx.instance_mir(def) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,193 @@ +use rustc_data_structures::captures::Captures; +use rustc_middle::mir::{ + self, AggregateKind, FakeReadCause, Rvalue, Statement, StatementKind, Terminator, + TerminatorKind, +}; +use rustc_span::Span; + +use crate::coverage::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph}; +use crate::coverage::spans::CoverageSpan; + +pub(super) fn mir_to_initial_sorted_coverage_spans( + mir_body: &mir::Body<'_>, + fn_sig_span: Span, + body_span: Span, + basic_coverage_blocks: &CoverageGraph, +) -> Vec { + let mut initial_spans = Vec::with_capacity(mir_body.basic_blocks.len() * 2); + for (bcb, bcb_data) in basic_coverage_blocks.iter_enumerated() { + initial_spans.extend(bcb_to_initial_coverage_spans(mir_body, body_span, bcb, bcb_data)); + } + + if initial_spans.is_empty() { + // This can happen if, for example, the function is unreachable (contains only a + // `BasicBlock`(s) with an `Unreachable` terminator). + return initial_spans; + } + + initial_spans.push(CoverageSpan::for_fn_sig(fn_sig_span)); + + initial_spans.sort_by(|a, b| { + // First sort by span start. + Ord::cmp(&a.span.lo(), &b.span.lo()) + // If span starts are the same, sort by span end in reverse order. + // This ensures that if spans A and B are adjacent in the list, + // and they overlap but are not equal, then either: + // - Span A extends further left, or + // - Both have the same start and span A extends further right + .then_with(|| Ord::cmp(&a.span.hi(), &b.span.hi()).reverse()) + // If both spans are equal, sort the BCBs in dominator order, + // so that dominating BCBs come before other BCBs they dominate. + .then_with(|| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb)) + // If two spans are otherwise identical, put closure spans first, + // as this seems to be what the refinement step expects. + .then_with(|| Ord::cmp(&a.is_closure, &b.is_closure).reverse()) + }); + + initial_spans +} + +// Generate a set of `CoverageSpan`s from the filtered set of `Statement`s and `Terminator`s of +// the `BasicBlock`(s) in the given `BasicCoverageBlockData`. One `CoverageSpan` is generated +// for each `Statement` and `Terminator`. (Note that subsequent stages of coverage analysis will +// merge some `CoverageSpan`s, at which point a `CoverageSpan` may represent multiple +// `Statement`s and/or `Terminator`s.) +fn bcb_to_initial_coverage_spans<'a, 'tcx>( + mir_body: &'a mir::Body<'tcx>, + body_span: Span, + bcb: BasicCoverageBlock, + bcb_data: &'a BasicCoverageBlockData, +) -> impl Iterator + Captures<'a> + Captures<'tcx> { + bcb_data.basic_blocks.iter().flat_map(move |&bb| { + let data = &mir_body[bb]; + + let statement_spans = data.statements.iter().filter_map(move |statement| { + let expn_span = filtered_statement_span(statement)?; + let span = function_source_span(expn_span, body_span); + + Some(CoverageSpan::new(span, expn_span, bcb, is_closure(statement))) + }); + + let terminator_span = Some(data.terminator()).into_iter().filter_map(move |terminator| { + let expn_span = filtered_terminator_span(terminator)?; + let span = function_source_span(expn_span, body_span); + + Some(CoverageSpan::new(span, expn_span, bcb, false)) + }); + + statement_spans.chain(terminator_span) + }) +} + +fn is_closure(statement: &Statement<'_>) -> bool { + match statement.kind { + StatementKind::Assign(box (_, Rvalue::Aggregate(box ref agg_kind, _))) => match agg_kind { + AggregateKind::Closure(_, _) | AggregateKind::Coroutine(_, _, _) => true, + _ => false, + }, + _ => false, + } +} + +/// If the MIR `Statement` has a span contributive to computing coverage spans, +/// return it; otherwise return `None`. +fn filtered_statement_span(statement: &Statement<'_>) -> Option { + match statement.kind { + // These statements have spans that are often outside the scope of the executed source code + // for their parent `BasicBlock`. + StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + // Coverage should not be encountered, but don't inject coverage coverage + | StatementKind::Coverage(_) + // Ignore `ConstEvalCounter`s + | StatementKind::ConstEvalCounter + // Ignore `Nop`s + | StatementKind::Nop => None, + + // FIXME(#78546): MIR InstrumentCoverage - Can the source_info.span for `FakeRead` + // statements be more consistent? + // + // FakeReadCause::ForGuardBinding, in this example: + // match somenum { + // x if x < 1 => { ... } + // }... + // The BasicBlock within the match arm code included one of these statements, but the span + // for it covered the `1` in this source. The actual statements have nothing to do with that + // source span: + // FakeRead(ForGuardBinding, _4); + // where `_4` is: + // _4 = &_1; (at the span for the first `x`) + // and `_1` is the `Place` for `somenum`. + // + // If and when the Issue is resolved, remove this special case match pattern: + StatementKind::FakeRead(box (FakeReadCause::ForGuardBinding, _)) => None, + + // Retain spans from all other statements + StatementKind::FakeRead(box (_, _)) // Not including `ForGuardBinding` + | StatementKind::Intrinsic(..) + | StatementKind::Assign(_) + | StatementKind::SetDiscriminant { .. } + | StatementKind::Deinit(..) + | StatementKind::Retag(_, _) + | StatementKind::PlaceMention(..) + | StatementKind::AscribeUserType(_, _) => { + Some(statement.source_info.span) + } + } +} + +/// If the MIR `Terminator` has a span contributive to computing coverage spans, +/// return it; otherwise return `None`. +fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option { + match terminator.kind { + // These terminators have spans that don't positively contribute to computing a reasonable + // span of actually executed source code. (For example, SwitchInt terminators extracted from + // an `if condition { block }` has a span that includes the executed block, if true, + // but for coverage, the code region executed, up to *and* through the SwitchInt, + // actually stops before the if's block.) + TerminatorKind::Unreachable // Unreachable blocks are not connected to the MIR CFG + | TerminatorKind::Assert { .. } + | TerminatorKind::Drop { .. } + | TerminatorKind::SwitchInt { .. } + // For `FalseEdge`, only the `real` branch is taken, so it is similar to a `Goto`. + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::Goto { .. } => None, + + // Call `func` operand can have a more specific span when part of a chain of calls + | TerminatorKind::Call { ref func, .. } => { + let mut span = terminator.source_info.span; + if let mir::Operand::Constant(box constant) = func { + if constant.span.lo() > span.lo() { + span = span.with_lo(constant.span.lo()); + } + } + Some(span) + } + + // Retain spans from all other terminators + TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Yield { .. } + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::InlineAsm { .. } => { + Some(terminator.source_info.span) + } + } +} + +/// Returns an extrapolated span (pre-expansion[^1]) corresponding to a range +/// within the function's body source. This span is guaranteed to be contained +/// within, or equal to, the `body_span`. If the extrapolated span is not +/// contained within the `body_span`, the `body_span` is returned. +/// +/// [^1]Expansions result from Rust syntax including macros, syntactic sugar, +/// etc.). +#[inline] +fn function_source_span(span: Span, body_span: Span) -> Span { + use rustc_span::source_map::original_sp; + + let original_span = original_sp(span, body_span).with_ctxt(body_span.ctxt()); + if body_span.contains(original_span) { original_span } else { body_span } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/spans.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,26 +1,48 @@ -use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB}; +use std::cell::OnceCell; use rustc_data_structures::graph::WithNumNodes; -use rustc_middle::mir::{ - self, AggregateKind, BasicBlock, FakeReadCause, Rvalue, Statement, StatementKind, Terminator, - TerminatorKind, -}; -use rustc_span::source_map::original_sp; -use rustc_span::{BytePos, ExpnKind, MacroKind, Span, Symbol}; +use rustc_index::IndexVec; +use rustc_middle::mir; +use rustc_span::{BytePos, ExpnKind, MacroKind, Span, Symbol, DUMMY_SP}; -use std::cell::OnceCell; +use super::graph::{BasicCoverageBlock, CoverageGraph, START_BCB}; + +mod from_mir; -#[derive(Debug, Copy, Clone)] -pub(super) enum CoverageStatement { - Statement(BasicBlock, Span, usize), - Terminator(BasicBlock, Span), +pub(super) struct CoverageSpans { + /// Map from BCBs to their list of coverage spans. + bcb_to_spans: IndexVec>, } -impl CoverageStatement { - pub fn span(&self) -> Span { - match self { - Self::Statement(_, span, _) | Self::Terminator(_, span) => *span, +impl CoverageSpans { + pub(super) fn generate_coverage_spans( + mir_body: &mir::Body<'_>, + fn_sig_span: Span, + body_span: Span, + basic_coverage_blocks: &CoverageGraph, + ) -> Self { + let coverage_spans = CoverageSpansGenerator::generate_coverage_spans( + mir_body, + fn_sig_span, + body_span, + basic_coverage_blocks, + ); + + // Group the coverage spans by BCB, with the BCBs in sorted order. + let mut bcb_to_spans = IndexVec::from_elem_n(Vec::new(), basic_coverage_blocks.num_nodes()); + for CoverageSpan { bcb, span, .. } in coverage_spans { + bcb_to_spans[bcb].push(span); } + + Self { bcb_to_spans } + } + + pub(super) fn bcb_has_coverage_spans(&self, bcb: BasicCoverageBlock) -> bool { + !self.bcb_to_spans[bcb].is_empty() + } + + pub(super) fn spans_for_bcb(&self, bcb: BasicCoverageBlock) -> &[Span] { + &self.bcb_to_spans[bcb] } } @@ -28,87 +50,55 @@ /// references the originating BCB and one or more MIR `Statement`s and/or `Terminator`s. /// Initially, the `Span`s come from the `Statement`s and `Terminator`s, but subsequent /// transforms can combine adjacent `Span`s and `CoverageSpan` from the same BCB, merging the -/// `CoverageStatement` vectors, and the `Span`s to cover the extent of the combined `Span`s. +/// `merged_spans` vectors, and the `Span`s to cover the extent of the combined `Span`s. /// -/// Note: A `CoverageStatement` merged into another CoverageSpan may come from a `BasicBlock` that +/// Note: A span merged into another CoverageSpan may come from a `BasicBlock` that /// is not part of the `CoverageSpan` bcb if the statement was included because it's `Span` matches /// or is subsumed by the `Span` associated with this `CoverageSpan`, and it's `BasicBlock` /// `dominates()` the `BasicBlock`s in this `CoverageSpan`. #[derive(Debug, Clone)] -pub(super) struct CoverageSpan { +struct CoverageSpan { pub span: Span, pub expn_span: Span, pub current_macro_or_none: OnceCell>, pub bcb: BasicCoverageBlock, - pub coverage_statements: Vec, + /// List of all the original spans from MIR that have been merged into this + /// span. Mainly used to precisely skip over gaps when truncating a span. + pub merged_spans: Vec, pub is_closure: bool, } impl CoverageSpan { pub fn for_fn_sig(fn_sig_span: Span) -> Self { - Self { - span: fn_sig_span, - expn_span: fn_sig_span, - current_macro_or_none: Default::default(), - bcb: START_BCB, - coverage_statements: vec![], - is_closure: false, - } + Self::new(fn_sig_span, fn_sig_span, START_BCB, false) } - pub fn for_statement( - statement: &Statement<'_>, + pub(super) fn new( span: Span, expn_span: Span, bcb: BasicCoverageBlock, - bb: BasicBlock, - stmt_index: usize, + is_closure: bool, ) -> Self { - let is_closure = match statement.kind { - StatementKind::Assign(box (_, Rvalue::Aggregate(box ref kind, _))) => { - matches!(kind, AggregateKind::Closure(_, _) | AggregateKind::Generator(_, _, _)) - } - _ => false, - }; - Self { span, expn_span, current_macro_or_none: Default::default(), bcb, - coverage_statements: vec![CoverageStatement::Statement(bb, span, stmt_index)], + merged_spans: vec![span], is_closure, } } - pub fn for_terminator( - span: Span, - expn_span: Span, - bcb: BasicCoverageBlock, - bb: BasicBlock, - ) -> Self { - Self { - span, - expn_span, - current_macro_or_none: Default::default(), - bcb, - coverage_statements: vec![CoverageStatement::Terminator(bb, span)], - is_closure: false, - } - } - pub fn merge_from(&mut self, mut other: CoverageSpan) { debug_assert!(self.is_mergeable(&other)); self.span = self.span.to(other.span); - self.coverage_statements.append(&mut other.coverage_statements); + self.merged_spans.append(&mut other.merged_spans); } pub fn cutoff_statements_at(&mut self, cutoff_pos: BytePos) { - self.coverage_statements.retain(|covstmt| covstmt.span().hi() <= cutoff_pos); - if let Some(highest_covstmt) = - self.coverage_statements.iter().max_by_key(|covstmt| covstmt.span().hi()) - { - self.span = self.span.with_hi(highest_covstmt.span().hi()); + self.merged_spans.retain(|span| span.hi() <= cutoff_pos); + if let Some(max_hi) = self.merged_spans.iter().map(|span| span.hi()).max() { + self.span = self.span.with_hi(max_hi); } } @@ -139,11 +129,12 @@ /// If the span is part of a macro, and the macro is visible (expands directly to the given /// body_span), returns the macro name symbol. pub fn visible_macro(&self, body_span: Span) -> Option { - if let Some(current_macro) = self.current_macro() && self - .expn_span - .parent_callsite() - .unwrap_or_else(|| bug!("macro must have a parent")) - .eq_ctxt(body_span) + if let Some(current_macro) = self.current_macro() + && self + .expn_span + .parent_callsite() + .unwrap_or_else(|| bug!("macro must have a parent")) + .eq_ctxt(body_span) { return Some(current_macro); } @@ -162,13 +153,7 @@ /// * Merge spans that represent continuous (both in source code and control flow), non-branching /// execution /// * Carve out (leave uncovered) any span that will be counted by another MIR (notably, closures) -pub struct CoverageSpans<'a, 'tcx> { - /// The MIR, used to look up `BasicBlockData`. - mir_body: &'a mir::Body<'tcx>, - - /// A `Span` covering the signature of function for the MIR. - fn_sig_span: Span, - +struct CoverageSpansGenerator<'a> { /// A `Span` covering the function body of the MIR (typically from left curly brace to right /// curly brace). body_span: Span, @@ -178,7 +163,7 @@ /// The initial set of `CoverageSpan`s, sorted by `Span` (`lo` and `hi`) and by relative /// dominance between the `BasicCoverageBlock`s of equal `Span`s. - sorted_spans_iter: Option>, + sorted_spans_iter: std::vec::IntoIter, /// The current `CoverageSpan` to compare to its `prev`, to possibly merge, discard, force the /// discard of the `prev` (and or `pending_dups`), or keep both (with `prev` moved to @@ -200,9 +185,6 @@ /// is mutated. prev_original_span: Span, - /// A copy of the expn_span from the prior iteration. - prev_expn_span: Option, - /// One or more `CoverageSpan`s with the same `Span` but different `BasicCoverageBlock`s, and /// no `BasicCoverageBlock` in this list dominates another `BasicCoverageBlock` in the list. /// If a new `curr` span also fits this criteria (compared to an existing list of @@ -218,7 +200,7 @@ refined_spans: Vec, } -impl<'a, 'tcx> CoverageSpans<'a, 'tcx> { +impl<'a> CoverageSpansGenerator<'a> { /// Generate a minimal set of `CoverageSpan`s, each representing a contiguous code region to be /// counted. /// @@ -241,109 +223,79 @@ /// Note the resulting vector of `CoverageSpan`s may not be fully sorted (and does not need /// to be). pub(super) fn generate_coverage_spans( - mir_body: &'a mir::Body<'tcx>, + mir_body: &mir::Body<'_>, fn_sig_span: Span, // Ensured to be same SourceFile and SyntaxContext as `body_span` body_span: Span, basic_coverage_blocks: &'a CoverageGraph, ) -> Vec { - let mut coverage_spans = CoverageSpans { + let sorted_spans = from_mir::mir_to_initial_sorted_coverage_spans( mir_body, fn_sig_span, body_span, basic_coverage_blocks, - sorted_spans_iter: None, - refined_spans: Vec::with_capacity(basic_coverage_blocks.num_nodes() * 2), + ); + + let coverage_spans = Self { + body_span, + basic_coverage_blocks, + sorted_spans_iter: sorted_spans.into_iter(), some_curr: None, - curr_original_span: Span::with_root_ctxt(BytePos(0), BytePos(0)), + curr_original_span: DUMMY_SP, some_prev: None, - prev_original_span: Span::with_root_ctxt(BytePos(0), BytePos(0)), - prev_expn_span: None, + prev_original_span: DUMMY_SP, pending_dups: Vec::new(), + refined_spans: Vec::with_capacity(basic_coverage_blocks.num_nodes() * 2), }; - let sorted_spans = coverage_spans.mir_to_initial_sorted_coverage_spans(); - - coverage_spans.sorted_spans_iter = Some(sorted_spans.into_iter()); - coverage_spans.to_refined_spans() } - fn mir_to_initial_sorted_coverage_spans(&self) -> Vec { - let mut initial_spans = - Vec::::with_capacity(self.mir_body.basic_blocks.len() * 2); - for (bcb, bcb_data) in self.basic_coverage_blocks.iter_enumerated() { - initial_spans.extend(self.bcb_to_initial_coverage_spans(bcb, bcb_data)); - } - - if initial_spans.is_empty() { - // This can happen if, for example, the function is unreachable (contains only a - // `BasicBlock`(s) with an `Unreachable` terminator). - return initial_spans; - } - - initial_spans.push(CoverageSpan::for_fn_sig(self.fn_sig_span)); - - initial_spans.sort_by(|a, b| { - // First sort by span start. - Ord::cmp(&a.span.lo(), &b.span.lo()) - // If span starts are the same, sort by span end in reverse order. - // This ensures that if spans A and B are adjacent in the list, - // and they overlap but are not equal, then either: - // - Span A extends further left, or - // - Both have the same start and span A extends further right - .then_with(|| Ord::cmp(&a.span.hi(), &b.span.hi()).reverse()) - // If both spans are equal, sort the BCBs in dominator order, - // so that dominating BCBs come before other BCBs they dominate. - .then_with(|| self.basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb)) - // If two spans are otherwise identical, put closure spans first, - // as this seems to be what the refinement step expects. - .then_with(|| Ord::cmp(&a.is_closure, &b.is_closure).reverse()) - }); - - initial_spans - } - /// Iterate through the sorted `CoverageSpan`s, and return the refined list of merged and /// de-duplicated `CoverageSpan`s. fn to_refined_spans(mut self) -> Vec { while self.next_coverage_span() { + // For the first span we don't have `prev` set, so most of the + // span-processing steps don't make sense yet. if self.some_prev.is_none() { debug!(" initial span"); - self.check_invoked_macro_name_span(); - } else if self.curr().is_mergeable(self.prev()) { - debug!(" same bcb (and neither is a closure), merge with prev={:?}", self.prev()); + self.maybe_push_macro_name_span(); + continue; + } + + // The remaining cases assume that `prev` and `curr` are set. + let prev = self.prev(); + let curr = self.curr(); + + if curr.is_mergeable(prev) { + debug!(" same bcb (and neither is a closure), merge with prev={prev:?}"); let prev = self.take_prev(); self.curr_mut().merge_from(prev); - self.check_invoked_macro_name_span(); + self.maybe_push_macro_name_span(); // Note that curr.span may now differ from curr_original_span - } else if self.prev_ends_before_curr() { + } else if prev.span.hi() <= curr.span.lo() { debug!( - " different bcbs and disjoint spans, so keep curr for next iter, and add \ - prev={:?}", - self.prev() + " different bcbs and disjoint spans, so keep curr for next iter, and add prev={prev:?}", ); let prev = self.take_prev(); self.push_refined_span(prev); - self.check_invoked_macro_name_span(); - } else if self.prev().is_closure { + self.maybe_push_macro_name_span(); + } else if prev.is_closure { // drop any equal or overlapping span (`curr`) and keep `prev` to test again in the // next iter debug!( - " curr overlaps a closure (prev). Drop curr and keep prev for next iter. \ - prev={:?}", - self.prev() + " curr overlaps a closure (prev). Drop curr and keep prev for next iter. prev={prev:?}", ); - self.take_curr(); - } else if self.curr().is_closure { + self.take_curr(); // Discards curr. + } else if curr.is_closure { self.carve_out_span_for_closure(); - } else if self.prev_original_span == self.curr().span { + } else if self.prev_original_span == curr.span { // Note that this compares the new (`curr`) span to `prev_original_span`. // In this branch, the actual span byte range of `prev_original_span` is not // important. What is important is knowing whether the new `curr` span was // **originally** the same as the original span of `prev()`. The original spans // reflect their original sort order, and for equal spans, conveys a partial // ordering based on CFG dominator priority. - if self.prev().is_macro_expansion() && self.curr().is_macro_expansion() { + if prev.is_macro_expansion() && curr.is_macro_expansion() { // Macros that expand to include branching (such as // `assert_eq!()`, `assert_ne!()`, `info!()`, `debug!()`, or // `trace!()`) typically generate callee spans with identical @@ -357,23 +309,24 @@ debug!( " curr and prev are part of a macro expansion, and curr has the same span \ as prev, but is in a different bcb. Drop curr and keep prev for next iter. \ - prev={:?}", - self.prev() + prev={prev:?}", ); - self.take_curr(); + self.take_curr(); // Discards curr. } else { - self.hold_pending_dups_unless_dominated(); + self.update_pending_dups(); } } else { self.cutoff_prev_at_overlapping_curr(); - self.check_invoked_macro_name_span(); + self.maybe_push_macro_name_span(); } } - debug!(" AT END, adding last prev={:?}", self.prev()); let prev = self.take_prev(); - let pending_dups = self.pending_dups.split_off(0); - for dup in pending_dups { + debug!(" AT END, adding last prev={prev:?}"); + + // Take `pending_dups` so that we can drain it while calling self methods. + // It is never used as a field after this point. + for dup in std::mem::take(&mut self.pending_dups) { debug!(" ...adding at least one pending dup={:?}", dup); self.push_refined_span(dup); } @@ -403,91 +356,46 @@ } fn push_refined_span(&mut self, covspan: CoverageSpan) { - let len = self.refined_spans.len(); - if len > 0 { - let last = &mut self.refined_spans[len - 1]; - if last.is_mergeable(&covspan) { - debug!( - "merging new refined span with last refined span, last={:?}, covspan={:?}", - last, covspan - ); - last.merge_from(covspan); - return; - } + if let Some(last) = self.refined_spans.last_mut() + && last.is_mergeable(&covspan) + { + // Instead of pushing the new span, merge it with the last refined span. + debug!(?last, ?covspan, "merging new refined span with last refined span"); + last.merge_from(covspan); + } else { + self.refined_spans.push(covspan); } - self.refined_spans.push(covspan) } - fn check_invoked_macro_name_span(&mut self) { - if let Some(visible_macro) = self.curr().visible_macro(self.body_span) { - if !self - .prev_expn_span - .is_some_and(|prev_expn_span| self.curr().expn_span.ctxt() == prev_expn_span.ctxt()) - { - let merged_prefix_len = self.curr_original_span.lo() - self.curr().span.lo(); - let after_macro_bang = - merged_prefix_len + BytePos(visible_macro.as_str().len() as u32 + 1); - if self.curr().span.lo() + after_macro_bang > self.curr().span.hi() { - // Something is wrong with the macro name span; - // return now to avoid emitting malformed mappings. - // FIXME(#117788): Track down why this happens. - return; - } - let mut macro_name_cov = self.curr().clone(); - self.curr_mut().span = - self.curr().span.with_lo(self.curr().span.lo() + after_macro_bang); - macro_name_cov.span = - macro_name_cov.span.with_hi(macro_name_cov.span.lo() + after_macro_bang); - debug!( - " and curr starts a new macro expansion, so add a new span just for \ - the macro `{}!`, new span={:?}", - visible_macro, macro_name_cov - ); - self.push_refined_span(macro_name_cov); - } + /// If `curr` is part of a new macro expansion, carve out and push a separate + /// span that ends just after the macro name and its subsequent `!`. + fn maybe_push_macro_name_span(&mut self) { + let curr = self.curr(); + + let Some(visible_macro) = curr.visible_macro(self.body_span) else { return }; + if let Some(prev) = &self.some_prev + && prev.expn_span.eq_ctxt(curr.expn_span) + { + return; } - } - // Generate a set of `CoverageSpan`s from the filtered set of `Statement`s and `Terminator`s of - // the `BasicBlock`(s) in the given `BasicCoverageBlockData`. One `CoverageSpan` is generated - // for each `Statement` and `Terminator`. (Note that subsequent stages of coverage analysis will - // merge some `CoverageSpan`s, at which point a `CoverageSpan` may represent multiple - // `Statement`s and/or `Terminator`s.) - fn bcb_to_initial_coverage_spans( - &self, - bcb: BasicCoverageBlock, - bcb_data: &'a BasicCoverageBlockData, - ) -> Vec { - bcb_data - .basic_blocks - .iter() - .flat_map(|&bb| { - let data = &self.mir_body[bb]; - data.statements - .iter() - .enumerate() - .filter_map(move |(index, statement)| { - filtered_statement_span(statement).map(|span| { - CoverageSpan::for_statement( - statement, - function_source_span(span, self.body_span), - span, - bcb, - bb, - index, - ) - }) - }) - .chain(filtered_terminator_span(data.terminator()).map(|span| { - CoverageSpan::for_terminator( - function_source_span(span, self.body_span), - span, - bcb, - bb, - ) - })) - }) - .collect() + let merged_prefix_len = self.curr_original_span.lo() - curr.span.lo(); + let after_macro_bang = merged_prefix_len + BytePos(visible_macro.as_str().len() as u32 + 1); + if self.curr().span.lo() + after_macro_bang > self.curr().span.hi() { + // Something is wrong with the macro name span; + // return now to avoid emitting malformed mappings. + // FIXME(#117788): Track down why this happens. + return; + } + let mut macro_name_cov = curr.clone(); + self.curr_mut().span = curr.span.with_lo(curr.span.lo() + after_macro_bang); + macro_name_cov.span = + macro_name_cov.span.with_hi(macro_name_cov.span.lo() + after_macro_bang); + debug!( + " and curr starts a new macro expansion, so add a new span just for \ + the macro `{visible_macro}!`, new span={macro_name_cov:?}", + ); + self.push_refined_span(macro_name_cov); } fn curr(&self) -> &CoverageSpan { @@ -502,6 +410,12 @@ .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr")) } + /// If called, then the next call to `next_coverage_span()` will *not* update `prev` with the + /// `curr` coverage span. + fn take_curr(&mut self) -> CoverageSpan { + self.some_curr.take().unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr")) + } + fn prev(&self) -> &CoverageSpan { self.some_prev .as_ref() @@ -527,82 +441,78 @@ /// `pending_dups` could have as few as one span) /// In either case, no more spans will match the span of `pending_dups`, so /// add the `pending_dups` if they don't overlap `curr`, and clear the list. - fn check_pending_dups(&mut self) { - if let Some(dup) = self.pending_dups.last() && dup.span != self.prev().span { - debug!( - " SAME spans, but pending_dups are NOT THE SAME, so BCBs matched on \ - previous iteration, or prev started a new disjoint span" - ); - if dup.span.hi() <= self.curr().span.lo() { - let pending_dups = self.pending_dups.split_off(0); - for dup in pending_dups.into_iter() { - debug!(" ...adding at least one pending={:?}", dup); - self.push_refined_span(dup); - } - } else { - self.pending_dups.clear(); - } + fn maybe_flush_pending_dups(&mut self) { + let Some(last_dup) = self.pending_dups.last() else { return }; + if last_dup.span == self.prev().span { + return; + } + + debug!( + " SAME spans, but pending_dups are NOT THE SAME, so BCBs matched on \ + previous iteration, or prev started a new disjoint span" + ); + if last_dup.span.hi() <= self.curr().span.lo() { + // Temporarily steal `pending_dups` into a local, so that we can + // drain it while calling other self methods. + let mut pending_dups = std::mem::take(&mut self.pending_dups); + for dup in pending_dups.drain(..) { + debug!(" ...adding at least one pending={:?}", dup); + self.push_refined_span(dup); + } + // The list of dups is now empty, but we can recycle its capacity. + assert!(pending_dups.is_empty() && self.pending_dups.is_empty()); + self.pending_dups = pending_dups; + } else { + self.pending_dups.clear(); } } /// Advance `prev` to `curr` (if any), and `curr` to the next `CoverageSpan` in sorted order. fn next_coverage_span(&mut self) -> bool { if let Some(curr) = self.some_curr.take() { - self.prev_expn_span = Some(curr.expn_span); self.some_prev = Some(curr); self.prev_original_span = self.curr_original_span; } - while let Some(curr) = self.sorted_spans_iter.as_mut().unwrap().next() { + while let Some(curr) = self.sorted_spans_iter.next() { debug!("FOR curr={:?}", curr); - if self.some_prev.is_some() && self.prev_starts_after_next(&curr) { + if let Some(prev) = &self.some_prev && prev.span.lo() > curr.span.lo() { + // Skip curr because prev has already advanced beyond the end of curr. + // This can only happen if a prior iteration updated `prev` to skip past + // a region of code, such as skipping past a closure. debug!( " prev.span starts after curr.span, so curr will be dropped (skipping past \ - closure?); prev={:?}", - self.prev() + closure?); prev={prev:?}", ); } else { // Save a copy of the original span for `curr` in case the `CoverageSpan` is changed // by `self.curr_mut().merge_from(prev)`. self.curr_original_span = curr.span; self.some_curr.replace(curr); - self.check_pending_dups(); + self.maybe_flush_pending_dups(); return true; } } false } - /// If called, then the next call to `next_coverage_span()` will *not* update `prev` with the - /// `curr` coverage span. - fn take_curr(&mut self) -> CoverageSpan { - self.some_curr.take().unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr")) - } - - /// Returns true if the curr span should be skipped because prev has already advanced beyond the - /// end of curr. This can only happen if a prior iteration updated `prev` to skip past a region - /// of code, such as skipping past a closure. - fn prev_starts_after_next(&self, next_curr: &CoverageSpan) -> bool { - self.prev().span.lo() > next_curr.span.lo() - } - - /// Returns true if the curr span starts past the end of the prev span, which means they don't - /// overlap, so we now know the prev can be added to the refined coverage spans. - fn prev_ends_before_curr(&self) -> bool { - self.prev().span.hi() <= self.curr().span.lo() - } - /// If `prev`s span extends left of the closure (`curr`), carve out the closure's span from /// `prev`'s span. (The closure's coverage counters will be injected when processing the /// closure's own MIR.) Add the portion of the span to the left of the closure; and if the span /// extends to the right of the closure, update `prev` to that portion of the span. For any /// `pending_dups`, repeat the same process. fn carve_out_span_for_closure(&mut self) { - let curr_span = self.curr().span; - let left_cutoff = curr_span.lo(); - let right_cutoff = curr_span.hi(); - let has_pre_closure_span = self.prev().span.lo() < right_cutoff; - let has_post_closure_span = self.prev().span.hi() > right_cutoff; - let mut pending_dups = self.pending_dups.split_off(0); + let prev = self.prev(); + let curr = self.curr(); + + let left_cutoff = curr.span.lo(); + let right_cutoff = curr.span.hi(); + let has_pre_closure_span = prev.span.lo() < right_cutoff; + let has_post_closure_span = prev.span.hi() > right_cutoff; + + // Temporarily steal `pending_dups` into a local, so that we can + // mutate and/or drain it while calling other self methods. + let mut pending_dups = std::mem::take(&mut self.pending_dups); + if has_pre_closure_span { let mut pre_closure = self.prev().clone(); pre_closure.span = pre_closure.span.with_hi(left_cutoff); @@ -616,6 +526,7 @@ } self.push_refined_span(pre_closure); } + if has_post_closure_span { // Mutate `prev.span()` to start after the closure (and discard curr). // (**NEVER** update `prev_original_span` because it affects the assumptions @@ -626,12 +537,15 @@ debug!(" ...and at least one overlapping dup={:?}", dup); dup.span = dup.span.with_lo(right_cutoff); } - self.pending_dups.append(&mut pending_dups); - let closure_covspan = self.take_curr(); + let closure_covspan = self.take_curr(); // Prevent this curr from becoming prev. self.push_refined_span(closure_covspan); // since self.prev() was already updated } else { pending_dups.clear(); } + + // Restore the modified post-closure spans, or the empty vector's capacity. + assert!(self.pending_dups.is_empty()); + self.pending_dups = pending_dups; } /// Called if `curr.span` equals `prev_original_span` (and potentially equal to all @@ -648,26 +562,28 @@ /// neither `CoverageSpan` dominates the other, both (or possibly more than two) are held, /// until their disposition is determined. In this latter case, the `prev` dup is moved into /// `pending_dups` so the new `curr` dup can be moved to `prev` for the next iteration. - fn hold_pending_dups_unless_dominated(&mut self) { + fn update_pending_dups(&mut self) { + let prev_bcb = self.prev().bcb; + let curr_bcb = self.curr().bcb; + // Equal coverage spans are ordered by dominators before dominated (if any), so it should be // impossible for `curr` to dominate any previous `CoverageSpan`. - debug_assert!(!self.span_bcb_dominates(self.curr(), self.prev())); + debug_assert!(!self.basic_coverage_blocks.dominates(curr_bcb, prev_bcb)); let initial_pending_count = self.pending_dups.len(); if initial_pending_count > 0 { - let mut pending_dups = self.pending_dups.split_off(0); - pending_dups.retain(|dup| !self.span_bcb_dominates(dup, self.curr())); - self.pending_dups.append(&mut pending_dups); - if self.pending_dups.len() < initial_pending_count { + self.pending_dups + .retain(|dup| !self.basic_coverage_blocks.dominates(dup.bcb, curr_bcb)); + + let n_discarded = initial_pending_count - self.pending_dups.len(); + if n_discarded > 0 { debug!( - " discarded {} of {} pending_dups that dominated curr", - initial_pending_count - self.pending_dups.len(), - initial_pending_count + " discarded {n_discarded} of {initial_pending_count} pending_dups that dominated curr", ); } } - if self.span_bcb_dominates(self.prev(), self.curr()) { + if self.basic_coverage_blocks.dominates(prev_bcb, curr_bcb) { debug!( " different bcbs but SAME spans, and prev dominates curr. Discard prev={:?}", self.prev() @@ -720,7 +636,7 @@ if self.pending_dups.is_empty() { let curr_span = self.curr().span; self.prev_mut().cutoff_statements_at(curr_span.lo()); - if self.prev().coverage_statements.is_empty() { + if self.prev().merged_spans.is_empty() { debug!(" ... no non-overlapping statements to add"); } else { debug!(" ... adding modified prev={:?}", self.prev()); @@ -732,109 +648,4 @@ self.pending_dups.clear(); } } - - fn span_bcb_dominates(&self, dom_covspan: &CoverageSpan, covspan: &CoverageSpan) -> bool { - self.basic_coverage_blocks.dominates(dom_covspan.bcb, covspan.bcb) - } -} - -/// If the MIR `Statement` has a span contributive to computing coverage spans, -/// return it; otherwise return `None`. -pub(super) fn filtered_statement_span(statement: &Statement<'_>) -> Option { - match statement.kind { - // These statements have spans that are often outside the scope of the executed source code - // for their parent `BasicBlock`. - StatementKind::StorageLive(_) - | StatementKind::StorageDead(_) - // Coverage should not be encountered, but don't inject coverage coverage - | StatementKind::Coverage(_) - // Ignore `ConstEvalCounter`s - | StatementKind::ConstEvalCounter - // Ignore `Nop`s - | StatementKind::Nop => None, - - // FIXME(#78546): MIR InstrumentCoverage - Can the source_info.span for `FakeRead` - // statements be more consistent? - // - // FakeReadCause::ForGuardBinding, in this example: - // match somenum { - // x if x < 1 => { ... } - // }... - // The BasicBlock within the match arm code included one of these statements, but the span - // for it covered the `1` in this source. The actual statements have nothing to do with that - // source span: - // FakeRead(ForGuardBinding, _4); - // where `_4` is: - // _4 = &_1; (at the span for the first `x`) - // and `_1` is the `Place` for `somenum`. - // - // If and when the Issue is resolved, remove this special case match pattern: - StatementKind::FakeRead(box (FakeReadCause::ForGuardBinding, _)) => None, - - // Retain spans from all other statements - StatementKind::FakeRead(box (_, _)) // Not including `ForGuardBinding` - | StatementKind::Intrinsic(..) - | StatementKind::Assign(_) - | StatementKind::SetDiscriminant { .. } - | StatementKind::Deinit(..) - | StatementKind::Retag(_, _) - | StatementKind::PlaceMention(..) - | StatementKind::AscribeUserType(_, _) => { - Some(statement.source_info.span) - } - } -} - -/// If the MIR `Terminator` has a span contributive to computing coverage spans, -/// return it; otherwise return `None`. -pub(super) fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option { - match terminator.kind { - // These terminators have spans that don't positively contribute to computing a reasonable - // span of actually executed source code. (For example, SwitchInt terminators extracted from - // an `if condition { block }` has a span that includes the executed block, if true, - // but for coverage, the code region executed, up to *and* through the SwitchInt, - // actually stops before the if's block.) - TerminatorKind::Unreachable // Unreachable blocks are not connected to the MIR CFG - | TerminatorKind::Assert { .. } - | TerminatorKind::Drop { .. } - | TerminatorKind::SwitchInt { .. } - // For `FalseEdge`, only the `real` branch is taken, so it is similar to a `Goto`. - | TerminatorKind::FalseEdge { .. } - | TerminatorKind::Goto { .. } => None, - - // Call `func` operand can have a more specific span when part of a chain of calls - | TerminatorKind::Call { ref func, .. } => { - let mut span = terminator.source_info.span; - if let mir::Operand::Constant(box constant) = func { - if constant.span.lo() > span.lo() { - span = span.with_lo(constant.span.lo()); - } - } - Some(span) - } - - // Retain spans from all other terminators - TerminatorKind::UnwindResume - | TerminatorKind::UnwindTerminate(_) - | TerminatorKind::Return - | TerminatorKind::Yield { .. } - | TerminatorKind::GeneratorDrop - | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::InlineAsm { .. } => { - Some(terminator.source_info.span) - } - } -} - -/// Returns an extrapolated span (pre-expansion[^1]) corresponding to a range -/// within the function's body source. This span is guaranteed to be contained -/// within, or equal to, the `body_span`. If the extrapolated span is not -/// contained within the `body_span`, the `body_span` is returned. -/// -/// [^1]Expansions result from Rust syntax including macros, syntactic sugar, -/// etc.). -#[inline] -pub(super) fn function_source_span(span: Span, body_span: Span) -> Span { - let original_span = original_sp(span, body_span).with_ctxt(body_span.ctxt()); - if body_span.contains(original_span) { original_span } else { body_span } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/coverage/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -25,8 +25,7 @@ //! to: `rustc_span::create_default_session_globals_then(|| { test_here(); })`. use super::counters; -use super::graph; -use super::spans; +use super::graph::{self, BasicCoverageBlock}; use coverage_test_macros::let_bcb; @@ -242,7 +241,7 @@ " {:?} [label=\"{:?}: {}\"];\n{}", bcb, bcb, - bcb_data.terminator(mir_body).kind.name(), + mir_body[bcb_data.last_bb()].terminator().kind.name(), basic_coverage_blocks .successors(bcb) .map(|successor| { format!(" {:?} -> {:?};", bcb, successor) }) @@ -629,7 +628,7 @@ let basic_coverage_blocks = graph::CoverageGraph::from_mir(&mir_body); let mut traversed_in_order = Vec::new(); let mut traversal = graph::TraverseCoverageGraphWithLoops::new(&basic_coverage_blocks); - while let Some(bcb) = traversal.next(&basic_coverage_blocks) { + while let Some(bcb) = traversal.next() { traversed_in_order.push(bcb); } @@ -644,41 +643,18 @@ ); } -fn synthesize_body_span_from_terminators(mir_body: &Body<'_>) -> Span { - let mut some_span: Option = None; - for (_, data) in mir_body.basic_blocks.iter_enumerated() { - let term_span = data.terminator().source_info.span; - if let Some(span) = some_span.as_mut() { - *span = span.to(term_span); - } else { - some_span = Some(term_span) - } - } - some_span.expect("body must have at least one BasicBlock") -} - #[test] fn test_make_bcb_counters() { rustc_span::create_default_session_globals_then(|| { let mir_body = goto_switchint(); - let body_span = synthesize_body_span_from_terminators(&mir_body); - let mut basic_coverage_blocks = graph::CoverageGraph::from_mir(&mir_body); - let mut coverage_spans = Vec::new(); - for (bcb, data) in basic_coverage_blocks.iter_enumerated() { - if let Some(span) = spans::filtered_terminator_span(data.terminator(&mir_body)) { - coverage_spans.push(spans::CoverageSpan::for_terminator( - spans::function_source_span(span, body_span), - span, - bcb, - data.last_bb(), - )); - } - } + let basic_coverage_blocks = graph::CoverageGraph::from_mir(&mir_body); + // Historically this test would use `spans` internals to set up fake + // coverage spans for BCBs 1 and 2. Now we skip that step and just tell + // BCB counter construction that those BCBs have spans. + let bcb_has_coverage_spans = |bcb: BasicCoverageBlock| (1..=2).contains(&bcb.as_usize()); let mut coverage_counters = counters::CoverageCounters::new(&basic_coverage_blocks); - coverage_counters - .make_bcb_counters(&mut basic_coverage_blocks, &coverage_spans) - .expect("should be Ok"); - assert_eq!(coverage_counters.intermediate_expressions.len(), 0); + coverage_counters.make_bcb_counters(&basic_coverage_blocks, bcb_has_coverage_spans); + assert_eq!(coverage_counters.num_expressions(), 0); let_bcb!(1); assert_eq!( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cross_crate_inline.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cross_crate_inline.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cross_crate_inline.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/cross_crate_inline.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,130 @@ +use crate::inline; +use crate::pass_manager as pm; +use rustc_attr::InlineAttr; +use rustc_hir::def::DefKind; +use rustc_hir::def_id::LocalDefId; +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::*; +use rustc_middle::query::Providers; +use rustc_middle::ty::TyCtxt; +use rustc_session::config::InliningThreshold; +use rustc_session::config::OptLevel; + +pub fn provide(providers: &mut Providers) { + providers.cross_crate_inlinable = cross_crate_inlinable; +} + +fn cross_crate_inlinable(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { + let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id); + // If this has an extern indicator, then this function is globally shared and thus will not + // generate cgu-internal copies which would make it cross-crate inlinable. + if codegen_fn_attrs.contains_extern_indicator() { + return false; + } + + // Obey source annotations first; this is important because it means we can use + // #[inline(never)] to force code generation. + match codegen_fn_attrs.inline { + InlineAttr::Never => return false, + InlineAttr::Hint | InlineAttr::Always => return true, + _ => {} + } + + // This just reproduces the logic from Instance::requires_inline. + match tcx.def_kind(def_id) { + DefKind::Ctor(..) | DefKind::Closure => return true, + DefKind::Fn | DefKind::AssocFn => {} + _ => return false, + } + + // Don't do any inference when incremental compilation is enabled; the additional inlining that + // inference permits also creates more work for small edits. + if tcx.sess.opts.incremental.is_some() { + return false; + } + + // Don't do any inference if codegen optimizations are disabled and also MIR inlining is not + // enabled. This ensures that we do inference even if someone only passes -Zinline-mir, + // which is less confusing than having to also enable -Copt-level=1. + if matches!(tcx.sess.opts.optimize, OptLevel::No) && !pm::should_run_pass(tcx, &inline::Inline) + { + return false; + } + + if !tcx.is_mir_available(def_id) { + return false; + } + + let threshold = match tcx.sess.opts.unstable_opts.cross_crate_inline_threshold { + InliningThreshold::Always => return true, + InliningThreshold::Sometimes(threshold) => threshold, + InliningThreshold::Never => return false, + }; + + let mir = tcx.optimized_mir(def_id); + let mut checker = + CostChecker { tcx, callee_body: mir, calls: 0, statements: 0, landing_pads: 0, resumes: 0 }; + checker.visit_body(mir); + checker.calls == 0 + && checker.resumes == 0 + && checker.landing_pads == 0 + && checker.statements <= threshold +} + +struct CostChecker<'b, 'tcx> { + tcx: TyCtxt<'tcx>, + callee_body: &'b Body<'tcx>, + calls: usize, + statements: usize, + landing_pads: usize, + resumes: usize, +} + +impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> { + fn visit_statement(&mut self, statement: &Statement<'tcx>, _: Location) { + // Don't count StorageLive/StorageDead in the inlining cost. + match statement.kind { + StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Deinit(_) + | StatementKind::Nop => {} + _ => self.statements += 1, + } + } + + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, _: Location) { + let tcx = self.tcx; + match terminator.kind { + TerminatorKind::Drop { ref place, unwind, .. } => { + let ty = place.ty(self.callee_body, tcx).ty; + if !ty.is_trivially_pure_clone_copy() { + self.calls += 1; + if let UnwindAction::Cleanup(_) = unwind { + self.landing_pads += 1; + } + } + } + TerminatorKind::Call { unwind, .. } => { + self.calls += 1; + if let UnwindAction::Cleanup(_) = unwind { + self.landing_pads += 1; + } + } + TerminatorKind::Assert { unwind, .. } => { + self.calls += 1; + if let UnwindAction::Cleanup(_) = unwind { + self.landing_pads += 1; + } + } + TerminatorKind::UnwindResume => self.resumes += 1, + TerminatorKind::InlineAsm { unwind, .. } => { + self.statements += 1; + if let UnwindAction::Cleanup(_) = unwind { + self.landing_pads += 1; + } + } + TerminatorKind::Return => {} + _ => self.statements += 1, + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dataflow_const_prop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dataflow_const_prop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dataflow_const_prop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dataflow_const_prop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,14 +2,13 @@ //! //! Currently, this pass only propagates scalar values. -use rustc_const_eval::const_eval::CheckAlignment; -use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable}; +use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, PlaceTy, Projectable}; use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::DefKind; use rustc_middle::mir::interpret::{AllocId, ConstAllocation, InterpResult, Scalar}; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; -use rustc_middle::ty::layout::TyAndLayout; +use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_mir_dataflow::value_analysis::{ Map, PlaceIndex, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace, @@ -17,8 +16,9 @@ use rustc_mir_dataflow::{lattice::FlatSet, Analysis, Results, ResultsVisitor}; use rustc_span::def_id::DefId; use rustc_span::DUMMY_SP; -use rustc_target::abi::{Align, FieldIdx, VariantIdx}; +use rustc_target::abi::{Abi, FieldIdx, Size, VariantIdx, FIRST_VARIANT}; +use crate::const_prop::throw_machine_stop_str; use crate::MirPass; // These constants are somewhat random guesses and have not been optimized. @@ -286,9 +286,9 @@ let val = match null_op { NullOp::SizeOf if layout.is_sized() => layout.size.bytes(), NullOp::AlignOf if layout.is_sized() => layout.align.abi.bytes(), - NullOp::OffsetOf(fields) => layout - .offset_of_subfield(&self.ecx, fields.iter().map(|f| f.index())) - .bytes(), + NullOp::OffsetOf(fields) => { + layout.offset_of_subfield(&self.ecx, fields.iter()).bytes() + } _ => return ValueOrPlace::Value(FlatSet::Top), }; FlatSet::Elem(Scalar::from_target_usize(val, &self.tcx)) @@ -406,7 +406,8 @@ TrackElem::Variant(idx) => self.ecx.project_downcast(op, idx).ok(), TrackElem::Discriminant => { let variant = self.ecx.read_discriminant(op).ok()?; - let discr_value = self.ecx.discriminant_for_variant(op.layout, variant).ok()?; + let discr_value = + self.ecx.discriminant_for_variant(op.layout.ty, variant).ok()?; Some(discr_value.into()) } TrackElem::DerefLen => { @@ -507,7 +508,8 @@ return None; } let enum_ty_layout = self.tcx.layout_of(self.param_env.and(enum_ty)).ok()?; - let discr_value = self.ecx.discriminant_for_variant(enum_ty_layout, variant_index).ok()?; + let discr_value = + self.ecx.discriminant_for_variant(enum_ty_layout.ty, variant_index).ok()?; Some(discr_value.to_scalar()) } @@ -554,18 +556,153 @@ fn try_make_constant( &self, + ecx: &mut InterpCx<'tcx, 'tcx, DummyMachine>, place: Place<'tcx>, state: &State>, map: &Map, ) -> Option> { - let FlatSet::Elem(Scalar::Int(value)) = state.get(place.as_ref(), &map) else { - return None; - }; let ty = place.ty(self.local_decls, self.patch.tcx).ty; - Some(Const::Val(ConstValue::Scalar(value.into()), ty)) + let layout = ecx.layout_of(ty).ok()?; + + if layout.is_zst() { + return Some(Const::zero_sized(ty)); + } + + if layout.is_unsized() { + return None; + } + + let place = map.find(place.as_ref())?; + if layout.abi.is_scalar() + && let Some(value) = propagatable_scalar(place, state, map) + { + return Some(Const::Val(ConstValue::Scalar(value), ty)); + } + + if matches!(layout.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) { + let alloc_id = ecx + .intern_with_temp_alloc(layout, |ecx, dest| { + try_write_constant(ecx, dest, place, ty, state, map) + }) + .ok()?; + return Some(Const::Val(ConstValue::Indirect { alloc_id, offset: Size::ZERO }, ty)); + } + + None + } +} + +fn propagatable_scalar( + place: PlaceIndex, + state: &State>, + map: &Map, +) -> Option { + if let FlatSet::Elem(value) = state.get_idx(place, map) && value.try_to_int().is_ok() { + // Do not attempt to propagate pointers, as we may fail to preserve their identity. + Some(value) + } else { + None } } +#[instrument(level = "trace", skip(ecx, state, map))] +fn try_write_constant<'tcx>( + ecx: &mut InterpCx<'_, 'tcx, DummyMachine>, + dest: &PlaceTy<'tcx>, + place: PlaceIndex, + ty: Ty<'tcx>, + state: &State>, + map: &Map, +) -> InterpResult<'tcx> { + let layout = ecx.layout_of(ty)?; + + // Fast path for ZSTs. + if layout.is_zst() { + return Ok(()); + } + + // Fast path for scalars. + if layout.abi.is_scalar() + && let Some(value) = propagatable_scalar(place, state, map) + { + return ecx.write_immediate(Immediate::Scalar(value), dest); + } + + match ty.kind() { + // ZSTs. Nothing to do. + ty::FnDef(..) => {} + + // Those are scalars, must be handled above. + ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => throw_machine_stop_str!("primitive type with provenance"), + + ty::Tuple(elem_tys) => { + for (i, elem) in elem_tys.iter().enumerate() { + let Some(field) = map.apply(place, TrackElem::Field(FieldIdx::from_usize(i))) else { + throw_machine_stop_str!("missing field in tuple") + }; + let field_dest = ecx.project_field(dest, i)?; + try_write_constant(ecx, &field_dest, field, elem, state, map)?; + } + } + + ty::Adt(def, args) => { + if def.is_union() { + throw_machine_stop_str!("cannot propagate unions") + } + + let (variant_idx, variant_def, variant_place, variant_dest) = if def.is_enum() { + let Some(discr) = map.apply(place, TrackElem::Discriminant) else { + throw_machine_stop_str!("missing discriminant for enum") + }; + let FlatSet::Elem(Scalar::Int(discr)) = state.get_idx(discr, map) else { + throw_machine_stop_str!("discriminant with provenance") + }; + let discr_bits = discr.assert_bits(discr.size()); + let Some((variant, _)) = def.discriminants(*ecx.tcx).find(|(_, var)| discr_bits == var.val) else { + throw_machine_stop_str!("illegal discriminant for enum") + }; + let Some(variant_place) = map.apply(place, TrackElem::Variant(variant)) else { + throw_machine_stop_str!("missing variant for enum") + }; + let variant_dest = ecx.project_downcast(dest, variant)?; + (variant, def.variant(variant), variant_place, variant_dest) + } else { + (FIRST_VARIANT, def.non_enum_variant(), place, dest.clone()) + }; + + for (i, field) in variant_def.fields.iter_enumerated() { + let ty = field.ty(*ecx.tcx, args); + let Some(field) = map.apply(variant_place, TrackElem::Field(i)) else { + throw_machine_stop_str!("missing field in ADT") + }; + let field_dest = ecx.project_field(&variant_dest, i.as_usize())?; + try_write_constant(ecx, &field_dest, field, ty, state, map)?; + } + ecx.write_discriminant(variant_idx, dest)?; + } + + // Unsupported for now. + ty::Array(_, _) + + // Do not attempt to support indirection in constants. + | ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..) | ty::Str | ty::Slice(_) + + | ty::Never + | ty::Foreign(..) + | ty::Alias(..) + | ty::Param(_) + | ty::Bound(..) + | ty::Placeholder(..) + | ty::Closure(..) + | ty::Coroutine(..) + | ty::Dynamic(..) => throw_machine_stop_str!("unsupported type"), + + ty::Error(_) | ty::Infer(..) | ty::CoroutineWitness(..) => bug!(), + } + + Ok(()) +} + impl<'mir, 'tcx> ResultsVisitor<'mir, 'tcx, Results<'tcx, ValueAnalysisWrapper>>> for Collector<'tcx, '_> @@ -581,8 +718,13 @@ ) { match &statement.kind { StatementKind::Assign(box (_, rvalue)) => { - OperandCollector { state, visitor: self, map: &results.analysis.0.map } - .visit_rvalue(rvalue, location); + OperandCollector { + state, + visitor: self, + ecx: &mut results.analysis.0.ecx, + map: &results.analysis.0.map, + } + .visit_rvalue(rvalue, location); } _ => (), } @@ -600,7 +742,12 @@ // Don't overwrite the assignment if it already uses a constant (to keep the span). } StatementKind::Assign(box (place, _)) => { - if let Some(value) = self.try_make_constant(place, state, &results.analysis.0.map) { + if let Some(value) = self.try_make_constant( + &mut results.analysis.0.ecx, + place, + state, + &results.analysis.0.map, + ) { self.patch.assignments.insert(location, value); } } @@ -615,8 +762,13 @@ terminator: &'mir Terminator<'tcx>, location: Location, ) { - OperandCollector { state, visitor: self, map: &results.analysis.0.map } - .visit_terminator(terminator, location); + OperandCollector { + state, + visitor: self, + ecx: &mut results.analysis.0.ecx, + map: &results.analysis.0.map, + } + .visit_terminator(terminator, location); } } @@ -671,6 +823,7 @@ struct OperandCollector<'tcx, 'map, 'locals, 'a> { state: &'a State>, visitor: &'a mut Collector<'tcx, 'locals>, + ecx: &'map mut InterpCx<'tcx, 'tcx, DummyMachine>, map: &'map Map, } @@ -683,7 +836,7 @@ location: Location, ) { if let PlaceElem::Index(local) = elem - && let Some(value) = self.visitor.try_make_constant(local.into(), self.state, self.map) + && let Some(value) = self.visitor.try_make_constant(self.ecx, local.into(), self.state, self.map) { self.visitor.patch.before_effect.insert((location, local.into()), value); } @@ -691,7 +844,9 @@ fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { if let Some(place) = operand.place() { - if let Some(value) = self.visitor.try_make_constant(place, self.state, self.map) { + if let Some(value) = + self.visitor.try_make_constant(self.ecx, place, self.state, self.map) + { self.visitor.patch.before_effect.insert((location, place), value); } else if !place.projection.is_empty() { // Try to propagate into `Index` projections. @@ -701,7 +856,7 @@ } } -struct DummyMachine; +pub(crate) struct DummyMachine; impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine { rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>); @@ -709,22 +864,12 @@ const PANIC_ON_ALLOC_FAIL: bool = true; #[inline(always)] - fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment { - // We do not check for alignment to avoid having to carry an `Align` - // in `ConstValue::ByRef`. - CheckAlignment::No + fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + false // no reason to enforce alignment } fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>, _layout: TyAndLayout<'tcx>) -> bool { - unimplemented!() - } - fn alignment_check_failed( - _ecx: &InterpCx<'mir, 'tcx, Self>, - _has: Align, - _required: Align, - _check: CheckAlignment, - ) -> interpret::InterpResult<'tcx, ()> { - unimplemented!() + false } fn before_access_global( @@ -736,13 +881,13 @@ is_write: bool, ) -> InterpResult<'tcx> { if is_write { - crate::const_prop::throw_machine_stop_str!("can't write to global"); + throw_machine_stop_str!("can't write to global"); } // If the static allocation is mutable, then we can't const prop it as its content // might be different at runtime. if alloc.inner().mutability.is_mut() { - crate::const_prop::throw_machine_stop_str!("can't access mutable globals in ConstProp"); + throw_machine_stop_str!("can't access mutable globals in ConstProp"); } Ok(()) @@ -792,7 +937,7 @@ _left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>, _right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>, ) -> interpret::InterpResult<'tcx, (ImmTy<'tcx, Self::Provenance>, bool)> { - crate::const_prop::throw_machine_stop_str!("can't do pointer arithmetic"); + throw_machine_stop_str!("can't do pointer arithmetic"); } fn expose_ptr( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dead_store_elimination.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dead_store_elimination.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dead_store_elimination.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dead_store_elimination.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,10 +13,10 @@ //! use crate::util::is_within_packed; -use rustc_index::bit_set::BitSet; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; +use rustc_mir_dataflow::debuginfo::debuginfo_locals; use rustc_mir_dataflow::impls::{ borrowed_locals, LivenessTransferFunction, MaybeTransitiveLiveLocals, }; @@ -26,8 +26,15 @@ /// /// The `borrowed` set must be a `BitSet` of all the locals that are ever borrowed in this body. It /// can be generated via the [`borrowed_locals`] function. -pub fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, borrowed: &BitSet) { - let mut live = MaybeTransitiveLiveLocals::new(borrowed) +pub fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let borrowed_locals = borrowed_locals(body); + + // If the user requests complete debuginfo, mark the locals that appear in it as live, so + // we don't remove assignements to them. + let mut always_live = debuginfo_locals(body); + always_live.union(&borrowed_locals); + + let mut live = MaybeTransitiveLiveLocals::new(&always_live) .into_engine(tcx, body) .iterate_to_fixpoint() .into_results_cursor(body); @@ -48,7 +55,9 @@ for (index, arg) in args.iter().enumerate().rev() { if let Operand::Copy(place) = *arg && !place.is_indirect() - && !borrowed.contains(place.local) + // Do not skip the transformation if the local is in debuginfo, as we do + // not really lose any information for this purpose. + && !borrowed_locals.contains(place.local) && !state.contains(place.local) // If `place` is a projection of a disaligned field in a packed ADT, // the move may be codegened as a pointer to that field. @@ -75,7 +84,7 @@ StatementKind::Assign(box (place, _)) | StatementKind::SetDiscriminant { place: box place, .. } | StatementKind::Deinit(box place) => { - if !place.is_indirect() && !borrowed.contains(place.local) { + if !place.is_indirect() && !always_live.contains(place.local) { live.seek_before_primary_effect(loc); if !live.get().contains(place.local) { patch.push(loc); @@ -126,7 +135,6 @@ } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let borrowed = borrowed_locals(body); - eliminate(tcx, body, &borrowed); + eliminate(tcx, body); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deduce_param_attrs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deduce_param_attrs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deduce_param_attrs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deduce_param_attrs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,6 +44,7 @@ // Whether mutating though a `&raw const` is allowed is still undecided, so we // disable any sketchy `readonly` optimizations for now. // But we only need to do this if the pointer would point into the argument. + // IOW: for indirect places, like `&raw (*local).field`, this surely cannot mutate `local`. !place.is_indirect() } PlaceContext::NonMutatingUse(..) | PlaceContext::NonUse(..) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deref_separator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deref_separator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deref_separator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/deref_separator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,7 +37,7 @@ for (idx, (p_ref, p_elem)) in place.iter_projections().enumerate() { if !p_ref.projection.is_empty() && p_elem == ProjectionElem::Deref { let ty = p_ref.ty(self.local_decls, self.tcx).ty; - let temp = self.patcher.new_internal_with_info( + let temp = self.patcher.new_local_with_info( ty, self.local_decls[p_ref.local].source_info.span, LocalInfo::DerefTemp, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dest_prop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dest_prop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dest_prop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/dest_prop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -114,7 +114,7 @@ //! approach that only works for some classes of CFGs: //! - rustc now has a powerful dataflow analysis framework that can handle forwards and backwards //! analyses efficiently. -//! - Layout optimizations for generators have been added to improve code generation for +//! - Layout optimizations for coroutines have been added to improve code generation for //! async/await, which are very similar in spirit to what this optimization does. //! //! Also, rustc now has a simple NRVO pass (see `nrvo.rs`), which handles a subset of the cases that @@ -244,7 +244,7 @@ if round_count != 0 { // Merging can introduce overlap between moved arguments and/or call destination in an // unreachable code, which validator considers to be ill-formed. - remove_dead_blocks(tcx, body); + remove_dead_blocks(body); } trace!(round_count); @@ -655,7 +655,7 @@ // `Drop`s create a `&mut` and so are not considered } TerminatorKind::Yield { .. } - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } => { bug!("{:?} not found in this MIR phase", terminator) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/early_otherwise_branch.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/early_otherwise_branch.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/early_otherwise_branch.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/early_otherwise_branch.rs 2023-12-21 16:55:28.000000000 +0000 @@ -95,6 +95,7 @@ impl<'tcx> MirPass<'tcx> for EarlyOtherwiseBranch { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { + // unsound: https://github.com/rust-lang/rust/issues/95162 sess.mir_opt_level() >= 3 && sess.opts.unstable_opts.unsound_mir_opts } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -69,7 +69,7 @@ let (unique_ty, nonnull_ty, ptr_ty) = build_ptr_tys(tcx, base_ty.boxed_ty(), self.unique_did, self.nonnull_did); - let ptr_local = self.patch.new_internal(ptr_ty, source_info.span); + let ptr_local = self.patch.new_temp(ptr_ty, source_info.span); self.patch.add_assign( location, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_drops.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_drops.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_drops.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/elaborate_drops.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,9 +9,9 @@ use rustc_mir_dataflow::elaborate_drops::{DropElaborator, DropFlagMode, DropStyle}; use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces}; use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex}; +use rustc_mir_dataflow::on_all_children_bits; use rustc_mir_dataflow::on_lookup_result_bits; use rustc_mir_dataflow::MoveDataParamEnv; -use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits}; use rustc_mir_dataflow::{Analysis, ResultsCursor}; use rustc_span::Span; use rustc_target::abi::{FieldIdx, VariantIdx}; @@ -54,16 +54,10 @@ let def_id = body.source.def_id(); let param_env = tcx.param_env_reveal_all_normalized(def_id); - let move_data = match MoveData::gather_moves(body, tcx, param_env) { - Ok(move_data) => move_data, - Err((move_data, _)) => { - tcx.sess.delay_span_bug( - body.span, - "No `move_errors` should be allowed in MIR borrowck", - ); - move_data - } - }; + // For types that do not need dropping, the behaviour is trivial. So we only need to track + // init/uninit for types that do need dropping. + let move_data = + MoveData::gather_moves(&body, tcx, param_env, |ty| ty.needs_drop(tcx, param_env)); let elaborate_patch = { let env = MoveDataParamEnv { move_data, param_env }; @@ -178,13 +172,19 @@ let mut some_live = false; let mut some_dead = false; let mut children_count = 0; - on_all_drop_children_bits(self.tcx(), self.body(), self.ctxt.env, path, |child| { - let (live, dead) = self.ctxt.init_data.maybe_live_dead(child); - debug!("elaborate_drop: state({:?}) = {:?}", child, (live, dead)); - some_live |= live; - some_dead |= dead; - children_count += 1; - }); + on_all_children_bits( + self.tcx(), + self.body(), + self.ctxt.move_data(), + path, + |child| { + let (live, dead) = self.ctxt.init_data.maybe_live_dead(child); + debug!("elaborate_drop: state({:?}) = {:?}", child, (live, dead)); + some_live |= live; + some_dead |= dead; + children_count += 1; + }, + ); ((some_live, some_dead), children_count != 1) } }; @@ -271,7 +271,7 @@ let tcx = self.tcx; let patch = &mut self.patch; debug!("create_drop_flag({:?})", self.body.span); - self.drop_flags[index].get_or_insert_with(|| patch.new_internal(tcx.types.bool, span)); + self.drop_flags[index].get_or_insert_with(|| patch.new_temp(tcx.types.bool, span)); } fn drop_flag(&mut self, index: MovePathIndex) -> Option> { @@ -296,26 +296,36 @@ fn collect_drop_flags(&mut self) { for (bb, data) in self.body.basic_blocks.iter_enumerated() { let terminator = data.terminator(); - let place = match terminator.kind { - TerminatorKind::Drop { ref place, .. } => place, - _ => continue, - }; - - self.init_data.seek_before(self.body.terminator_loc(bb)); + let TerminatorKind::Drop { ref place, .. } = terminator.kind else { continue }; let path = self.move_data().rev_lookup.find(place.as_ref()); debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path); - let path = match path { - LookupResult::Exact(e) => e, - LookupResult::Parent(None) => continue, + match path { + LookupResult::Exact(path) => { + self.init_data.seek_before(self.body.terminator_loc(bb)); + on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| { + let (maybe_live, maybe_dead) = self.init_data.maybe_live_dead(child); + debug!( + "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}", + child, + place, + path, + (maybe_live, maybe_dead) + ); + if maybe_live && maybe_dead { + self.create_drop_flag(child, terminator.source_info.span) + } + }); + } + LookupResult::Parent(None) => {} LookupResult::Parent(Some(parent)) => { - let (_maybe_live, maybe_dead) = self.init_data.maybe_live_dead(parent); - if self.body.local_decls[place.local].is_deref_temp() { continue; } + self.init_data.seek_before(self.body.terminator_loc(bb)); + let (_maybe_live, maybe_dead) = self.init_data.maybe_live_dead(parent); if maybe_dead { self.tcx.sess.delay_span_bug( terminator.source_info.span, @@ -324,80 +334,74 @@ ), ); } - continue; } }; - - on_all_drop_children_bits(self.tcx, self.body, self.env, path, |child| { - let (maybe_live, maybe_dead) = self.init_data.maybe_live_dead(child); - debug!( - "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}", - child, - place, - path, - (maybe_live, maybe_dead) - ); - if maybe_live && maybe_dead { - self.create_drop_flag(child, terminator.source_info.span) - } - }); } } fn elaborate_drops(&mut self) { + // This function should mirror what `collect_drop_flags` does. for (bb, data) in self.body.basic_blocks.iter_enumerated() { - let loc = Location { block: bb, statement_index: data.statements.len() }; let terminator = data.terminator(); + let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else { + continue; + }; - match terminator.kind { - TerminatorKind::Drop { place, target, unwind, replace } => { - self.init_data.seek_before(loc); - match self.move_data().rev_lookup.find(place.as_ref()) { - LookupResult::Exact(path) => { - let unwind = if data.is_cleanup { - Unwind::InCleanup - } else { - match unwind { - UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup), - UnwindAction::Continue => Unwind::To(self.patch.resume_block()), - UnwindAction::Unreachable => { - Unwind::To(self.patch.unreachable_cleanup_block()) - } - UnwindAction::Terminate(reason) => { - debug_assert_ne!( - reason, - UnwindTerminateReason::InCleanup, - "we are not in a cleanup block, InCleanup reason should be impossible" - ); - Unwind::To(self.patch.terminate_block(reason)) - } - } - }; - elaborate_drop( - &mut Elaborator { ctxt: self }, - terminator.source_info, - place, - path, - target, - unwind, - bb, - ) + // This place does not need dropping. It does not have an associated move-path, so the + // match below will conservatively keep an unconditional drop. As that drop is useless, + // just remove it here and now. + if !place + .ty(&self.body.local_decls, self.tcx) + .ty + .needs_drop(self.tcx, self.env.param_env) + { + self.patch.patch_terminator(bb, TerminatorKind::Goto { target }); + continue; + } + + let path = self.move_data().rev_lookup.find(place.as_ref()); + match path { + LookupResult::Exact(path) => { + let unwind = match unwind { + _ if data.is_cleanup => Unwind::InCleanup, + UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup), + UnwindAction::Continue => Unwind::To(self.patch.resume_block()), + UnwindAction::Unreachable => { + Unwind::To(self.patch.unreachable_cleanup_block()) } - LookupResult::Parent(..) => { - if !replace { - self.tcx.sess.delay_span_bug( - terminator.source_info.span, - format!("drop of untracked value {bb:?}"), - ); - } - // A drop and replace behind a pointer/array/whatever. - // The borrow checker requires that these locations are initialized before the assignment, - // so we just leave an unconditional drop. - assert!(!data.is_cleanup); + UnwindAction::Terminate(reason) => { + debug_assert_ne!( + reason, + UnwindTerminateReason::InCleanup, + "we are not in a cleanup block, InCleanup reason should be impossible" + ); + Unwind::To(self.patch.terminate_block(reason)) } + }; + self.init_data.seek_before(self.body.terminator_loc(bb)); + elaborate_drop( + &mut Elaborator { ctxt: self }, + terminator.source_info, + place, + path, + target, + unwind, + bb, + ) + } + LookupResult::Parent(None) => {} + LookupResult::Parent(Some(_)) => { + if !replace { + self.tcx.sess.delay_span_bug( + terminator.source_info.span, + format!("drop of untracked value {bb:?}"), + ); } + // A drop and replace behind a pointer/array/whatever. + // The borrow checker requires that these locations are initialized before the assignment, + // so we just leave an unconditional drop. + assert!(!data.is_cleanup); } - _ => continue, } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs 2023-12-21 16:55:28.000000000 +0000 @@ -58,7 +58,7 @@ let body_abi = match body_ty.kind() { ty::FnDef(..) => body_ty.fn_sig(tcx).abi(), ty::Closure(..) => Abi::RustCall, - ty::Generator(..) => Abi::Rust, + ty::Coroutine(..) => Abi::Rust, _ => span_bug!(body.span, "unexpected body ty: {:?}", body_ty), }; let body_can_unwind = layout::fn_can_unwind(tcx, Some(def_id), body_abi); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/generator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/generator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/generator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/generator.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1905 +0,0 @@ -//! This is the implementation of the pass which transforms generators into state machines. -//! -//! MIR generation for generators creates a function which has a self argument which -//! passes by value. This argument is effectively a generator type which only contains upvars and -//! is only used for this argument inside the MIR for the generator. -//! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that -//! MIR before this pass and creates drop flags for MIR locals. -//! It will also drop the generator argument (which only consists of upvars) if any of the upvars -//! are moved out of. This pass elaborates the drops of upvars / generator argument in the case -//! that none of the upvars were moved out of. This is because we cannot have any drops of this -//! generator in the MIR, since it is used to create the drop glue for the generator. We'd get -//! infinite recursion otherwise. -//! -//! This pass creates the implementation for either the `Generator::resume` or `Future::poll` -//! function and the drop shim for the generator based on the MIR input. -//! It converts the generator argument from Self to &mut Self adding derefs in the MIR as needed. -//! It computes the final layout of the generator struct which looks like this: -//! First upvars are stored -//! It is followed by the generator state field. -//! Then finally the MIR locals which are live across a suspension point are stored. -//! ```ignore (illustrative) -//! struct Generator { -//! upvars..., -//! state: u32, -//! mir_locals..., -//! } -//! ``` -//! This pass computes the meaning of the state field and the MIR locals which are live -//! across a suspension point. There are however three hardcoded generator states: -//! 0 - Generator have not been resumed yet -//! 1 - Generator has returned / is completed -//! 2 - Generator has been poisoned -//! -//! It also rewrites `return x` and `yield y` as setting a new generator state and returning -//! `GeneratorState::Complete(x)` and `GeneratorState::Yielded(y)`, -//! or `Poll::Ready(x)` and `Poll::Pending` respectively. -//! MIR locals which are live across a suspension point are moved to the generator struct -//! with references to them being updated with references to the generator struct. -//! -//! The pass creates two functions which have a switch on the generator state giving -//! the action to take. -//! -//! One of them is the implementation of `Generator::resume` / `Future::poll`. -//! For generators with state 0 (unresumed) it starts the execution of the generator. -//! For generators with state 1 (returned) and state 2 (poisoned) it panics. -//! Otherwise it continues the execution from the last suspension point. -//! -//! The other function is the drop glue for the generator. -//! For generators with state 0 (unresumed) it drops the upvars of the generator. -//! For generators with state 1 (returned) and state 2 (poisoned) it does nothing. -//! Otherwise it drops all the values in scope at the last suspension point. - -use crate::abort_unwinding_calls; -use crate::deref_separator::deref_finder; -use crate::errors; -use crate::pass_manager as pm; -use crate::simplify; -use crate::MirPass; -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_errors::pluralize; -use rustc_hir as hir; -use rustc_hir::lang_items::LangItem; -use rustc_hir::GeneratorKind; -use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet}; -use rustc_index::{Idx, IndexVec}; -use rustc_middle::mir::dump_mir; -use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; -use rustc_middle::mir::*; -use rustc_middle::ty::InstanceDef; -use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt}; -use rustc_middle::ty::{GeneratorArgs, GenericArgsRef}; -use rustc_mir_dataflow::impls::{ - MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive, -}; -use rustc_mir_dataflow::storage::always_storage_live_locals; -use rustc_mir_dataflow::{self, Analysis}; -use rustc_span::def_id::{DefId, LocalDefId}; -use rustc_span::symbol::sym; -use rustc_span::Span; -use rustc_target::abi::{FieldIdx, VariantIdx}; -use rustc_target::spec::PanicStrategy; -use std::{iter, ops}; - -pub struct StateTransform; - -struct RenameLocalVisitor<'tcx> { - from: Local, - to: Local, - tcx: TyCtxt<'tcx>, -} - -impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { - if *local == self.from { - *local = self.to; - } - } - - fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) { - match terminator.kind { - TerminatorKind::Return => { - // Do not replace the implicit `_0` access here, as that's not possible. The - // transform already handles `return` correctly. - } - _ => self.super_terminator(terminator, location), - } - } -} - -struct DerefArgVisitor<'tcx> { - tcx: TyCtxt<'tcx>, -} - -impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { - assert_ne!(*local, SELF_ARG); - } - - fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { - if place.local == SELF_ARG { - replace_base( - place, - Place { - local: SELF_ARG, - projection: self.tcx().mk_place_elems(&[ProjectionElem::Deref]), - }, - self.tcx, - ); - } else { - self.visit_local(&mut place.local, context, location); - - for elem in place.projection.iter() { - if let PlaceElem::Index(local) = elem { - assert_ne!(local, SELF_ARG); - } - } - } - } -} - -struct PinArgVisitor<'tcx> { - ref_gen_ty: Ty<'tcx>, - tcx: TyCtxt<'tcx>, -} - -impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { - assert_ne!(*local, SELF_ARG); - } - - fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) { - if place.local == SELF_ARG { - replace_base( - place, - Place { - local: SELF_ARG, - projection: self.tcx().mk_place_elems(&[ProjectionElem::Field( - FieldIdx::new(0), - self.ref_gen_ty, - )]), - }, - self.tcx, - ); - } else { - self.visit_local(&mut place.local, context, location); - - for elem in place.projection.iter() { - if let PlaceElem::Index(local) = elem { - assert_ne!(local, SELF_ARG); - } - } - } - } -} - -fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) { - place.local = new_base.local; - - let mut new_projection = new_base.projection.to_vec(); - new_projection.append(&mut place.projection.to_vec()); - - place.projection = tcx.mk_place_elems(&new_projection); -} - -const SELF_ARG: Local = Local::from_u32(1); - -/// Generator has not been resumed yet. -const UNRESUMED: usize = GeneratorArgs::UNRESUMED; -/// Generator has returned / is completed. -const RETURNED: usize = GeneratorArgs::RETURNED; -/// Generator has panicked and is poisoned. -const POISONED: usize = GeneratorArgs::POISONED; - -/// Number of variants to reserve in generator state. Corresponds to -/// `UNRESUMED` (beginning of a generator) and `RETURNED`/`POISONED` -/// (end of a generator) states. -const RESERVED_VARIANTS: usize = 3; - -/// A `yield` point in the generator. -struct SuspensionPoint<'tcx> { - /// State discriminant used when suspending or resuming at this point. - state: usize, - /// The block to jump to after resumption. - resume: BasicBlock, - /// Where to move the resume argument after resumption. - resume_arg: Place<'tcx>, - /// Which block to jump to if the generator is dropped in this state. - drop: Option, - /// Set of locals that have live storage while at this suspension point. - storage_liveness: GrowableBitSet, -} - -struct TransformVisitor<'tcx> { - tcx: TyCtxt<'tcx>, - is_async_kind: bool, - state_adt_ref: AdtDef<'tcx>, - state_args: GenericArgsRef<'tcx>, - - // The type of the discriminant in the generator struct - discr_ty: Ty<'tcx>, - - // Mapping from Local to (type of local, generator struct index) - // FIXME(eddyb) This should use `IndexVec>`. - remap: FxHashMap, VariantIdx, FieldIdx)>, - - // A map from a suspension point in a block to the locals which have live storage at that point - storage_liveness: IndexVec>>, - - // A list of suspension points, generated during the transform - suspension_points: Vec>, - - // The set of locals that have no `StorageLive`/`StorageDead` annotations. - always_live_locals: BitSet, - - // The original RETURN_PLACE local - new_ret_local: Local, -} - -impl<'tcx> TransformVisitor<'tcx> { - // Make a `GeneratorState` or `Poll` variant assignment. - // - // `core::ops::GeneratorState` only has single element tuple variants, - // so we can just write to the downcasted first field and then set the - // discriminant to the appropriate variant. - fn make_state( - &self, - val: Operand<'tcx>, - source_info: SourceInfo, - is_return: bool, - statements: &mut Vec>, - ) { - let idx = VariantIdx::new(match (is_return, self.is_async_kind) { - (true, false) => 1, // GeneratorState::Complete - (false, false) => 0, // GeneratorState::Yielded - (true, true) => 0, // Poll::Ready - (false, true) => 1, // Poll::Pending - }); - - let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_args, None, None); - - // `Poll::Pending` - if self.is_async_kind && idx == VariantIdx::new(1) { - assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 0); - - // FIXME(swatinem): assert that `val` is indeed unit? - statements.push(Statement { - kind: StatementKind::Assign(Box::new(( - Place::return_place(), - Rvalue::Aggregate(Box::new(kind), IndexVec::new()), - ))), - source_info, - }); - return; - } - - // else: `Poll::Ready(x)`, `GeneratorState::Yielded(x)` or `GeneratorState::Complete(x)` - assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 1); - - statements.push(Statement { - kind: StatementKind::Assign(Box::new(( - Place::return_place(), - Rvalue::Aggregate(Box::new(kind), [val].into()), - ))), - source_info, - }); - } - - // Create a Place referencing a generator struct field - fn make_field(&self, variant_index: VariantIdx, idx: FieldIdx, ty: Ty<'tcx>) -> Place<'tcx> { - let self_place = Place::from(SELF_ARG); - let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index); - let mut projection = base.projection.to_vec(); - projection.push(ProjectionElem::Field(idx, ty)); - - Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) } - } - - // Create a statement which changes the discriminant - fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> { - let self_place = Place::from(SELF_ARG); - Statement { - source_info, - kind: StatementKind::SetDiscriminant { - place: Box::new(self_place), - variant_index: state_disc, - }, - } - } - - // Create a statement which reads the discriminant into a temporary - fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) { - let temp_decl = LocalDecl::new(self.discr_ty, body.span).internal(); - let local_decls_len = body.local_decls.push(temp_decl); - let temp = Place::from(local_decls_len); - - let self_place = Place::from(SELF_ARG); - let assign = Statement { - source_info: SourceInfo::outermost(body.span), - kind: StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))), - }; - (assign, temp) - } -} - -impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) { - assert_eq!(self.remap.get(local), None); - } - - fn visit_place( - &mut self, - place: &mut Place<'tcx>, - _context: PlaceContext, - _location: Location, - ) { - // Replace an Local in the remap with a generator struct access - if let Some(&(ty, variant_index, idx)) = self.remap.get(&place.local) { - replace_base(place, self.make_field(variant_index, idx, ty), self.tcx); - } - } - - fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) { - // Remove StorageLive and StorageDead statements for remapped locals - data.retain_statements(|s| match s.kind { - StatementKind::StorageLive(l) | StatementKind::StorageDead(l) => { - !self.remap.contains_key(&l) - } - _ => true, - }); - - let ret_val = match data.terminator().kind { - TerminatorKind::Return => { - Some((true, None, Operand::Move(Place::from(self.new_ret_local)), None)) - } - TerminatorKind::Yield { ref value, resume, resume_arg, drop } => { - Some((false, Some((resume, resume_arg)), value.clone(), drop)) - } - _ => None, - }; - - if let Some((is_return, resume, v, drop)) = ret_val { - let source_info = data.terminator().source_info; - // We must assign the value first in case it gets declared dead below - self.make_state(v, source_info, is_return, &mut data.statements); - let state = if let Some((resume, mut resume_arg)) = resume { - // Yield - let state = RESERVED_VARIANTS + self.suspension_points.len(); - - // The resume arg target location might itself be remapped if its base local is - // live across a yield. - let resume_arg = - if let Some(&(ty, variant, idx)) = self.remap.get(&resume_arg.local) { - replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx); - resume_arg - } else { - resume_arg - }; - - self.suspension_points.push(SuspensionPoint { - state, - resume, - resume_arg, - drop, - storage_liveness: self.storage_liveness[block].clone().unwrap().into(), - }); - - VariantIdx::new(state) - } else { - // Return - VariantIdx::new(RETURNED) // state for returned - }; - data.statements.push(self.set_discr(state, source_info)); - data.terminator_mut().kind = TerminatorKind::Return; - } - - self.super_basic_block_data(block, data); - } -} - -fn make_generator_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let gen_ty = body.local_decls.raw[1].ty; - - let ref_gen_ty = Ty::new_ref( - tcx, - tcx.lifetimes.re_erased, - ty::TypeAndMut { ty: gen_ty, mutbl: Mutability::Mut }, - ); - - // Replace the by value generator argument - body.local_decls.raw[1].ty = ref_gen_ty; - - // Add a deref to accesses of the generator state - DerefArgVisitor { tcx }.visit_body(body); -} - -fn make_generator_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let ref_gen_ty = body.local_decls.raw[1].ty; - - let pin_did = tcx.require_lang_item(LangItem::Pin, Some(body.span)); - let pin_adt_ref = tcx.adt_def(pin_did); - let args = tcx.mk_args(&[ref_gen_ty.into()]); - let pin_ref_gen_ty = Ty::new_adt(tcx, pin_adt_ref, args); - - // Replace the by ref generator argument - body.local_decls.raw[1].ty = pin_ref_gen_ty; - - // Add the Pin field access to accesses of the generator state - PinArgVisitor { ref_gen_ty, tcx }.visit_body(body); -} - -/// Allocates a new local and replaces all references of `local` with it. Returns the new local. -/// -/// `local` will be changed to a new local decl with type `ty`. -/// -/// Note that the new local will be uninitialized. It is the caller's responsibility to assign some -/// valid value to it before its first use. -fn replace_local<'tcx>( - local: Local, - ty: Ty<'tcx>, - body: &mut Body<'tcx>, - tcx: TyCtxt<'tcx>, -) -> Local { - let new_decl = LocalDecl::new(ty, body.span); - let new_local = body.local_decls.push(new_decl); - body.local_decls.swap(local, new_local); - - RenameLocalVisitor { from: local, to: new_local, tcx }.visit_body(body); - - new_local -} - -/// Transforms the `body` of the generator applying the following transforms: -/// -/// - Eliminates all the `get_context` calls that async lowering created. -/// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`). -/// -/// The `Local`s that have their types replaced are: -/// - The `resume` argument itself. -/// - The argument to `get_context`. -/// - The yielded value of a `yield`. -/// -/// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the -/// `get_context` function is being used to convert that back to a `&mut Context<'_>`. -/// -/// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection, -/// but rather directly use `&mut Context<'_>`, however that would currently -/// lead to higher-kinded lifetime errors. -/// See . -/// -/// The async lowering step and the type / lifetime inference / checking are -/// still using the `ResumeTy` indirection for the time being, and that indirection -/// is removed here. After this transform, the generator body only knows about `&mut Context<'_>`. -fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let context_mut_ref = Ty::new_task_context(tcx); - - // replace the type of the `resume` argument - replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref); - - let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None); - - for bb in START_BLOCK..body.basic_blocks.next_index() { - let bb_data = &body[bb]; - if bb_data.is_cleanup { - continue; - } - - match &bb_data.terminator().kind { - TerminatorKind::Call { func, .. } => { - let func_ty = func.ty(body, tcx); - if let ty::FnDef(def_id, _) = *func_ty.kind() { - if def_id == get_context_def_id { - let local = eliminate_get_context_call(&mut body[bb]); - replace_resume_ty_local(tcx, body, local, context_mut_ref); - } - } else { - continue; - } - } - TerminatorKind::Yield { resume_arg, .. } => { - replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref); - } - _ => {} - } - } -} - -fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local { - let terminator = bb_data.terminator.take().unwrap(); - if let TerminatorKind::Call { mut args, destination, target, .. } = terminator.kind { - let arg = args.pop().unwrap(); - let local = arg.place().unwrap().local; - - let arg = Rvalue::Use(arg); - let assign = Statement { - source_info: terminator.source_info, - kind: StatementKind::Assign(Box::new((destination, arg))), - }; - bb_data.statements.push(assign); - bb_data.terminator = Some(Terminator { - source_info: terminator.source_info, - kind: TerminatorKind::Goto { target: target.unwrap() }, - }); - local - } else { - bug!(); - } -} - -#[cfg_attr(not(debug_assertions), allow(unused))] -fn replace_resume_ty_local<'tcx>( - tcx: TyCtxt<'tcx>, - body: &mut Body<'tcx>, - local: Local, - context_mut_ref: Ty<'tcx>, -) { - let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref); - // We have to replace the `ResumeTy` that is used for type and borrow checking - // with `&mut Context<'_>` in MIR. - #[cfg(debug_assertions)] - { - if let ty::Adt(resume_ty_adt, _) = local_ty.kind() { - let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None)); - assert_eq!(*resume_ty_adt, expected_adt); - } else { - panic!("expected `ResumeTy`, found `{:?}`", local_ty); - }; - } -} - -struct LivenessInfo { - /// Which locals are live across any suspension point. - saved_locals: GeneratorSavedLocals, - - /// The set of saved locals live at each suspension point. - live_locals_at_suspension_points: Vec>, - - /// Parallel vec to the above with SourceInfo for each yield terminator. - source_info_at_suspension_points: Vec, - - /// For every saved local, the set of other saved locals that are - /// storage-live at the same time as this local. We cannot overlap locals in - /// the layout which have conflicting storage. - storage_conflicts: BitMatrix, - - /// For every suspending block, the locals which are storage-live across - /// that suspension point. - storage_liveness: IndexVec>>, -} - -/// Computes which locals have to be stored in the state-machine for the -/// given coroutine. -/// -/// The basic idea is as follows: -/// - a local is live until we encounter a `StorageDead` statement. In -/// case none exist, the local is considered to be always live. -/// - a local has to be stored if it is either directly used after the -/// the suspend point, or if it is live and has been previously borrowed. -fn locals_live_across_suspend_points<'tcx>( - tcx: TyCtxt<'tcx>, - body: &Body<'tcx>, - always_live_locals: &BitSet, - movable: bool, -) -> LivenessInfo { - let body_ref: &Body<'_> = &body; - - // Calculate when MIR locals have live storage. This gives us an upper bound of their - // lifetimes. - let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals)) - .into_engine(tcx, body_ref) - .iterate_to_fixpoint() - .into_results_cursor(body_ref); - - // Calculate the MIR locals which have been previously - // borrowed (even if they are still active). - let borrowed_locals_results = - MaybeBorrowedLocals.into_engine(tcx, body_ref).pass_name("generator").iterate_to_fixpoint(); - - let mut borrowed_locals_cursor = borrowed_locals_results.cloned_results_cursor(body_ref); - - // Calculate the MIR locals that we actually need to keep storage around - // for. - let mut requires_storage_results = - MaybeRequiresStorage::new(borrowed_locals_results.cloned_results_cursor(body)) - .into_engine(tcx, body_ref) - .iterate_to_fixpoint(); - let mut requires_storage_cursor = requires_storage_results.as_results_cursor(body_ref); - - // Calculate the liveness of MIR locals ignoring borrows. - let mut liveness = MaybeLiveLocals - .into_engine(tcx, body_ref) - .pass_name("generator") - .iterate_to_fixpoint() - .into_results_cursor(body_ref); - - let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks); - let mut live_locals_at_suspension_points = Vec::new(); - let mut source_info_at_suspension_points = Vec::new(); - let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len()); - - for (block, data) in body.basic_blocks.iter_enumerated() { - if let TerminatorKind::Yield { .. } = data.terminator().kind { - let loc = Location { block, statement_index: data.statements.len() }; - - liveness.seek_to_block_end(block); - let mut live_locals: BitSet<_> = BitSet::new_empty(body.local_decls.len()); - live_locals.union(liveness.get()); - - if !movable { - // The `liveness` variable contains the liveness of MIR locals ignoring borrows. - // This is correct for movable generators since borrows cannot live across - // suspension points. However for immovable generators we need to account for - // borrows, so we conservatively assume that all borrowed locals are live until - // we find a StorageDead statement referencing the locals. - // To do this we just union our `liveness` result with `borrowed_locals`, which - // contains all the locals which has been borrowed before this suspension point. - // If a borrow is converted to a raw reference, we must also assume that it lives - // forever. Note that the final liveness is still bounded by the storage liveness - // of the local, which happens using the `intersect` operation below. - borrowed_locals_cursor.seek_before_primary_effect(loc); - live_locals.union(borrowed_locals_cursor.get()); - } - - // Store the storage liveness for later use so we can restore the state - // after a suspension point - storage_live.seek_before_primary_effect(loc); - storage_liveness_map[block] = Some(storage_live.get().clone()); - - // Locals live are live at this point only if they are used across - // suspension points (the `liveness` variable) - // and their storage is required (the `storage_required` variable) - requires_storage_cursor.seek_before_primary_effect(loc); - live_locals.intersect(requires_storage_cursor.get()); - - // The generator argument is ignored. - live_locals.remove(SELF_ARG); - - debug!("loc = {:?}, live_locals = {:?}", loc, live_locals); - - // Add the locals live at this suspension point to the set of locals which live across - // any suspension points - live_locals_at_any_suspension_point.union(&live_locals); - - live_locals_at_suspension_points.push(live_locals); - source_info_at_suspension_points.push(data.terminator().source_info); - } - } - - debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point); - let saved_locals = GeneratorSavedLocals(live_locals_at_any_suspension_point); - - // Renumber our liveness_map bitsets to include only the locals we are - // saving. - let live_locals_at_suspension_points = live_locals_at_suspension_points - .iter() - .map(|live_here| saved_locals.renumber_bitset(&live_here)) - .collect(); - - let storage_conflicts = compute_storage_conflicts( - body_ref, - &saved_locals, - always_live_locals.clone(), - requires_storage_results, - ); - - LivenessInfo { - saved_locals, - live_locals_at_suspension_points, - source_info_at_suspension_points, - storage_conflicts, - storage_liveness: storage_liveness_map, - } -} - -/// The set of `Local`s that must be saved across yield points. -/// -/// `GeneratorSavedLocal` is indexed in terms of the elements in this set; -/// i.e. `GeneratorSavedLocal::new(1)` corresponds to the second local -/// included in this set. -struct GeneratorSavedLocals(BitSet); - -impl GeneratorSavedLocals { - /// Returns an iterator over each `GeneratorSavedLocal` along with the `Local` it corresponds - /// to. - fn iter_enumerated(&self) -> impl '_ + Iterator { - self.iter().enumerate().map(|(i, l)| (GeneratorSavedLocal::from(i), l)) - } - - /// Transforms a `BitSet` that contains only locals saved across yield points to the - /// equivalent `BitSet`. - fn renumber_bitset(&self, input: &BitSet) -> BitSet { - assert!(self.superset(&input), "{:?} not a superset of {:?}", self.0, input); - let mut out = BitSet::new_empty(self.count()); - for (saved_local, local) in self.iter_enumerated() { - if input.contains(local) { - out.insert(saved_local); - } - } - out - } - - fn get(&self, local: Local) -> Option { - if !self.contains(local) { - return None; - } - - let idx = self.iter().take_while(|&l| l < local).count(); - Some(GeneratorSavedLocal::new(idx)) - } -} - -impl ops::Deref for GeneratorSavedLocals { - type Target = BitSet; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -/// For every saved local, looks for which locals are StorageLive at the same -/// time. Generates a bitset for every local of all the other locals that may be -/// StorageLive simultaneously with that local. This is used in the layout -/// computation; see `GeneratorLayout` for more. -fn compute_storage_conflicts<'mir, 'tcx>( - body: &'mir Body<'tcx>, - saved_locals: &GeneratorSavedLocals, - always_live_locals: BitSet, - mut requires_storage: rustc_mir_dataflow::Results<'tcx, MaybeRequiresStorage<'_, 'mir, 'tcx>>, -) -> BitMatrix { - assert_eq!(body.local_decls.len(), saved_locals.domain_size()); - - debug!("compute_storage_conflicts({:?})", body.span); - debug!("always_live = {:?}", always_live_locals); - - // Locals that are always live or ones that need to be stored across - // suspension points are not eligible for overlap. - let mut ineligible_locals = always_live_locals; - ineligible_locals.intersect(&**saved_locals); - - // Compute the storage conflicts for all eligible locals. - let mut visitor = StorageConflictVisitor { - body, - saved_locals: &saved_locals, - local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()), - }; - - requires_storage.visit_reachable_with(body, &mut visitor); - - let local_conflicts = visitor.local_conflicts; - - // Compress the matrix using only stored locals (Local -> GeneratorSavedLocal). - // - // NOTE: Today we store a full conflict bitset for every local. Technically - // this is twice as many bits as we need, since the relation is symmetric. - // However, in practice these bitsets are not usually large. The layout code - // also needs to keep track of how many conflicts each local has, so it's - // simpler to keep it this way for now. - let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count()); - for (saved_local_a, local_a) in saved_locals.iter_enumerated() { - if ineligible_locals.contains(local_a) { - // Conflicts with everything. - storage_conflicts.insert_all_into_row(saved_local_a); - } else { - // Keep overlap information only for stored locals. - for (saved_local_b, local_b) in saved_locals.iter_enumerated() { - if local_conflicts.contains(local_a, local_b) { - storage_conflicts.insert(saved_local_a, saved_local_b); - } - } - } - } - storage_conflicts -} - -struct StorageConflictVisitor<'mir, 'tcx, 's> { - body: &'mir Body<'tcx>, - saved_locals: &'s GeneratorSavedLocals, - // FIXME(tmandry): Consider using sparse bitsets here once we have good - // benchmarks for generators. - local_conflicts: BitMatrix, -} - -impl<'mir, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx, R> - for StorageConflictVisitor<'mir, 'tcx, '_> -{ - type FlowState = BitSet; - - fn visit_statement_before_primary_effect( - &mut self, - _results: &mut R, - state: &Self::FlowState, - _statement: &'mir Statement<'tcx>, - loc: Location, - ) { - self.apply_state(state, loc); - } - - fn visit_terminator_before_primary_effect( - &mut self, - _results: &mut R, - state: &Self::FlowState, - _terminator: &'mir Terminator<'tcx>, - loc: Location, - ) { - self.apply_state(state, loc); - } -} - -impl StorageConflictVisitor<'_, '_, '_> { - fn apply_state(&mut self, flow_state: &BitSet, loc: Location) { - // Ignore unreachable blocks. - if self.body.basic_blocks[loc.block].terminator().kind == TerminatorKind::Unreachable { - return; - } - - let mut eligible_storage_live = flow_state.clone(); - eligible_storage_live.intersect(&**self.saved_locals); - - for local in eligible_storage_live.iter() { - self.local_conflicts.union_row_with(&eligible_storage_live, local); - } - - if eligible_storage_live.count() > 1 { - trace!("at {:?}, eligible_storage_live={:?}", loc, eligible_storage_live); - } - } -} - -fn compute_layout<'tcx>( - liveness: LivenessInfo, - body: &Body<'tcx>, -) -> ( - FxHashMap, VariantIdx, FieldIdx)>, - GeneratorLayout<'tcx>, - IndexVec>>, -) { - let LivenessInfo { - saved_locals, - live_locals_at_suspension_points, - source_info_at_suspension_points, - storage_conflicts, - storage_liveness, - } = liveness; - - // Gather live local types and their indices. - let mut locals = IndexVec::::new(); - let mut tys = IndexVec::::new(); - for (saved_local, local) in saved_locals.iter_enumerated() { - debug!("generator saved local {:?} => {:?}", saved_local, local); - - locals.push(local); - let decl = &body.local_decls[local]; - debug!(?decl); - - // Do not `assert_crate_local` here, as post-borrowck cleanup may have already cleared - // the information. This is alright, since `ignore_for_traits` is only relevant when - // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer - // default. - let ignore_for_traits = match decl.local_info { - // Do not include raw pointers created from accessing `static` items, as those could - // well be re-created by another access to the same static. - ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => { - !is_thread_local - } - // Fake borrows are only read by fake reads, so do not have any reality in - // post-analysis MIR. - ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true, - _ => false, - }; - let decl = - GeneratorSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits }; - debug!(?decl); - - tys.push(decl); - } - - // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states. - // In debuginfo, these will correspond to the beginning (UNRESUMED) or end - // (RETURNED, POISONED) of the function. - let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span; - let mut variant_source_info: IndexVec = [ - SourceInfo::outermost(body_span.shrink_to_lo()), - SourceInfo::outermost(body_span.shrink_to_hi()), - SourceInfo::outermost(body_span.shrink_to_hi()), - ] - .iter() - .copied() - .collect(); - - // Build the generator variant field list. - // Create a map from local indices to generator struct indices. - let mut variant_fields: IndexVec> = - iter::repeat(IndexVec::new()).take(RESERVED_VARIANTS).collect(); - let mut remap = FxHashMap::default(); - for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() { - let variant_index = VariantIdx::from(RESERVED_VARIANTS + suspension_point_idx); - let mut fields = IndexVec::new(); - for (idx, saved_local) in live_locals.iter().enumerate() { - fields.push(saved_local); - // Note that if a field is included in multiple variants, we will - // just use the first one here. That's fine; fields do not move - // around inside generators, so it doesn't matter which variant - // index we access them by. - let idx = FieldIdx::from_usize(idx); - remap.entry(locals[saved_local]).or_insert((tys[saved_local].ty, variant_index, idx)); - } - variant_fields.push(fields); - variant_source_info.push(source_info_at_suspension_points[suspension_point_idx]); - } - debug!("generator variant_fields = {:?}", variant_fields); - debug!("generator storage_conflicts = {:#?}", storage_conflicts); - - let mut field_names = IndexVec::from_elem(None, &tys); - for var in &body.var_debug_info { - let VarDebugInfoContents::Place(place) = &var.value else { continue }; - let Some(local) = place.as_local() else { continue }; - let Some(&(_, variant, field)) = remap.get(&local) else { continue }; - - let saved_local = variant_fields[variant][field]; - field_names.get_or_insert_with(saved_local, || var.name); - } - - let layout = GeneratorLayout { - field_tys: tys, - field_names, - variant_fields, - variant_source_info, - storage_conflicts, - }; - debug!(?layout); - - (remap, layout, storage_liveness) -} - -/// Replaces the entry point of `body` with a block that switches on the generator discriminant and -/// dispatches to blocks according to `cases`. -/// -/// After this function, the former entry point of the function will be bb1. -fn insert_switch<'tcx>( - body: &mut Body<'tcx>, - cases: Vec<(usize, BasicBlock)>, - transform: &TransformVisitor<'tcx>, - default: TerminatorKind<'tcx>, -) { - let default_block = insert_term_block(body, default); - let (assign, discr) = transform.get_discr(body); - let switch_targets = - SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block); - let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets }; - - let source_info = SourceInfo::outermost(body.span); - body.basic_blocks_mut().raw.insert( - 0, - BasicBlockData { - statements: vec![assign], - terminator: Some(Terminator { source_info, kind: switch }), - is_cleanup: false, - }, - ); - - let blocks = body.basic_blocks_mut().iter_mut(); - - for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) { - *target = BasicBlock::new(target.index() + 1); - } -} - -fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - use crate::shim::DropShimElaborator; - use rustc_middle::mir::patch::MirPatch; - use rustc_mir_dataflow::elaborate_drops::{elaborate_drop, Unwind}; - - // Note that `elaborate_drops` only drops the upvars of a generator, and - // this is ok because `open_drop` can only be reached within that own - // generator's resume function. - - let def_id = body.source.def_id(); - let param_env = tcx.param_env(def_id); - - let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, param_env }; - - for (block, block_data) in body.basic_blocks.iter_enumerated() { - let (target, unwind, source_info) = match block_data.terminator() { - Terminator { - source_info, - kind: TerminatorKind::Drop { place, target, unwind, replace: _ }, - } => { - if let Some(local) = place.as_local() { - if local == SELF_ARG { - (target, unwind, source_info) - } else { - continue; - } - } else { - continue; - } - } - _ => continue, - }; - let unwind = if block_data.is_cleanup { - Unwind::InCleanup - } else { - Unwind::To(match *unwind { - UnwindAction::Cleanup(tgt) => tgt, - UnwindAction::Continue => elaborator.patch.resume_block(), - UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(), - UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason), - }) - }; - elaborate_drop( - &mut elaborator, - *source_info, - Place::from(SELF_ARG), - (), - *target, - unwind, - block, - ); - } - elaborator.patch.apply(body); -} - -fn create_generator_drop_shim<'tcx>( - tcx: TyCtxt<'tcx>, - transform: &TransformVisitor<'tcx>, - gen_ty: Ty<'tcx>, - body: &mut Body<'tcx>, - drop_clean: BasicBlock, -) -> Body<'tcx> { - let mut body = body.clone(); - body.arg_count = 1; // make sure the resume argument is not included here - - let source_info = SourceInfo::outermost(body.span); - - let mut cases = create_cases(&mut body, transform, Operation::Drop); - - cases.insert(0, (UNRESUMED, drop_clean)); - - // The returned state and the poisoned state fall through to the default - // case which is just to return - - insert_switch(&mut body, cases, &transform, TerminatorKind::Return); - - for block in body.basic_blocks_mut() { - let kind = &mut block.terminator_mut().kind; - if let TerminatorKind::GeneratorDrop = *kind { - *kind = TerminatorKind::Return; - } - } - - // Replace the return variable - body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(Ty::new_unit(tcx), source_info); - - make_generator_state_argument_indirect(tcx, &mut body); - - // Change the generator argument from &mut to *mut - body.local_decls[SELF_ARG] = LocalDecl::with_source_info( - Ty::new_ptr(tcx, ty::TypeAndMut { ty: gen_ty, mutbl: hir::Mutability::Mut }), - source_info, - ); - - // Make sure we remove dead blocks to remove - // unrelated code from the resume part of the function - simplify::remove_dead_blocks(tcx, &mut body); - - // Update the body's def to become the drop glue. - // This needs to be updated before the AbortUnwindingCalls pass. - let gen_instance = body.source.instance; - let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None); - let drop_instance = InstanceDef::DropGlue(drop_in_place, Some(gen_ty)); - body.source.instance = drop_instance; - - pm::run_passes_no_validate( - tcx, - &mut body, - &[&abort_unwinding_calls::AbortUnwindingCalls], - None, - ); - - // Temporary change MirSource to generator's instance so that dump_mir produces more sensible - // filename. - body.source.instance = gen_instance; - dump_mir(tcx, false, "generator_drop", &0, &body, |_, _| Ok(())); - body.source.instance = drop_instance; - - body -} - -fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock { - let source_info = SourceInfo::outermost(body.span); - body.basic_blocks_mut().push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { source_info, kind }), - is_cleanup: false, - }) -} - -fn insert_panic_block<'tcx>( - tcx: TyCtxt<'tcx>, - body: &mut Body<'tcx>, - message: AssertMessage<'tcx>, -) -> BasicBlock { - let assert_block = BasicBlock::new(body.basic_blocks.len()); - let term = TerminatorKind::Assert { - cond: Operand::Constant(Box::new(ConstOperand { - span: body.span, - user_ty: None, - const_: Const::from_bool(tcx, false), - })), - expected: true, - msg: Box::new(message), - target: assert_block, - unwind: UnwindAction::Continue, - }; - - let source_info = SourceInfo::outermost(body.span); - body.basic_blocks_mut().push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { source_info, kind: term }), - is_cleanup: false, - }); - - assert_block -} - -fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { - // Returning from a function with an uninhabited return type is undefined behavior. - if body.return_ty().is_privately_uninhabited(tcx, param_env) { - return false; - } - - // If there's a return terminator the function may return. - for block in body.basic_blocks.iter() { - if let TerminatorKind::Return = block.terminator().kind { - return true; - } - } - - // Otherwise the function can't return. - false -} - -fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { - // Nothing can unwind when landing pads are off. - if tcx.sess.panic_strategy() == PanicStrategy::Abort { - return false; - } - - // Unwinds can only start at certain terminators. - for block in body.basic_blocks.iter() { - match block.terminator().kind { - // These never unwind. - TerminatorKind::Goto { .. } - | TerminatorKind::SwitchInt { .. } - | TerminatorKind::UnwindTerminate(_) - | TerminatorKind::Return - | TerminatorKind::Unreachable - | TerminatorKind::GeneratorDrop - | TerminatorKind::FalseEdge { .. } - | TerminatorKind::FalseUnwind { .. } => {} - - // Resume will *continue* unwinding, but if there's no other unwinding terminator it - // will never be reached. - TerminatorKind::UnwindResume => {} - - TerminatorKind::Yield { .. } => { - unreachable!("`can_unwind` called before generator transform") - } - - // These may unwind. - TerminatorKind::Drop { .. } - | TerminatorKind::Call { .. } - | TerminatorKind::InlineAsm { .. } - | TerminatorKind::Assert { .. } => return true, - } - } - - // If we didn't find an unwinding terminator, the function cannot unwind. - false -} - -fn create_generator_resume_function<'tcx>( - tcx: TyCtxt<'tcx>, - transform: TransformVisitor<'tcx>, - body: &mut Body<'tcx>, - can_return: bool, -) { - let can_unwind = can_unwind(tcx, body); - - // Poison the generator when it unwinds - if can_unwind { - let source_info = SourceInfo::outermost(body.span); - let poison_block = body.basic_blocks_mut().push(BasicBlockData { - statements: vec![transform.set_discr(VariantIdx::new(POISONED), source_info)], - terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }), - is_cleanup: true, - }); - - for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() { - let source_info = block.terminator().source_info; - - if let TerminatorKind::UnwindResume = block.terminator().kind { - // An existing `Resume` terminator is redirected to jump to our dedicated - // "poisoning block" above. - if idx != poison_block { - *block.terminator_mut() = Terminator { - source_info, - kind: TerminatorKind::Goto { target: poison_block }, - }; - } - } else if !block.is_cleanup { - // Any terminators that *can* unwind but don't have an unwind target set are also - // pointed at our poisoning block (unless they're part of the cleanup path). - if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { - *unwind = UnwindAction::Cleanup(poison_block); - } - } - } - } - - let mut cases = create_cases(body, &transform, Operation::Resume); - - use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn}; - - // Jump to the entry point on the unresumed - cases.insert(0, (UNRESUMED, START_BLOCK)); - - // Panic when resumed on the returned or poisoned state - let generator_kind = body.generator_kind().unwrap(); - - if can_unwind { - cases.insert( - 1, - (POISONED, insert_panic_block(tcx, body, ResumedAfterPanic(generator_kind))), - ); - } - - if can_return { - cases.insert( - 1, - (RETURNED, insert_panic_block(tcx, body, ResumedAfterReturn(generator_kind))), - ); - } - - insert_switch(body, cases, &transform, TerminatorKind::Unreachable); - - make_generator_state_argument_indirect(tcx, body); - make_generator_state_argument_pinned(tcx, body); - - // Make sure we remove dead blocks to remove - // unrelated code from the drop part of the function - simplify::remove_dead_blocks(tcx, body); - - pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None); - - dump_mir(tcx, false, "generator_resume", &0, body, |_, _| Ok(())); -} - -fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { - let return_block = insert_term_block(body, TerminatorKind::Return); - - let term = TerminatorKind::Drop { - place: Place::from(SELF_ARG), - target: return_block, - unwind: UnwindAction::Continue, - replace: false, - }; - let source_info = SourceInfo::outermost(body.span); - - // Create a block to destroy an unresumed generators. This can only destroy upvars. - body.basic_blocks_mut().push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { source_info, kind: term }), - is_cleanup: false, - }) -} - -/// An operation that can be performed on a generator. -#[derive(PartialEq, Copy, Clone)] -enum Operation { - Resume, - Drop, -} - -impl Operation { - fn target_block(self, point: &SuspensionPoint<'_>) -> Option { - match self { - Operation::Resume => Some(point.resume), - Operation::Drop => point.drop, - } - } -} - -fn create_cases<'tcx>( - body: &mut Body<'tcx>, - transform: &TransformVisitor<'tcx>, - operation: Operation, -) -> Vec<(usize, BasicBlock)> { - let source_info = SourceInfo::outermost(body.span); - - transform - .suspension_points - .iter() - .filter_map(|point| { - // Find the target for this suspension point, if applicable - operation.target_block(point).map(|target| { - let mut statements = Vec::new(); - - // Create StorageLive instructions for locals with live storage - for i in 0..(body.local_decls.len()) { - if i == 2 { - // The resume argument is live on function entry. Don't insert a - // `StorageLive`, or the following `Assign` will read from uninitialized - // memory. - continue; - } - - let l = Local::new(i); - let needs_storage_live = point.storage_liveness.contains(l) - && !transform.remap.contains_key(&l) - && !transform.always_live_locals.contains(l); - if needs_storage_live { - statements - .push(Statement { source_info, kind: StatementKind::StorageLive(l) }); - } - } - - if operation == Operation::Resume { - // Move the resume argument to the destination place of the `Yield` terminator - let resume_arg = Local::new(2); // 0 = return, 1 = self - statements.push(Statement { - source_info, - kind: StatementKind::Assign(Box::new(( - point.resume_arg, - Rvalue::Use(Operand::Move(resume_arg.into())), - ))), - }); - } - - // Then jump to the real target - let block = body.basic_blocks_mut().push(BasicBlockData { - statements, - terminator: Some(Terminator { - source_info, - kind: TerminatorKind::Goto { target }, - }), - is_cleanup: false, - }); - - (point.state, block) - }) - }) - .collect() -} - -#[instrument(level = "debug", skip(tcx), ret)] -pub(crate) fn mir_generator_witnesses<'tcx>( - tcx: TyCtxt<'tcx>, - def_id: LocalDefId, -) -> Option> { - let (body, _) = tcx.mir_promoted(def_id); - let body = body.borrow(); - let body = &*body; - - // The first argument is the generator type passed by value - let gen_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty; - - let movable = match *gen_ty.kind() { - ty::Generator(_, _, movability) => movability == hir::Movability::Movable, - ty::Error(_) => return None, - _ => span_bug!(body.span, "unexpected generator type {}", gen_ty), - }; - - // The witness simply contains all locals live across suspend points. - - let always_live_locals = always_storage_live_locals(&body); - let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); - - // Extract locals which are live across suspension point into `layout` - // `remap` gives a mapping from local indices onto generator struct indices - // `storage_liveness` tells us which locals have live storage at suspension points - let (_, generator_layout, _) = compute_layout(liveness_info, body); - - check_suspend_tys(tcx, &generator_layout, &body); - - Some(generator_layout) -} - -impl<'tcx> MirPass<'tcx> for StateTransform { - fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let Some(yield_ty) = body.yield_ty() else { - // This only applies to generators - return; - }; - - assert!(body.generator_drop().is_none()); - - // The first argument is the generator type passed by value - let gen_ty = body.local_decls.raw[1].ty; - - // Get the discriminant type and args which typeck computed - let (discr_ty, movable) = match *gen_ty.kind() { - ty::Generator(_, args, movability) => { - let args = args.as_generator(); - (args.discr_ty(tcx), movability == hir::Movability::Movable) - } - _ => { - tcx.sess.delay_span_bug(body.span, format!("unexpected generator type {gen_ty}")); - return; - } - }; - - let is_async_kind = matches!(body.generator_kind(), Some(GeneratorKind::Async(_))); - let (state_adt_ref, state_args) = if is_async_kind { - // Compute Poll - let poll_did = tcx.require_lang_item(LangItem::Poll, None); - let poll_adt_ref = tcx.adt_def(poll_did); - let poll_args = tcx.mk_args(&[body.return_ty().into()]); - (poll_adt_ref, poll_args) - } else { - // Compute GeneratorState - let state_did = tcx.require_lang_item(LangItem::GeneratorState, None); - let state_adt_ref = tcx.adt_def(state_did); - let state_args = tcx.mk_args(&[yield_ty.into(), body.return_ty().into()]); - (state_adt_ref, state_args) - }; - let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args); - - // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local - // RETURN_PLACE then is a fresh unused local with type ret_ty. - let new_ret_local = replace_local(RETURN_PLACE, ret_ty, body, tcx); - - // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies. - if is_async_kind { - transform_async_context(tcx, body); - } - - // We also replace the resume argument and insert an `Assign`. - // This is needed because the resume argument `_2` might be live across a `yield`, in which - // case there is no `Assign` to it that the transform can turn into a store to the generator - // state. After the yield the slot in the generator state would then be uninitialized. - let resume_local = Local::new(2); - let resume_ty = if is_async_kind { - Ty::new_task_context(tcx) - } else { - body.local_decls[resume_local].ty - }; - let new_resume_local = replace_local(resume_local, resume_ty, body, tcx); - - // When first entering the generator, move the resume argument into its new local. - let source_info = SourceInfo::outermost(body.span); - let stmts = &mut body.basic_blocks_mut()[START_BLOCK].statements; - stmts.insert( - 0, - Statement { - source_info, - kind: StatementKind::Assign(Box::new(( - new_resume_local.into(), - Rvalue::Use(Operand::Move(resume_local.into())), - ))), - }, - ); - - let always_live_locals = always_storage_live_locals(&body); - - let liveness_info = - locals_live_across_suspend_points(tcx, body, &always_live_locals, movable); - - if tcx.sess.opts.unstable_opts.validate_mir { - let mut vis = EnsureGeneratorFieldAssignmentsNeverAlias { - assigned_local: None, - saved_locals: &liveness_info.saved_locals, - storage_conflicts: &liveness_info.storage_conflicts, - }; - - vis.visit_body(body); - } - - // Extract locals which are live across suspension point into `layout` - // `remap` gives a mapping from local indices onto generator struct indices - // `storage_liveness` tells us which locals have live storage at suspension points - let (remap, layout, storage_liveness) = compute_layout(liveness_info, body); - - let can_return = can_return(tcx, body, tcx.param_env(body.source.def_id())); - - // Run the transformation which converts Places from Local to generator struct - // accesses for locals in `remap`. - // It also rewrites `return x` and `yield y` as writing a new generator state and returning - // either GeneratorState::Complete(x) and GeneratorState::Yielded(y), - // or Poll::Ready(x) and Poll::Pending respectively depending on `is_async_kind`. - let mut transform = TransformVisitor { - tcx, - is_async_kind, - state_adt_ref, - state_args, - remap, - storage_liveness, - always_live_locals, - suspension_points: Vec::new(), - new_ret_local, - discr_ty, - }; - transform.visit_body(body); - - // Update our MIR struct to reflect the changes we've made - body.arg_count = 2; // self, resume arg - body.spread_arg = None; - - // The original arguments to the function are no longer arguments, mark them as such. - // Otherwise they'll conflict with our new arguments, which although they don't have - // argument_index set, will get emitted as unnamed arguments. - for var in &mut body.var_debug_info { - var.argument_index = None; - } - - body.generator.as_mut().unwrap().yield_ty = None; - body.generator.as_mut().unwrap().generator_layout = Some(layout); - - // Insert `drop(generator_struct)` which is used to drop upvars for generators in - // the unresumed state. - // This is expanded to a drop ladder in `elaborate_generator_drops`. - let drop_clean = insert_clean_drop(body); - - dump_mir(tcx, false, "generator_pre-elab", &0, body, |_, _| Ok(())); - - // Expand `drop(generator_struct)` to a drop ladder which destroys upvars. - // If any upvars are moved out of, drop elaboration will handle upvar destruction. - // However we need to also elaborate the code generated by `insert_clean_drop`. - elaborate_generator_drops(tcx, body); - - dump_mir(tcx, false, "generator_post-transform", &0, body, |_, _| Ok(())); - - // Create a copy of our MIR and use it to create the drop shim for the generator - let drop_shim = create_generator_drop_shim(tcx, &transform, gen_ty, body, drop_clean); - - body.generator.as_mut().unwrap().generator_drop = Some(drop_shim); - - // Create the Generator::resume / Future::poll function - create_generator_resume_function(tcx, transform, body, can_return); - - // Run derefer to fix Derefs that are not in the first place - deref_finder(tcx, body); - } -} - -/// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields -/// in the generator state machine but whose storage is not marked as conflicting -/// -/// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after. -/// -/// This condition would arise when the assignment is the last use of `_5` but the initial -/// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as -/// conflicting. Non-conflicting generator saved locals may be stored at the same location within -/// the generator state machine, which would result in ill-formed MIR: the left-hand and right-hand -/// sides of an assignment may not alias. This caused a miscompilation in [#73137]. -/// -/// [#73137]: https://github.com/rust-lang/rust/issues/73137 -struct EnsureGeneratorFieldAssignmentsNeverAlias<'a> { - saved_locals: &'a GeneratorSavedLocals, - storage_conflicts: &'a BitMatrix, - assigned_local: Option, -} - -impl EnsureGeneratorFieldAssignmentsNeverAlias<'_> { - fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option { - if place.is_indirect() { - return None; - } - - self.saved_locals.get(place.local) - } - - fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) { - if let Some(assigned_local) = self.saved_local_for_direct_place(place) { - assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse"); - - self.assigned_local = Some(assigned_local); - f(self); - self.assigned_local = None; - } - } -} - -impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> { - fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) { - let Some(lhs) = self.assigned_local else { - // This visitor only invokes `visit_place` for the right-hand side of an assignment - // and only after setting `self.assigned_local`. However, the default impl of - // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places - // with debuginfo. Ignore them here. - assert!(!context.is_use()); - return; - }; - - let Some(rhs) = self.saved_local_for_direct_place(*place) else { return }; - - if !self.storage_conflicts.contains(lhs, rhs) { - bug!( - "Assignment between generator saved locals whose storage is not \ - marked as conflicting: {:?}: {:?} = {:?}", - location, - lhs, - rhs, - ); - } - } - - fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { - match &statement.kind { - StatementKind::Assign(box (lhs, rhs)) => { - self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location)); - } - - StatementKind::FakeRead(..) - | StatementKind::SetDiscriminant { .. } - | StatementKind::Deinit(..) - | StatementKind::StorageLive(_) - | StatementKind::StorageDead(_) - | StatementKind::Retag(..) - | StatementKind::AscribeUserType(..) - | StatementKind::PlaceMention(..) - | StatementKind::Coverage(..) - | StatementKind::Intrinsic(..) - | StatementKind::ConstEvalCounter - | StatementKind::Nop => {} - } - } - - fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { - // Checking for aliasing in terminators is probably overkill, but until we have actual - // semantics, we should be conservative here. - match &terminator.kind { - TerminatorKind::Call { - func, - args, - destination, - target: Some(_), - unwind: _, - call_source: _, - fn_span: _, - } => { - self.check_assigned_place(*destination, |this| { - this.visit_operand(func, location); - for arg in args { - this.visit_operand(arg, location); - } - }); - } - - TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => { - self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location)); - } - - // FIXME: Does `asm!` have any aliasing requirements? - TerminatorKind::InlineAsm { .. } => {} - - TerminatorKind::Call { .. } - | TerminatorKind::Goto { .. } - | TerminatorKind::SwitchInt { .. } - | TerminatorKind::UnwindResume - | TerminatorKind::UnwindTerminate(_) - | TerminatorKind::Return - | TerminatorKind::Unreachable - | TerminatorKind::Drop { .. } - | TerminatorKind::Assert { .. } - | TerminatorKind::GeneratorDrop - | TerminatorKind::FalseEdge { .. } - | TerminatorKind::FalseUnwind { .. } => {} - } - } -} - -fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &GeneratorLayout<'tcx>, body: &Body<'tcx>) { - let mut linted_tys = FxHashSet::default(); - - // We want a user-facing param-env. - let param_env = tcx.param_env(body.source.def_id()); - - for (variant, yield_source_info) in - layout.variant_fields.iter().zip(&layout.variant_source_info) - { - debug!(?variant); - for &local in variant { - let decl = &layout.field_tys[local]; - debug!(?decl); - - if !decl.ignore_for_traits && linted_tys.insert(decl.ty) { - let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else { - continue; - }; - - check_must_not_suspend_ty( - tcx, - decl.ty, - hir_id, - param_env, - SuspendCheckData { - source_span: decl.source_info.span, - yield_span: yield_source_info.span, - plural_len: 1, - ..Default::default() - }, - ); - } - } - } -} - -#[derive(Default)] -struct SuspendCheckData<'a> { - source_span: Span, - yield_span: Span, - descr_pre: &'a str, - descr_post: &'a str, - plural_len: usize, -} - -// Returns whether it emitted a diagnostic or not -// Note that this fn and the proceeding one are based on the code -// for creating must_use diagnostics -// -// Note that this technique was chosen over things like a `Suspend` marker trait -// as it is simpler and has precedent in the compiler -fn check_must_not_suspend_ty<'tcx>( - tcx: TyCtxt<'tcx>, - ty: Ty<'tcx>, - hir_id: hir::HirId, - param_env: ty::ParamEnv<'tcx>, - data: SuspendCheckData<'_>, -) -> bool { - if ty.is_unit() { - return false; - } - - let plural_suffix = pluralize!(data.plural_len); - - debug!("Checking must_not_suspend for {}", ty); - - match *ty.kind() { - ty::Adt(..) if ty.is_box() => { - let boxed_ty = ty.boxed_ty(); - let descr_pre = &format!("{}boxed ", data.descr_pre); - check_must_not_suspend_ty( - tcx, - boxed_ty, - hir_id, - param_env, - SuspendCheckData { descr_pre, ..data }, - ) - } - ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data), - // FIXME: support adding the attribute to TAITs - ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => { - let mut has_emitted = false; - for &(predicate, _) in tcx.explicit_item_bounds(def).skip_binder() { - // We only look at the `DefId`, so it is safe to skip the binder here. - if let ty::ClauseKind::Trait(ref poly_trait_predicate) = - predicate.kind().skip_binder() - { - let def_id = poly_trait_predicate.trait_ref.def_id; - let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix); - if check_must_not_suspend_def( - tcx, - def_id, - hir_id, - SuspendCheckData { descr_pre, ..data }, - ) { - has_emitted = true; - break; - } - } - } - has_emitted - } - ty::Dynamic(binder, _, _) => { - let mut has_emitted = false; - for predicate in binder.iter() { - if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() { - let def_id = trait_ref.def_id; - let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post); - if check_must_not_suspend_def( - tcx, - def_id, - hir_id, - SuspendCheckData { descr_post, ..data }, - ) { - has_emitted = true; - break; - } - } - } - has_emitted - } - ty::Tuple(fields) => { - let mut has_emitted = false; - for (i, ty) in fields.iter().enumerate() { - let descr_post = &format!(" in tuple element {i}"); - if check_must_not_suspend_ty( - tcx, - ty, - hir_id, - param_env, - SuspendCheckData { descr_post, ..data }, - ) { - has_emitted = true; - } - } - has_emitted - } - ty::Array(ty, len) => { - let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix); - check_must_not_suspend_ty( - tcx, - ty, - hir_id, - param_env, - SuspendCheckData { - descr_pre, - plural_len: len.try_eval_target_usize(tcx, param_env).unwrap_or(0) as usize + 1, - ..data - }, - ) - } - // If drop tracking is enabled, we want to look through references, since the referent - // may not be considered live across the await point. - ty::Ref(_region, ty, _mutability) => { - let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix); - check_must_not_suspend_ty( - tcx, - ty, - hir_id, - param_env, - SuspendCheckData { descr_pre, ..data }, - ) - } - _ => false, - } -} - -fn check_must_not_suspend_def( - tcx: TyCtxt<'_>, - def_id: DefId, - hir_id: hir::HirId, - data: SuspendCheckData<'_>, -) -> bool { - if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) { - let reason = attr.value_str().map(|s| errors::MustNotSuspendReason { - span: data.source_span, - reason: s.as_str().to_string(), - }); - tcx.emit_spanned_lint( - rustc_session::lint::builtin::MUST_NOT_SUSPEND, - hir_id, - data.source_span, - errors::MustNotSupend { - tcx, - yield_sp: data.yield_span, - reason, - src_sp: data.source_span, - pre: data.descr_pre, - def_id, - post: data.descr_post, - }, - ); - - true - } else { - false - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/gvn.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/gvn.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/gvn.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/gvn.rs 2023-12-21 16:55:28.000000000 +0000 @@ -52,19 +52,59 @@ //! _a = *_b // _b is &Freeze //! _c = *_b // replaced by _c = _a //! ``` +//! +//! # Determinism of constant propagation +//! +//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant. +//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed. +//! +//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have +//! different runtime values each time they are evaluated. This is the case with +//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that +//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different +//! symbol in each codegen unit. +//! +//! Meanwhile, we want to be able to read indirect constants. For instance: +//! ``` +//! static A: &'static &'static u8 = &&63; +//! fn foo() -> u8 { +//! **A // We want to replace by 63. +//! } +//! fn bar() -> u8 { +//! b"abc"[1] // We want to replace by 'b'. +//! } +//! ``` +//! +//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant +//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not +//! merge the constants. See `duplicate_slice` test in `gvn.rs`. +//! +//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const` +//! that contain `AllocId`s. +use rustc_const_eval::interpret::{intern_const_alloc_for_constprop, MemoryKind}; +use rustc_const_eval::interpret::{ImmTy, InterpCx, OpTy, Projectable, Scalar}; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; use rustc_data_structures::graph::dominators::Dominators; +use rustc_hir::def::DefKind; use rustc_index::bit_set::BitSet; use rustc_index::IndexVec; use rustc_macros::newtype_index; +use rustc_middle::mir::interpret::GlobalAlloc; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; -use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_target::abi::{VariantIdx, FIRST_VARIANT}; +use rustc_middle::ty::adjustment::PointerCoercion; +use rustc_middle::ty::layout::LayoutOf; +use rustc_middle::ty::{self, Ty, TyCtxt, TypeAndMut}; +use rustc_span::def_id::DefId; +use rustc_span::DUMMY_SP; +use rustc_target::abi::{self, Abi, Size, VariantIdx, FIRST_VARIANT}; +use std::borrow::Cow; -use crate::ssa::SsaLocals; +use crate::dataflow_const_prop::DummyMachine; +use crate::ssa::{AssignedValue, SsaLocals}; use crate::MirPass; +use either::Either; pub struct GVN; @@ -87,21 +127,28 @@ let dominators = body.basic_blocks.dominators().clone(); let mut state = VnState::new(tcx, param_env, &ssa, &dominators, &body.local_decls); - for arg in body.args_iter() { - if ssa.is_ssa(arg) { - let value = state.new_opaque().unwrap(); - state.assign(arg, value); - } - } - - ssa.for_each_assignment_mut(&mut body.basic_blocks, |local, rvalue, location| { - let value = state.simplify_rvalue(rvalue, location).or_else(|| state.new_opaque()).unwrap(); - // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark `local` as - // reusable if we have an exact type match. - if state.local_decls[local].ty == rvalue.ty(state.local_decls, tcx) { + ssa.for_each_assignment_mut( + body.basic_blocks.as_mut_preserves_cfg(), + |local, value, location| { + let value = match value { + // We do not know anything of this assigned value. + AssignedValue::Arg | AssignedValue::Terminator(_) => None, + // Try to get some insight. + AssignedValue::Rvalue(rvalue) => { + let value = state.simplify_rvalue(rvalue, location); + // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark `local` as + // reusable if we have an exact type match. + if state.local_decls[local].ty != rvalue.ty(state.local_decls, tcx) { + return; + } + value + } + }; + // `next_opaque` is `Some`, so `new_opaque` must return `Some`. + let value = value.or_else(|| state.new_opaque()).unwrap(); state.assign(local, value); - } - }); + }, + ); // Stop creating opaques during replacement as it is useless. state.next_opaque = None; @@ -111,22 +158,33 @@ let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb]; state.visit_basic_block_data(bb, data); } - let any_replacement = state.any_replacement; // For each local that is reused (`y` above), we remove its storage statements do avoid any // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage // statements. StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body); - - if any_replacement { - crate::simplify::remove_unused_definitions(body); - } } newtype_index! { struct VnIndex {} } +/// Computing the aggregate's type can be quite slow, so we only keep the minimal amount of +/// information to reconstruct it when needed. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +enum AggregateTy<'tcx> { + /// Invariant: this must not be used for an empty array. + Array, + Tuple, + Def(DefId, ty::GenericArgsRef<'tcx>), +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +enum AddressKind { + Ref(BorrowKind), + Address(Mutability), +} + #[derive(Debug, PartialEq, Eq, Hash)] enum Value<'tcx> { // Root values. @@ -134,15 +192,21 @@ /// The `usize` is a counter incremented by `new_opaque`. Opaque(usize), /// Evaluated or unevaluated constant value. - Constant(Const<'tcx>), + Constant { + value: Const<'tcx>, + /// Some constants do not have a deterministic value. To avoid merging two instances of the + /// same `Const`, we assign them an additional integer index. + disambiguator: usize, + }, /// An aggregate value, either tuple/closure/struct/enum. /// This does not contain unions, as we cannot reason with the value. - Aggregate(Ty<'tcx>, VariantIdx, Vec), + Aggregate(AggregateTy<'tcx>, VariantIdx, Vec), /// This corresponds to a `[value; count]` expression. Repeat(VnIndex, ty::Const<'tcx>), /// The address of a place. Address { place: Place<'tcx>, + kind: AddressKind, /// Give each borrow and pointer a different provenance, so we don't merge them. provenance: usize, }, @@ -170,6 +234,7 @@ struct VnState<'body, 'tcx> { tcx: TyCtxt<'tcx>, + ecx: InterpCx<'tcx, 'tcx, DummyMachine>, param_env: ty::ParamEnv<'tcx>, local_decls: &'body LocalDecls<'tcx>, /// Value stored in each local. @@ -177,13 +242,14 @@ /// First local to be assigned that value. rev_locals: FxHashMap>, values: FxIndexSet>, + /// Values evaluated as constants if possible. + evaluated: IndexVec>>, /// Counter to generate different values. /// This is an option to stop creating opaques during replacement. next_opaque: Option, ssa: &'body SsaLocals, dominators: &'body Dominators, reused_locals: BitSet, - any_replacement: bool, } impl<'body, 'tcx> VnState<'body, 'tcx> { @@ -196,23 +262,30 @@ ) -> Self { VnState { tcx, + ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine), param_env, local_decls, locals: IndexVec::from_elem(None, local_decls), rev_locals: FxHashMap::default(), values: FxIndexSet::default(), + evaluated: IndexVec::new(), next_opaque: Some(0), ssa, dominators, reused_locals: BitSet::new_empty(local_decls.len()), - any_replacement: false, } } #[instrument(level = "trace", skip(self), ret)] fn insert(&mut self, value: Value<'tcx>) -> VnIndex { - let (index, _) = self.values.insert_full(value); - VnIndex::from_usize(index) + let (index, new) = self.values.insert_full(value); + let index = VnIndex::from_usize(index); + if new { + let evaluated = self.eval_to_const(index); + let _index = self.evaluated.push(evaluated); + debug_assert_eq!(index, _index); + } + index } /// Create a new `Value` for which we have no information at all, except that it is distinct @@ -227,9 +300,9 @@ /// Create a new `Value::Address` distinct from all the others. #[instrument(level = "trace", skip(self), ret)] - fn new_pointer(&mut self, place: Place<'tcx>) -> Option { + fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option { let next_opaque = self.next_opaque.as_mut()?; - let value = Value::Address { place, provenance: *next_opaque }; + let value = Value::Address { place, kind, provenance: *next_opaque }; *next_opaque += 1; Some(self.insert(value)) } @@ -251,6 +324,343 @@ } } + fn insert_constant(&mut self, value: Const<'tcx>) -> Option { + let disambiguator = if value.is_deterministic() { + // The constant is deterministic, no need to disambiguate. + 0 + } else { + // Multiple mentions of this constant will yield different values, + // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`. + let next_opaque = self.next_opaque.as_mut()?; + let disambiguator = *next_opaque; + *next_opaque += 1; + disambiguator + }; + Some(self.insert(Value::Constant { value, disambiguator })) + } + + fn insert_scalar(&mut self, scalar: Scalar, ty: Ty<'tcx>) -> VnIndex { + self.insert_constant(Const::from_scalar(self.tcx, scalar, ty)) + .expect("scalars are deterministic") + } + + #[instrument(level = "trace", skip(self), ret)] + fn eval_to_const(&mut self, value: VnIndex) -> Option> { + use Value::*; + let op = match *self.get(value) { + Opaque(_) => return None, + // Do not bother evaluating repeat expressions. This would uselessly consume memory. + Repeat(..) => return None, + + Constant { ref value, disambiguator: _ } => { + self.ecx.eval_mir_constant(value, None, None).ok()? + } + Aggregate(kind, variant, ref fields) => { + let fields = fields + .iter() + .map(|&f| self.evaluated[f].as_ref()) + .collect::>>()?; + let ty = match kind { + AggregateTy::Array => { + assert!(fields.len() > 0); + Ty::new_array(self.tcx, fields[0].layout.ty, fields.len() as u64) + } + AggregateTy::Tuple => { + Ty::new_tup_from_iter(self.tcx, fields.iter().map(|f| f.layout.ty)) + } + AggregateTy::Def(def_id, args) => { + self.tcx.type_of(def_id).instantiate(self.tcx, args) + } + }; + let variant = if ty.is_enum() { Some(variant) } else { None }; + let ty = self.ecx.layout_of(ty).ok()?; + if ty.is_zst() { + ImmTy::uninit(ty).into() + } else if matches!(ty.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) { + let dest = self.ecx.allocate(ty, MemoryKind::Stack).ok()?; + let variant_dest = if let Some(variant) = variant { + self.ecx.project_downcast(&dest, variant).ok()? + } else { + dest.clone() + }; + for (field_index, op) in fields.into_iter().enumerate() { + let field_dest = self.ecx.project_field(&variant_dest, field_index).ok()?; + self.ecx.copy_op(op, &field_dest, /*allow_transmute*/ false).ok()?; + } + self.ecx.write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest).ok()?; + self.ecx.alloc_mark_immutable(dest.ptr().provenance.unwrap()).ok()?; + dest.into() + } else { + return None; + } + } + + Projection(base, elem) => { + let value = self.evaluated[base].as_ref()?; + let elem = match elem { + ProjectionElem::Deref => ProjectionElem::Deref, + ProjectionElem::Downcast(name, read_variant) => { + ProjectionElem::Downcast(name, read_variant) + } + ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty), + ProjectionElem::ConstantIndex { offset, min_length, from_end } => { + ProjectionElem::ConstantIndex { offset, min_length, from_end } + } + ProjectionElem::Subslice { from, to, from_end } => { + ProjectionElem::Subslice { from, to, from_end } + } + ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty), + ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty), + // This should have been replaced by a `ConstantIndex` earlier. + ProjectionElem::Index(_) => return None, + }; + self.ecx.project(value, elem).ok()? + } + Address { place, kind, provenance: _ } => { + if !place.is_indirect_first_projection() { + return None; + } + let local = self.locals[place.local]?; + let pointer = self.evaluated[local].as_ref()?; + let mut mplace = self.ecx.deref_pointer(pointer).ok()?; + for proj in place.projection.iter().skip(1) { + // We have no call stack to associate a local with a value, so we cannot interpret indexing. + if matches!(proj, ProjectionElem::Index(_)) { + return None; + } + mplace = self.ecx.project(&mplace, proj).ok()?; + } + let pointer = mplace.to_ref(&self.ecx); + let ty = match kind { + AddressKind::Ref(bk) => Ty::new_ref( + self.tcx, + self.tcx.lifetimes.re_erased, + ty::TypeAndMut { ty: mplace.layout.ty, mutbl: bk.to_mutbl_lossy() }, + ), + AddressKind::Address(mutbl) => { + Ty::new_ptr(self.tcx, TypeAndMut { ty: mplace.layout.ty, mutbl }) + } + }; + let layout = self.ecx.layout_of(ty).ok()?; + ImmTy::from_immediate(pointer, layout).into() + } + + Discriminant(base) => { + let base = self.evaluated[base].as_ref()?; + let variant = self.ecx.read_discriminant(base).ok()?; + let discr_value = + self.ecx.discriminant_for_variant(base.layout.ty, variant).ok()?; + discr_value.into() + } + Len(slice) => { + let slice = self.evaluated[slice].as_ref()?; + let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap(); + let len = slice.len(&self.ecx).ok()?; + let imm = ImmTy::try_from_uint(len, usize_layout)?; + imm.into() + } + NullaryOp(null_op, ty) => { + let layout = self.ecx.layout_of(ty).ok()?; + if let NullOp::SizeOf | NullOp::AlignOf = null_op && layout.is_unsized() { + return None; + } + let val = match null_op { + NullOp::SizeOf => layout.size.bytes(), + NullOp::AlignOf => layout.align.abi.bytes(), + NullOp::OffsetOf(fields) => { + layout.offset_of_subfield(&self.ecx, fields.iter()).bytes() + } + }; + let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap(); + let imm = ImmTy::try_from_uint(val, usize_layout)?; + imm.into() + } + UnaryOp(un_op, operand) => { + let operand = self.evaluated[operand].as_ref()?; + let operand = self.ecx.read_immediate(operand).ok()?; + let (val, _) = self.ecx.overflowing_unary_op(un_op, &operand).ok()?; + val.into() + } + BinaryOp(bin_op, lhs, rhs) => { + let lhs = self.evaluated[lhs].as_ref()?; + let lhs = self.ecx.read_immediate(lhs).ok()?; + let rhs = self.evaluated[rhs].as_ref()?; + let rhs = self.ecx.read_immediate(rhs).ok()?; + let (val, _) = self.ecx.overflowing_binary_op(bin_op, &lhs, &rhs).ok()?; + val.into() + } + CheckedBinaryOp(bin_op, lhs, rhs) => { + let lhs = self.evaluated[lhs].as_ref()?; + let lhs = self.ecx.read_immediate(lhs).ok()?; + let rhs = self.evaluated[rhs].as_ref()?; + let rhs = self.ecx.read_immediate(rhs).ok()?; + let (val, overflowed) = self.ecx.overflowing_binary_op(bin_op, &lhs, &rhs).ok()?; + let tuple = Ty::new_tup_from_iter( + self.tcx, + [val.layout.ty, self.tcx.types.bool].into_iter(), + ); + let tuple = self.ecx.layout_of(tuple).ok()?; + ImmTy::from_scalar_pair(val.to_scalar(), Scalar::from_bool(overflowed), tuple) + .into() + } + Cast { kind, value, from: _, to } => match kind { + CastKind::IntToInt | CastKind::IntToFloat => { + let value = self.evaluated[value].as_ref()?; + let value = self.ecx.read_immediate(value).ok()?; + let to = self.ecx.layout_of(to).ok()?; + let res = self.ecx.int_to_int_or_float(&value, to).ok()?; + res.into() + } + CastKind::FloatToFloat | CastKind::FloatToInt => { + let value = self.evaluated[value].as_ref()?; + let value = self.ecx.read_immediate(value).ok()?; + let to = self.ecx.layout_of(to).ok()?; + let res = self.ecx.float_to_float_or_int(&value, to).ok()?; + res.into() + } + CastKind::Transmute => { + let value = self.evaluated[value].as_ref()?; + let to = self.ecx.layout_of(to).ok()?; + // `offset` for immediates only supports scalar/scalar-pair ABIs, + // so bail out if the target is not one. + if value.as_mplace_or_imm().is_right() { + match (value.layout.abi, to.abi) { + (Abi::Scalar(..), Abi::Scalar(..)) => {} + (Abi::ScalarPair(..), Abi::ScalarPair(..)) => {} + _ => return None, + } + } + value.offset(Size::ZERO, to, &self.ecx).ok()? + } + _ => return None, + }, + }; + Some(op) + } + + fn project( + &mut self, + place: PlaceRef<'tcx>, + value: VnIndex, + proj: PlaceElem<'tcx>, + ) -> Option { + let proj = match proj { + ProjectionElem::Deref => { + let ty = place.ty(self.local_decls, self.tcx).ty; + if let Some(Mutability::Not) = ty.ref_mutability() + && let Some(pointee_ty) = ty.builtin_deref(true) + && pointee_ty.ty.is_freeze(self.tcx, self.param_env) + { + // An immutable borrow `_x` always points to the same value for the + // lifetime of the borrow, so we can merge all instances of `*_x`. + ProjectionElem::Deref + } else { + return None; + } + } + ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index), + ProjectionElem::Field(f, ty) => { + if let Value::Aggregate(_, _, fields) = self.get(value) { + return Some(fields[f.as_usize()]); + } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value) + && let Value::Aggregate(_, written_variant, fields) = self.get(*outer_value) + // This pass is not aware of control-flow, so we do not know whether the + // replacement we are doing is actually reachable. We could be in any arm of + // ``` + // match Some(x) { + // Some(y) => /* stuff */, + // None => /* other */, + // } + // ``` + // + // In surface rust, the current statement would be unreachable. + // + // However, from the reference chapter on enums and RFC 2195, + // accessing the wrong variant is not UB if the enum has repr. + // So it's not impossible for a series of MIR opts to generate + // a downcast to an inactive variant. + && written_variant == read_variant + { + return Some(fields[f.as_usize()]); + } + ProjectionElem::Field(f, ty) + } + ProjectionElem::Index(idx) => { + if let Value::Repeat(inner, _) = self.get(value) { + return Some(*inner); + } + let idx = self.locals[idx]?; + ProjectionElem::Index(idx) + } + ProjectionElem::ConstantIndex { offset, min_length, from_end } => { + match self.get(value) { + Value::Repeat(inner, _) => { + return Some(*inner); + } + Value::Aggregate(AggregateTy::Array, _, operands) => { + let offset = if from_end { + operands.len() - offset as usize + } else { + offset as usize + }; + return operands.get(offset).copied(); + } + _ => {} + }; + ProjectionElem::ConstantIndex { offset, min_length, from_end } + } + ProjectionElem::Subslice { from, to, from_end } => { + ProjectionElem::Subslice { from, to, from_end } + } + ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty), + ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty), + }; + + Some(self.insert(Value::Projection(value, proj))) + } + + /// Simplify the projection chain if we know better. + #[instrument(level = "trace", skip(self))] + fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) { + // If the projection is indirect, we treat the local as a value, so can replace it with + // another local. + if place.is_indirect() + && let Some(base) = self.locals[place.local] + && let Some(new_local) = self.try_as_local(base, location) + { + place.local = new_local; + self.reused_locals.insert(new_local); + } + + let mut projection = Cow::Borrowed(&place.projection[..]); + + for i in 0..projection.len() { + let elem = projection[i]; + if let ProjectionElem::Index(idx) = elem + && let Some(idx) = self.locals[idx] + { + if let Some(offset) = self.evaluated[idx].as_ref() + && let Ok(offset) = self.ecx.read_target_usize(offset) + { + projection.to_mut()[i] = ProjectionElem::ConstantIndex { + offset, + min_length: offset + 1, + from_end: false, + }; + } else if let Some(new_idx) = self.try_as_local(idx, location) { + projection.to_mut()[i] = ProjectionElem::Index(new_idx); + self.reused_locals.insert(new_idx); + } + } + } + + if projection.is_owned() { + place.projection = self.tcx.mk_place_elems(&projection); + } + + trace!(?place); + } + /// Represent the *value* which would be read from `place`, and point `place` to a preexisting /// place with the same value (if that already exists). #[instrument(level = "trace", skip(self), ret)] @@ -259,6 +669,8 @@ place: &mut Place<'tcx>, location: Location, ) -> Option { + self.simplify_place_projection(place, location); + // Invariant: `place` and `place_ref` point to the same value, even if they point to // different memory locations. let mut place_ref = place.as_ref(); @@ -273,57 +685,18 @@ place_ref = PlaceRef { local, projection: &place.projection[index..] }; } - let proj = match proj { - ProjectionElem::Deref => { - let ty = Place::ty_from( - place.local, - &place.projection[..index], - self.local_decls, - self.tcx, - ) - .ty; - if let Some(Mutability::Not) = ty.ref_mutability() - && let Some(pointee_ty) = ty.builtin_deref(true) - && pointee_ty.ty.is_freeze(self.tcx, self.param_env) - { - // An immutable borrow `_x` always points to the same value for the - // lifetime of the borrow, so we can merge all instances of `*_x`. - ProjectionElem::Deref - } else { - return None; - } - } - ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty), - ProjectionElem::Index(idx) => { - let idx = self.locals[idx]?; - ProjectionElem::Index(idx) - } - ProjectionElem::ConstantIndex { offset, min_length, from_end } => { - ProjectionElem::ConstantIndex { offset, min_length, from_end } - } - ProjectionElem::Subslice { from, to, from_end } => { - ProjectionElem::Subslice { from, to, from_end } - } - ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index), - ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty), - ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty), - }; - value = self.insert(Value::Projection(value, proj)); + let base = PlaceRef { local: place.local, projection: &place.projection[..index] }; + value = self.project(base, value, proj)?; } - if let Some(local) = self.try_as_local(value, location) - && local != place.local // in case we had no projection to begin with. - { - *place = local.into(); - self.reused_locals.insert(local); - self.any_replacement = true; - } else if place_ref.local != place.local - || place_ref.projection.len() < place.projection.len() - { + if let Some(new_local) = self.try_as_local(value, location) { + place_ref = PlaceRef { local: new_local, projection: &[] }; + } + + if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() { // By the invariant on `place_ref`. *place = place_ref.project_deeper(&[], self.tcx); self.reused_locals.insert(place_ref.local); - self.any_replacement = true; } Some(value) @@ -336,12 +709,14 @@ location: Location, ) -> Option { match *operand { - Operand::Constant(ref constant) => Some(self.insert(Value::Constant(constant.const_))), + Operand::Constant(ref mut constant) => { + let const_ = constant.const_.normalize(self.tcx, self.param_env); + self.insert_constant(const_) + } Operand::Copy(ref mut place) | Operand::Move(ref mut place) => { let value = self.simplify_place_value(place, location)?; if let Some(const_) = self.try_as_constant(value) { *operand = Operand::Constant(Box::new(const_)); - self.any_replacement = true; } Some(value) } @@ -370,24 +745,15 @@ Value::Repeat(op, amount) } Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty), - Rvalue::Aggregate(box ref kind, ref mut fields) => { - let variant_index = match *kind { - AggregateKind::Array(..) - | AggregateKind::Tuple - | AggregateKind::Closure(..) - | AggregateKind::Generator(..) => FIRST_VARIANT, - AggregateKind::Adt(_, variant_index, _, _, None) => variant_index, - // Do not track unions. - AggregateKind::Adt(_, _, _, _, Some(_)) => return None, - }; - let fields: Option> = fields - .iter_mut() - .map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque())) - .collect(); - let ty = rvalue.ty(self.local_decls, self.tcx); - Value::Aggregate(ty, variant_index, fields?) + Rvalue::Aggregate(..) => return self.simplify_aggregate(rvalue, location), + Rvalue::Ref(_, borrow_kind, ref mut place) => { + self.simplify_place_projection(place, location); + return self.new_pointer(*place, AddressKind::Ref(borrow_kind)); + } + Rvalue::AddressOf(mutbl, ref mut place) => { + self.simplify_place_projection(place, location); + return self.new_pointer(*place, AddressKind::Address(mutbl)); } - Rvalue::Ref(.., place) | Rvalue::AddressOf(_, place) => return self.new_pointer(place), // Operations. Rvalue::Len(ref mut place) => { @@ -397,6 +763,14 @@ Rvalue::Cast(kind, ref mut value, to) => { let from = value.ty(self.local_decls, self.tcx); let value = self.simplify_operand(value, location)?; + if let CastKind::PointerCoercion( + PointerCoercion::ReifyFnPointer | PointerCoercion::ClosureFnPointer(_), + ) = kind + { + // Each reification of a generic fn may get a different pointer. + // Do not try to merge them. + return self.new_opaque(); + } Value::Cast { kind, value, from, to } } Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => { @@ -415,6 +789,9 @@ } Rvalue::Discriminant(ref mut place) => { let place = self.simplify_place_value(place, location)?; + if let Some(discr) = self.simplify_discriminant(place) { + return Some(discr); + } Value::Discriminant(place) } @@ -424,45 +801,182 @@ debug!(?value); Some(self.insert(value)) } + + fn simplify_discriminant(&mut self, place: VnIndex) -> Option { + if let Value::Aggregate(enum_ty, variant, _) = *self.get(place) + && let AggregateTy::Def(enum_did, enum_substs) = enum_ty + && let DefKind::Enum = self.tcx.def_kind(enum_did) + { + let enum_ty = self.tcx.type_of(enum_did).instantiate(self.tcx, enum_substs); + let discr = self.ecx.discriminant_for_variant(enum_ty, variant).ok()?; + return Some(self.insert_scalar(discr.to_scalar(), discr.layout.ty)); + } + + None + } + + fn simplify_aggregate( + &mut self, + rvalue: &mut Rvalue<'tcx>, + location: Location, + ) -> Option { + let Rvalue::Aggregate(box ref kind, ref mut fields) = *rvalue else { bug!() }; + + let tcx = self.tcx; + if fields.is_empty() { + let is_zst = match *kind { + AggregateKind::Array(..) | AggregateKind::Tuple | AggregateKind::Closure(..) => { + true + } + // Only enums can be non-ZST. + AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum, + // Coroutines are never ZST, as they at least contain the implicit states. + AggregateKind::Coroutine(..) => false, + }; + + if is_zst { + let ty = rvalue.ty(self.local_decls, tcx); + return self.insert_constant(Const::zero_sized(ty)); + } + } + + let (ty, variant_index) = match *kind { + AggregateKind::Array(..) => { + assert!(!fields.is_empty()); + (AggregateTy::Array, FIRST_VARIANT) + } + AggregateKind::Tuple => { + assert!(!fields.is_empty()); + (AggregateTy::Tuple, FIRST_VARIANT) + } + AggregateKind::Closure(did, substs) | AggregateKind::Coroutine(did, substs, _) => { + (AggregateTy::Def(did, substs), FIRST_VARIANT) + } + AggregateKind::Adt(did, variant_index, substs, _, None) => { + (AggregateTy::Def(did, substs), variant_index) + } + // Do not track unions. + AggregateKind::Adt(_, _, _, _, Some(_)) => return None, + }; + + let fields: Option> = fields + .iter_mut() + .map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque())) + .collect(); + let fields = fields?; + + if let AggregateTy::Array = ty && fields.len() > 4 { + let first = fields[0]; + if fields.iter().all(|&v| v == first) { + let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap()); + if let Some(const_) = self.try_as_constant(first) { + *rvalue = Rvalue::Repeat(Operand::Constant(Box::new(const_)), len); + } else if let Some(local) = self.try_as_local(first, location) { + *rvalue = Rvalue::Repeat(Operand::Copy(local.into()), len); + self.reused_locals.insert(local); + } + return Some(self.insert(Value::Repeat(first, len))); + } + } + + Some(self.insert(Value::Aggregate(ty, variant_index, fields))) + } +} + +fn op_to_prop_const<'tcx>( + ecx: &mut InterpCx<'_, 'tcx, DummyMachine>, + op: &OpTy<'tcx>, +) -> Option> { + // Do not attempt to propagate unsized locals. + if op.layout.is_unsized() { + return None; + } + + // This constant is a ZST, just return an empty value. + if op.layout.is_zst() { + return Some(ConstValue::ZeroSized); + } + + // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to avoid. + if !matches!(op.layout.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) { + return None; + } + + // If this constant has scalar ABI, return it as a `ConstValue::Scalar`. + if let Abi::Scalar(abi::Scalar::Initialized { .. }) = op.layout.abi + && let Ok(scalar) = ecx.read_scalar(op) + && scalar.try_to_int().is_ok() + { + return Some(ConstValue::Scalar(scalar)); + } + + // If this constant is already represented as an `Allocation`, + // try putting it into global memory to return it. + if let Either::Left(mplace) = op.as_mplace_or_imm() { + let (size, _align) = ecx.size_and_align_of_mplace(&mplace).ok()??; + + // Do not try interning a value that contains provenance. + // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs. + // FIXME: remove this hack once that issue is fixed. + let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).ok()??; + if alloc_ref.has_provenance() { + return None; + } + + let pointer = mplace.ptr().into_pointer_or_addr().ok()?; + let (alloc_id, offset) = pointer.into_parts(); + intern_const_alloc_for_constprop(ecx, alloc_id).ok()?; + if matches!(ecx.tcx.global_alloc(alloc_id), GlobalAlloc::Memory(_)) { + // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything + // by `GlobalAlloc::Memory`, so do fall through to copying if needed. + // FIXME: find a way to treat this more uniformly + // (probably by fixing codegen) + return Some(ConstValue::Indirect { alloc_id, offset }); + } + } + + // Everything failed: create a new allocation to hold the data. + let alloc_id = + ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest, false)).ok()?; + let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO }; + + // Check that we do not leak a pointer. + // Those pointers may lose part of their identity in codegen. + // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed. + if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() { + return Some(value); + } + + None } impl<'tcx> VnState<'_, 'tcx> { /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR. fn try_as_constant(&mut self, index: VnIndex) -> Option> { - if let Value::Constant(const_) = *self.get(index) { - // Some constants may contain pointers. We need to preserve the provenance of these - // pointers, but not all constants guarantee this: - // - valtrees purposefully do not; - // - ConstValue::Slice does not either. - match const_ { - Const::Ty(c) => match c.kind() { - ty::ConstKind::Value(valtree) => match valtree { - // This is just an integer, keep it. - ty::ValTree::Leaf(_) => {} - ty::ValTree::Branch(_) => return None, - }, - ty::ConstKind::Param(..) - | ty::ConstKind::Unevaluated(..) - | ty::ConstKind::Expr(..) => {} - // Should not appear in runtime MIR. - ty::ConstKind::Infer(..) - | ty::ConstKind::Bound(..) - | ty::ConstKind::Placeholder(..) - | ty::ConstKind::Error(..) => bug!(), - }, - Const::Unevaluated(..) => {} - // If the same slice appears twice in the MIR, we cannot guarantee that we will - // give the same `AllocId` to the data. - Const::Val(ConstValue::Slice { .. }, _) => return None, - Const::Val( - ConstValue::ZeroSized | ConstValue::Scalar(_) | ConstValue::Indirect { .. }, - _, - ) => {} - } - Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_ }) - } else { - None + // This was already constant in MIR, do not change it. + if let Value::Constant { value, disambiguator: _ } = *self.get(index) + // If the constant is not deterministic, adding an additional mention of it in MIR will + // not give the same value as the former mention. + && value.is_deterministic() + { + return Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_: value }); } + + let op = self.evaluated[index].as_ref()?; + if op.layout.is_unsized() { + // Do not attempt to propagate unsized locals. + return None; + } + + let value = op_to_prop_const(&mut self.ecx, op)?; + + // Check that we do not leak a pointer. + // Those pointers may lose part of their identity in codegen. + // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed. + assert!(!value.may_have_provenance(self.tcx, op.layout.size)); + + let const_ = Const::Val(value, op.layout.ty); + Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_ }) } /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`, @@ -481,27 +995,32 @@ self.tcx } + fn visit_place(&mut self, place: &mut Place<'tcx>, _: PlaceContext, location: Location) { + self.simplify_place_projection(place, location); + } + fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) { self.simplify_operand(operand, location); } fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) { - self.super_statement(stmt, location); if let StatementKind::Assign(box (_, ref mut rvalue)) = stmt.kind // Do not try to simplify a constant, it's already in canonical shape. && !matches!(rvalue, Rvalue::Use(Operand::Constant(_))) - && let Some(value) = self.simplify_rvalue(rvalue, location) { - if let Some(const_) = self.try_as_constant(value) { - *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_))); - self.any_replacement = true; - } else if let Some(local) = self.try_as_local(value, location) - && *rvalue != Rvalue::Use(Operand::Move(local.into())) + if let Some(value) = self.simplify_rvalue(rvalue, location) { - *rvalue = Rvalue::Use(Operand::Copy(local.into())); - self.reused_locals.insert(local); - self.any_replacement = true; + if let Some(const_) = self.try_as_constant(value) { + *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_))); + } else if let Some(local) = self.try_as_local(value, location) + && *rvalue != Rvalue::Use(Operand::Move(local.into())) + { + *rvalue = Rvalue::Use(Operand::Copy(local.into())); + self.reused_locals.insert(local); + } } + } else { + self.super_statement(stmt, location); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/inline.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/inline.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/inline.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/inline.rs 2023-12-21 16:55:28.000000000 +0000 @@ -14,6 +14,7 @@ use rustc_target::abi::FieldIdx; use rustc_target::spec::abi::Abi; +use crate::cost_checker::CostChecker; use crate::simplify::{remove_dead_blocks, CfgSimplifier}; use crate::util; use crate::MirPass; @@ -22,11 +23,6 @@ pub(crate) mod cycle; -const INSTR_COST: usize = 5; -const CALL_PENALTY: usize = 25; -const LANDINGPAD_PENALTY: usize = 50; -const RESUME_PENALTY: usize = 45; - const TOP_DOWN_DEPTH_LIMIT: usize = 5; pub struct Inline; @@ -63,7 +59,7 @@ if inline(tcx, body) { debug!("running simplify cfg on {:?}", body.source); CfgSimplifier::new(body).simplify(); - remove_dead_blocks(tcx, body); + remove_dead_blocks(body); deref_finder(tcx, body); } } @@ -79,10 +75,10 @@ if body.source.promoted.is_some() { return false; } - // Avoid inlining into generators, since their `optimized_mir` is used for layout computation, + // Avoid inlining into coroutines, since their `optimized_mir` is used for layout computation, // which can create a cycle, even when no attempt is made to inline the function in the other // direction. - if body.generator.is_some() { + if body.coroutine.is_some() { return false; } @@ -169,8 +165,11 @@ caller_body: &mut Body<'tcx>, callsite: &CallSite<'tcx>, ) -> Result, &'static str> { + self.check_mir_is_available(caller_body, &callsite.callee)?; + let callee_attrs = self.tcx.codegen_fn_attrs(callsite.callee.def_id()); - self.check_codegen_attributes(callsite, callee_attrs)?; + let cross_crate_inlinable = self.tcx.cross_crate_inlinable(callsite.callee.def_id()); + self.check_codegen_attributes(callsite, callee_attrs, cross_crate_inlinable)?; let terminator = caller_body[callsite.block].terminator.as_ref().unwrap(); let TerminatorKind::Call { args, destination, .. } = &terminator.kind else { bug!() }; @@ -183,9 +182,8 @@ } } - self.check_mir_is_available(caller_body, &callsite.callee)?; let callee_body = try_instance_mir(self.tcx, callsite.callee.def)?; - self.check_mir_body(callsite, callee_body, callee_attrs)?; + self.check_mir_body(callsite, callee_body, callee_attrs, cross_crate_inlinable)?; if !self.tcx.consider_optimizing(|| { format!("Inline {:?} into {:?}", callsite.callee, caller_body.source) @@ -401,6 +399,7 @@ &self, callsite: &CallSite<'tcx>, callee_attrs: &CodegenFnAttrs, + cross_crate_inlinable: bool, ) -> Result<(), &'static str> { if let InlineAttr::Never = callee_attrs.inline { return Err("never inline hint"); @@ -414,7 +413,7 @@ .non_erasable_generics(self.tcx, callsite.callee.def_id()) .next() .is_some(); - if !is_generic && !callee_attrs.requests_inline() { + if !is_generic && !cross_crate_inlinable { return Err("not exported"); } @@ -439,10 +438,13 @@ return Err("incompatible instruction set"); } - for feature in &callee_attrs.target_features { - if !self.codegen_fn_attrs.target_features.contains(feature) { - return Err("incompatible target feature"); - } + if callee_attrs.target_features != self.codegen_fn_attrs.target_features { + // In general it is not correct to inline a callee with target features that are a + // subset of the caller. This is because the callee might contain calls, and the ABI of + // those calls depends on the target features of the surrounding function. By moving a + // `Call` terminator from one MIR body to another with more target features, we might + // change the ABI of that call! + return Err("incompatible target features"); } Ok(()) @@ -456,10 +458,11 @@ callsite: &CallSite<'tcx>, callee_body: &Body<'tcx>, callee_attrs: &CodegenFnAttrs, + cross_crate_inlinable: bool, ) -> Result<(), &'static str> { let tcx = self.tcx; - let mut threshold = if callee_attrs.requests_inline() { + let mut threshold = if cross_crate_inlinable { self.tcx.sess.opts.unstable_opts.inline_mir_hint_threshold.unwrap_or(100) } else { self.tcx.sess.opts.unstable_opts.inline_mir_threshold.unwrap_or(50) @@ -475,13 +478,8 @@ // FIXME: Give a bonus to functions with only a single caller - let mut checker = CostChecker { - tcx: self.tcx, - param_env: self.param_env, - instance: callsite.callee, - callee_body, - cost: 0, - }; + let mut checker = + CostChecker::new(self.tcx, self.param_env, Some(callsite.callee), callee_body); // Traverse the MIR manually so we can account for the effects of inlining on the CFG. let mut work_list = vec![START_BLOCK]; @@ -503,7 +501,9 @@ self.tcx, ty::EarlyBinder::bind(&place.ty(callee_body, tcx).ty), ); - if ty.needs_drop(tcx, self.param_env) && let UnwindAction::Cleanup(unwind) = unwind { + if ty.needs_drop(tcx, self.param_env) + && let UnwindAction::Cleanup(unwind) = unwind + { work_list.push(unwind); } } else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set @@ -524,7 +524,7 @@ // That attribute is often applied to very large functions that exceed LLVM's (very // generous) inlining threshold. Such functions are very poor MIR inlining candidates. // Always inlining #[inline(always)] functions in MIR, on net, slows down the compiler. - let cost = checker.cost; + let cost = checker.cost(); if cost <= threshold { debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold); Ok(()) @@ -616,9 +616,7 @@ // If there are any locals without storage markers, give them storage only for the // duration of the call. for local in callee_body.vars_and_temps_iter() { - if !callee_body.local_decls[local].internal - && integrator.always_live_locals.contains(local) - { + if integrator.always_live_locals.contains(local) { let new_local = integrator.map_local(local); caller_body[callsite.block].statements.push(Statement { source_info: callsite.source_info, @@ -641,9 +639,7 @@ n += 1; } for local in callee_body.vars_and_temps_iter().rev() { - if !callee_body.local_decls[local].internal - && integrator.always_live_locals.contains(local) - { + if integrator.always_live_locals.contains(local) { let new_local = integrator.map_local(local); caller_body[block].statements.push(Statement { source_info: callsite.source_info, @@ -801,79 +797,6 @@ } } -/// Verify that the callee body is compatible with the caller. -/// -/// This visitor mostly computes the inlining cost, -/// but also needs to verify that types match because of normalization failure. -struct CostChecker<'b, 'tcx> { - tcx: TyCtxt<'tcx>, - param_env: ParamEnv<'tcx>, - cost: usize, - callee_body: &'b Body<'tcx>, - instance: ty::Instance<'tcx>, -} - -impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> { - fn visit_statement(&mut self, statement: &Statement<'tcx>, _: Location) { - // Don't count StorageLive/StorageDead in the inlining cost. - match statement.kind { - StatementKind::StorageLive(_) - | StatementKind::StorageDead(_) - | StatementKind::Deinit(_) - | StatementKind::Nop => {} - _ => self.cost += INSTR_COST, - } - } - - fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, _: Location) { - let tcx = self.tcx; - match terminator.kind { - TerminatorKind::Drop { ref place, unwind, .. } => { - // If the place doesn't actually need dropping, treat it like a regular goto. - let ty = self.instance.instantiate_mir( - tcx, - ty::EarlyBinder::bind(&place.ty(self.callee_body, tcx).ty), - ); - if ty.needs_drop(tcx, self.param_env) { - self.cost += CALL_PENALTY; - if let UnwindAction::Cleanup(_) = unwind { - self.cost += LANDINGPAD_PENALTY; - } - } else { - self.cost += INSTR_COST; - } - } - TerminatorKind::Call { func: Operand::Constant(ref f), unwind, .. } => { - let fn_ty = - self.instance.instantiate_mir(tcx, ty::EarlyBinder::bind(&f.const_.ty())); - self.cost += if let ty::FnDef(def_id, _) = *fn_ty.kind() && tcx.is_intrinsic(def_id) { - // Don't give intrinsics the extra penalty for calls - INSTR_COST - } else { - CALL_PENALTY - }; - if let UnwindAction::Cleanup(_) = unwind { - self.cost += LANDINGPAD_PENALTY; - } - } - TerminatorKind::Assert { unwind, .. } => { - self.cost += CALL_PENALTY; - if let UnwindAction::Cleanup(_) = unwind { - self.cost += LANDINGPAD_PENALTY; - } - } - TerminatorKind::UnwindResume => self.cost += RESUME_PENALTY, - TerminatorKind::InlineAsm { unwind, .. } => { - self.cost += INSTR_COST; - if let UnwindAction::Cleanup(_) = unwind { - self.cost += LANDINGPAD_PENALTY; - } - } - _ => self.cost += INSTR_COST, - } - } -} - /** * Integrator. * @@ -1010,7 +933,7 @@ } match terminator.kind { - TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(), + TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } => bug!(), TerminatorKind::Goto { ref mut target } => { *target = self.map_block(*target); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/instsimplify.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/instsimplify.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/instsimplify.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/instsimplify.rs 2023-12-21 16:55:28.000000000 +0000 @@ -93,7 +93,9 @@ _ => None, }; - if let Some(new) = new && self.should_simplify(source_info, rvalue) { + if let Some(new) = new + && self.should_simplify(source_info, rvalue) + { *rvalue = new; } } @@ -150,7 +152,8 @@ *rvalue = Rvalue::Use(operand.clone()); } else if *kind == CastKind::Transmute { // Transmuting an integer to another integer is just a signedness cast - if let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) = (operand_ty.kind(), cast_ty.kind()) + if let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) = + (operand_ty.kind(), cast_ty.kind()) && int.bit_width() == uint.bit_width() { // The width check isn't strictly necessary, as different widths @@ -172,8 +175,15 @@ for (i, field) in variant.fields.iter().enumerate() { let field_ty = field.ty(self.tcx, args); if field_ty == *cast_ty { - let place = place.project_deeper(&[ProjectionElem::Field(FieldIdx::from_usize(i), *cast_ty)], self.tcx); - let operand = if operand.is_move() { Operand::Move(place) } else { Operand::Copy(place) }; + let place = place.project_deeper( + &[ProjectionElem::Field(FieldIdx::from_usize(i), *cast_ty)], + self.tcx, + ); + let operand = if operand.is_move() { + Operand::Move(place) + } else { + Operand::Copy(place) + }; *rvalue = Rvalue::Use(operand); return; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/jump_threading.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/jump_threading.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/jump_threading.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/jump_threading.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,759 @@ +//! A jump threading optimization. +//! +//! This optimization seeks to replace join-then-switch control flow patterns by straight jumps +//! X = 0 X = 0 +//! ------------\ /-------- ------------ +//! X = 1 X----X SwitchInt(X) => X = 1 +//! ------------/ \-------- ------------ +//! +//! +//! We proceed by walking the cfg backwards starting from each `SwitchInt` terminator, +//! looking for assignments that will turn the `SwitchInt` into a simple `Goto`. +//! +//! The algorithm maintains a set of replacement conditions: +//! - `conditions[place]` contains `Condition { value, polarity: Eq, target }` +//! if assigning `value` to `place` turns the `SwitchInt` into `Goto { target }`. +//! - `conditions[place]` contains `Condition { value, polarity: Ne, target }` +//! if assigning anything different from `value` to `place` turns the `SwitchInt` +//! into `Goto { target }`. +//! +//! In this file, we denote as `place ?= value` the existence of a replacement condition +//! on `place` with given `value`, irrespective of the polarity and target of that +//! replacement condition. +//! +//! We then walk the CFG backwards transforming the set of conditions. +//! When we find a fulfilling assignment, we record a `ThreadingOpportunity`. +//! All `ThreadingOpportunity`s are applied to the body, by duplicating blocks if required. +//! +//! The optimization search can be very heavy, as it performs a DFS on MIR starting from +//! each `SwitchInt` terminator. To manage the complexity, we: +//! - bound the maximum depth by a constant `MAX_BACKTRACK`; +//! - we only traverse `Goto` terminators. +//! +//! We try to avoid creating irreducible control-flow by not threading through a loop header. +//! +//! Likewise, applying the optimisation can create a lot of new MIR, so we bound the instruction +//! cost by `MAX_COST`. + +use rustc_arena::DroplessArena; +use rustc_data_structures::fx::FxHashSet; +use rustc_index::bit_set::BitSet; +use rustc_index::IndexVec; +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::*; +use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt}; +use rustc_mir_dataflow::value_analysis::{Map, PlaceIndex, State, TrackElem}; + +use crate::cost_checker::CostChecker; +use crate::MirPass; + +pub struct JumpThreading; + +const MAX_BACKTRACK: usize = 5; +const MAX_COST: usize = 100; +const MAX_PLACES: usize = 100; + +impl<'tcx> MirPass<'tcx> for JumpThreading { + fn is_enabled(&self, sess: &rustc_session::Session) -> bool { + sess.mir_opt_level() >= 4 + } + + #[instrument(skip_all level = "debug")] + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let def_id = body.source.def_id(); + debug!(?def_id); + + let param_env = tcx.param_env_reveal_all_normalized(def_id); + let map = Map::new(tcx, body, Some(MAX_PLACES)); + let loop_headers = loop_headers(body); + + let arena = DroplessArena::default(); + let mut finder = TOFinder { + tcx, + param_env, + body, + arena: &arena, + map: &map, + loop_headers: &loop_headers, + opportunities: Vec::new(), + }; + + for (bb, bbdata) in body.basic_blocks.iter_enumerated() { + debug!(?bb, term = ?bbdata.terminator()); + if bbdata.is_cleanup || loop_headers.contains(bb) { + continue; + } + let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { continue }; + let Some(discr) = discr.place() else { continue }; + debug!(?discr, ?bb); + + let discr_ty = discr.ty(body, tcx).ty; + let Ok(discr_layout) = tcx.layout_of(param_env.and(discr_ty)) else { continue }; + + let Some(discr) = finder.map.find(discr.as_ref()) else { continue }; + debug!(?discr); + + let cost = CostChecker::new(tcx, param_env, None, body); + + let mut state = State::new(ConditionSet::default(), &finder.map); + + let conds = if let Some((value, then, else_)) = targets.as_static_if() { + let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { + continue; + }; + arena.alloc_from_iter([ + Condition { value, polarity: Polarity::Eq, target: then }, + Condition { value, polarity: Polarity::Ne, target: else_ }, + ]) + } else { + arena.alloc_from_iter(targets.iter().filter_map(|(value, target)| { + let value = ScalarInt::try_from_uint(value, discr_layout.size)?; + Some(Condition { value, polarity: Polarity::Eq, target }) + })) + }; + let conds = ConditionSet(conds); + state.insert_value_idx(discr, conds, &finder.map); + + finder.find_opportunity(bb, state, cost, 0); + } + + let opportunities = finder.opportunities; + debug!(?opportunities); + if opportunities.is_empty() { + return; + } + + // Verify that we do not thread through a loop header. + for to in opportunities.iter() { + assert!(to.chain.iter().all(|&block| !loop_headers.contains(block))); + } + OpportunitySet::new(body, opportunities).apply(body); + } +} + +#[derive(Debug)] +struct ThreadingOpportunity { + /// The list of `BasicBlock`s from the one that found the opportunity to the `SwitchInt`. + chain: Vec, + /// The `SwitchInt` will be replaced by `Goto { target }`. + target: BasicBlock, +} + +struct TOFinder<'tcx, 'a> { + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + body: &'a Body<'tcx>, + map: &'a Map, + loop_headers: &'a BitSet, + /// We use an arena to avoid cloning the slices when cloning `state`. + arena: &'a DroplessArena, + opportunities: Vec, +} + +/// Represent the following statement. If we can prove that the current local is equal/not-equal +/// to `value`, jump to `target`. +#[derive(Copy, Clone, Debug)] +struct Condition { + value: ScalarInt, + polarity: Polarity, + target: BasicBlock, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum Polarity { + Ne, + Eq, +} + +impl Condition { + fn matches(&self, value: ScalarInt) -> bool { + (self.value == value) == (self.polarity == Polarity::Eq) + } + + fn inv(mut self) -> Self { + self.polarity = match self.polarity { + Polarity::Eq => Polarity::Ne, + Polarity::Ne => Polarity::Eq, + }; + self + } +} + +#[derive(Copy, Clone, Debug, Default)] +struct ConditionSet<'a>(&'a [Condition]); + +impl<'a> ConditionSet<'a> { + fn iter(self) -> impl Iterator + 'a { + self.0.iter().copied() + } + + fn iter_matches(self, value: ScalarInt) -> impl Iterator + 'a { + self.iter().filter(move |c| c.matches(value)) + } + + fn map(self, arena: &'a DroplessArena, f: impl Fn(Condition) -> Condition) -> ConditionSet<'a> { + ConditionSet(arena.alloc_from_iter(self.iter().map(f))) + } +} + +impl<'tcx, 'a> TOFinder<'tcx, 'a> { + fn is_empty(&self, state: &State>) -> bool { + state.all(|cs| cs.0.is_empty()) + } + + /// Recursion entry point to find threading opportunities. + #[instrument(level = "trace", skip(self, cost), ret)] + fn find_opportunity( + &mut self, + bb: BasicBlock, + mut state: State>, + mut cost: CostChecker<'_, 'tcx>, + depth: usize, + ) { + // Do not thread through loop headers. + if self.loop_headers.contains(bb) { + return; + } + + debug!(cost = ?cost.cost()); + for (statement_index, stmt) in + self.body.basic_blocks[bb].statements.iter().enumerate().rev() + { + if self.is_empty(&state) { + return; + } + + cost.visit_statement(stmt, Location { block: bb, statement_index }); + if cost.cost() > MAX_COST { + return; + } + + // Attempt to turn the `current_condition` on `lhs` into a condition on another place. + self.process_statement(bb, stmt, &mut state); + + // When a statement mutates a place, assignments to that place that happen + // above the mutation cannot fulfill a condition. + // _1 = 5 // Whatever happens here, it won't change the result of a `SwitchInt`. + // _1 = 6 + if let Some((lhs, tail)) = self.mutated_statement(stmt) { + state.flood_with_tail_elem(lhs.as_ref(), tail, self.map, ConditionSet::default()); + } + } + + if self.is_empty(&state) || depth >= MAX_BACKTRACK { + return; + } + + let last_non_rec = self.opportunities.len(); + + let predecessors = &self.body.basic_blocks.predecessors()[bb]; + if let &[pred] = &predecessors[..] && bb != START_BLOCK { + let term = self.body.basic_blocks[pred].terminator(); + match term.kind { + TerminatorKind::SwitchInt { ref discr, ref targets } => { + self.process_switch_int(discr, targets, bb, &mut state); + self.find_opportunity(pred, state, cost, depth + 1); + } + _ => self.recurse_through_terminator(pred, &state, &cost, depth), + } + } else { + for &pred in predecessors { + self.recurse_through_terminator(pred, &state, &cost, depth); + } + } + + let new_tos = &mut self.opportunities[last_non_rec..]; + debug!(?new_tos); + + // Try to deduplicate threading opportunities. + if new_tos.len() > 1 + && new_tos.len() == predecessors.len() + && predecessors + .iter() + .zip(new_tos.iter()) + .all(|(&pred, to)| to.chain == &[pred] && to.target == new_tos[0].target) + { + // All predecessors have a threading opportunity, and they all point to the same block. + debug!(?new_tos, "dedup"); + let first = &mut new_tos[0]; + *first = ThreadingOpportunity { chain: vec![bb], target: first.target }; + self.opportunities.truncate(last_non_rec + 1); + return; + } + + for op in self.opportunities[last_non_rec..].iter_mut() { + op.chain.push(bb); + } + } + + /// Extract the mutated place from a statement. + /// + /// This method returns the `Place` so we can flood the state in case of a partial assignment. + /// (_1 as Ok).0 = _5; + /// (_1 as Err).0 = _6; + /// We want to ensure that a `SwitchInt((_1 as Ok).0)` does not see the first assignment, as + /// the value may have been mangled by the second assignment. + /// + /// In case we assign to a discriminant, we return `Some(TrackElem::Discriminant)`, so we can + /// stop at flooding the discriminant, and preserve the variant fields. + /// (_1 as Some).0 = _6; + /// SetDiscriminant(_1, 1); + /// switchInt((_1 as Some).0) + #[instrument(level = "trace", skip(self), ret)] + fn mutated_statement( + &self, + stmt: &Statement<'tcx>, + ) -> Option<(Place<'tcx>, Option)> { + match stmt.kind { + StatementKind::Assign(box (place, _)) + | StatementKind::Deinit(box place) => Some((place, None)), + StatementKind::SetDiscriminant { box place, variant_index: _ } => { + Some((place, Some(TrackElem::Discriminant))) + } + StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { + Some((Place::from(local), None)) + } + StatementKind::Retag(..) + | StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(..)) + // copy_nonoverlapping takes pointers and mutated the pointed-to value. + | StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(..)) + | StatementKind::AscribeUserType(..) + | StatementKind::Coverage(..) + | StatementKind::FakeRead(..) + | StatementKind::ConstEvalCounter + | StatementKind::PlaceMention(..) + | StatementKind::Nop => None, + } + } + + #[instrument(level = "trace", skip(self))] + fn process_operand( + &mut self, + bb: BasicBlock, + lhs: PlaceIndex, + rhs: &Operand<'tcx>, + state: &mut State>, + ) -> Option { + let register_opportunity = |c: Condition| { + debug!(?bb, ?c.target, "register"); + self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target }) + }; + + match rhs { + // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`. + Operand::Constant(constant) => { + let conditions = state.try_get_idx(lhs, self.map)?; + let constant = + constant.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()?; + conditions.iter_matches(constant).for_each(register_opportunity); + } + // Transfer the conditions on the copied rhs. + Operand::Move(rhs) | Operand::Copy(rhs) => { + let rhs = self.map.find(rhs.as_ref())?; + state.insert_place_idx(rhs, lhs, self.map); + } + } + + None + } + + #[instrument(level = "trace", skip(self))] + fn process_statement( + &mut self, + bb: BasicBlock, + stmt: &Statement<'tcx>, + state: &mut State>, + ) -> Option { + let register_opportunity = |c: Condition| { + debug!(?bb, ?c.target, "register"); + self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target }) + }; + + // Below, `lhs` is the return value of `mutated_statement`, + // the place to which `conditions` apply. + + let discriminant_for_variant = |enum_ty: Ty<'tcx>, variant_index| { + let discr = enum_ty.discriminant_for_variant(self.tcx, variant_index)?; + let discr_layout = self.tcx.layout_of(self.param_env.and(discr.ty)).ok()?; + let scalar = ScalarInt::try_from_uint(discr.val, discr_layout.size)?; + Some(Operand::const_from_scalar( + self.tcx, + discr.ty, + scalar.into(), + rustc_span::DUMMY_SP, + )) + }; + + match &stmt.kind { + // If we expect `discriminant(place) ?= A`, + // we have an opportunity if `variant_index ?= A`. + StatementKind::SetDiscriminant { box place, variant_index } => { + let discr_target = self.map.find_discr(place.as_ref())?; + let enum_ty = place.ty(self.body, self.tcx).ty; + let discr = discriminant_for_variant(enum_ty, *variant_index)?; + self.process_operand(bb, discr_target, &discr, state)?; + } + // If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`. + StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume( + Operand::Copy(place) | Operand::Move(place), + )) => { + let conditions = state.try_get(place.as_ref(), self.map)?; + conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity); + } + StatementKind::Assign(box (lhs_place, rhs)) => { + if let Some(lhs) = self.map.find(lhs_place.as_ref()) { + match rhs { + Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state)?, + // Transfer the conditions on the copy rhs. + Rvalue::CopyForDeref(rhs) => { + self.process_operand(bb, lhs, &Operand::Copy(*rhs), state)? + } + Rvalue::Discriminant(rhs) => { + let rhs = self.map.find_discr(rhs.as_ref())?; + state.insert_place_idx(rhs, lhs, self.map); + } + // If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`. + Rvalue::Aggregate(box ref kind, ref operands) => { + let agg_ty = lhs_place.ty(self.body, self.tcx).ty; + let lhs = match kind { + // Do not support unions. + AggregateKind::Adt(.., Some(_)) => return None, + AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => { + if let Some(discr_target) = self.map.apply(lhs, TrackElem::Discriminant) + && let Some(discr_value) = discriminant_for_variant(agg_ty, *variant_index) + { + self.process_operand(bb, discr_target, &discr_value, state); + } + self.map.apply(lhs, TrackElem::Variant(*variant_index))? + } + _ => lhs, + }; + for (field_index, operand) in operands.iter_enumerated() { + if let Some(field) = + self.map.apply(lhs, TrackElem::Field(field_index)) + { + self.process_operand(bb, field, operand, state); + } + } + } + // Transfer the conditions on the copy rhs, after inversing polarity. + Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => { + let conditions = state.try_get_idx(lhs, self.map)?; + let place = self.map.find(place.as_ref())?; + let conds = conditions.map(self.arena, Condition::inv); + state.insert_value_idx(place, conds, self.map); + } + // We expect `lhs ?= A`. We found `lhs = Eq(rhs, B)`. + // Create a condition on `rhs ?= B`. + Rvalue::BinaryOp( + op, + box ( + Operand::Move(place) | Operand::Copy(place), + Operand::Constant(value), + ) + | box ( + Operand::Constant(value), + Operand::Move(place) | Operand::Copy(place), + ), + ) => { + let conditions = state.try_get_idx(lhs, self.map)?; + let place = self.map.find(place.as_ref())?; + let equals = match op { + BinOp::Eq => ScalarInt::TRUE, + BinOp::Ne => ScalarInt::FALSE, + _ => return None, + }; + let value = value + .const_ + .normalize(self.tcx, self.param_env) + .try_to_scalar_int()?; + let conds = conditions.map(self.arena, |c| Condition { + value, + polarity: if c.matches(equals) { + Polarity::Eq + } else { + Polarity::Ne + }, + ..c + }); + state.insert_value_idx(place, conds, self.map); + } + + _ => {} + } + } + } + _ => {} + } + + None + } + + #[instrument(level = "trace", skip(self, cost))] + fn recurse_through_terminator( + &mut self, + bb: BasicBlock, + state: &State>, + cost: &CostChecker<'_, 'tcx>, + depth: usize, + ) { + let register_opportunity = |c: Condition| { + debug!(?bb, ?c.target, "register"); + self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target }) + }; + + let term = self.body.basic_blocks[bb].terminator(); + let place_to_flood = match term.kind { + // We come from a target, so those are not possible. + TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::CoroutineDrop => bug!("{term:?} has no terminators"), + // Disallowed during optimizations. + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Yield { .. } => bug!("{term:?} invalid"), + // Cannot reason about inline asm. + TerminatorKind::InlineAsm { .. } => return, + // `SwitchInt` is handled specially. + TerminatorKind::SwitchInt { .. } => return, + // We can recurse, no thing particular to do. + TerminatorKind::Goto { .. } => None, + // Flood the overwritten place, and progress through. + TerminatorKind::Drop { place: destination, .. } + | TerminatorKind::Call { destination, .. } => Some(destination), + // Treat as an `assume(cond == expected)`. + TerminatorKind::Assert { ref cond, expected, .. } => { + if let Some(place) = cond.place() + && let Some(conditions) = state.try_get(place.as_ref(), self.map) + { + let expected = if expected { ScalarInt::TRUE } else { ScalarInt::FALSE }; + conditions.iter_matches(expected).for_each(register_opportunity); + } + None + } + }; + + // We can recurse through this terminator. + let mut state = state.clone(); + if let Some(place_to_flood) = place_to_flood { + state.flood_with(place_to_flood.as_ref(), self.map, ConditionSet::default()); + } + self.find_opportunity(bb, state, cost.clone(), depth + 1); + } + + #[instrument(level = "trace", skip(self))] + fn process_switch_int( + &mut self, + discr: &Operand<'tcx>, + targets: &SwitchTargets, + target_bb: BasicBlock, + state: &mut State>, + ) -> Option { + debug_assert_ne!(target_bb, START_BLOCK); + debug_assert_eq!(self.body.basic_blocks.predecessors()[target_bb].len(), 1); + + let discr = discr.place()?; + let discr_ty = discr.ty(self.body, self.tcx).ty; + let discr_layout = self.tcx.layout_of(self.param_env.and(discr_ty)).ok()?; + let conditions = state.try_get(discr.as_ref(), self.map)?; + + if let Some((value, _)) = targets.iter().find(|&(_, target)| target == target_bb) { + let value = ScalarInt::try_from_uint(value, discr_layout.size)?; + debug_assert_eq!(targets.iter().filter(|&(_, target)| target == target_bb).count(), 1); + + // We are inside `target_bb`. Since we have a single predecessor, we know we passed + // through the `SwitchInt` before arriving here. Therefore, we know that + // `discr == value`. If one condition can be fulfilled by `discr == value`, + // that's an opportunity. + for c in conditions.iter_matches(value) { + debug!(?target_bb, ?c.target, "register"); + self.opportunities.push(ThreadingOpportunity { chain: vec![], target: c.target }); + } + } else if let Some((value, _, else_bb)) = targets.as_static_if() + && target_bb == else_bb + { + let value = ScalarInt::try_from_uint(value, discr_layout.size)?; + + // We only know that `discr != value`. That's much weaker information than + // the equality we had in the previous arm. All we can conclude is that + // the replacement condition `discr != value` can be threaded, and nothing else. + for c in conditions.iter() { + if c.value == value && c.polarity == Polarity::Ne { + debug!(?target_bb, ?c.target, "register"); + self.opportunities + .push(ThreadingOpportunity { chain: vec![], target: c.target }); + } + } + } + + None + } +} + +struct OpportunitySet { + opportunities: Vec, + /// For each bb, give the TOs in which it appears. The pair corresponds to the index + /// in `opportunities` and the index in `ThreadingOpportunity::chain`. + involving_tos: IndexVec>, + /// Cache the number of predecessors for each block, as we clear the basic block cache.. + predecessors: IndexVec, +} + +impl OpportunitySet { + fn new(body: &Body<'_>, opportunities: Vec) -> OpportunitySet { + let mut involving_tos = IndexVec::from_elem(Vec::new(), &body.basic_blocks); + for (index, to) in opportunities.iter().enumerate() { + for (ibb, &bb) in to.chain.iter().enumerate() { + involving_tos[bb].push((index, ibb)); + } + involving_tos[to.target].push((index, to.chain.len())); + } + let predecessors = predecessor_count(body); + OpportunitySet { opportunities, involving_tos, predecessors } + } + + /// Apply the opportunities on the graph. + fn apply(&mut self, body: &mut Body<'_>) { + for i in 0..self.opportunities.len() { + self.apply_once(i, body); + } + } + + #[instrument(level = "trace", skip(self, body))] + fn apply_once(&mut self, index: usize, body: &mut Body<'_>) { + debug!(?self.predecessors); + debug!(?self.involving_tos); + + // Check that `predecessors` satisfies its invariant. + debug_assert_eq!(self.predecessors, predecessor_count(body)); + + // Remove the TO from the vector to allow modifying the other ones later. + let op = &mut self.opportunities[index]; + debug!(?op); + let op_chain = std::mem::take(&mut op.chain); + let op_target = op.target; + debug_assert_eq!(op_chain.len(), op_chain.iter().collect::>().len()); + + let Some((current, chain)) = op_chain.split_first() else { return }; + let basic_blocks = body.basic_blocks.as_mut(); + + // Invariant: the control-flow is well-formed at the end of each iteration. + let mut current = *current; + for &succ in chain { + debug!(?current, ?succ); + + // `succ` must be a successor of `current`. If it is not, this means this TO is not + // satisfiable and a previous TO erased this edge, so we bail out. + if basic_blocks[current].terminator().successors().find(|s| *s == succ).is_none() { + debug!("impossible"); + return; + } + + // Fast path: `succ` is only used once, so we can reuse it directly. + if self.predecessors[succ] == 1 { + debug!("single"); + current = succ; + continue; + } + + let new_succ = basic_blocks.push(basic_blocks[succ].clone()); + debug!(?new_succ); + + // Replace `succ` by `new_succ` where it appears. + let mut num_edges = 0; + for s in basic_blocks[current].terminator_mut().successors_mut() { + if *s == succ { + *s = new_succ; + num_edges += 1; + } + } + + // Update predecessors with the new block. + let _new_succ = self.predecessors.push(num_edges); + debug_assert_eq!(new_succ, _new_succ); + self.predecessors[succ] -= num_edges; + self.update_predecessor_count(basic_blocks[new_succ].terminator(), Update::Incr); + + // Replace the `current -> succ` edge by `current -> new_succ` in all the following + // TOs. This is necessary to avoid trying to thread through a non-existing edge. We + // use `involving_tos` here to avoid traversing the full set of TOs on each iteration. + let mut new_involved = Vec::new(); + for &(to_index, in_to_index) in &self.involving_tos[current] { + // That TO has already been applied, do nothing. + if to_index <= index { + continue; + } + + let other_to = &mut self.opportunities[to_index]; + if other_to.chain.get(in_to_index) != Some(¤t) { + continue; + } + let s = other_to.chain.get_mut(in_to_index + 1).unwrap_or(&mut other_to.target); + if *s == succ { + // `other_to` references the `current -> succ` edge, so replace `succ`. + *s = new_succ; + new_involved.push((to_index, in_to_index + 1)); + } + } + + // The TOs that we just updated now reference `new_succ`. Update `involving_tos` + // in case we need to duplicate an edge starting at `new_succ` later. + let _new_succ = self.involving_tos.push(new_involved); + debug_assert_eq!(new_succ, _new_succ); + + current = new_succ; + } + + let current = &mut basic_blocks[current]; + self.update_predecessor_count(current.terminator(), Update::Decr); + current.terminator_mut().kind = TerminatorKind::Goto { target: op_target }; + self.predecessors[op_target] += 1; + } + + fn update_predecessor_count(&mut self, terminator: &Terminator<'_>, incr: Update) { + match incr { + Update::Incr => { + for s in terminator.successors() { + self.predecessors[s] += 1; + } + } + Update::Decr => { + for s in terminator.successors() { + self.predecessors[s] -= 1; + } + } + } + } +} + +fn predecessor_count(body: &Body<'_>) -> IndexVec { + let mut predecessors: IndexVec<_, _> = + body.basic_blocks.predecessors().iter().map(|ps| ps.len()).collect(); + predecessors[START_BLOCK] += 1; // Account for the implicit entry edge. + predecessors +} + +enum Update { + Incr, + Decr, +} + +/// Compute the set of loop headers in the given body. We define a loop header as a block which has +/// at least a predecessor which it dominates. This definition is only correct for reducible CFGs. +/// But if the CFG is already irreducible, there is no point in trying much harder. +/// is already irreducible. +fn loop_headers(body: &Body<'_>) -> BitSet { + let mut loop_headers = BitSet::new_empty(body.basic_blocks.len()); + let dominators = body.basic_blocks.dominators(); + // Only visit reachable blocks. + for (bb, bbdata) in traversal::preorder(body) { + for succ in bbdata.terminator().successors() { + if dominators.dominates(succ, bb) { + loop_headers.insert(succ); + } + } + } + loop_headers +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/large_enums.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/large_enums.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/large_enums.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/large_enums.rs 2023-12-21 16:55:28.000000000 +0000 @@ -30,6 +30,9 @@ impl<'tcx> MirPass<'tcx> for EnumSizeOpt { fn is_enabled(&self, sess: &Session) -> bool { + // There are some differences in behavior on wasm and ARM that are not properly + // understood, so we conservatively treat this optimization as unsound: + // https://github.com/rust-lang/rust/pull/85158#issuecomment-1101836457 sess.opts.unstable_opts.unsound_mir_opts || sess.mir_opt_level() >= 3 } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,6 +2,7 @@ #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] #![feature(box_patterns)] +#![feature(cow_is_borrowed)] #![feature(decl_macro)] #![feature(is_sorted)] #![feature(let_chains)] @@ -20,6 +21,7 @@ #[macro_use] extern crate rustc_middle; +use hir::ConstContext; use required_consts::RequiredConstsVisitor; use rustc_const_eval::util; use rustc_data_structures::fx::FxIndexSet; @@ -61,7 +63,10 @@ mod const_prop; mod const_prop_lint; mod copy_prop; +mod coroutine; +mod cost_checker; mod coverage; +mod cross_crate_inline; mod ctfe_limit; mod dataflow_const_prop; mod dead_store_elimination; @@ -76,10 +81,10 @@ mod errors; mod ffi_unwind_calls; mod function_item_references; -mod generator; mod gvn; pub mod inline; mod instsimplify; +mod jump_threading; mod large_enums; mod lower_intrinsics; mod lower_slice_len; @@ -123,6 +128,7 @@ coverage::query::provide(providers); ffi_unwind_calls::provide(providers); shim::provide(providers); + cross_crate_inline::provide(providers); *providers = Providers { mir_keys, mir_const, @@ -130,7 +136,7 @@ mir_promoted, mir_drops_elaborated_and_const_checked, mir_for_ctfe, - mir_generator_witnesses: generator::mir_generator_witnesses, + mir_coroutine_witnesses: coroutine::mir_coroutine_witnesses, optimized_mir, is_mir_available, is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did), @@ -162,37 +168,50 @@ && tcx.item_name(def_id) == sym::const_eval_select && tcx.is_intrinsic(def_id) => { - let [tupled_args, called_in_const, called_at_rt]: [_; 3] = std::mem::take(args).try_into().unwrap(); + let [tupled_args, called_in_const, called_at_rt]: [_; 3] = + std::mem::take(args).try_into().unwrap(); let ty = tupled_args.ty(&body.local_decls, tcx); let fields = ty.tuple_fields(); let num_args = fields.len(); - let func = if context == hir::Constness::Const { called_in_const } else { called_at_rt }; - let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) = match tupled_args { - Operand::Constant(_) => { - // there is no good way of extracting a tuple arg from a constant (const generic stuff) - // so we just create a temporary and deconstruct that. - let local = body.local_decls.push(LocalDecl::new(ty, fn_span)); - bb.statements.push(Statement { - source_info: SourceInfo::outermost(fn_span), - kind: StatementKind::Assign(Box::new((local.into(), Rvalue::Use(tupled_args.clone())))), - }); - (Operand::Move, local.into()) - } - Operand::Move(place) => (Operand::Move, place), - Operand::Copy(place) => (Operand::Copy, place), - }; - let place_elems = place.projection; - let arguments = (0..num_args).map(|x| { - let mut place_elems = place_elems.to_vec(); - place_elems.push(ProjectionElem::Field(x.into(), fields[x])); - let projection = tcx.mk_place_elems(&place_elems); - let place = Place { - local: place.local, - projection, + let func = + if context == hir::Constness::Const { called_in_const } else { called_at_rt }; + let (method, place): (fn(Place<'tcx>) -> Operand<'tcx>, Place<'tcx>) = + match tupled_args { + Operand::Constant(_) => { + // there is no good way of extracting a tuple arg from a constant (const generic stuff) + // so we just create a temporary and deconstruct that. + let local = body.local_decls.push(LocalDecl::new(ty, fn_span)); + bb.statements.push(Statement { + source_info: SourceInfo::outermost(fn_span), + kind: StatementKind::Assign(Box::new(( + local.into(), + Rvalue::Use(tupled_args.clone()), + ))), + }); + (Operand::Move, local.into()) + } + Operand::Move(place) => (Operand::Move, place), + Operand::Copy(place) => (Operand::Copy, place), }; - method(place) - }).collect(); - terminator.kind = TerminatorKind::Call { func, args: arguments, destination, target, unwind, call_source: CallSource::Misc, fn_span }; + let place_elems = place.projection; + let arguments = (0..num_args) + .map(|x| { + let mut place_elems = place_elems.to_vec(); + place_elems.push(ProjectionElem::Field(x.into(), fields[x])); + let projection = tcx.mk_place_elems(&place_elems); + let place = Place { local: place.local, projection }; + method(place) + }) + .collect(); + terminator.kind = TerminatorKind::Call { + func, + args: arguments, + destination, + target, + unwind, + call_source: CallSource::Misc, + fn_span, + }; } _ => {} } @@ -234,8 +253,13 @@ let const_kind = tcx.hir().body_const_context(def); // No need to const-check a non-const `fn`. - if const_kind.is_none() { - return Default::default(); + match const_kind { + Some(ConstContext::Const { .. } | ConstContext::Static(_)) + | Some(ConstContext::ConstFn) => {} + None => span_bug!( + tcx.def_span(def), + "`mir_const_qualif` should only be called on const fns and const items" + ), } // N.B., this `borrow()` is guaranteed to be valid (i.e., the value @@ -300,7 +324,21 @@ // Ensure that we compute the `mir_const_qualif` for constants at // this point, before we steal the mir-const result. // Also this means promotion can rely on all const checks having been done. - let const_qualifs = tcx.mir_const_qualif(def); + + let const_qualifs = match tcx.def_kind(def) { + DefKind::Fn | DefKind::AssocFn | DefKind::Closure + if tcx.constness(def) == hir::Constness::Const + || tcx.is_const_default_method(def.to_def_id()) => + { + tcx.mir_const_qualif(def) + } + DefKind::AssocConst + | DefKind::Const + | DefKind::Static(_) + | DefKind::InlineConst + | DefKind::AnonConst => tcx.mir_const_qualif(def), + _ => ConstQualifs::default(), + }; let mut body = tcx.mir_const(def).steal(); if let Some(error_reported) = const_qualifs.tainted_by_errors { body.tainted_by_errors = Some(error_reported); @@ -360,15 +398,15 @@ /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't /// end up missing the source MIR due to stealing happening. fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal> { - if let DefKind::Generator = tcx.def_kind(def) { - tcx.ensure_with_value().mir_generator_witnesses(def); + if let DefKind::Coroutine = tcx.def_kind(def) { + tcx.ensure_with_value().mir_coroutine_witnesses(def); } let mir_borrowck = tcx.mir_borrowck(def); let is_fn_like = tcx.def_kind(def).is_fn_like(); if is_fn_like { // Do not compute the mir call graph without said call graph actually being used. - if inline::Inline.is_enabled(&tcx.sess) { + if pm::should_run_pass(tcx, &inline::Inline) { tcx.ensure_with_value().mir_inliner_callees(ty::InstanceDef::Item(def.to_def_id())); } } @@ -494,9 +532,9 @@ // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late, // but before optimizations begin. &elaborate_box_derefs::ElaborateBoxDerefs, - &generator::StateTransform, + &coroutine::StateTransform, &add_retag::AddRetag, - &Lint(const_prop_lint::ConstProp), + &Lint(const_prop_lint::ConstPropLint), ]; pm::run_passes_no_validate(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::Initial))); } @@ -530,10 +568,11 @@ &[ &check_alignment::CheckAlignment, &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first - &unreachable_prop::UnreachablePropagation, + &inline::Inline, + // Substitutions during inlining may introduce switch on enums with uninhabited branches. &uninhabited_enum_branching::UninhabitedEnumBranching, + &unreachable_prop::UnreachablePropagation, &o1(simplify::SimplifyCfg::AfterUninhabitedEnumBranching), - &inline::Inline, &remove_storage_markers::RemoveStorageMarkers, &remove_zsts::RemoveZsts, &normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering @@ -553,11 +592,11 @@ &separate_const_switch::SeparateConstSwitch, &const_prop::ConstProp, &gvn::GVN, + &simplify::SimplifyLocals::AfterGVN, &dataflow_const_prop::DataflowConstProp, - // - // Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0. &const_debuginfo::ConstDebugInfo, &o1(simplify_branches::SimplifyConstCondition::AfterConstProp), + &jump_threading::JumpThreading, &early_otherwise_branch::EarlyOtherwiseBranch, &simplify_comparison_integral::SimplifyComparisonIntegral, &dead_store_elimination::DeadStoreElimination, @@ -613,6 +652,15 @@ return body; } + // If `mir_drops_elaborated_and_const_checked` found that the current body has unsatisfiable + // predicates, it will shrink the MIR to a single `unreachable` terminator. + // More generally, if MIR is a lone `unreachable`, there is nothing to optimize. + if let TerminatorKind::Unreachable = body.basic_blocks[START_BLOCK].terminator().kind + && body.basic_blocks[START_BLOCK].statements.is_empty() + { + return body; + } + run_optimization_passes(tcx, &mut body); body diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_intrinsics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_intrinsics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_intrinsics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_intrinsics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,9 +2,8 @@ use crate::MirPass; use rustc_middle::mir::*; -use rustc_middle::ty::GenericArgsRef; -use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_span::symbol::{sym, Symbol}; +use rustc_middle::ty::{self, TyCtxt}; +use rustc_span::symbol::sym; use rustc_target::abi::{FieldIdx, VariantIdx}; pub struct LowerIntrinsics; @@ -16,12 +15,10 @@ let terminator = block.terminator.as_mut().unwrap(); if let TerminatorKind::Call { func, args, destination, target, .. } = &mut terminator.kind + && let ty::FnDef(def_id, generic_args) = *func.ty(local_decls, tcx).kind() + && tcx.is_intrinsic(def_id) { - let func_ty = func.ty(local_decls, tcx); - let Some((intrinsic_name, generic_args)) = resolve_rust_intrinsic(tcx, func_ty) - else { - continue; - }; + let intrinsic_name = tcx.item_name(def_id); match intrinsic_name { sym::unreachable => { terminator.kind = TerminatorKind::Unreachable; @@ -169,12 +166,16 @@ let [arg] = args.as_slice() else { span_bug!(terminator.source_info.span, "Wrong number of arguments"); }; - let derefed_place = - if let Some(place) = arg.place() && let Some(local) = place.as_local() { - tcx.mk_place_deref(local.into()) - } else { - span_bug!(terminator.source_info.span, "Only passing a local is supported"); - }; + let derefed_place = if let Some(place) = arg.place() + && let Some(local) = place.as_local() + { + tcx.mk_place_deref(local.into()) + } else { + span_bug!( + terminator.source_info.span, + "Only passing a local is supported" + ); + }; // Add new statement at the end of the block that does the read, and patch // up the terminator. block.statements.push(Statement { @@ -201,12 +202,16 @@ "Wrong number of arguments for write_via_move intrinsic", ); }; - let derefed_place = - if let Some(place) = ptr.place() && let Some(local) = place.as_local() { - tcx.mk_place_deref(local.into()) - } else { - span_bug!(terminator.source_info.span, "Only passing a local is supported"); - }; + let derefed_place = if let Some(place) = ptr.place() + && let Some(local) = place.as_local() + { + tcx.mk_place_deref(local.into()) + } else { + span_bug!( + terminator.source_info.span, + "Only passing a local is supported" + ); + }; block.statements.push(Statement { source_info: terminator.source_info, kind: StatementKind::Assign(Box::new(( @@ -309,15 +314,3 @@ } } } - -fn resolve_rust_intrinsic<'tcx>( - tcx: TyCtxt<'tcx>, - func_ty: Ty<'tcx>, -) -> Option<(Symbol, GenericArgsRef<'tcx>)> { - if let ty::FnDef(def_id, args) = *func_ty.kind() { - if tcx.is_intrinsic(def_id) { - return Some((tcx.item_name(def_id), args)); - } - } - None -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_slice_len.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_slice_len.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_slice_len.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/lower_slice_len.rs 2023-12-21 16:55:28.000000000 +0000 @@ -34,67 +34,43 @@ } } -struct SliceLenPatchInformation<'tcx> { - add_statement: Statement<'tcx>, - new_terminator_kind: TerminatorKind<'tcx>, -} - fn lower_slice_len_call<'tcx>( tcx: TyCtxt<'tcx>, block: &mut BasicBlockData<'tcx>, local_decls: &IndexSlice>, slice_len_fn_item_def_id: DefId, ) { - let mut patch_found: Option> = None; - let terminator = block.terminator(); - match &terminator.kind { - TerminatorKind::Call { - func, - args, - destination, - target: Some(bb), - call_source: CallSource::Normal, - .. - } => { - // some heuristics for fast rejection - if args.len() != 1 { - return; - } - let Some(arg) = args[0].place() else { return }; - let func_ty = func.ty(local_decls, tcx); - match func_ty.kind() { - ty::FnDef(fn_def_id, _) if fn_def_id == &slice_len_fn_item_def_id => { - // perform modifications - // from something like `_5 = core::slice::::len(move _6) -> bb1` - // into: - // ``` - // _5 = Len(*_6) - // goto bb1 - // ``` - - // make new RValue for Len - let deref_arg = tcx.mk_place_deref(arg); - let r_value = Rvalue::Len(deref_arg); - let len_statement_kind = - StatementKind::Assign(Box::new((*destination, r_value))); - let add_statement = - Statement { kind: len_statement_kind, source_info: terminator.source_info }; + if let TerminatorKind::Call { + func, + args, + destination, + target: Some(bb), + call_source: CallSource::Normal, + .. + } = &terminator.kind + // some heuristics for fast rejection + && let [arg] = &args[..] + && let Some(arg) = arg.place() + && let ty::FnDef(fn_def_id, _) = func.ty(local_decls, tcx).kind() + && *fn_def_id == slice_len_fn_item_def_id + { + // perform modifications from something like: + // _5 = core::slice::::len(move _6) -> bb1 + // into: + // _5 = Len(*_6) + // goto bb1 + + // make new RValue for Len + let deref_arg = tcx.mk_place_deref(arg); + let r_value = Rvalue::Len(deref_arg); + let len_statement_kind = StatementKind::Assign(Box::new((*destination, r_value))); + let add_statement = + Statement { kind: len_statement_kind, source_info: terminator.source_info }; - // modify terminator into simple Goto - let new_terminator_kind = TerminatorKind::Goto { target: *bb }; - - let patch = SliceLenPatchInformation { add_statement, new_terminator_kind }; - - patch_found = Some(patch); - } - _ => {} - } - } - _ => {} - } + // modify terminator into simple Goto + let new_terminator_kind = TerminatorKind::Goto { target: *bb }; - if let Some(SliceLenPatchInformation { add_statement, new_terminator_kind }) = patch_found { block.statements.push(add_statement); block.terminator_mut().kind = new_terminator_kind; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/multiple_return_terminators.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/multiple_return_terminators.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/multiple_return_terminators.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/multiple_return_terminators.rs 2023-12-21 16:55:28.000000000 +0000 @@ -38,6 +38,6 @@ } } - simplify::remove_dead_blocks(tcx, body) + simplify::remove_dead_blocks(body) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/normalize_array_len.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/normalize_array_len.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/normalize_array_len.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/normalize_array_len.rs 2023-12-21 16:55:28.000000000 +0000 @@ -57,7 +57,9 @@ } // The length information is stored in the fat pointer, so we treat `operand` as a value. Rvalue::Use(operand) => { - if let Some(rhs) = operand.place() && let Some(rhs) = rhs.as_local() { + if let Some(rhs) = operand.place() + && let Some(rhs) = rhs.as_local() + { slice_lengths[local] = slice_lengths[rhs]; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/nrvo.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/nrvo.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/nrvo.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/nrvo.rs 2023-12-21 16:55:28.000000000 +0000 @@ -34,7 +34,7 @@ impl<'tcx> MirPass<'tcx> for RenameReturnPlace { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - // #111005 + // unsound: #111005 sess.mir_opt_level() > 0 && sess.opts.unstable_opts.unsound_mir_opts } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/pass_manager.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/pass_manager.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/pass_manager.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/pass_manager.rs 2023-12-21 16:55:28.000000000 +0000 @@ -83,6 +83,25 @@ run_passes_inner(tcx, body, passes, phase_change, true); } +pub fn should_run_pass<'tcx, P>(tcx: TyCtxt<'tcx>, pass: &P) -> bool +where + P: MirPass<'tcx> + ?Sized, +{ + let name = pass.name(); + + let overridden_passes = &tcx.sess.opts.unstable_opts.mir_enable_passes; + let overridden = + overridden_passes.iter().rev().find(|(s, _)| s == &*name).map(|(_name, polarity)| { + trace!( + pass = %name, + "{} as requested by flag", + if *polarity { "Running" } else { "Not running" }, + ); + *polarity + }); + overridden.unwrap_or_else(|| pass.is_enabled(&tcx.sess)) +} + fn run_passes_inner<'tcx>( tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, @@ -100,19 +119,9 @@ for pass in passes { let name = pass.name(); - let overridden = overridden_passes.iter().rev().find(|(s, _)| s == &*name).map( - |(_name, polarity)| { - trace!( - pass = %name, - "{} as requested by flag", - if *polarity { "Running" } else { "Not running" }, - ); - *polarity - }, - ); - if !overridden.unwrap_or_else(|| pass.is_enabled(&tcx.sess)) { + if !should_run_pass(tcx, *pass) { continue; - } + }; let dump_enabled = pass.is_mir_dump_enabled(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ref_prop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ref_prop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ref_prop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ref_prop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -210,14 +210,17 @@ // have been visited before. Rvalue::Use(Operand::Copy(place) | Operand::Move(place)) | Rvalue::CopyForDeref(place) => { - if let Some(rhs) = place.as_local() && ssa.is_ssa(rhs) { + if let Some(rhs) = place.as_local() + && ssa.is_ssa(rhs) + { let target = targets[rhs]; // Only see through immutable reference and pointers, as we do not know yet if // mutable references are fully replaced. if !needs_unique && matches!(target, Value::Pointer(..)) { targets[local] = target; } else { - targets[local] = Value::Pointer(tcx.mk_place_deref(rhs.into()), needs_unique); + targets[local] = + Value::Pointer(tcx.mk_place_deref(rhs.into()), needs_unique); } } } @@ -365,7 +368,7 @@ *place = Place::from(target.local).project_deeper(rest, self.tcx); self.any_replacement = true; } else { - break + break; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs 2023-12-21 16:55:28.000000000 +0000 @@ -69,7 +69,7 @@ | TerminatorKind::FalseUnwind { .. } => { terminator.successors().all(|succ| nop_landing_pads.contains(succ)) } - TerminatorKind::GeneratorDrop + TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } | TerminatorKind::Return | TerminatorKind::UnwindTerminate(_) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_uninit_drops.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_uninit_drops.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_uninit_drops.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_uninit_drops.rs 2023-12-21 16:55:28.000000000 +0000 @@ -24,11 +24,8 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let param_env = tcx.param_env(body.source.def_id()); - let Ok(move_data) = MoveData::gather_moves(body, tcx, param_env) else { - // We could continue if there are move errors, but there's not much point since our - // init data isn't complete. - return; - }; + let move_data = + MoveData::gather_moves(&body, tcx, param_env, |ty| ty.needs_drop(tcx, param_env)); let mdpe = MoveDataParamEnv { move_data, param_env }; let mut maybe_inits = MaybeInitializedPlaces::new(tcx, body, &mdpe) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_zsts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_zsts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_zsts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/remove_zsts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,8 +13,8 @@ } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - // Avoid query cycles (generators require optimized MIR for layout). - if tcx.type_of(body.source.def_id()).instantiate_identity().is_generator() { + // Avoid query cycles (coroutines require optimized MIR for layout). + if tcx.type_of(body.source.def_id()).instantiate_identity().is_coroutine() { return; } let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); @@ -126,7 +126,10 @@ && let ty = place_for_ty.ty(self.local_decls, self.tcx).ty && self.known_to_be_zst(ty) && self.tcx.consider_optimizing(|| { - format!("RemoveZsts - Place: {:?} SourceInfo: {:?}", place_for_ty, statement.source_info) + format!( + "RemoveZsts - Place: {:?} SourceInfo: {:?}", + place_for_ty, statement.source_info + ) }) { statement.make_nop(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/separate_const_switch.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/separate_const_switch.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/separate_const_switch.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/separate_const_switch.rs 2023-12-21 16:55:28.000000000 +0000 @@ -118,7 +118,7 @@ | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::InlineAsm { .. } - | TerminatorKind::GeneratorDrop => { + | TerminatorKind::CoroutineDrop => { continue 'predec_iter; } } @@ -169,7 +169,7 @@ | TerminatorKind::UnwindTerminate(_) | TerminatorKind::Return | TerminatorKind::Unreachable - | TerminatorKind::GeneratorDrop + | TerminatorKind::CoroutineDrop | TerminatorKind::Assert { .. } | TerminatorKind::FalseUnwind { .. } | TerminatorKind::Drop { .. } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/shim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/shim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/shim.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/shim.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ use rustc_middle::mir::*; use rustc_middle::query::Providers; use rustc_middle::ty::GenericArgs; -use rustc_middle::ty::{self, EarlyBinder, GeneratorArgs, Ty, TyCtxt}; +use rustc_middle::ty::{self, CoroutineArgs, EarlyBinder, Ty, TyCtxt}; use rustc_target::abi::{FieldIdx, VariantIdx, FIRST_VARIANT}; use rustc_index::{Idx, IndexVec}; @@ -67,18 +67,20 @@ } ty::InstanceDef::DropGlue(def_id, ty) => { - // FIXME(#91576): Drop shims for generators aren't subject to the MIR passes at the end + // FIXME(#91576): Drop shims for coroutines aren't subject to the MIR passes at the end // of this function. Is this intentional? - if let Some(ty::Generator(gen_def_id, args, _)) = ty.map(Ty::kind) { - let body = tcx.optimized_mir(*gen_def_id).generator_drop().unwrap(); + if let Some(ty::Coroutine(coroutine_def_id, args, _)) = ty.map(Ty::kind) { + let body = tcx.optimized_mir(*coroutine_def_id).coroutine_drop().unwrap(); let mut body = EarlyBinder::bind(body.clone()).instantiate(tcx, args); debug!("make_shim({:?}) = {:?}", instance, body); - // Run empty passes to mark phase change and perform validation. pm::run_passes( tcx, &mut body, - &[], + &[ + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + ], Some(MirPhase::Runtime(RuntimePhase::Optimized)), ); @@ -171,7 +173,7 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) -> Body<'tcx> { debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty); - assert!(!matches!(ty, Some(ty) if ty.is_generator())); + assert!(!matches!(ty, Some(ty) if ty.is_coroutine())); let args = if let Some(ty) = ty { tcx.mk_args(&[ty.into()]) @@ -392,8 +394,8 @@ _ if is_copy => builder.copy_shim(), ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()), ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()), - ty::Generator(gen_def_id, args, hir::Movability::Movable) => { - builder.generator_shim(dest, src, *gen_def_id, args.as_generator()) + ty::Coroutine(coroutine_def_id, args, hir::Movability::Movable) => { + builder.coroutine_shim(dest, src, *coroutine_def_id, args.as_coroutine()) } _ => bug!("clone shim for `{:?}` which is not `Copy` and is not an aggregate", self_ty), }; @@ -593,12 +595,12 @@ let _final_cleanup_block = self.clone_fields(dest, src, target, unwind, tys); } - fn generator_shim( + fn coroutine_shim( &mut self, dest: Place<'tcx>, src: Place<'tcx>, - gen_def_id: DefId, - args: GeneratorArgs<'tcx>, + coroutine_def_id: DefId, + args: CoroutineArgs<'tcx>, ) { self.block(vec![], TerminatorKind::Goto { target: self.block_index_offset(3) }, false); let unwind = self.block(vec![], TerminatorKind::UnwindResume, true); @@ -607,8 +609,8 @@ let unwind = self.clone_fields(dest, src, switch, unwind, args.upvar_tys()); let target = self.block(vec![], TerminatorKind::Return, false); let unreachable = self.block(vec![], TerminatorKind::Unreachable, false); - let mut cases = Vec::with_capacity(args.state_tys(gen_def_id, self.tcx).count()); - for (index, state_tys) in args.state_tys(gen_def_id, self.tcx).enumerate() { + let mut cases = Vec::with_capacity(args.state_tys(coroutine_def_id, self.tcx).count()); + for (index, state_tys) in args.state_tys(coroutine_def_id, self.tcx).enumerate() { let variant_index = VariantIdx::new(index); let dest = self.tcx.mk_place_downcast_unnamed(dest, variant_index); let src = self.tcx.mk_place_downcast_unnamed(src, variant_index); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify.rs 2023-12-21 16:55:28.000000000 +0000 @@ -28,10 +28,8 @@ //! return. use crate::MirPass; -use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; -use rustc_index::bit_set::BitSet; +use rustc_data_structures::fx::FxIndexSet; use rustc_index::{Idx, IndexSlice, IndexVec}; -use rustc_middle::mir::coverage::*; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; @@ -68,7 +66,7 @@ pub fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { CfgSimplifier::new(body).simplify(); remove_duplicate_unreachable_blocks(tcx, body); - remove_dead_blocks(tcx, body); + remove_dead_blocks(body); // FIXME: Should probably be moved into some kind of pass manager body.basic_blocks_mut().raw.shrink_to_fit(); @@ -337,7 +335,7 @@ } } -pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { +pub fn remove_dead_blocks(body: &mut Body<'_>) { let reachable = traversal::reachable_as_bitset(body); let num_blocks = body.basic_blocks.len(); if num_blocks == reachable.count() { @@ -345,10 +343,6 @@ } let basic_blocks = body.basic_blocks.as_mut(); - let source_scopes = &body.source_scopes; - if tcx.sess.instrument_coverage() { - save_unreachable_coverage(basic_blocks, source_scopes, &reachable); - } let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect(); let mut orig_index = 0; @@ -370,99 +364,9 @@ } } -/// Some MIR transforms can determine at compile time that a sequences of -/// statements will never be executed, so they can be dropped from the MIR. -/// For example, an `if` or `else` block that is guaranteed to never be executed -/// because its condition can be evaluated at compile time, such as by const -/// evaluation: `if false { ... }`. -/// -/// Those statements are bypassed by redirecting paths in the CFG around the -/// `dead blocks`; but with `-C instrument-coverage`, the dead blocks usually -/// include `Coverage` statements representing the Rust source code regions to -/// be counted at runtime. Without these `Coverage` statements, the regions are -/// lost, and the Rust source code will show no coverage information. -/// -/// What we want to show in a coverage report is the dead code with coverage -/// counts of `0`. To do this, we need to save the code regions, by injecting -/// `Unreachable` coverage statements. These are non-executable statements whose -/// code regions are still recorded in the coverage map, representing regions -/// with `0` executions. -/// -/// If there are no live `Counter` `Coverage` statements remaining, we remove -/// `Coverage` statements along with the dead blocks. Since at least one -/// counter per function is required by LLVM (and necessary, to add the -/// `function_hash` to the counter's call to the LLVM intrinsic -/// `instrprof.increment()`). -/// -/// The `generator::StateTransform` MIR pass and MIR inlining can create -/// atypical conditions, where all live `Counter`s are dropped from the MIR. -/// -/// With MIR inlining we can have coverage counters belonging to different -/// instances in a single body, so the strategy described above is applied to -/// coverage counters from each instance individually. -fn save_unreachable_coverage( - basic_blocks: &mut IndexSlice>, - source_scopes: &IndexSlice>, - reachable: &BitSet, -) { - // Identify instances that still have some live coverage counters left. - let mut live = FxHashSet::default(); - for bb in reachable.iter() { - let basic_block = &basic_blocks[bb]; - for statement in &basic_block.statements { - let StatementKind::Coverage(coverage) = &statement.kind else { continue }; - let CoverageKind::Counter { .. } = coverage.kind else { continue }; - let instance = statement.source_info.scope.inlined_instance(source_scopes); - live.insert(instance); - } - } - - for bb in reachable.iter() { - let block = &mut basic_blocks[bb]; - for statement in &mut block.statements { - let StatementKind::Coverage(_) = &statement.kind else { continue }; - let instance = statement.source_info.scope.inlined_instance(source_scopes); - if !live.contains(&instance) { - statement.make_nop(); - } - } - } - - if live.is_empty() { - return; - } - - // Retain coverage for instances that still have some live counters left. - let mut retained_coverage = Vec::new(); - for dead_block in basic_blocks.indices() { - if reachable.contains(dead_block) { - continue; - } - let dead_block = &basic_blocks[dead_block]; - for statement in &dead_block.statements { - let StatementKind::Coverage(coverage) = &statement.kind else { continue }; - let Some(code_region) = &coverage.code_region else { continue }; - let instance = statement.source_info.scope.inlined_instance(source_scopes); - if live.contains(&instance) { - retained_coverage.push((statement.source_info, code_region.clone())); - } - } - } - - let start_block = &mut basic_blocks[START_BLOCK]; - start_block.statements.extend(retained_coverage.into_iter().map( - |(source_info, code_region)| Statement { - source_info, - kind: StatementKind::Coverage(Box::new(Coverage { - kind: CoverageKind::Unreachable, - code_region: Some(code_region), - })), - }, - )); -} - pub enum SimplifyLocals { BeforeConstProp, + AfterGVN, Final, } @@ -470,6 +374,7 @@ fn name(&self) -> &'static str { match &self { SimplifyLocals::BeforeConstProp => "SimplifyLocals-before-const-prop", + SimplifyLocals::AfterGVN => "SimplifyLocals-after-value-numbering", SimplifyLocals::Final => "SimplifyLocals-final", } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify_branches.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify_branches.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify_branches.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/simplify_branches.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,8 +16,25 @@ } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + trace!("Running SimplifyConstCondition on {:?}", body.source); let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); - for block in body.basic_blocks_mut() { + 'blocks: for block in body.basic_blocks_mut() { + for stmt in block.statements.iter_mut() { + if let StatementKind::Intrinsic(box ref intrinsic) = stmt.kind + && let NonDivergingIntrinsic::Assume(discr) = intrinsic + && let Operand::Constant(ref c) = discr + && let Some(constant) = c.const_.try_eval_bool(tcx, param_env) + { + if constant { + stmt.make_nop(); + } else { + block.statements.clear(); + block.terminator_mut().kind = TerminatorKind::Unreachable; + continue 'blocks; + } + } + } + let terminator = block.terminator_mut(); terminator.kind = match terminator.kind { TerminatorKind::SwitchInt { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/sroa.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/sroa.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/sroa.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/sroa.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,7 +7,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_mir_dataflow::value_analysis::{excluded_locals, iter_fields}; -use rustc_target::abi::{FieldIdx, ReprFlags, FIRST_VARIANT}; +use rustc_target::abi::{FieldIdx, FIRST_VARIANT}; pub struct ScalarReplacementOfAggregates; @@ -20,8 +20,8 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { debug!(def_id = ?body.source.def_id()); - // Avoid query cycles (generators require optimized MIR for layout). - if tcx.type_of(body.source.def_id()).instantiate_identity().is_generator() { + // Avoid query cycles (coroutines require optimized MIR for layout). + if tcx.type_of(body.source.def_id()).instantiate_identity().is_coroutine() { return; } @@ -66,7 +66,7 @@ return true; } if let ty::Adt(def, _args) = ty.kind() { - if def.repr().flags.contains(ReprFlags::IS_SIMD) { + if def.repr().simd() { // Exclude #[repr(simd)] types so that they are not de-optimized into an array return true; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ssa.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ssa.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ssa.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/ssa.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,6 @@ //! As a consequence of rule 2, we consider that borrowed locals are not SSA, even if they are //! `Freeze`, as we do not track that the assignment dominates all uses of the borrow. -use either::Either; use rustc_data_structures::graph::dominators::Dominators; use rustc_index::bit_set::BitSet; use rustc_index::{IndexSlice, IndexVec}; @@ -15,7 +14,7 @@ pub struct SsaLocals { /// Assignments to each local. This defines whether the local is SSA. - assignments: IndexVec>, + assignments: IndexVec>, /// We visit the body in reverse postorder, to ensure each local is assigned before it is used. /// We remember the order in which we saw the assignments to compute the SSA values in a single /// pass. @@ -27,39 +26,10 @@ direct_uses: IndexVec, } -/// We often encounter MIR bodies with 1 or 2 basic blocks. In those cases, it's unnecessary to -/// actually compute dominators, we can just compare block indices because bb0 is always the first -/// block, and in any body all other blocks are always dominated by bb0. -struct SmallDominators<'a> { - inner: Option<&'a Dominators>, -} - -impl SmallDominators<'_> { - fn dominates(&self, first: Location, second: Location) -> bool { - if first.block == second.block { - first.statement_index <= second.statement_index - } else if let Some(inner) = &self.inner { - inner.dominates(first.block, second.block) - } else { - first.block < second.block - } - } - - fn check_dominates(&mut self, set: &mut Set1, loc: Location) { - let assign_dominates = match *set { - Set1::Empty | Set1::Many => false, - Set1::One(LocationExtended::Arg) => true, - Set1::One(LocationExtended::Plain(assign)) => { - self.dominates(assign.successor_within_block(), loc) - } - }; - // We are visiting a use that is not dominated by an assignment. - // Either there is a cycle involved, or we are reading for uninitialized local. - // Bail out. - if !assign_dominates { - *set = Set1::Many; - } - } +pub enum AssignedValue<'a, 'tcx> { + Arg, + Rvalue(&'a mut Rvalue<'tcx>), + Terminator(&'a mut TerminatorKind<'tcx>), } impl SsaLocals { @@ -67,15 +37,14 @@ let assignment_order = Vec::with_capacity(body.local_decls.len()); let assignments = IndexVec::from_elem(Set1::Empty, &body.local_decls); - let dominators = - if body.basic_blocks.len() > 2 { Some(body.basic_blocks.dominators()) } else { None }; - let dominators = SmallDominators { inner: dominators }; + let dominators = body.basic_blocks.dominators(); let direct_uses = IndexVec::from_elem(0, &body.local_decls); let mut visitor = SsaVisitor { assignments, assignment_order, dominators, direct_uses }; for local in body.args_iter() { - visitor.assignments[local] = Set1::One(LocationExtended::Arg); + visitor.assignments[local] = Set1::One(DefLocation::Argument); + visitor.assignment_order.push(local); } // For SSA assignments, a RPO visit will see the assignment before it sees any use. @@ -131,14 +100,7 @@ location: Location, ) -> bool { match self.assignments[local] { - Set1::One(LocationExtended::Arg) => true, - Set1::One(LocationExtended::Plain(ass)) => { - if ass.block == location.block { - ass.statement_index < location.statement_index - } else { - dominators.dominates(ass.block, location.block) - } - } + Set1::One(def) => def.dominates(location, dominators), _ => false, } } @@ -148,9 +110,9 @@ body: &'a Body<'tcx>, ) -> impl Iterator, Location)> + 'a { self.assignment_order.iter().filter_map(|&local| { - if let Set1::One(LocationExtended::Plain(loc)) = self.assignments[local] { + if let Set1::One(DefLocation::Body(loc)) = self.assignments[local] { + let stmt = body.stmt_at(loc).left()?; // `loc` must point to a direct assignment to `local`. - let Either::Left(stmt) = body.stmt_at(loc) else { bug!() }; let Some((target, rvalue)) = stmt.kind.as_assign() else { bug!() }; assert_eq!(target.as_local(), Some(local)); Some((local, rvalue, loc)) @@ -162,18 +124,33 @@ pub fn for_each_assignment_mut<'tcx>( &self, - basic_blocks: &mut BasicBlocks<'tcx>, - mut f: impl FnMut(Local, &mut Rvalue<'tcx>, Location), + basic_blocks: &mut IndexSlice>, + mut f: impl FnMut(Local, AssignedValue<'_, 'tcx>, Location), ) { for &local in &self.assignment_order { - if let Set1::One(LocationExtended::Plain(loc)) = self.assignments[local] { - // `loc` must point to a direct assignment to `local`. - let bbs = basic_blocks.as_mut_preserves_cfg(); - let bb = &mut bbs[loc.block]; - let stmt = &mut bb.statements[loc.statement_index]; - let StatementKind::Assign(box (target, ref mut rvalue)) = stmt.kind else { bug!() }; - assert_eq!(target.as_local(), Some(local)); - f(local, rvalue, loc) + match self.assignments[local] { + Set1::One(DefLocation::Argument) => f( + local, + AssignedValue::Arg, + Location { block: START_BLOCK, statement_index: 0 }, + ), + Set1::One(DefLocation::Body(loc)) => { + let bb = &mut basic_blocks[loc.block]; + let value = if loc.statement_index < bb.statements.len() { + // `loc` must point to a direct assignment to `local`. + let stmt = &mut bb.statements[loc.statement_index]; + let StatementKind::Assign(box (target, ref mut rvalue)) = stmt.kind else { + bug!() + }; + assert_eq!(target.as_local(), Some(local)); + AssignedValue::Rvalue(rvalue) + } else { + let term = bb.terminator_mut(); + AssignedValue::Terminator(&mut term.kind) + }; + f(local, value, loc) + } + _ => {} } } } @@ -224,19 +201,29 @@ } } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -enum LocationExtended { - Plain(Location), - Arg, -} - struct SsaVisitor<'a> { - dominators: SmallDominators<'a>, - assignments: IndexVec>, + dominators: &'a Dominators, + assignments: IndexVec>, assignment_order: Vec, direct_uses: IndexVec, } +impl SsaVisitor<'_> { + fn check_dominates(&mut self, local: Local, loc: Location) { + let set = &mut self.assignments[local]; + let assign_dominates = match *set { + Set1::Empty | Set1::Many => false, + Set1::One(def) => def.dominates(loc, self.dominators), + }; + // We are visiting a use that is not dominated by an assignment. + // Either there is a cycle involved, or we are reading for uninitialized local. + // Bail out. + if !assign_dominates { + *set = Set1::Many; + } + } +} + impl<'tcx> Visitor<'tcx> for SsaVisitor<'_> { fn visit_local(&mut self, local: Local, ctxt: PlaceContext, loc: Location) { match ctxt { @@ -254,7 +241,7 @@ self.assignments[local] = Set1::Many; } PlaceContext::NonMutatingUse(_) => { - self.dominators.check_dominates(&mut self.assignments[local], loc); + self.check_dominates(local, loc); self.direct_uses[local] += 1; } PlaceContext::NonUse(_) => {} @@ -262,34 +249,34 @@ } fn visit_place(&mut self, place: &Place<'tcx>, ctxt: PlaceContext, loc: Location) { - if place.projection.first() == Some(&PlaceElem::Deref) { - // Do not do anything for storage statements and debuginfo. + let location = match ctxt { + PlaceContext::MutatingUse( + MutatingUseContext::Store | MutatingUseContext::Call | MutatingUseContext::Yield, + ) => Some(DefLocation::Body(loc)), + _ => None, + }; + if let Some(location) = location + && let Some(local) = place.as_local() + { + self.assignments[local].insert(location); + if let Set1::One(_) = self.assignments[local] { + // Only record if SSA-like, to avoid growing the vector needlessly. + self.assignment_order.push(local); + } + } else if place.projection.first() == Some(&PlaceElem::Deref) { + // Do not do anything for debuginfo. if ctxt.is_use() { // Only change the context if it is a real use, not a "use" in debuginfo. let new_ctxt = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy); self.visit_projection(place.as_ref(), new_ctxt, loc); - self.dominators.check_dominates(&mut self.assignments[place.local], loc); + self.check_dominates(place.local, loc); } - return; } else { self.visit_projection(place.as_ref(), ctxt, loc); self.visit_local(place.local, ctxt, loc); } } - - fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, loc: Location) { - if let Some(local) = place.as_local() { - self.assignments[local].insert(LocationExtended::Plain(loc)); - if let Set1::One(_) = self.assignments[local] { - // Only record if SSA-like, to avoid growing the vector needlessly. - self.assignment_order.push(local); - } - } else { - self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), loc); - } - self.visit_rvalue(rvalue, loc); - } } #[instrument(level = "trace", skip(ssa, body))] @@ -356,7 +343,7 @@ #[derive(Debug)] pub(crate) struct StorageLiveLocals { /// Set of "StorageLive" statements for each local. - storage_live: IndexVec>, + storage_live: IndexVec>, } impl StorageLiveLocals { @@ -366,13 +353,13 @@ ) -> StorageLiveLocals { let mut storage_live = IndexVec::from_elem(Set1::Empty, &body.local_decls); for local in always_storage_live_locals.iter() { - storage_live[local] = Set1::One(LocationExtended::Arg); + storage_live[local] = Set1::One(DefLocation::Argument); } for (block, bbdata) in body.basic_blocks.iter_enumerated() { for (statement_index, statement) in bbdata.statements.iter().enumerate() { if let StatementKind::StorageLive(local) = statement.kind { storage_live[local] - .insert(LocationExtended::Plain(Location { block, statement_index })); + .insert(DefLocation::Body(Location { block, statement_index })); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,8 +3,7 @@ use crate::MirPass; use rustc_data_structures::fx::FxHashSet; use rustc_middle::mir::{ - BasicBlockData, Body, Local, Operand, Rvalue, StatementKind, SwitchTargets, Terminator, - TerminatorKind, + BasicBlockData, Body, Local, Operand, Rvalue, StatementKind, Terminator, TerminatorKind, }; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{Ty, TyCtxt}; @@ -30,18 +29,16 @@ let terminator = block_data.terminator(); // Only bother checking blocks which terminate by switching on a local. - if let Some(local) = get_discriminant_local(&terminator.kind) { - let stmt_before_term = (!block_data.statements.is_empty()) - .then(|| &block_data.statements[block_data.statements.len() - 1].kind); - - if let Some(StatementKind::Assign(box (l, Rvalue::Discriminant(place)))) = stmt_before_term - { - if l.as_local() == Some(local) { - let ty = place.ty(body, tcx).ty; - if ty.is_enum() { - return Some(ty); - } - } + let local = get_discriminant_local(&terminator.kind)?; + + let stmt_before_term = block_data.statements.last()?; + + if let StatementKind::Assign(box (l, Rvalue::Discriminant(place))) = stmt_before_term.kind + && l.as_local() == Some(local) + { + let ty = place.ty(body, tcx).ty; + if ty.is_enum() { + return Some(ty); } } @@ -72,28 +69,6 @@ } } -/// Ensures that the `otherwise` branch leads to an unreachable bb, returning `None` if so and a new -/// bb to use as the new target if not. -fn ensure_otherwise_unreachable<'tcx>( - body: &Body<'tcx>, - targets: &SwitchTargets, -) -> Option> { - let otherwise = targets.otherwise(); - let bb = &body.basic_blocks[otherwise]; - if bb.terminator().kind == TerminatorKind::Unreachable - && bb.statements.iter().all(|s| matches!(&s.kind, StatementKind::StorageDead(_))) - { - return None; - } - - let mut new_block = BasicBlockData::new(Some(Terminator { - source_info: bb.terminator().source_info, - kind: TerminatorKind::Unreachable, - })); - new_block.is_cleanup = bb.is_cleanup; - Some(new_block) -} - impl<'tcx> MirPass<'tcx> for UninhabitedEnumBranching { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { sess.mir_opt_level() > 0 @@ -102,13 +77,16 @@ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { trace!("UninhabitedEnumBranching starting for {:?}", body.source); - for bb in body.basic_blocks.indices() { + let mut removable_switchs = Vec::new(); + + for (bb, bb_data) in body.basic_blocks.iter_enumerated() { trace!("processing block {:?}", bb); - let Some(discriminant_ty) = get_switched_on_type(&body.basic_blocks[bb], tcx, body) - else { + if bb_data.is_cleanup { continue; - }; + } + + let Some(discriminant_ty) = get_switched_on_type(&bb_data, tcx, body) else { continue }; let layout = tcx.layout_of( tcx.param_env_reveal_all_normalized(body.source.def_id()).and(discriminant_ty), @@ -122,31 +100,38 @@ trace!("allowed_variants = {:?}", allowed_variants); - if let TerminatorKind::SwitchInt { targets, .. } = - &mut body.basic_blocks_mut()[bb].terminator_mut().kind - { - let mut new_targets = SwitchTargets::new( - targets.iter().filter(|(val, _)| allowed_variants.contains(val)), - targets.otherwise(), - ); - - if new_targets.iter().count() == allowed_variants.len() { - if let Some(updated) = ensure_otherwise_unreachable(body, &new_targets) { - let new_otherwise = body.basic_blocks_mut().push(updated); - *new_targets.all_targets_mut().last_mut().unwrap() = new_otherwise; - } - } + let terminator = bb_data.terminator(); + let TerminatorKind::SwitchInt { targets, .. } = &terminator.kind else { bug!() }; - if let TerminatorKind::SwitchInt { targets, .. } = - &mut body.basic_blocks_mut()[bb].terminator_mut().kind - { - *targets = new_targets; + let mut reachable_count = 0; + for (index, (val, _)) in targets.iter().enumerate() { + if allowed_variants.contains(&val) { + reachable_count += 1; } else { - unreachable!() + removable_switchs.push((bb, index)); } - } else { - unreachable!() } + + if reachable_count == allowed_variants.len() { + removable_switchs.push((bb, targets.iter().count())); + } + } + + if removable_switchs.is_empty() { + return; + } + + let new_block = BasicBlockData::new(Some(Terminator { + source_info: body.basic_blocks[removable_switchs[0].0].terminator().source_info, + kind: TerminatorKind::Unreachable, + })); + let unreachable_block = body.basic_blocks.as_mut().push(new_block); + + for (bb, index) in removable_switchs { + let bb = &mut body.basic_blocks.as_mut()[bb]; + let terminator = bb.terminator_mut(); + let TerminatorKind::SwitchInt { targets, .. } = &mut terminator.kind else { bug!() }; + targets.all_targets_mut()[index] = unreachable_block; } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/unreachable_prop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/unreachable_prop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/unreachable_prop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_mir_transform/src/unreachable_prop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,11 +2,13 @@ //! when all of their successors are unreachable. This is achieved through a //! post-order traversal of the blocks. -use crate::simplify; use crate::MirPass; -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::fx::FxHashSet; +use rustc_middle::mir::interpret::Scalar; +use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::*; -use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::{self, TyCtxt}; +use rustc_target::abi::Size; pub struct UnreachablePropagation; @@ -21,106 +23,133 @@ } fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let mut patch = MirPatch::new(body); let mut unreachable_blocks = FxHashSet::default(); - let mut replacements = FxHashMap::default(); for (bb, bb_data) in traversal::postorder(body) { let terminator = bb_data.terminator(); - if terminator.kind == TerminatorKind::Unreachable { - unreachable_blocks.insert(bb); - } else { - let is_unreachable = |succ: BasicBlock| unreachable_blocks.contains(&succ); - let terminator_kind_opt = remove_successors(&terminator.kind, is_unreachable); - - if let Some(terminator_kind) = terminator_kind_opt { - if terminator_kind == TerminatorKind::Unreachable { - unreachable_blocks.insert(bb); - } - replacements.insert(bb, terminator_kind); + let is_unreachable = match &terminator.kind { + TerminatorKind::Unreachable => true, + // This will unconditionally run into an unreachable and is therefore unreachable as well. + TerminatorKind::Goto { target } if unreachable_blocks.contains(target) => { + patch.patch_terminator(bb, TerminatorKind::Unreachable); + true + } + // Try to remove unreachable targets from the switch. + TerminatorKind::SwitchInt { .. } => { + remove_successors_from_switch(tcx, bb, &unreachable_blocks, body, &mut patch) } + _ => false, + }; + if is_unreachable { + unreachable_blocks.insert(bb); } } + if !tcx + .consider_optimizing(|| format!("UnreachablePropagation {:?} ", body.source.def_id())) + { + return; + } + + patch.apply(body); + // We do want do keep some unreachable blocks, but make them empty. for bb in unreachable_blocks { - if !tcx.consider_optimizing(|| { - format!("UnreachablePropagation {:?} ", body.source.def_id()) - }) { - break; - } - body.basic_blocks_mut()[bb].statements.clear(); } + } +} - let replaced = !replacements.is_empty(); +/// Return whether the current terminator is fully unreachable. +fn remove_successors_from_switch<'tcx>( + tcx: TyCtxt<'tcx>, + bb: BasicBlock, + unreachable_blocks: &FxHashSet, + body: &Body<'tcx>, + patch: &mut MirPatch<'tcx>, +) -> bool { + let terminator = body.basic_blocks[bb].terminator(); + let TerminatorKind::SwitchInt { discr, targets } = &terminator.kind else { bug!() }; + let source_info = terminator.source_info; + let location = body.terminator_loc(bb); + + let is_unreachable = |bb| unreachable_blocks.contains(&bb); + + // If there are multiple targets, we want to keep information about reachability for codegen. + // For example (see tests/codegen/match-optimizes-away.rs) + // + // pub enum Two { A, B } + // pub fn identity(x: Two) -> Two { + // match x { + // Two::A => Two::A, + // Two::B => Two::B, + // } + // } + // + // This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to + // turn it into just `x` later. Without the unreachable, such a transformation would be illegal. + // + // In order to preserve this information, we record reachable and unreachable targets as + // `Assume` statements in MIR. + + let discr_ty = discr.ty(body, tcx); + let discr_size = Size::from_bits(match discr_ty.kind() { + ty::Uint(uint) => uint.normalize(tcx.sess.target.pointer_width).bit_width().unwrap(), + ty::Int(int) => int.normalize(tcx.sess.target.pointer_width).bit_width().unwrap(), + ty::Char => 32, + ty::Bool => 1, + other => bug!("unhandled type: {:?}", other), + }); + + let mut add_assumption = |binop, value| { + let local = patch.new_temp(tcx.types.bool, source_info.span); + let value = Operand::Constant(Box::new(ConstOperand { + span: source_info.span, + user_ty: None, + const_: Const::from_scalar(tcx, Scalar::from_uint(value, discr_size), discr_ty), + })); + let cmp = Rvalue::BinaryOp(binop, Box::new((discr.to_copy(), value))); + patch.add_assign(location, local.into(), cmp); - for (bb, terminator_kind) in replacements { - if !tcx.consider_optimizing(|| { - format!("UnreachablePropagation {:?} ", body.source.def_id()) - }) { - break; - } + let assume = NonDivergingIntrinsic::Assume(Operand::Move(local.into())); + patch.add_statement(location, StatementKind::Intrinsic(Box::new(assume))); + }; - body.basic_blocks_mut()[bb].terminator_mut().kind = terminator_kind; - } + let otherwise = targets.otherwise(); + let otherwise_unreachable = is_unreachable(otherwise); - if replaced { - simplify::remove_dead_blocks(tcx, body); + let reachable_iter = targets.iter().filter(|&(value, bb)| { + let is_unreachable = is_unreachable(bb); + // We remove this target from the switch, so record the inequality using `Assume`. + if is_unreachable && !otherwise_unreachable { + add_assumption(BinOp::Ne, value); } - } -} + !is_unreachable + }); -fn remove_successors<'tcx, F>( - terminator_kind: &TerminatorKind<'tcx>, - is_unreachable: F, -) -> Option> -where - F: Fn(BasicBlock) -> bool, -{ - let terminator = match terminator_kind { - // This will unconditionally run into an unreachable and is therefore unreachable as well. - TerminatorKind::Goto { target } if is_unreachable(*target) => TerminatorKind::Unreachable, - TerminatorKind::SwitchInt { targets, discr } => { - let otherwise = targets.otherwise(); - - // If all targets are unreachable, we can be unreachable as well. - if targets.all_targets().iter().all(|bb| is_unreachable(*bb)) { - TerminatorKind::Unreachable - } else if is_unreachable(otherwise) { - // If there are multiple targets, don't delete unreachable branches (like an unreachable otherwise) - // unless otherwise is unreachable, in which case deleting a normal branch causes it to be merged with - // the otherwise, keeping its unreachable. - // This looses information about reachability causing worse codegen. - // For example (see tests/codegen/match-optimizes-away.rs) - // - // pub enum Two { A, B } - // pub fn identity(x: Two) -> Two { - // match x { - // Two::A => Two::A, - // Two::B => Two::B, - // } - // } - // - // This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to - // turn it into just `x` later. Without the unreachable, such a transformation would be illegal. - // If the otherwise branch is unreachable, we can delete all other unreachable targets, as they will - // still point to the unreachable and therefore not lose reachability information. - let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb)); - - let new_targets = SwitchTargets::new(reachable_iter, otherwise); - - // No unreachable branches were removed. - if new_targets.all_targets().len() == targets.all_targets().len() { - return None; - } + let new_targets = SwitchTargets::new(reachable_iter, otherwise); - TerminatorKind::SwitchInt { discr: discr.clone(), targets: new_targets } - } else { - // If the otherwise branch is reachable, we don't want to delete any unreachable branches. - return None; - } + let num_targets = new_targets.all_targets().len(); + let fully_unreachable = num_targets == 1 && otherwise_unreachable; + + let terminator = match (num_targets, otherwise_unreachable) { + // If all targets are unreachable, we can be unreachable as well. + (1, true) => TerminatorKind::Unreachable, + (1, false) => TerminatorKind::Goto { target: otherwise }, + (2, true) => { + // All targets are unreachable except one. Record the equality, and make it a goto. + let (value, target) = new_targets.iter().next().unwrap(); + add_assumption(BinOp::Eq, value); + TerminatorKind::Goto { target } } - _ => return None, + _ if num_targets == targets.all_targets().len() => { + // Nothing has changed. + return false; + } + _ => TerminatorKind::SwitchInt { discr: discr.clone(), targets: new_targets }, }; - Some(terminator) + + patch.patch_terminator(bb, terminator); + fully_unreachable } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,19 +3,18 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -serde = "1" -serde_json = "1" -tracing = "0.1" +# tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_index = { path = "../rustc_index" } +rustc_hir = { path = "../rustc_hir" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } +serde = "1" +serde_json = "1" +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -7,8 +7,6 @@ monomorphize_encountered_error_while_instantiating = the above error was encountered while instantiating `{$formatted_item}` -monomorphize_fatal_error = {$error_message} - monomorphize_large_assignments = moving {$size} bytes .label = value moved from here @@ -29,8 +27,6 @@ monomorphize_unknown_cgu_collection_mode = unknown codegen-item collection mode '{$mode}', falling back to 'lazy' mode -monomorphize_unknown_partition_strategy = unknown partitioning strategy - monomorphize_unused_generic_params = item has unused generic parameters monomorphize_written_to_path = the full type name has been written to '{$path}' diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/collector.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/collector.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/collector.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/collector.rs 2023-12-21 16:55:28.000000000 +0000 @@ -173,7 +173,7 @@ use rustc_middle::mir::interpret::{AllocId, ErrorHandled, GlobalAlloc, Scalar}; use rustc_middle::mir::mono::{InstantiationMode, MonoItem}; use rustc_middle::mir::visit::Visitor as MirVisitor; -use rustc_middle::mir::{self, Local, Location}; +use rustc_middle::mir::{self, Location}; use rustc_middle::query::TyCtxtAt; use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion}; use rustc_middle::ty::print::with_no_trimmed_paths; @@ -186,8 +186,9 @@ use rustc_session::config::EntryFnType; use rustc_session::lint::builtin::LARGE_ASSIGNMENTS; use rustc_session::Limit; -use rustc_span::source_map::{dummy_spanned, respan, Span, Spanned, DUMMY_SP}; +use rustc_span::source_map::{dummy_spanned, respan, Spanned}; use rustc_span::symbol::{sym, Ident}; +use rustc_span::{Span, DUMMY_SP}; use rustc_target::abi::Size; use std::path::PathBuf; @@ -432,7 +433,7 @@ hir::InlineAsmOperand::SymFn { anon_const } => { let fn_ty = tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id); - visit_fn_use(tcx, fn_ty, false, *op_sp, &mut used_items, &[]); + visit_fn_use(tcx, fn_ty, false, *op_sp, &mut used_items); } hir::InlineAsmOperand::SymStatic { path: _, def_id } => { let instance = Instance::mono(tcx, *def_id); @@ -593,11 +594,9 @@ instance: Instance<'tcx>, /// Spans for move size lints already emitted. Helps avoid duplicate lints. move_size_spans: Vec, - /// If true, we should temporarily skip move size checks, because we are - /// processing an operand to a `skip_move_check_fns` function call. - skip_move_size_check: bool, + visiting_call_terminator: bool, /// Set of functions for which it is OK to move large data into. - skip_move_check_fns: Vec, + skip_move_check_fns: Option>, } impl<'a, 'tcx> MirUsedCollector<'a, 'tcx> { @@ -613,7 +612,20 @@ ) } - fn check_move_size(&mut self, limit: usize, operand: &mir::Operand<'tcx>, location: Location) { + fn check_operand_move_size(&mut self, operand: &mir::Operand<'tcx>, location: Location) { + let limit = self.tcx.move_size_limit().0; + if limit == 0 { + return; + } + + // This function is called by visit_operand() which visits _all_ + // operands, including TerminatorKind::Call operands. But if + // check_fn_args_move_size() has been called, the operands have already + // been visited. Do not visit them again. + if self.visiting_call_terminator { + return; + } + let limit = Size::from_bytes(limit); let ty = operand.ty(self.body, self.tcx); let ty = self.monomorphize(ty); @@ -651,6 +663,38 @@ ); self.move_size_spans.push(source_info.span); } + + fn check_fn_args_move_size( + &mut self, + callee_ty: Ty<'tcx>, + args: &[mir::Operand<'tcx>], + location: Location, + ) { + let limit = self.tcx.move_size_limit(); + if limit.0 == 0 { + return; + } + + if args.is_empty() { + return; + } + + // Allow large moves into container types that themselves are cheap to move + let ty::FnDef(def_id, _) = *callee_ty.kind() else { + return; + }; + if self + .skip_move_check_fns + .get_or_insert_with(|| build_skip_move_check_fns(self.tcx)) + .contains(&def_id) + { + return; + } + + for arg in args { + self.check_operand_move_size(arg, location); + } + } } impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> { @@ -696,14 +740,7 @@ ) => { let fn_ty = operand.ty(self.body, self.tcx); let fn_ty = self.monomorphize(fn_ty); - visit_fn_use( - self.tcx, - fn_ty, - false, - span, - &mut self.output, - &self.skip_move_check_fns, - ); + visit_fn_use(self.tcx, fn_ty, false, span, &mut self.output); } mir::Rvalue::Cast( mir::CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_)), @@ -775,17 +812,11 @@ }; match terminator.kind { - mir::TerminatorKind::Call { ref func, .. } => { + mir::TerminatorKind::Call { ref func, ref args, .. } => { let callee_ty = func.ty(self.body, tcx); let callee_ty = self.monomorphize(callee_ty); - self.skip_move_size_check = visit_fn_use( - self.tcx, - callee_ty, - true, - source, - &mut self.output, - &self.skip_move_check_fns, - ) + self.check_fn_args_move_size(callee_ty, args, location); + visit_fn_use(self.tcx, callee_ty, true, source, &mut self.output) } mir::TerminatorKind::Drop { ref place, .. } => { let ty = place.ty(self.body, self.tcx).ty; @@ -797,7 +828,7 @@ match *op { mir::InlineAsmOperand::SymFn { ref value } => { let fn_ty = self.monomorphize(value.const_.ty()); - visit_fn_use(self.tcx, fn_ty, false, source, &mut self.output, &[]); + visit_fn_use(self.tcx, fn_ty, false, source, &mut self.output); } mir::InlineAsmOperand::SymStatic { def_id } => { let instance = Instance::mono(self.tcx, def_id); @@ -825,7 +856,7 @@ | mir::TerminatorKind::UnwindResume | mir::TerminatorKind::Return | mir::TerminatorKind::Unreachable => {} - mir::TerminatorKind::GeneratorDrop + mir::TerminatorKind::CoroutineDrop | mir::TerminatorKind::Yield { .. } | mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } => bug!(), @@ -835,24 +866,14 @@ push_mono_lang_item(self, reason.lang_item()); } + self.visiting_call_terminator = matches!(terminator.kind, mir::TerminatorKind::Call { .. }); self.super_terminator(terminator, location); - self.skip_move_size_check = false; + self.visiting_call_terminator = false; } fn visit_operand(&mut self, operand: &mir::Operand<'tcx>, location: Location) { self.super_operand(operand, location); - let move_size_limit = self.tcx.move_size_limit().0; - if move_size_limit > 0 && !self.skip_move_size_check { - self.check_move_size(move_size_limit, operand, location); - } - } - - fn visit_local( - &mut self, - _place_local: Local, - _context: mir::visit::PlaceContext, - _location: Location, - ) { + self.check_operand_move_size(operand, location); } } @@ -873,11 +894,8 @@ is_direct_call: bool, source: Span, output: &mut MonoItems<'tcx>, - skip_move_check_fns: &[DefId], -) -> bool { - let mut skip_move_size_check = false; +) { if let ty::FnDef(def_id, args) = *ty.kind() { - skip_move_size_check = skip_move_check_fns.contains(&def_id); let instance = if is_direct_call { ty::Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args) } else { @@ -888,7 +906,6 @@ }; visit_instance_use(tcx, instance, is_direct_call, source, output); } - skip_move_size_check } fn visit_instance_use<'tcx>( @@ -1196,7 +1213,7 @@ } fn is_root(&self, def_id: LocalDefId) -> bool { - !item_requires_monomorphization(self.tcx, def_id) + !self.tcx.generics_of(def_id).requires_monomorphization(self.tcx) && match self.mode { MonoItemCollectionMode::Eager => true, MonoItemCollectionMode::Lazy => { @@ -1259,11 +1276,6 @@ } } -fn item_requires_monomorphization(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { - let generics = tcx.generics_of(def_id); - generics.requires_monomorphization(tcx) -} - #[instrument(level = "debug", skip(tcx, output))] fn create_mono_items_for_default_impls<'tcx>( tcx: TyCtxt<'tcx>, @@ -1370,17 +1382,6 @@ } } -fn add_assoc_fn<'tcx>( - tcx: TyCtxt<'tcx>, - def_id: Option, - fn_ident: Ident, - skip_move_check_fns: &mut Vec, -) { - if let Some(def_id) = def_id.and_then(|def_id| assoc_fn_of_type(tcx, def_id, fn_ident)) { - skip_move_check_fns.push(def_id); - } -} - fn assoc_fn_of_type<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fn_ident: Ident) -> Option { for impl_def_id in tcx.inherent_impls(def_id) { if let Some(new) = tcx.associated_items(impl_def_id).find_by_name_and_kind( @@ -1395,6 +1396,19 @@ return None; } +fn build_skip_move_check_fns(tcx: TyCtxt<'_>) -> Vec { + let fns = [ + (tcx.lang_items().owned_box(), "new"), + (tcx.get_diagnostic_item(sym::Rc), "new"), + (tcx.get_diagnostic_item(sym::Arc), "new"), + ]; + fns.into_iter() + .filter_map(|(def_id, fn_name)| { + def_id.and_then(|def_id| assoc_fn_of_type(tcx, def_id, Ident::from_str(fn_name))) + }) + .collect::>() +} + /// Scans the MIR in order to find function calls, closures, and drop-glue. #[instrument(skip(tcx, output), level = "debug")] fn collect_used_items<'tcx>( @@ -1404,36 +1418,16 @@ ) { let body = tcx.instance_mir(instance.def); - let mut skip_move_check_fns = vec![]; - if tcx.move_size_limit().0 > 0 { - add_assoc_fn( - tcx, - tcx.lang_items().owned_box(), - Ident::from_str("new"), - &mut skip_move_check_fns, - ); - add_assoc_fn( - tcx, - tcx.get_diagnostic_item(sym::Arc), - Ident::from_str("new"), - &mut skip_move_check_fns, - ); - add_assoc_fn( - tcx, - tcx.get_diagnostic_item(sym::Rc), - Ident::from_str("new"), - &mut skip_move_check_fns, - ); - } - + // Here we rely on the visitor also visiting `required_consts`, so that we evaluate them + // and abort compilation if any of them errors. MirUsedCollector { tcx, body: &body, output, instance, move_size_spans: vec![], - skip_move_size_check: false, - skip_move_check_fns, + visiting_call_terminator: false, + skip_move_check_fns: None, } .visit_body(&body); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -76,10 +76,6 @@ } #[derive(Diagnostic)] -#[diag(monomorphize_unknown_partition_strategy)] -pub struct UnknownPartitionStrategy; - -#[derive(Diagnostic)] #[diag(monomorphize_symbol_already_defined)] pub struct SymbolAlreadyDefined { #[primary_span] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/partitioning.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/partitioning.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/partitioning.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/partitioning.rs 2023-12-21 16:55:28.000000000 +0000 @@ -105,7 +105,6 @@ use rustc_hir::definitions::DefPathDataName; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; -use rustc_middle::mir; use rustc_middle::mir::mono::{ CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, Linkage, MonoItem, MonoItemData, Visibility, @@ -1279,38 +1278,8 @@ Ok(()) } -fn codegened_and_inlined_items(tcx: TyCtxt<'_>, (): ()) -> &DefIdSet { - let (items, cgus) = tcx.collect_and_partition_mono_items(()); - let mut visited = DefIdSet::default(); - let mut result = items.clone(); - - for cgu in cgus { - for item in cgu.items().keys() { - if let MonoItem::Fn(ref instance) = item { - let did = instance.def_id(); - if !visited.insert(did) { - continue; - } - let body = tcx.instance_mir(instance.def); - for block in body.basic_blocks.iter() { - for statement in &block.statements { - let mir::StatementKind::Coverage(_) = statement.kind else { continue }; - let scope = statement.source_info.scope; - if let Some(inlined) = scope.inlined_instance(&body.source_scopes) { - result.insert(inlined.def_id()); - } - } - } - } - } - } - - tcx.arena.alloc(result) -} - pub fn provide(providers: &mut Providers) { providers.collect_and_partition_mono_items = collect_and_partition_mono_items; - providers.codegened_and_inlined_items = codegened_and_inlined_items; providers.is_codegened_item = |tcx, def_id| { let (all_mono_items, _) = tcx.collect_and_partition_mono_items(()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/polymorphize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/polymorphize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/polymorphize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_monomorphize/src/polymorphize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -131,7 +131,7 @@ unused_parameters: &mut UnusedGenericParams, ) { match tcx.def_kind(def_id) { - DefKind::Closure | DefKind::Generator => { + DefKind::Closure | DefKind::Coroutine => { for param in &generics.params { debug!(?param, "(closure/gen)"); unused_parameters.mark_used(param.index); @@ -227,7 +227,7 @@ impl<'a, 'tcx> MarkUsedGenericParams<'a, 'tcx> { /// Invoke `unused_generic_params` on a body contained within the current item (e.g. - /// a closure, generator or constant). + /// a closure, coroutine or constant). #[instrument(level = "debug", skip(self, def_id, args))] fn visit_child_body(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) { let instance = ty::InstanceDef::Item(def_id); @@ -248,8 +248,8 @@ fn visit_local_decl(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) { if local == Local::from_usize(1) { let def_kind = self.tcx.def_kind(self.def_id); - if matches!(def_kind, DefKind::Closure | DefKind::Generator) { - // Skip visiting the closure/generator that is currently being processed. This only + if matches!(def_kind, DefKind::Closure | DefKind::Coroutine) { + // Skip visiting the closure/coroutine that is currently being processed. This only // happens because the first argument to the closure is a reference to itself and // that will call `visit_args`, resulting in each generic parameter captured being // considered used by default. @@ -319,14 +319,14 @@ } match *ty.kind() { - ty::Closure(def_id, args) | ty::Generator(def_id, args, ..) => { + ty::Closure(def_id, args) | ty::Coroutine(def_id, args, ..) => { debug!(?def_id); - // Avoid cycle errors with generators. + // Avoid cycle errors with coroutines. if def_id == self.def_id { return ControlFlow::Continue(()); } - // Consider any generic parameters used by any closures/generators as used in the + // Consider any generic parameters used by any closures/coroutines as used in the // parent. self.visit_child_body(def_id, args); ControlFlow::Continue(()) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,9 +3,8 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.0" rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } @@ -21,3 +20,4 @@ tracing = "0.1" unicode-normalization = "0.1.11" unicode-width = "0.1.4" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -59,7 +59,6 @@ parse_bare_cr_in_raw_string = bare CR not allowed in raw string -parse_binary_float_literal_not_supported = binary float literal is not supported parse_bounds_not_allowed_on_trait_aliases = bounds are not allowed on trait aliases parse_box_not_pat = expected pattern, found {$descr} @@ -226,6 +225,10 @@ parse_expected_statement_after_outer_attr = expected statement after outer attribute +parse_expected_struct_field = expected one of `,`, `:`, or `{"}"}`, found `{$token}` + .label = expected one of `,`, `:`, or `{"}"}` + .ident_label = while parsing this struct field + parse_expected_trait_in_trait_impl_found_type = expected a trait, found type parse_extern_crate_name_with_dashes = crate name using dashes are not valid in `extern crate` statements @@ -267,6 +270,9 @@ *[false] a } `for` parameter list +parse_fn_trait_missing_paren = `Fn` bounds require arguments in parentheses + .add_paren = add the missing parentheses + parse_forgot_paren = perhaps you forgot parentheses? parse_found_expr_would_be_stmt = expected expression, found `{$token}` @@ -275,6 +281,9 @@ parse_function_body_equals_expr = function body cannot be `= expression;` .suggestion = surround the expression with `{"{"}` and `{"}"}` instead of `=` and `;` +parse_gen_fn = `gen` functions are not yet implemented + .help = for now you can use `gen {"{}"}` blocks and return `impl Iterator` instead + parse_generic_args_in_pat_require_turbofish_syntax = generic args in patterns require the turbofish syntax parse_generic_parameters_without_angle_brackets = generic parameters without surrounding angle brackets @@ -284,7 +293,6 @@ parse_help_set_edition_cargo = set `edition = "{$edition}"` in `Cargo.toml` parse_help_set_edition_standalone = pass `--edition {$edition}` to `rustc` -parse_hexadecimal_float_literal_not_supported = hexadecimal float literal is not supported parse_if_expression_missing_condition = missing condition for `if` expression .condition_label = expected condition here .block_label = if this block is the condition of the `if` expression, then it must be followed by another block @@ -356,8 +364,6 @@ .label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item} .sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style -parse_int_literal_too_large = integer literal is too large - parse_invalid_block_macro_segment = cannot use a `block` macro fragment here .label = the `block` fragment is within this context .suggestion = wrap this in another block @@ -382,18 +388,8 @@ .suggestion = remove this keyword parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else` -parse_invalid_float_literal_suffix = invalid suffix `{$suffix}` for float literal - .label = invalid suffix `{$suffix}` - .help = valid suffixes are `f32` and `f64` - -parse_invalid_float_literal_width = invalid width `{$width}` for float literal - .help = valid widths are 32 and 64 - parse_invalid_identifier_with_leading_number = identifiers cannot start with a number -parse_invalid_int_literal_width = invalid width `{$width}` for integer literal - .help = valid widths are 8, 16, 32, 64 and 128 - parse_invalid_interpolated_expression = invalid interpolated expression parse_invalid_literal_suffix = suffixes on {$kind} literals are invalid @@ -412,14 +408,6 @@ parse_invalid_meta_item = expected unsuffixed literal or identifier, found `{$token}` -parse_invalid_num_literal_base_prefix = invalid base prefix for number literal - .note = base prefixes (`0xff`, `0b1010`, `0o755`) are lowercase - .suggestion = try making the prefix lowercase - -parse_invalid_num_literal_suffix = invalid suffix `{$suffix}` for number literal - .label = invalid suffix `{$suffix}` - .help = the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.) - parse_invalid_unicode_escape = invalid unicode character escape .label = invalid escape .help = unicode escape must {$surrogate -> @@ -541,6 +529,9 @@ parse_missing_fn_for_method_definition = missing `fn` for method definition .suggestion = add `fn` here to parse `{$ident}` as a public method +parse_missing_fn_params = missing parameters for function definition + .suggestion = add a parameter list + parse_missing_for_in_trait_impl = missing `for` in a trait impl .suggestion = add `for` here @@ -603,13 +594,6 @@ parse_no_digits_literal = no valid digits found for number -parse_non_item_in_item_list = non-item in item list - .suggestion_use_const_not_let = consider using `const` instead of `let` for associated const - .label_list_start = item list starts here - .label_non_item = non-item starts here - .label_list_end = item list ends here - .suggestion_remove_semicolon = consider removing this semicolon - parse_non_string_abi_literal = non-string ABI literal .suggestion = specify the ABI with a string literal @@ -626,7 +610,6 @@ parse_note_pattern_alternatives_use_single_vert = alternatives in or-patterns are separated with `|`, not `||` -parse_octal_float_literal_not_supported = octal float literal is not supported parse_or_pattern_not_allowed_in_fn_parameters = top-level or-patterns are not allowed in function parameters parse_or_pattern_not_allowed_in_let_binding = top-level or-patterns are not allowed in `let` bindings parse_out_of_range_hex_escape = out of range hex escape diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -431,6 +431,17 @@ } #[derive(Diagnostic)] +#[diag(parse_expected_struct_field)] +pub(crate) struct ExpectedStructField { + #[primary_span] + #[label] + pub span: Span, + pub token: Token, + #[label(parse_ident_label)] + pub ident_span: Span, +} + +#[derive(Diagnostic)] #[diag(parse_outer_attribute_not_allowed_on_if_else)] pub(crate) struct OuterAttributeNotAllowedOnIfElse { #[primary_span] @@ -510,6 +521,14 @@ } #[derive(Diagnostic)] +#[diag(parse_gen_fn)] +#[help] +pub(crate) struct GenFn { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] #[diag(parse_comma_after_base_struct)] #[note] pub(crate) struct CommaAfterBaseStruct { @@ -1359,6 +1378,34 @@ pub for_param_list_exists: bool, } +pub(crate) struct FnTraitMissingParen { + pub span: Span, + pub machine_applicable: bool, +} + +impl AddToDiagnostic for FnTraitMissingParen { + fn add_to_diagnostic_with(self, diag: &mut rustc_errors::Diagnostic, _: F) + where + F: Fn( + &mut rustc_errors::Diagnostic, + rustc_errors::SubdiagnosticMessage, + ) -> rustc_errors::SubdiagnosticMessage, + { + diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren); + let applicability = if self.machine_applicable { + Applicability::MachineApplicable + } else { + Applicability::MaybeIncorrect + }; + diag.span_suggestion_short( + self.span.shrink_to_hi(), + crate::fluent_generated::parse_add_paren, + "()", + applicability, + ); + } +} + #[derive(Diagnostic)] #[diag(parse_unexpected_if_with_if)] pub(crate) struct UnexpectedIfWithIf( @@ -1533,6 +1580,14 @@ } #[derive(Diagnostic)] +#[diag(parse_missing_fn_params)] +pub(crate) struct MissingFnParams { + #[primary_span] + #[suggestion(code = "()", applicability = "machine-applicable", style = "short")] + pub span: Span, +} + +#[derive(Diagnostic)] #[diag(parse_missing_trait_in_trait_impl)] pub(crate) struct MissingTraitInTraitImpl { #[primary_span] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -111,9 +111,10 @@ // If there is no suspicious span, give the last properly closed block may help if let Some(parent) = diag_info.matching_block_spans.last() && diag_info.open_braces.last().is_none() - && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1)) { - err.span_label(parent.0, "this opening brace..."); - err.span_label(parent.1, "...matches this closing brace"); + && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1)) + { + err.span_label(parent.0, "this opening brace..."); + err.span_label(parent.1, "...matches this closing brace"); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -64,10 +64,10 @@ override_span, nbsp_is_whitespace: false, }; - let (token_trees, unmatched_delims) = + let (stream, res, unmatched_delims) = tokentrees::TokenTreesReader::parse_all_token_trees(string_reader); - match token_trees { - Ok(stream) if unmatched_delims.is_empty() => Ok(stream), + match res { + Ok(()) if unmatched_delims.is_empty() => Ok(stream), _ => { // Return error if there are unmatched delimiters or unclosed delimiters. // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch @@ -79,9 +79,11 @@ err.buffer(&mut buffer); } } - if let Err(err) = token_trees { - // Add unclosing delimiter error - err.buffer(&mut buffer); + if let Err(errs) = res { + // Add unclosing delimiter or diff marker errors + for err in errs { + err.buffer(&mut buffer); + } } Err(buffer) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/tokentrees.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/tokentrees.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/tokentrees.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/lexer/tokentrees.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,7 @@ use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree}; use rustc_ast_pretty::pprust::token_to_string; -use rustc_errors::{PErr, PResult}; +use rustc_errors::PErr; pub(super) struct TokenTreesReader<'a> { string_reader: StringReader<'a>, @@ -18,36 +18,42 @@ impl<'a> TokenTreesReader<'a> { pub(super) fn parse_all_token_trees( string_reader: StringReader<'a>, - ) -> (PResult<'a, TokenStream>, Vec) { + ) -> (TokenStream, Result<(), Vec>>, Vec) { let mut tt_reader = TokenTreesReader { string_reader, token: Token::dummy(), diag_info: TokenTreeDiagInfo::default(), }; - let res = tt_reader.parse_token_trees(/* is_delimited */ false); - (res, tt_reader.diag_info.unmatched_delims) + let (stream, res) = tt_reader.parse_token_trees(/* is_delimited */ false); + (stream, res, tt_reader.diag_info.unmatched_delims) } // Parse a stream of tokens into a list of `TokenTree`s. - fn parse_token_trees(&mut self, is_delimited: bool) -> PResult<'a, TokenStream> { + fn parse_token_trees( + &mut self, + is_delimited: bool, + ) -> (TokenStream, Result<(), Vec>>) { self.token = self.string_reader.next_token().0; let mut buf = Vec::new(); loop { match self.token.kind { - token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)?), + token::OpenDelim(delim) => { + buf.push(match self.parse_token_tree_open_delim(delim) { + Ok(val) => val, + Err(errs) => return (TokenStream::new(buf), Err(errs)), + }) + } token::CloseDelim(delim) => { - return if is_delimited { - Ok(TokenStream::new(buf)) - } else { - Err(self.close_delim_err(delim)) - }; + return ( + TokenStream::new(buf), + if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) }, + ); } token::Eof => { - return if is_delimited { - Err(self.eof_err()) - } else { - Ok(TokenStream::new(buf)) - }; + return ( + TokenStream::new(buf), + if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) }, + ); } _ => { // Get the next normal token. This might require getting multiple adjacent @@ -55,16 +61,14 @@ let (this_spacing, next_tok) = loop { let (next_tok, is_next_tok_preceded_by_whitespace) = self.string_reader.next_token(); - if !is_next_tok_preceded_by_whitespace { - if let Some(glued) = self.token.glue(&next_tok) { - self.token = glued; - } else { - let this_spacing = - if next_tok.is_op() { Spacing::Joint } else { Spacing::Alone }; - break (this_spacing, next_tok); - } - } else { + if is_next_tok_preceded_by_whitespace { break (Spacing::Alone, next_tok); + } else if let Some(glued) = self.token.glue(&next_tok) { + self.token = glued; + } else { + let this_spacing = + if next_tok.is_punct() { Spacing::Joint } else { Spacing::Alone }; + break (this_spacing, next_tok); } }; let this_tok = std::mem::replace(&mut self.token, next_tok); @@ -99,7 +103,10 @@ err } - fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> PResult<'a, TokenTree> { + fn parse_token_tree_open_delim( + &mut self, + open_delim: Delimiter, + ) -> Result>> { // The span for beginning of the delimited section let pre_span = self.token.span; @@ -108,7 +115,26 @@ // Parse the token trees within the delimiters. // We stop at any delimiter so we can try to recover if the user // uses an incorrect delimiter. - let tts = self.parse_token_trees(/* is_delimited */ true)?; + let (tts, res) = self.parse_token_trees(/* is_delimited */ true); + if let Err(mut errs) = res { + // If there are unclosed delims, see if there are diff markers and if so, point them + // out instead of complaining about the unclosed delims. + let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None); + let mut diff_errs = vec![]; + while parser.token != token::Eof { + if let Err(diff_err) = parser.err_diff_marker() { + diff_errs.push(diff_err); + } + parser.bump(); + } + if !diff_errs.is_empty() { + errs.iter_mut().for_each(|err| { + err.delay_as_bug(); + }); + return Err(diff_errs); + } + return Err(errs); + } // Expand to cover the entire delimited token tree let delim_span = DelimSpan::from_pair(pre_span, self.token.span); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -39,7 +39,7 @@ use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::source_map::Spanned; use rustc_span::symbol::{kw, sym, Ident}; -use rustc_span::{Span, SpanSnippetError, Symbol, DUMMY_SP}; +use rustc_span::{BytePos, Span, SpanSnippetError, Symbol, DUMMY_SP}; use std::mem::take; use std::ops::{Deref, DerefMut}; use thin_vec::{thin_vec, ThinVec}; @@ -314,11 +314,10 @@ // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#` let ident_name = ident.name.to_string(); - Some(SuggEscapeIdentifier { - span: ident.span.shrink_to_lo(), - ident_name - }) - } else { None }; + Some(SuggEscapeIdentifier { span: ident.span.shrink_to_lo(), ident_name }) + } else { + None + }; let suggest_remove_comma = if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { @@ -375,9 +374,11 @@ // and current token should be Ident with the item name (i.e. the function name) // if there is a `<` after the fn name, then don't show a suggestion, show help - if !self.look_ahead(1, |t| *t == token::Lt) && - let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) { - err.multipart_suggestion_verbose( + if !self.look_ahead(1, |t| *t == token::Lt) + && let Ok(snippet) = + self.sess.source_map().span_to_snippet(generic.span) + { + err.multipart_suggestion_verbose( format!("place the generic parameter name after the {ident_name} name"), vec![ (self.token.span.shrink_to_hi(), snippet), @@ -385,11 +386,11 @@ ], Applicability::MaybeIncorrect, ); - } else { - err.help(format!( - "place the generic parameter name after the {ident_name} name" - )); - } + } else { + err.help(format!( + "place the generic parameter name after the {ident_name} name" + )); + } } } Err(err) => { @@ -402,7 +403,9 @@ } } - if let Some(recovered_ident) = recovered_ident && recover { + if let Some(recovered_ident) = recovered_ident + && recover + { err.emit(); Ok(recovered_ident) } else { @@ -501,8 +504,10 @@ // Special-case "expected `;`" errors if expected.contains(&TokenType::Token(token::Semi)) { - if self.prev_token == token::Question && self.maybe_recover_from_ternary_operator() { - return Ok(true); + // If the user is trying to write a ternary expression, recover it and + // return an Err to prevent a cascade of irrelevant diagnostics + if self.prev_token == token::Question && let Err(e) = self.maybe_recover_from_ternary_operator() { + return Err(e); } if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP { @@ -617,19 +622,19 @@ } if let TokenKind::Ident(prev, _) = &self.prev_token.kind - && let TokenKind::Ident(cur, _) = &self.token.kind + && let TokenKind::Ident(cur, _) = &self.token.kind { - let concat = Symbol::intern(&format!("{prev}{cur}")); - let ident = Ident::new(concat, DUMMY_SP); - if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() { - let span = self.prev_token.span.to(self.token.span); - err.span_suggestion_verbose( - span, - format!("consider removing the space to spell keyword `{concat}`"), - concat, - Applicability::MachineApplicable, - ); - } + let concat = Symbol::intern(&format!("{prev}{cur}")); + let ident = Ident::new(concat, DUMMY_SP); + if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() { + let span = self.prev_token.span.to(self.token.span); + err.span_suggestion_verbose( + span, + format!("consider removing the space to spell keyword `{concat}`"), + concat, + Applicability::MachineApplicable, + ); + } } // `pub` may be used for an item or `pub(crate)` @@ -645,6 +650,26 @@ ); } + if let token::DocComment(kind, style, _) = self.token.kind { + // We have something like `expr //!val` where the user likely meant `expr // !val` + let pos = self.token.span.lo() + BytePos(2); + let span = self.token.span.with_lo(pos).with_hi(pos); + err.span_suggestion_verbose( + span, + format!( + "add a space before {} to write a regular comment", + match (kind, style) { + (token::CommentKind::Line, ast::AttrStyle::Inner) => "`!`", + (token::CommentKind::Block, ast::AttrStyle::Inner) => "`!`", + (token::CommentKind::Line, ast::AttrStyle::Outer) => "the last `/`", + (token::CommentKind::Block, ast::AttrStyle::Outer) => "the last `*`", + }, + ), + " ".to_string(), + Applicability::MachineApplicable, + ); + } + // Add suggestion for a missing closing angle bracket if '>' is included in expected_tokens // there are unclosed angle brackets if self.unmatched_angle_bracket_count > 0 @@ -827,6 +852,65 @@ None } + pub(super) fn recover_closure_body( + &mut self, + mut err: DiagnosticBuilder<'a, ErrorGuaranteed>, + before: token::Token, + prev: token::Token, + token: token::Token, + lo: Span, + decl_hi: Span, + ) -> PResult<'a, P> { + err.span_label(lo.to(decl_hi), "while parsing the body of this closure"); + match before.kind { + token::OpenDelim(Delimiter::Brace) + if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => + { + // `{ || () }` should have been `|| { () }` + err.multipart_suggestion( + "you might have meant to open the body of the closure, instead of enclosing \ + the closure in a block", + vec![ + (before.span, String::new()), + (prev.span.shrink_to_hi(), " {".to_string()), + ], + Applicability::MaybeIncorrect, + ); + err.emit(); + self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]); + } + token::OpenDelim(Delimiter::Parenthesis) + if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => + { + // We are within a function call or tuple, we can emit the error + // and recover. + self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis), &token::Comma]); + + err.multipart_suggestion_verbose( + "you might have meant to open the body of the closure", + vec![ + (prev.span.shrink_to_hi(), " {".to_string()), + (self.token.span.shrink_to_lo(), "}".to_string()), + ], + Applicability::MaybeIncorrect, + ); + err.emit(); + } + _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => { + // We don't have a heuristic to correctly identify where the block + // should be closed. + err.multipart_suggestion_verbose( + "you might have meant to open the body of the closure", + vec![(prev.span.shrink_to_hi(), " {".to_string())], + Applicability::HasPlaceholders, + ); + return Err(err); + } + _ => return Err(err), + } + Ok(self.mk_expr_err(lo.to(self.token.span))) + } + /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { @@ -1025,8 +1109,7 @@ .emit(); match self.parse_expr() { Ok(_) => { - *expr = - self.mk_expr_err(expr.span.to(self.prev_token.span)); + *expr = self.mk_expr_err(expr.span.to(self.prev_token.span)); return Ok(()); } Err(err) => { @@ -1218,7 +1301,9 @@ return if token::ModSep == self.token.kind { // We have some certainty that this was a bad turbofish at this point. // `foo< bar >::` - if let ExprKind::Binary(o, ..) = inner_op.kind && o.node == BinOpKind::Lt { + if let ExprKind::Binary(o, ..) = inner_op.kind + && o.node == BinOpKind::Lt + { err.suggest_turbofish = Some(op.span.shrink_to_lo()); } else { err.help_turbofish = Some(()); @@ -1248,7 +1333,9 @@ } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind { // We have high certainty that this was a bad turbofish at this point. // `foo< bar >(` - if let ExprKind::Binary(o, ..) = inner_op.kind && o.node == BinOpKind::Lt { + if let ExprKind::Binary(o, ..) = inner_op.kind + && o.node == BinOpKind::Lt + { err.suggest_turbofish = Some(op.span.shrink_to_lo()); } else { err.help_turbofish = Some(()); @@ -1343,10 +1430,10 @@ /// Rust has no ternary operator (`cond ? then : else`). Parse it and try /// to recover from it if `then` and `else` are valid expressions. Returns - /// whether it was a ternary operator. - pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> bool { + /// an err if this appears to be a ternary expression. + pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> PResult<'a, ()> { if self.prev_token != token::Question { - return false; + return PResult::Ok(()); } let lo = self.prev_token.span.lo(); @@ -1364,20 +1451,18 @@ if self.eat_noexpect(&token::Colon) { match self.parse_expr() { Ok(_) => { - self.sess.emit_err(TernaryOperator { span: self.token.span.with_lo(lo) }); - return true; + return Err(self + .sess + .create_err(TernaryOperator { span: self.token.span.with_lo(lo) })); } Err(err) => { err.cancel(); - self.restore_snapshot(snapshot); } }; } - } else { - self.restore_snapshot(snapshot); - }; - - false + } + self.restore_snapshot(snapshot); + Ok(()) } pub(super) fn maybe_recover_from_bad_type_plus(&mut self, ty: &Ty) -> PResult<'a, ()> { @@ -1826,19 +1911,21 @@ let sm = self.sess.source_map(); let left = begin_par_sp; let right = self.prev_token.span; - let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left) && - !snip.ends_with(' ') { - " ".to_string() - } else { - "".to_string() - }; + let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left) + && !snip.ends_with(' ') + { + " ".to_string() + } else { + "".to_string() + }; - let right_snippet = if let Ok(snip) = sm.span_to_next_source(right) && - !snip.starts_with(' ') { - " ".to_string() - } else { - "".to_string() - }; + let right_snippet = if let Ok(snip) = sm.span_to_next_source(right) + && !snip.starts_with(' ') + { + " ".to_string() + } else { + "".to_string() + }; self.sess.emit_err(ParenthesesInForHead { span: vec![left, right], @@ -2721,8 +2808,15 @@ } pub fn recover_diff_marker(&mut self) { + if let Err(mut err) = self.err_diff_marker() { + err.emit(); + FatalError.raise(); + } + } + + pub fn err_diff_marker(&mut self) -> PResult<'a, ()> { let Some(start) = self.diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else { - return; + return Ok(()); }; let mut spans = Vec::with_capacity(3); spans.push(start); @@ -2769,8 +2863,7 @@ "for an explanation on these markers from the `git` documentation, visit \ ", ); - err.emit(); - FatalError.raise() + Err(err) } /// Parse and throw away a parenthesized comma separated diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/expr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/expr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/expr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/expr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,7 @@ use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use ast::mut_visit::{noop_visit_expr, MutVisitor}; -use ast::{Path, PathSegment}; +use ast::{GenBlockKind, Path, PathSegment}; use core::mem; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; @@ -32,10 +32,10 @@ use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded}; use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP; use rustc_session::lint::BuiltinLintDiagnostics; -use rustc_span::source_map::{self, Span, Spanned}; +use rustc_span::source_map::{self, Spanned}; use rustc_span::symbol::kw::PathRoot; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::{BytePos, Pos}; +use rustc_span::{BytePos, Pos, Span}; use thin_vec::{thin_vec, ThinVec}; /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression @@ -1007,8 +1007,9 @@ let span = self.token.span; let sm = self.sess.source_map(); let (span, actual) = match (&self.token.kind, self.subparser_name) { - (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => - (span.shrink_to_hi(), actual.into()), + (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => { + (span.shrink_to_hi(), actual.into()) + } _ => (span, actual), }; self.sess.emit_err(errors::UnexpectedTokenAfterDot { span, actual }); @@ -1440,14 +1441,20 @@ } else if this.token.uninterpolated_span().at_least_rust_2018() { // `Span:.at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. if this.check_keyword(kw::Async) { - if this.is_async_block() { + if this.is_gen_block(kw::Async) { // Check for `async {` and `async move {`. - this.parse_async_block() + this.parse_gen_block() } else { this.parse_expr_closure() } } else if this.eat_keyword(kw::Await) { this.recover_incorrect_await_syntax(lo, this.prev_token.span) + } else if this.token.uninterpolated_span().at_least_rust_2024() { + if this.is_gen_block(kw::Gen) { + this.parse_gen_block() + } else { + this.parse_expr_lit() + } } else { this.parse_expr_lit() } @@ -1550,10 +1557,7 @@ self.sess.emit_err(errors::MacroInvocationWithQualifiedPath(path.span)); } let lo = path.span; - let mac = P(MacCall { - path, - args: self.parse_delim_args()?, - }); + let mac = P(MacCall { path, args: self.parse_delim_args()? }); (lo.to(self.prev_token.span), ExprKind::MacCall(mac)) } else if self.check(&token::OpenDelim(Delimiter::Brace)) && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) @@ -1592,7 +1596,7 @@ } else if !ate_colon && self.may_recover() && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma) - || self.token.is_op()) + || self.token.is_punct()) { let (lit, _) = self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| { @@ -1771,7 +1775,9 @@ fn parse_expr_break(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let mut label = self.eat_label(); - let kind = if self.token == token::Colon && let Some(label) = label.take() { + let kind = if self.token == token::Colon + && let Some(label) = label.take() + { // The value expression can be a labeled loop, see issue #86948, e.g.: // `loop { break 'label: loop { break 'label 42; }; }` let lexpr = self.parse_expr_labeled(label, true)?; @@ -1848,7 +1854,7 @@ let lo = self.prev_token.span; let kind = ExprKind::Yield(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); - self.sess.gated_spans.gate(sym::generators, span); + self.sess.gated_spans.gate(sym::yield_expr, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } @@ -2209,6 +2215,7 @@ fn parse_expr_closure(&mut self) -> PResult<'a, P> { let lo = self.token.span; + let before = self.prev_token.clone(); let binder = if self.check_keyword(kw::For) { let lo = self.token.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; @@ -2239,7 +2246,12 @@ FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; - self.parse_expr_res(restrictions, None)? + let prev = self.prev_token.clone(); + let token = self.token.clone(); + match self.parse_expr_res(restrictions, None) { + Ok(expr) => expr, + Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?, + } } _ => { // If an explicit return type is given, require a block to appear (RFC 968). @@ -2291,13 +2303,14 @@ /// Parses an optional `move` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { if self.eat_keyword(kw::Move) { + let move_kw_span = self.prev_token.span; // Check for `move async` and recover if self.check_keyword(kw::Async) { let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo); Err(errors::AsyncMoveOrderIncorrect { span: move_async_span } .into_diagnostic(&self.sess.span_diagnostic)) } else { - Ok(CaptureBy::Value) + Ok(CaptureBy::Value { move_kw: move_kw_span }) } } else { Ok(CaptureBy::Ref) @@ -2371,16 +2384,18 @@ let mut recover_block_from_condition = |this: &mut Self| { let block = match &mut cond.kind { ExprKind::Binary(Spanned { span: binop_span, .. }, _, right) - if let ExprKind::Block(_, None) = right.kind => { - self.sess.emit_err(errors::IfExpressionMissingThenBlock { - if_span: lo, - missing_then_block_sub: - errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(cond_span.shrink_to_lo().to(*binop_span)), - let_else_sub: None, - - }); - std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi())) - }, + if let ExprKind::Block(_, None) = right.kind => + { + self.sess.emit_err(errors::IfExpressionMissingThenBlock { + if_span: lo, + missing_then_block_sub: + errors::IfExpressionMissingThenBlockSub::UnfinishedCondition( + cond_span.shrink_to_lo().to(*binop_span), + ), + let_else_sub: None, + }); + std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi())) + } ExprKind::Block(_, None) => { self.sess.emit_err(errors::IfExpressionMissingCondition { if_span: lo.shrink_to_hi(), @@ -2426,10 +2441,26 @@ self.error_on_extra_if(&cond)?; // Parse block, which will always fail, but we can add a nice note to the error self.parse_block().map_err(|mut err| { - err.span_note( - cond_span, - "the `if` expression is missing a block after this condition", - ); + if self.prev_token == token::Semi + && self.token == token::AndAnd + && let maybe_let = self.look_ahead(1, |t| t.clone()) + && maybe_let.is_keyword(kw::Let) + { + err.span_suggestion( + self.prev_token.span, + "consider removing this semicolon to parse the `let` as part of the same chain", + "", + Applicability::MachineApplicable, + ).span_note( + self.token.span.to(maybe_let.span), + "you likely meant to continue parsing the let-chain starting here", + ); + } else { + err.span_note( + cond_span, + "the `if` expression is missing a block after this condition", + ); + } err })? } @@ -2459,10 +2490,16 @@ /// Parses a `let $pat = $expr` pseudo-expression. fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P> { let is_recovered = if !restrictions.contains(Restrictions::ALLOW_LET) { - Some(self.sess.emit_err(errors::ExpectedExpressionFoundLet { + let err = errors::ExpectedExpressionFoundLet { span: self.token.span, reason: ForbiddenLetReason::OtherForbidden, - })) + }; + if self.prev_token.kind == token::BinOp(token::Or) { + // This was part of a closure, the that part of the parser recover. + return Err(err.into_diagnostic(&self.sess.span_diagnostic)); + } else { + Some(self.sess.emit_err(err)) + } } else { None }; @@ -2557,13 +2594,16 @@ } fn error_on_extra_if(&mut self, cond: &P) -> PResult<'a, ()> { - if let ExprKind::Binary(Spanned { span: binop_span, node: binop}, _, right) = &cond.kind && - let BinOpKind::And = binop && - let ExprKind::If(cond, ..) = &right.kind { - Err(self.sess.create_err(errors::UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo())))) - } else { - Ok(()) - } + if let ExprKind::Binary(Spanned { span: binop_span, node: binop }, _, right) = &cond.kind + && let BinOpKind::And = binop + && let ExprKind::If(cond, ..) = &right.kind + { + Err(self.sess.create_err(errors::UnexpectedIfWithIf( + binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()), + ))) + } else { + Ok(()) + } } /// Parses `for in ` (`for` token already eaten). @@ -2834,7 +2874,7 @@ )?; let guard = if this.eat_keyword(kw::If) { let if_span = this.prev_token.span; - let mut cond = this.parse_expr_res(Restrictions::ALLOW_LET, None)?; + let mut cond = this.parse_match_guard_condition()?; CondChecker { parser: this, forbid_let_reason: None }.visit_expr(&mut cond); @@ -2860,9 +2900,9 @@ { err.span_suggestion( this.token.span, - "try using a fat arrow here", + "use a fat arrow to start a match arm", "=>", - Applicability::MaybeIncorrect, + Applicability::MachineApplicable, ); err.emit(); this.bump(); @@ -2911,9 +2951,9 @@ .or_else(|mut err| { if this.token == token::FatArrow { if let Ok(expr_lines) = sm.span_to_lines(expr.span) - && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span) - && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col - && expr_lines.lines.len() == 2 + && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span) + && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col + && expr_lines.lines.len() == 2 { // We check whether there's any trailing code in the parse span, // if there isn't, we very likely have the following: @@ -2979,6 +3019,33 @@ }) } + fn parse_match_guard_condition(&mut self) -> PResult<'a, P> { + self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err( + |mut err| { + if self.prev_token == token::OpenDelim(Delimiter::Brace) { + let sugg_sp = self.prev_token.span.shrink_to_lo(); + // Consume everything within the braces, let's avoid further parse + // errors. + self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); + let msg = "you might have meant to start a match arm after the match guard"; + if self.eat(&token::CloseDelim(Delimiter::Brace)) { + let applicability = if self.token.kind != token::FatArrow { + // We have high confidence that we indeed didn't have a struct + // literal in the match guard, but rather we had some operation + // that ended in a path, immediately followed by a block that was + // meant to be the match arm. + Applicability::MachineApplicable + } else { + Applicability::MaybeIncorrect + }; + err.span_suggestion_verbose(sugg_sp, msg, "=> ".to_string(), applicability); + } + } + err + }, + ) + } + pub(crate) fn is_builtin(&self) -> bool { self.token.is_keyword(kw::Builtin) && self.look_ahead(1, |t| *t == token::Pound) } @@ -3015,18 +3082,24 @@ && self.token.uninterpolated_span().at_least_rust_2018() } - /// Parses an `async move? {...}` expression. - fn parse_async_block(&mut self) -> PResult<'a, P> { + /// Parses an `async move? {...}` or `gen move? {...}` expression. + fn parse_gen_block(&mut self) -> PResult<'a, P> { let lo = self.token.span; - self.expect_keyword(kw::Async)?; + let kind = if self.eat_keyword(kw::Async) { + GenBlockKind::Async + } else { + assert!(self.eat_keyword(kw::Gen)); + self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.token.span)); + GenBlockKind::Gen + }; let capture_clause = self.parse_capture_clause()?; let (attrs, body) = self.parse_inner_attrs_and_block()?; - let kind = ExprKind::Async(capture_clause, body); + let kind = ExprKind::Gen(capture_clause, body, kind); Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs)) } - fn is_async_block(&self) -> bool { - self.token.is_keyword(kw::Async) + fn is_gen_block(&self, kw: Symbol) -> bool { + self.token.is_keyword(kw) && (( // `async move {` self.is_keyword_ahead(1, &[kw::Move]) @@ -3049,9 +3122,10 @@ || self.look_ahead(2, |t| t == &token::Colon) && ( // `{ ident: token, ` cannot start a block. - self.look_ahead(4, |t| t == &token::Comma) || - // `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`. - self.look_ahead(3, |t| !t.can_begin_type()) + self.look_ahead(4, |t| t == &token::Comma) + // `{ ident: ` cannot start a block unless it's a type ascription + // `ident: Type`. + || self.look_ahead(3, |t| !t.can_begin_type()) ) ) } @@ -3091,6 +3165,7 @@ let mut fields = ThinVec::new(); let mut base = ast::StructRest::None; let mut recover_async = false; + let in_if_guard = self.restrictions.contains(Restrictions::IN_IF_GUARD); let mut async_block_err = |e: &mut Diagnostic, span: Span| { recover_async = true; @@ -3128,6 +3203,26 @@ e.span_label(pth.span, "while parsing this struct"); } + if let Some((ident, _)) = self.token.ident() + && !self.token.is_reserved_ident() + && self.look_ahead(1, |t| { + AssocOp::from_token(&t).is_some() + || matches!(t.kind, token::OpenDelim(_)) + || t.kind == token::Dot + }) + { + // Looks like they tried to write a shorthand, complex expression. + e.span_suggestion_verbose( + self.token.span.shrink_to_lo(), + "try naming a field", + &format!("{ident}: ",), + Applicability::MaybeIncorrect, + ); + } + if in_if_guard && close_delim == Delimiter::Brace { + return Err(e); + } + if !recover { return Err(e); } @@ -3173,19 +3268,6 @@ ",", Applicability::MachineApplicable, ); - } else if is_shorthand - && (AssocOp::from_token(&self.token).is_some() - || matches!(&self.token.kind, token::OpenDelim(_)) - || self.token.kind == token::Dot) - { - // Looks like they tried to write a shorthand, complex expression. - let ident = parsed_field.expect("is_shorthand implies Some").ident; - e.span_suggestion( - ident.span.shrink_to_lo(), - "try naming a field", - &format!("{ident}: "), - Applicability::HasPlaceholders, - ); } } if !recover { @@ -3288,6 +3370,24 @@ // Check if a colon exists one ahead. This means we're parsing a fieldname. let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq); + // Proactively check whether parsing the field will be incorrect. + let is_wrong = this.token.is_ident() + && !this.token.is_reserved_ident() + && !this.look_ahead(1, |t| { + t == &token::Colon + || t == &token::Eq + || t == &token::Comma + || t == &token::CloseDelim(Delimiter::Brace) + || t == &token::CloseDelim(Delimiter::Parenthesis) + }); + if is_wrong { + return Err(errors::ExpectedStructField { + span: this.look_ahead(1, |t| t.span), + ident_span: this.token.span, + token: this.look_ahead(1, |t| t.clone()), + } + .into_diagnostic(&self.sess.span_diagnostic)); + } let (ident, expr) = if is_shorthand { // Mimic `x: x` for the `x` field shorthand. let ident = this.parse_ident_common(false)?; @@ -3508,8 +3608,7 @@ noop_visit_expr(e, self); self.forbid_let_reason = forbid_let_reason; } - ExprKind::Cast(ref mut op, _) - | ExprKind::Type(ref mut op, _) => { + ExprKind::Cast(ref mut op, _) | ExprKind::Type(ref mut op, _) => { let forbid_let_reason = self.forbid_let_reason; self.forbid_let_reason = Some(OtherForbidden); self.visit_expr(op); @@ -3526,7 +3625,7 @@ | ExprKind::Match(_, _) | ExprKind::Closure(_) | ExprKind::Block(_, _) - | ExprKind::Async(_, _) + | ExprKind::Gen(_, _, _) | ExprKind::TryBlock(_) | ExprKind::Underscore | ExprKind::Path(_, _) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/item.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/item.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/item.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/item.rs 2023-12-21 16:55:28.000000000 +0000 @@ -22,9 +22,9 @@ }; use rustc_span::edit_distance::edit_distance; use rustc_span::edition::Edition; -use rustc_span::source_map::{self, Span}; +use rustc_span::source_map; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::DUMMY_SP; +use rustc_span::{Span, DUMMY_SP}; use std::fmt::Write; use std::mem; use thin_vec::{thin_vec, ThinVec}; @@ -122,7 +122,9 @@ ) -> PResult<'a, Option> { // Don't use `maybe_whole` so that we have precise control // over when we bump the parser - if let token::Interpolated(nt) = &self.token.kind && let token::NtItem(item) = &**nt { + if let token::Interpolated(nt) = &self.token.kind + && let token::NtItem(item) = &**nt + { let mut item = item.clone(); self.bump(); @@ -623,11 +625,10 @@ // `impl impl Default for Wrapper` // ^^^^^ let extra_impl_kw = ty_first.span.until(bound.span()); - self.sess - .emit_err(errors::ExtraImplKeywordInTraitImpl { - extra_impl_kw, - impl_trait_span: ty_first.span - }); + self.sess.emit_err(errors::ExtraImplKeywordInTraitImpl { + extra_impl_kw, + impl_trait_span: ty_first.span, + }); } else { self.sess.emit_err(errors::ExpectedTraitInTraitImplFoundType { span: ty_first.span, @@ -813,7 +814,12 @@ fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> { let unsafety = self.parse_unsafety(Case::Sensitive); // Parse optional `auto` prefix. - let is_auto = if self.eat_keyword(kw::Auto) { IsAuto::Yes } else { IsAuto::No }; + let is_auto = if self.eat_keyword(kw::Auto) { + self.sess.gated_spans.gate(sym::auto_traits, self.prev_token.span); + IsAuto::Yes + } else { + IsAuto::No + }; self.expect_keyword(kw::Trait)?; let ident = self.parse_ident()?; @@ -1301,7 +1307,9 @@ // Provide a nice error message if the user placed a where-clause before the item body. // Users may be tempted to write such code if they are still used to the deprecated // where-clause location on type aliases and associated types. See also #89122. - if before_where_clause.has_where_token && let Some(expr) = &expr { + if before_where_clause.has_where_token + && let Some(expr) = &expr + { self.sess.emit_err(errors::WhereClauseBeforeConstBody { span: before_where_clause.span, name: ident.span, @@ -1944,7 +1952,8 @@ let mut err = self.expected_ident_found_err(); if self.eat_keyword_noexpect(kw::Let) && let removal_span = self.prev_token.span.until(self.token.span) - && let Ok(ident) = self.parse_ident_common(false) + && let Ok(ident) = self + .parse_ident_common(false) // Cancel this error, we don't need it. .map_err(|err| err.cancel()) && self.token.kind == TokenKind::Colon @@ -2269,6 +2278,18 @@ err.span_label(ident.span, "while parsing this `fn`"); err.emit(); } else { + // check for typo'd Fn* trait bounds such as + // fn foo() where F: FnOnce -> () {} + if self.token.kind == token::RArrow { + let machine_applicable = [sym::FnOnce, sym::FnMut, sym::Fn] + .into_iter() + .any(|s| self.prev_token.is_ident_named(s)); + + err.subdiagnostic(errors::FnTraitMissingParen { + span: self.prev_token.span, + machine_applicable, + }); + } return Err(err); } } @@ -2288,9 +2309,9 @@ // `pub` is added in case users got confused with the ordering like `async pub fn`, // only if it wasn't preceded by `default` as `default pub` is invalid. let quals: &[Symbol] = if check_pub { - &[kw::Pub, kw::Const, kw::Async, kw::Unsafe, kw::Extern] + &[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern] } else { - &[kw::Const, kw::Async, kw::Unsafe, kw::Extern] + &[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern] }; self.check_keyword_case(kw::Fn, case) // Definitely an `fn`. // `$qual fn` or `$qual $qual`: @@ -2344,6 +2365,9 @@ let async_start_sp = self.token.span; let asyncness = self.parse_asyncness(case); + let _gen_start_sp = self.token.span; + let genness = self.parse_genness(case); + let unsafe_start_sp = self.token.span; let unsafety = self.parse_unsafety(case); @@ -2359,6 +2383,10 @@ } } + if let Gen::Yes { span, .. } = genness { + self.sess.emit_err(errors::GenFn { span }); + } + if !self.eat_keyword_case(kw::Fn, case) { // It is possible for `expect_one_of` to recover given the contents of // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't @@ -2373,22 +2401,39 @@ Misplaced(Span), } + // We may be able to recover + let mut recover_constness = constness; + let mut recover_asyncness = asyncness; + let mut recover_unsafety = unsafety; // This will allow the machine fix to directly place the keyword in the correct place or to indicate // that the keyword is already present and the second instance should be removed. let wrong_kw = if self.check_keyword(kw::Const) { match constness { Const::Yes(sp) => Some(WrongKw::Duplicated(sp)), - Const::No => Some(WrongKw::Misplaced(async_start_sp)), + Const::No => { + recover_constness = Const::Yes(self.token.span); + Some(WrongKw::Misplaced(async_start_sp)) + } } } else if self.check_keyword(kw::Async) { match asyncness { Async::Yes { span, .. } => Some(WrongKw::Duplicated(span)), - Async::No => Some(WrongKw::Misplaced(unsafe_start_sp)), + Async::No => { + recover_asyncness = Async::Yes { + span: self.token.span, + closure_id: DUMMY_NODE_ID, + return_impl_trait_id: DUMMY_NODE_ID, + }; + Some(WrongKw::Misplaced(unsafe_start_sp)) + } } } else if self.check_keyword(kw::Unsafe) { match unsafety { Unsafe::Yes(sp) => Some(WrongKw::Duplicated(sp)), - Unsafe::No => Some(WrongKw::Misplaced(ext_start_sp)), + Unsafe::No => { + recover_unsafety = Unsafe::Yes(self.token.span); + Some(WrongKw::Misplaced(ext_start_sp)) + } } } else { None @@ -2458,6 +2503,23 @@ } } } + + if wrong_kw.is_some() + && self.may_recover() + && self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case)) + { + // Advance past the misplaced keyword and `fn` + self.bump(); + self.bump(); + err.emit(); + return Ok(FnHeader { + constness: recover_constness, + unsafety: recover_unsafety, + asyncness: recover_asyncness, + ext, + }); + } + return Err(err); } } @@ -2483,11 +2545,23 @@ pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec> { let mut first_param = true; // Parse the arguments, starting out with `self` being allowed... + if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis) + // might be typo'd trait impl, handled elsewhere + && !self.token.is_keyword(kw::For) + { + // recover from missing argument list, e.g. `fn main -> () {}` + self.sess + .emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() }); + return Ok(ThinVec::new()); + } + let (mut params, _) = self.parse_paren_comma_seq(|p| { p.recover_diff_marker(); + let snapshot = p.create_snapshot_for_diagnostic(); let param = p.parse_param_general(req_name, first_param).or_else(|mut e| { e.emit(); let lo = p.prev_token.span; + p.restore_snapshot(snapshot); // Skip every token until next possible arg or end. p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]); // Create a placeholder argument for proper arg count (issue #34264). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,6 +11,7 @@ mod ty; use crate::lexer::UnmatchedDelim; +use ast::Gen; pub use attr_wrapper::AttrWrapper; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; @@ -35,8 +36,8 @@ Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan, }; use rustc_session::parse::ParseSess; -use rustc_span::source_map::{Span, DUMMY_SP}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; +use rustc_span::{Span, DUMMY_SP}; use std::ops::Range; use std::{mem, slice}; use thin_vec::ThinVec; @@ -52,6 +53,7 @@ const NO_STRUCT_LITERAL = 1 << 1; const CONST_EXPR = 1 << 2; const ALLOW_LET = 1 << 3; + const IN_IF_GUARD = 1 << 4; } } @@ -158,8 +160,9 @@ /// appropriately. /// /// See the comments in the `parse_path_segment` function for more details. - unmatched_angle_bracket_count: u32, - max_angle_bracket_count: u32, + unmatched_angle_bracket_count: u16, + max_angle_bracket_count: u16, + angle_bracket_nesting: u16, last_unexpected_token_span: Option, /// If present, this `Parser` is not parsing Rust code but rather a macro call. @@ -393,6 +396,7 @@ break_last_token: false, unmatched_angle_bracket_count: 0, max_angle_bracket_count: 0, + angle_bracket_nesting: 0, last_unexpected_token_span: None, subparser_name, capture_state: CaptureState { @@ -557,8 +561,9 @@ } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() - && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { + && let Some((ident, /* is_raw */ false)) = self.token.ident() + && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() + { true } else { false @@ -586,12 +591,10 @@ } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() - && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { - self.sess.emit_err(errors::KwBadCase { - span: ident.span, - kw: kw.as_str() - }); + && let Some((ident, /* is_raw */ false)) = self.token.ident() + && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() + { + self.sess.emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() }); self.bump(); return true; } @@ -1126,6 +1129,16 @@ } } + /// Parses genness: `gen` or nothing. + fn parse_genness(&mut self, case: Case) -> Gen { + if self.token.span.at_least_rust_2024() && self.eat_keyword_case(kw::Gen, case) { + let span = self.prev_token.uninterpolated_span(); + Gen::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID } + } else { + Gen::No + } + } + /// Parses unsafety: `unsafe` or nothing. fn parse_unsafety(&mut self, case: Case) -> Unsafe { if self.eat_keyword_case(kw::Unsafe, case) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/nonterminal.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/nonterminal.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/nonterminal.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/nonterminal.rs 2023-12-21 16:55:28.000000000 +0000 @@ -115,7 +115,7 @@ Some(item) => NtItem(item), None => { return Err(UnexpectedNonterminal::Item(self.token.span) - .into_diagnostic(&self.sess.span_diagnostic)); + .into_diagnostic(&self.sess.span_diagnostic)); } }, NonterminalKind::Block => { @@ -127,7 +127,7 @@ Some(s) => NtStmt(P(s)), None => { return Err(UnexpectedNonterminal::Statement(self.token.span) - .into_diagnostic(&self.sess.span_diagnostic)); + .into_diagnostic(&self.sess.span_diagnostic)); } }, NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => { @@ -146,19 +146,15 @@ NonterminalKind::Expr => NtExpr(self.parse_expr_force_collect()?), NonterminalKind::Literal => { // The `:literal` matcher does not support attributes - NtLiteral( - self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?, - ) + NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?) } - NonterminalKind::Ty => NtTy( - self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, - ), + NonterminalKind::Ty => { + NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?) + } // this could be handled like a token, since it is one - NonterminalKind::Ident - if let Some((ident, is_raw)) = get_macro_ident(&self.token) => - { + NonterminalKind::Ident if let Some((ident, is_raw)) = get_macro_ident(&self.token) => { self.bump(); NtIdent(ident, is_raw) } @@ -166,15 +162,17 @@ return Err(UnexpectedNonterminal::Ident { span: self.token.span, token: self.token.clone(), - }.into_diagnostic(&self.sess.span_diagnostic)); + } + .into_diagnostic(&self.sess.span_diagnostic)); + } + NonterminalKind::Path => { + NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?)) } - NonterminalKind::Path => NtPath( - P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?), - ), NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(true)?)), - NonterminalKind::Vis => NtVis( - P(self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?), - ), + NonterminalKind::Vis => { + NtVis(P(self + .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?)) + } NonterminalKind::Lifetime => { if self.check_lifetime() { NtLifetime(self.expect_lifetime().ident) @@ -182,7 +180,8 @@ return Err(UnexpectedNonterminal::Lifetime { span: self.token.span, token: self.token.clone(), - }.into_diagnostic(&self.sess.span_diagnostic)); + } + .into_diagnostic(&self.sess.span_diagnostic)); } } }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/pat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/pat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/pat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/pat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,8 +20,9 @@ use rustc_ast_pretty::pprust; use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult}; use rustc_session::errors::ExprParenthesesNeeded; -use rustc_span::source_map::{respan, Span, Spanned}; +use rustc_span::source_map::{respan, Spanned}; use rustc_span::symbol::{kw, sym, Ident}; +use rustc_span::Span; use thin_vec::{thin_vec, ThinVec}; #[derive(PartialEq, Copy, Clone)] @@ -967,11 +968,12 @@ // check that a comma comes after every field if !ate_comma { - let err = ExpectedCommaAfterPatternField { span: self.token.span } + let mut err = ExpectedCommaAfterPatternField { span: self.token.span } .into_diagnostic(&self.sess.span_diagnostic); if let Some(mut delayed) = delayed_err { delayed.emit(); } + self.recover_misplaced_pattern_modifiers(&fields, &mut err); return Err(err); } ate_comma = false; @@ -1109,6 +1111,37 @@ Ok((fields, etc)) } + /// If the user writes `S { ref field: name }` instead of `S { field: ref name }`, we suggest + /// the correct code. + fn recover_misplaced_pattern_modifiers( + &self, + fields: &ThinVec, + err: &mut DiagnosticBuilder<'a, ErrorGuaranteed>, + ) { + if let Some(last) = fields.iter().last() + && last.is_shorthand + && let PatKind::Ident(binding, ident, None) = last.pat.kind + && binding != BindingAnnotation::NONE + && self.token == token::Colon + // We found `ref mut? ident:`, try to parse a `name,` or `name }`. + && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span)) + && self.look_ahead(2, |t| { + t == &token::Comma || t == &token::CloseDelim(Delimiter::Brace) + }) + { + let span = last.pat.span.with_hi(ident.span.lo()); + // We have `S { ref field: name }` instead of `S { field: ref name }` + err.multipart_suggestion( + "the pattern modifiers belong after the `:`", + vec![ + (span, String::new()), + (name_span.shrink_to_lo(), binding.prefix_str().to_string()), + ], + Applicability::MachineApplicable, + ); + } + } + /// Recover on `...` or `_` as if it were `..` to avoid further errors. /// See issue #46718. fn recover_bad_dot_dot(&self) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/path.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/path.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/path.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/path.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,8 +10,8 @@ Path, PathSegment, QSelf, }; use rustc_errors::{Applicability, IntoDiagnostic, PResult}; -use rustc_span::source_map::{BytePos, Span}; use rustc_span::symbol::{kw, sym, Ident}; +use rustc_span::{BytePos, Span}; use std::mem; use thin_vec::ThinVec; use tracing::debug; @@ -487,10 +487,24 @@ // Take a snapshot before attempting to parse - we can restore this later. let snapshot = is_first_invocation.then(|| self.clone()); + self.angle_bracket_nesting += 1; debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)"); match self.parse_angle_args(ty_generics) { - Ok(args) => Ok(args), + Ok(args) => { + self.angle_bracket_nesting -= 1; + Ok(args) + } + Err(mut e) if self.angle_bracket_nesting > 10 => { + self.angle_bracket_nesting -= 1; + // When encountering severely malformed code where there are several levels of + // nested unclosed angle args (`f:: 0 => { + self.angle_bracket_nesting -= 1; + // Swap `self` with our backup of the parser state before attempting to parse // generic arguments. let snapshot = mem::replace(self, snapshot.unwrap()); @@ -520,8 +534,8 @@ // Make a span over ${unmatched angle bracket count} characters. // This is safe because `all_angle_brackets` ensures that there are only `<`s, // i.e. no multibyte characters, in this range. - let span = - lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)); + let span = lo + .with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count.into())); self.sess.emit_err(errors::UnmatchedAngle { span, plural: snapshot.unmatched_angle_bracket_count > 1, @@ -531,7 +545,10 @@ self.parse_angle_args(ty_generics) } } - Err(e) => Err(e), + Err(e) => { + self.angle_bracket_nesting -= 1; + Err(e) + } } } @@ -764,7 +781,8 @@ if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind && let ast::TyKind::Err = inner_ty.kind && let Some(snapshot) = snapshot - && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot) + && let Some(expr) = + self.recover_unbraced_const_arg_that_can_begin_ty(snapshot) { return Ok(Some(self.dummy_const_arg_needs_braces( self.struct_span_err(expr.span, "invalid const generic expression"), @@ -776,12 +794,10 @@ } Err(err) => { if let Some(snapshot) = snapshot - && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot) + && let Some(expr) = + self.recover_unbraced_const_arg_that_can_begin_ty(snapshot) { - return Ok(Some(self.dummy_const_arg_needs_braces( - err, - expr.span, - ))); + return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span))); } // Try to recover from possible `const` arg without braces. return self.recover_const_arg(start, err).map(Some); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/stmt.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/stmt.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/stmt.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/stmt.rs 2023-12-21 16:55:28.000000000 +0000 @@ -20,8 +20,8 @@ use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt}; use rustc_ast::{StmtKind, DUMMY_NODE_ID}; use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult}; -use rustc_span::source_map::{BytePos, Span}; use rustc_span::symbol::{kw, sym, Ident}; +use rustc_span::{BytePos, Span}; use std::borrow::Cow; use std::mem; @@ -52,7 +52,9 @@ // Don't use `maybe_whole` so that we have precise control // over when we bump the parser - if let token::Interpolated(nt) = &self.token.kind && let token::NtStmt(stmt) = &**nt { + if let token::Interpolated(nt) = &self.token.kind + && let token::NtStmt(stmt) = &**nt + { let mut stmt = stmt.clone(); self.bump(); stmt.visit_attrs(|stmt_attrs| { @@ -227,8 +229,9 @@ /// Also error if the previous token was a doc comment. fn error_outer_attrs(&self, attrs: AttrWrapper) { if !attrs.is_empty() - && let attrs = attrs.take_for_recovery(self.sess) - && let attrs @ [.., last] = &*attrs { + && let attrs = attrs.take_for_recovery(self.sess) + && let attrs @ [.., last] = &*attrs + { if last.is_doc_comment() { self.sess.emit_err(errors::DocCommentDoesNotDocumentAnything { span: last.span, @@ -616,34 +619,19 @@ match &mut stmt.kind { // Expression without semicolon. StmtKind::Expr(expr) - if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => { + if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => + { // Just check for errors and recover; do not eat semicolon yet. // `expect_one_of` returns PResult<'a, bool /* recovered */> - let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]); + let expect_result = + self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]); let replace_with_err = 'break_recover: { match expect_result { - // Recover from parser, skip type error to avoid extra errors. + // Recover from parser, skip type error to avoid extra errors. Ok(true) => true, Err(mut e) => { - if let TokenKind::DocComment(..) = self.token.kind - && let Ok(snippet) = self.span_to_snippet(self.token.span) - { - let sp = self.token.span; - let marker = &snippet[..3]; - let (comment_marker, doc_comment_marker) = marker.split_at(2); - - e.span_suggestion( - sp.with_hi(sp.lo() + BytePos(marker.len() as u32)), - format!( - "add a space before `{doc_comment_marker}` to use a regular comment", - ), - format!("{comment_marker} {doc_comment_marker}"), - Applicability::MaybeIncorrect, - ); - } - if self.recover_colon_as_semi() { // recover_colon_as_semi has already emitted a nicer error. e.delay_as_bug(); @@ -654,14 +642,19 @@ } match &expr.kind { - ExprKind::Path(None, ast::Path { segments, .. }) if segments.len() == 1 => { + ExprKind::Path(None, ast::Path { segments, .. }) + if segments.len() == 1 => + { if self.token == token::Colon && self.look_ahead(1, |token| { - token.is_whole_block() || matches!( - token.kind, - token::Ident(kw::For | kw::Loop | kw::While, false) - | token::OpenDelim(Delimiter::Brace) - ) + token.is_whole_block() + || matches!( + token.kind, + token::Ident( + kw::For | kw::Loop | kw::While, + false + ) | token::OpenDelim(Delimiter::Brace) + ) }) { let snapshot = self.create_snapshot_for_diagnostic(); @@ -702,9 +695,8 @@ } true - } - Ok(false) => false + Ok(false) => false, } }; @@ -727,7 +719,9 @@ } eat_semi = false; } - StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => eat_semi = false, + StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => { + eat_semi = false + } } if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/ty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/ty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/ty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse/src/parser/ty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,9 +18,8 @@ TraitObjectSyntax, Ty, TyKind, }; use rustc_errors::{Applicability, PResult}; -use rustc_span::source_map::Span; use rustc_span::symbol::{kw, sym, Ident}; -use rustc_span::Symbol; +use rustc_span::{Span, Symbol}; use thin_vec::{thin_vec, ThinVec}; /// Any `?`, `!`, or `~const` modifiers that appear at the start of a bound. @@ -247,7 +246,7 @@ )?; FnRetTy::Ty(ty) } else { - FnRetTy::Default(self.token.span.shrink_to_lo()) + FnRetTy::Default(self.prev_token.span.shrink_to_hi()) }) } @@ -893,13 +892,15 @@ // to recover from errors, not make more). let path = if self.may_recover() { let (span, message, sugg, path, applicability) = match &ty.kind { - TyKind::Ptr(..) | TyKind::Ref(..) if let TyKind::Path(_, path) = &ty.peel_refs().kind => { + TyKind::Ptr(..) | TyKind::Ref(..) + if let TyKind::Path(_, path) = &ty.peel_refs().kind => + { ( ty.span.until(path.span), "consider removing the indirection", "", path, - Applicability::MaybeIncorrect + Applicability::MaybeIncorrect, ) } TyKind::ImplTrait(_, bounds) @@ -910,10 +911,10 @@ "use the trait bounds directly", "", &tr.trait_ref.path, - Applicability::MachineApplicable + Applicability::MachineApplicable, ) } - _ => return Err(err) + _ => return Err(err), }; err.span_suggestion_verbose(span, message, sugg, applicability); @@ -1027,7 +1028,8 @@ args.into_iter() .filter_map(|arg| { if let ast::AngleBracketedArg::Arg(generic_arg) = arg - && let ast::GenericArg::Lifetime(lifetime) = generic_arg { + && let ast::GenericArg::Lifetime(lifetime) = generic_arg + { Some(lifetime) } else { None diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,5 +4,7 @@ edition = "2021" [dependencies] -rustc_lexer = { path = "../rustc_lexer" } +# tidy-alphabetical-start rustc_index = { path = "../rustc_index", default-features = false } +rustc_lexer = { path = "../rustc_lexer" } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_parse_format/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,6 +9,9 @@ html_playground_url = "https://play.rust-lang.org/", test(attr(deny(warnings))) )] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] // We want to be able to build this crate with a stable compiler, so no diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,23 +4,25 @@ edition = "2021" [dependencies] -tracing = "0.1" +# tidy-alphabetical-start itertools = "0.10.1" -rustc_middle = { path = "../rustc_middle" } +rustc_ast = { path = "../rustc_ast" } +rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_expand = { path = "../rustc_expand" } -rustc_hir = { path = "../rustc_hir" } +rustc_feature = { path = "../rustc_feature" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } -rustc_session = { path = "../rustc_session" } -rustc_target = { path = "../rustc_target" } +rustc_lexer = { path = "../rustc_lexer" } rustc_macros = { path = "../rustc_macros" } -rustc_ast = { path = "../rustc_ast" } +rustc_middle = { path = "../rustc_middle" } rustc_serialize = { path = "../rustc_serialize" } +rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -rustc_lexer = { path = "../rustc_lexer" } -rustc_ast_pretty = { path = "../rustc_ast_pretty" } -rustc_feature = { path = "../rustc_feature" } +rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -393,20 +393,13 @@ `{$name}` attribute cannot be used at crate level .suggestion = perhaps you meant to use an outer attribute -passes_invalid_deprecation_version = - invalid deprecation version found - .label = invalid deprecation version - .item = the stability attribute annotates this item +passes_invalid_attr_at_crate_level_item = + the inner attribute doesn't annotate this {$kind} passes_invalid_macro_export_arguments = `{$name}` isn't a valid `#[macro_export]` argument passes_invalid_macro_export_arguments_too_many_items = `#[macro_export]` can only take 1 or 0 arguments -passes_invalid_stability = - invalid stability version found - .label = invalid stability version - .item = the stability attribute annotates this item - passes_lang_item_fn_with_target_feature = `{$name}` language item function is not allowed to have `#[target_feature]` .label = `{$name}` language item function is not allowed to have `#[target_feature]` @@ -580,6 +573,8 @@ *[false] {""} } +passes_outside_loop_suggestion = consider labeling this block to be able to break within it + passes_params_not_allowed = referencing function parameters is not allowed in naked functions .help = follow the calling convention in asm block to use parameters @@ -594,9 +589,6 @@ `pass_by_value` attribute should be applied to a struct, enum or type alias .label = is not a struct, enum or type alias -passes_plugin_registrar = - `#[plugin_registrar]` only has an effect on functions - passes_proc_macro_bad_sig = {$kind} has incorrect signature passes_repr_conflicting = @@ -778,6 +770,8 @@ passes_unused_var_remove_field = unused variable: `{$name}` passes_unused_var_remove_field_suggestion = try removing the field +passes_unused_variable_args_in_macro = `{$name}` is captured in macro and introduced a unused variable + passes_unused_variable_try_ignore = unused variable: `{$name}` .suggestion = try ignoring the field @@ -785,6 +779,7 @@ .label = unused variable .suggestion = if this is intentional, prefix it with an underscore + passes_used_compiler_linker = `used(compiler)` and `used(linker)` can't be used together diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_attr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_attr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_attr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_attr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -211,7 +211,6 @@ sym::deprecated => self.check_deprecated(hir_id, attr, span, target), sym::macro_use | sym::macro_escape => self.check_macro_use(hir_id, attr, target), sym::path => self.check_generic_attr(hir_id, attr, target, Target::Mod), - sym::plugin_registrar => self.check_plugin_registrar(hir_id, attr, target), sym::macro_export => self.check_macro_export(hir_id, attr, target), sym::ignore | sym::should_panic => { self.check_generic_attr(hir_id, attr, target, Target::Fn) @@ -1076,7 +1075,9 @@ ) -> bool { let mut is_valid = true; - if let Some(mi) = attr.meta() && let Some(list) = mi.meta_item_list() { + if let Some(mi) = attr.meta() + && let Some(list) = mi.meta_item_list() + { for meta in list { if let Some(i_meta) = meta.meta_item() { match i_meta.name_or_empty() { @@ -1108,6 +1109,7 @@ | sym::html_root_url | sym::html_no_source | sym::test + | sym::rust_logo if !self.check_attr_crate_level(attr, meta, hir_id) => { is_valid = false; @@ -1132,14 +1134,7 @@ is_valid = false; } - sym::masked - if !self.check_doc_masked( - attr, - meta, - hir_id, - target, - ) => - { + sym::masked if !self.check_doc_masked(attr, meta, hir_id, target) => { is_valid = false; } @@ -1166,6 +1161,18 @@ | sym::plugins | sym::fake_variadic => {} + sym::rust_logo => { + if !self.tcx.features().rustdoc_internals { + feature_err( + &self.tcx.sess.parse_sess, + sym::rustdoc_internals, + meta.span(), + "the `#[doc(rust_logo)]` attribute is used for Rust branding", + ) + .emit(); + } + } + sym::test => { if !self.check_test_attr(meta, hir_id) { is_valid = false; @@ -1179,13 +1186,11 @@ INVALID_DOC_ATTRIBUTES, hir_id, i_meta.span, - errors::DocTestUnknownSpotlight { - path, - span: i_meta.span - } + errors::DocTestUnknownSpotlight { path, span: i_meta.span }, ); - } else if i_meta.has_name(sym::include) && - let Some(value) = i_meta.value_str() { + } else if i_meta.has_name(sym::include) + && let Some(value) = i_meta.value_str() + { let applicability = if list.len() == 1 { Applicability::MachineApplicable } else { @@ -1200,16 +1205,19 @@ errors::DocTestUnknownInclude { path, value: value.to_string(), - inner: match attr.style { AttrStyle::Inner=> "!" , AttrStyle::Outer => "" }, + inner: match attr.style { + AttrStyle::Inner => "!", + AttrStyle::Outer => "", + }, sugg: (attr.meta().unwrap().span, applicability), - } + }, ); } else { self.tcx.emit_spanned_lint( INVALID_DOC_ATTRIBUTES, hir_id, i_meta.span, - errors::DocTestUnknownAny { path } + errors::DocTestUnknownAny { path }, ); } is_valid = false; @@ -2189,8 +2197,9 @@ attr.span, errors::MacroExport::Normal, ); - } else if let Some(meta_item_list) = attr.meta_item_list() && - !meta_item_list.is_empty() { + } else if let Some(meta_item_list) = attr.meta_item_list() + && !meta_item_list.is_empty() + { if meta_item_list.len() > 1 { self.tcx.emit_spanned_lint( INVALID_MACRO_EXPORT_ARGUMENTS, @@ -2227,17 +2236,6 @@ } } - fn check_plugin_registrar(&self, hir_id: HirId, attr: &Attribute, target: Target) { - if target != Target::Fn { - self.tcx.emit_spanned_lint( - UNUSED_ATTRIBUTES, - hir_id, - attr.span, - errors::PluginRegistrar, - ); - } - } - fn check_unused_attribute(&self, hir_id: HirId, attr: &Attribute) { // Warn on useless empty attributes. let note = if matches!( @@ -2255,9 +2253,9 @@ { errors::UnusedNote::EmptyList { name: attr.name_or_empty() } } else if matches!( - attr.name_or_empty(), - sym::allow | sym::warn | sym::deny | sym::forbid | sym::expect - ) && let Some(meta) = attr.meta_item_list() + attr.name_or_empty(), + sym::allow | sym::warn | sym::deny | sym::forbid | sym::expect + ) && let Some(meta) = attr.meta_item_list() && meta.len() == 1 && let Some(item) = meta[0].meta_item() && let MetaItemKind::NameValue(_) = &item.kind @@ -2380,7 +2378,7 @@ let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); self.abort.set(true); } } @@ -2524,10 +2522,30 @@ if attr.style == AttrStyle::Inner { for attr_to_check in ATTRS_TO_CHECK { if attr.has_name(*attr_to_check) { + let item = tcx + .hir() + .items() + .map(|id| tcx.hir().item(id)) + .find(|item| !item.span.is_dummy()) // Skip prelude `use`s + .map(|item| errors::ItemFollowingInnerAttr { + span: item.ident.span, + kind: item.kind.descr(), + }); tcx.sess.emit_err(errors::InvalidAttrAtCrateLevel { span: attr.span, - snippet: tcx.sess.source_map().span_to_snippet(attr.span).ok(), + sugg_span: tcx + .sess + .source_map() + .span_to_snippet(attr.span) + .ok() + .filter(|src| src.starts_with("#![")) + .map(|_| { + attr.span + .with_lo(attr.span.lo() + BytePos(1)) + .with_hi(attr.span.lo() + BytePos(2)) + }), name: *attr_to_check, + item, }); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_const.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_const.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_const.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/check_const.rs 2023-12-21 16:55:28.000000000 +0000 @@ -86,7 +86,7 @@ let is_feature_allowed = |feature_gate| { // All features require that the corresponding gate be enabled, // even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`. - if !tcx.features().enabled(feature_gate) { + if !tcx.features().active(feature_gate) { return false; } @@ -134,7 +134,7 @@ let required_gates = required_gates.unwrap_or(&[]); let missing_gates: Vec<_> = - required_gates.iter().copied().filter(|&g| !features.enabled(g)).collect(); + required_gates.iter().copied().filter(|&g| !features.active(g)).collect(); match missing_gates.as_slice() { [] => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/dead.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/dead.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/dead.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/dead.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,7 @@ -// This implements the dead-code warning pass. It follows middle::reachable -// closely. The idea is that all reachable symbols are live, codes called -// from live codes are live, and everything else is dead. +// This implements the dead-code warning pass. +// All reachable symbols are live, code called from live code is live, code with certain lint +// expectations such as `#[expect(unused)]` and `#[expect(dead_code)]` is live, and everything else +// is dead. use hir::def_id::{LocalDefIdMap, LocalDefIdSet}; use itertools::Itertools; @@ -192,15 +193,15 @@ if let hir::ExprKind::Assign(lhs, rhs, _) = assign.kind && check_for_self_assign_helper(self.typeck_results(), lhs, rhs) - && !assign.span.from_expansion() + && !assign.span.from_expansion() { - let is_field_assign = matches!(lhs.kind, hir::ExprKind::Field(..)); - self.tcx.emit_spanned_lint( - lint::builtin::DEAD_CODE, - assign.hir_id, - assign.span, - UselessAssignment { is_field_assign, ty: self.typeck_results().expr_ty(lhs) } - ) + let is_field_assign = matches!(lhs.kind, hir::ExprKind::Field(..)); + self.tcx.emit_spanned_lint( + lint::builtin::DEAD_CODE, + assign.hir_id, + assign.span, + UselessAssignment { is_field_assign, ty: self.typeck_results().expr_ty(lhs) }, + ) } } @@ -256,10 +257,10 @@ let mut current_ty = container; - for &index in indices { + for &(variant, field) in indices { match current_ty.kind() { ty::Adt(def, subst) => { - let field = &def.non_enum_variant().fields[index]; + let field = &def.variant(variant).fields[field]; self.insert_def_id(field.did); let field_ty = field.ty(self.tcx, subst); @@ -270,7 +271,7 @@ // but we may need to mark subfields ty::Tuple(tys) => { current_ty = - self.tcx.normalize_erasing_regions(param_env, tys[index.as_usize()]); + self.tcx.normalize_erasing_regions(param_env, tys[field.as_usize()]); } _ => span_bug!(expr.span, "named field access on non-ADT"), } @@ -670,7 +671,8 @@ if matches!(tcx.def_kind(id.owner_id), DefKind::AssocConst | DefKind::AssocFn) { let trait_item = tcx.hir().trait_item(id); if matches!(trait_item.kind, Const(_, Some(_)) | Fn(_, hir::TraitFn::Provided(_))) - && let Some(comes_from_allow) = has_allow_dead_code_or_lang_attr(tcx, trait_item.owner_id.def_id) + && let Some(comes_from_allow) = + has_allow_dead_code_or_lang_attr(tcx, trait_item.owner_id.def_id) { worklist.push((trait_item.owner_id.def_id, comes_from_allow)); } @@ -747,7 +749,7 @@ (symbol_visitor.live_symbols, symbol_visitor.ignored_derived_traits) } -struct DeadVariant { +struct DeadItem { def_id: LocalDefId, name: Symbol, level: lint::Level, @@ -785,7 +787,13 @@ ShouldWarnAboutField::Yes(is_positional) } - fn warn_multiple_dead_codes( + // # Panics + // All `dead_codes` must have the same lint level, otherwise we will intentionally ICE. + // This is because we emit a multi-spanned lint using the lint level of the `dead_codes`'s + // first local def id. + // Prefer calling `Self.warn_dead_code` or `Self.warn_dead_code_grouped_by_lint_level` + // since those methods group by lint level before calling this method. + fn lint_at_single_level( &self, dead_codes: &[LocalDefId], participle: &str, @@ -796,6 +804,15 @@ return; }; let tcx = self.tcx; + + let first_hir_id = tcx.hir().local_def_id_to_hir_id(first_id); + let first_lint_level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, first_hir_id).0; + assert!(dead_codes.iter().skip(1).all(|id| { + let hir_id = tcx.hir().local_def_id_to_hir_id(*id); + let level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, hir_id).0; + level == first_lint_level + })); + let names: Vec<_> = dead_codes.iter().map(|&def_id| tcx.item_name(def_id.to_def_id())).collect(); let spans: Vec<_> = dead_codes @@ -876,31 +893,26 @@ } }; - self.tcx.emit_spanned_lint( - lint, - tcx.hir().local_def_id_to_hir_id(first_id), - MultiSpan::from_spans(spans), - diag, - ); + self.tcx.emit_spanned_lint(lint, first_hir_id, MultiSpan::from_spans(spans), diag); } - fn warn_dead_fields_and_variants( + fn warn_multiple( &self, def_id: LocalDefId, participle: &str, - dead_codes: Vec, + dead_codes: Vec, is_positional: bool, ) { let mut dead_codes = dead_codes .iter() .filter(|v| !v.name.as_str().starts_with('_')) - .collect::>(); + .collect::>(); if dead_codes.is_empty() { return; } dead_codes.sort_by_key(|v| v.level); for (_, group) in &dead_codes.into_iter().group_by(|v| v.level) { - self.warn_multiple_dead_codes( + self.lint_at_single_level( &group.map(|v| v.def_id).collect::>(), participle, Some(def_id), @@ -910,7 +922,7 @@ } fn warn_dead_code(&mut self, id: LocalDefId, participle: &str) { - self.warn_multiple_dead_codes(&[id], participle, None, false); + self.lint_at_single_level(&[id], participle, None, false); } fn check_definition(&mut self, def_id: LocalDefId) { @@ -954,17 +966,16 @@ if let hir::ItemKind::Impl(impl_item) = tcx.hir().item(item).kind { let mut dead_items = Vec::new(); for item in impl_item.items { - let did = item.id.owner_id.def_id; - if !visitor.is_live_code(did) { - dead_items.push(did) + let def_id = item.id.owner_id.def_id; + if !visitor.is_live_code(def_id) { + let name = tcx.item_name(def_id.to_def_id()); + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); + let level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, hir_id).0; + + dead_items.push(DeadItem { def_id, name, level }) } } - visitor.warn_multiple_dead_codes( - &dead_items, - "used", - Some(item.owner_id.def_id), - false, - ); + visitor.warn_multiple(item.owner_id.def_id, "used", dead_items, false); } if !live_symbols.contains(&item.owner_id.def_id) { @@ -988,7 +999,7 @@ // Record to group diagnostics. let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); let level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, hir_id).0; - dead_variants.push(DeadVariant { def_id, name: variant.name, level }); + dead_variants.push(DeadItem { def_id, name: variant.name, level }); continue; } @@ -1013,21 +1024,16 @@ hir_id, ) .0; - Some(DeadVariant { def_id, name: field.name, level }) + Some(DeadItem { def_id, name: field.name, level }) } else { None } }) .collect(); - visitor.warn_dead_fields_and_variants(def_id, "read", dead_fields, is_positional) + visitor.warn_multiple(def_id, "read", dead_fields, is_positional); } - visitor.warn_dead_fields_and_variants( - item.owner_id.def_id, - "constructed", - dead_variants, - false, - ); + visitor.warn_multiple(item.owner_id.def_id, "constructed", dead_variants, false); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/entry.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/entry.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/entry.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/entry.rs 2023-12-21 16:55:28.000000000 +0000 @@ -52,31 +52,6 @@ configure_main(tcx, &ctxt) } -// Beware, this is duplicated in `librustc_builtin_macros/test_harness.rs` -// (with `ast::Item`), so make sure to keep them in sync. -// A small optimization was added so that hir::Item is fetched only when needed. -// An equivalent optimization was not applied to the duplicated code in test_harness.rs. -fn entry_point_type(ctxt: &EntryContext<'_>, id: ItemId, at_root: bool) -> EntryPointType { - let attrs = ctxt.tcx.hir().attrs(id.hir_id()); - if attr::contains_name(attrs, sym::start) { - EntryPointType::Start - } else if attr::contains_name(attrs, sym::rustc_main) { - EntryPointType::RustcMainAttr - } else { - if let Some(name) = ctxt.tcx.opt_item_name(id.owner_id.to_def_id()) - && name == sym::main { - if at_root { - // This is a top-level function so can be `main`. - EntryPointType::MainNamed - } else { - EntryPointType::OtherMain - } - } else { - EntryPointType::None - } - } -} - fn attr_span_by_symbol(ctxt: &EntryContext<'_>, id: ItemId, sym: Symbol) -> Option { let attrs = ctxt.tcx.hir().attrs(id.hir_id()); attr::find_by_name(attrs, sym).map(|attr| attr.span) @@ -85,7 +60,13 @@ fn find_item(id: ItemId, ctxt: &mut EntryContext<'_>) { let at_root = ctxt.tcx.opt_local_parent(id.owner_id.def_id) == Some(CRATE_DEF_ID); - match entry_point_type(ctxt, id, at_root) { + let attrs = ctxt.tcx.hir().attrs(id.hir_id()); + let entry_point_type = rustc_ast::entry::entry_point_type( + attrs, + at_root, + ctxt.tcx.opt_item_name(id.owner_id.to_def_id()), + ); + match entry_point_type { EntryPointType::None => { if let Some(span) = attr_span_by_symbol(ctxt, id, sym::unix_sigpipe) { ctxt.tcx.sess.emit_err(AttrOnlyOnMain { span, attr: sym::unix_sigpipe }); @@ -140,9 +121,13 @@ let def_id = local_def_id.to_def_id(); Some((def_id, EntryFnType::Main { sigpipe: sigpipe(tcx, def_id) })) } else { - if let Some(main_def) = tcx.resolutions(()).main_def && let Some(def_id) = main_def.opt_fn_def_id() { + if let Some(main_def) = tcx.resolutions(()).main_def + && let Some(def_id) = main_def.opt_fn_def_id() + { // non-local main imports are handled below - if let Some(def_id) = def_id.as_local() && matches!(tcx.hir().find_by_def_id(def_id), Some(Node::ForeignItem(_))) { + if let Some(def_id) = def_id.as_local() + && matches!(tcx.hir().find_by_def_id(def_id), Some(Node::ForeignItem(_))) + { tcx.sess.emit_err(ExternMain { span: tcx.def_span(def_id) }); return None; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -717,10 +717,6 @@ TooManyItems, } -#[derive(LintDiagnostic)] -#[diag(passes_plugin_registrar)] -pub struct PluginRegistrar; - #[derive(Subdiagnostic)] pub enum UnusedNote { #[note(passes_unused_empty_lints_note)] @@ -856,8 +852,15 @@ pub struct InvalidAttrAtCrateLevel { pub span: Span, - pub snippet: Option, + pub sugg_span: Option, pub name: Symbol, + pub item: Option, +} + +#[derive(Clone, Copy)] +pub struct ItemFollowingInnerAttr { + pub span: Span, + pub kind: &'static str, } impl IntoDiagnostic<'_> for InvalidAttrAtCrateLevel { @@ -871,15 +874,18 @@ diag.set_arg("name", self.name); // Only emit an error with a suggestion if we can create a string out // of the attribute span - if let Some(src) = self.snippet { - let replacement = src.replace("#!", "#"); + if let Some(span) = self.sugg_span { diag.span_suggestion_verbose( - self.span, + span, fluent::passes_suggestion, - replacement, + String::new(), rustc_errors::Applicability::MachineApplicable, ); } + if let Some(item) = self.item { + diag.set_arg("kind", item.kind); + diag.span_label(item.span, fluent::passes_invalid_attr_at_crate_level_item); + } diag } } @@ -1106,6 +1112,16 @@ pub span: Span, pub name: &'a str, pub is_break: bool, + #[subdiagnostic] + pub suggestion: Option, +} +#[derive(Subdiagnostic)] +#[multipart_suggestion(passes_outside_loop_suggestion, applicability = "maybe-incorrect")] +pub struct OutsideLoopSuggestion { + #[suggestion_part(code = "'block: ")] + pub block_span: Span, + #[suggestion_part(code = " 'block")] + pub break_span: Span, } #[derive(Diagnostic)] @@ -1308,7 +1324,9 @@ diag.span_label(self.sp.shrink_to_hi(), note); } - if let Some(main_def) = self.main_def_opt && main_def.opt_fn_def_id().is_none(){ + if let Some(main_def) = self.main_def_opt + && main_def.opt_fn_def_id().is_none() + { // There is something at `crate::main`, but it is not a function definition. diag.span_label(main_def.span, fluent::passes_non_function_main); } @@ -1493,16 +1511,6 @@ } #[derive(Diagnostic)] -#[diag(passes_invalid_stability)] -pub struct InvalidStability { - #[primary_span] - #[label] - pub span: Span, - #[label(passes_item)] - pub item_sp: Span, -} - -#[derive(Diagnostic)] #[diag(passes_cannot_stabilize_deprecated)] pub struct CannotStabilizeDeprecated { #[primary_span] @@ -1513,16 +1521,6 @@ } #[derive(Diagnostic)] -#[diag(passes_invalid_deprecation_version)] -pub struct InvalidDeprecationVersion { - #[primary_span] - #[label] - pub span: Span, - #[label(passes_item)] - pub item_sp: Span, -} - -#[derive(Diagnostic)] #[diag(passes_missing_stability_attr)] pub struct MissingStabilityAttr<'a> { #[primary_span] @@ -1756,15 +1754,24 @@ #[subdiagnostic] pub string_interp: Vec, #[subdiagnostic] - pub sugg: UnusedVariableTryPrefixSugg, + pub sugg: UnusedVariableSugg, + pub name: String, } #[derive(Subdiagnostic)] -#[multipart_suggestion(passes_suggestion, applicability = "machine-applicable")] -pub struct UnusedVariableTryPrefixSugg { - #[suggestion_part(code = "_{name}")] - pub spans: Vec, - pub name: String, +pub enum UnusedVariableSugg { + #[multipart_suggestion(passes_suggestion, applicability = "machine-applicable")] + TryPrefixSugg { + #[suggestion_part(code = "_{name}")] + spans: Vec, + name: String, + }, + #[help(passes_unused_variable_args_in_macro)] + NoSugg { + #[primary_span] + span: Span, + name: String, + }, } pub struct UnusedVariableStringInterp { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/hir_stats.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/hir_stats.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/hir_stats.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/hir_stats.rs 2023-12-21 16:55:28.000000000 +0000 @@ -567,10 +567,10 @@ (self, e, e.kind, Id::None, ast, Expr, ExprKind), [ Array, ConstBlock, Call, MethodCall, Tup, Binary, Unary, Lit, Cast, Type, Let, - If, While, ForLoop, Loop, Match, Closure, Block, Async, Await, TryBlock, Assign, + If, While, ForLoop, Loop, Match, Closure, Block, Await, TryBlock, Assign, AssignOp, Field, Index, Range, Underscore, Path, AddrOf, Break, Continue, Ret, InlineAsm, FormatArgs, OffsetOf, MacCall, Struct, Repeat, Paren, Try, Yield, Yeet, - Become, IncludedBytes, Err + Become, IncludedBytes, Gen, Err ] ); ast_visit::walk_expr(self, e) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lang_items.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lang_items.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lang_items.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lang_items.rs 2023-12-21 16:55:28.000000000 +0000 @@ -149,7 +149,7 @@ // Now check whether the lang_item has the expected number of generic // arguments. Generally speaking, binary and indexing operations have // one (for the RHS/index), unary operations have none, the closure - // traits have one for the argument list, generators have one for the + // traits have one for the argument list, coroutines have one for the // resume argument, and ordering/equality relations have one for the RHS // Some other types like Box and various functions like drop_in_place // have minimum requirements. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/layout_test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/layout_test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/layout_test.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/layout_test.rs 2023-12-21 16:55:28.000000000 +0000 @@ -57,7 +57,7 @@ ocx.register_obligation(obligation); let errors = ocx.select_all_or_error(); if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); + infcx.err_ctxt().report_fulfillment_errors(errors); false } else { // looks WF! diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,6 +6,9 @@ #![allow(rustc::potential_query_instability)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(iter_intersperse)] #![feature(let_chains)] #![feature(map_try_insert)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib_features.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib_features.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib_features.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/lib_features.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,7 @@ //! collect them instead. use rustc_ast::Attribute; -use rustc_attr::{rust_version_symbol, VERSION_PLACEHOLDER}; +use rustc_attr::VERSION_PLACEHOLDER; use rustc_hir::intravisit::Visitor; use rustc_middle::hir::nested_filter; use rustc_middle::middle::lib_features::LibFeatures; @@ -56,8 +56,10 @@ } } - if let Some(s) = since && s.as_str() == VERSION_PLACEHOLDER { - since = Some(rust_version_symbol()); + if let Some(s) = since + && s.as_str() == VERSION_PLACEHOLDER + { + since = Some(sym::env_CFG_RELEASE); } if let Some(feature) = feature { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/liveness.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/liveness.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/liveness.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/liveness.rs 2023-12-21 16:55:28.000000000 +0000 @@ -706,7 +706,7 @@ // // When computing the liveness for captured variables we take into // account how variable is captured (ByRef vs ByValue) and what is the - // closure kind (Generator / FnOnce vs Fn / FnMut). + // closure kind (Coroutine / FnOnce vs Fn / FnMut). // // Variables captured by reference are assumed to be used on the exit // from the closure. @@ -752,7 +752,7 @@ ty::ClosureKind::FnMut => {} ty::ClosureKind::FnOnce => return succ, }, - ty::Generator(..) => return succ, + ty::Coroutine(..) => return succ, _ => { span_bug!( body.value.span, @@ -1512,13 +1512,15 @@ Some(body), |spans, hir_id, ln, var| { if !self.live_on_entry(ln, var) - && let Some(name) = self.should_warn(var) { - self.ir.tcx.emit_spanned_lint( - lint::builtin::UNUSED_ASSIGNMENTS, - hir_id, - spans, - errors::UnusedAssignPassed { name }, - ); } + && let Some(name) = self.should_warn(var) + { + self.ir.tcx.emit_spanned_lint( + lint::builtin::UNUSED_ASSIGNMENTS, + hir_id, + spans, + errors::UnusedAssignPassed { name }, + ); + } }, ); } @@ -1578,7 +1580,6 @@ opt_body: Option<&hir::Body<'_>>, ) { let first_hir_id = hir_ids_and_spans[0].0; - if let Some(name) = self.should_warn(var).filter(|name| name != "self") { // annoying: for parameters in funcs like `fn(x: i32) // {ret}`, there is only one node, so asking about @@ -1650,11 +1651,29 @@ }, ); } else { + // #117284, when `pat_span` and `ident_span` have different contexts + // we can't provide a good suggestion, instead we pointed out the spans from macro + let from_macro = non_shorthands + .iter() + .find(|(_, pat_span, ident_span)| { + pat_span.ctxt() != ident_span.ctxt() && pat_span.from_expansion() + }) + .map(|(_, pat_span, _)| *pat_span); let non_shorthands = non_shorthands .into_iter() .map(|(_, _, ident_span)| ident_span) .collect::>(); + let suggestions = self.string_interp_suggestions(&name, opt_body); + let sugg = if let Some(span) = from_macro { + errors::UnusedVariableSugg::NoSugg { span, name: name.clone() } + } else { + errors::UnusedVariableSugg::TryPrefixSugg { + spans: non_shorthands, + name: name.clone(), + } + }; + self.ir.tcx.emit_spanned_lint( lint::builtin::UNUSED_VARIABLES, first_hir_id, @@ -1664,10 +1683,8 @@ .collect::>(), errors::UnusedVariableTryPrefix { label: if !suggestions.is_empty() { Some(pat.span) } else { None }, - sugg: errors::UnusedVariableTryPrefixSugg { - spans: non_shorthands, - name, - }, + name, + sugg, string_interp: suggestions, }, ); @@ -1707,13 +1724,14 @@ fn warn_about_dead_assign(&self, spans: Vec, hir_id: HirId, ln: LiveNode, var: Variable) { if !self.live_on_exit(ln, var) - && let Some(name) = self.should_warn(var) { - self.ir.tcx.emit_spanned_lint( - lint::builtin::UNUSED_ASSIGNMENTS, - hir_id, - spans, - errors::UnusedAssign { name }, - ); - } + && let Some(name) = self.should_warn(var) + { + self.ir.tcx.emit_spanned_lint( + lint::builtin::UNUSED_ASSIGNMENTS, + hir_id, + spans, + errors::UnusedAssign { name }, + ); + } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/loops.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/loops.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/loops.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/loops.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use Context::*; use rustc_hir as hir; -use rustc_hir::def_id::LocalModDefId; +use rustc_hir::def_id::{LocalDefId, LocalModDefId}; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::{Destination, Movability, Node}; use rustc_middle::hir::map::Map; @@ -10,19 +10,21 @@ use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_span::hygiene::DesugaringKind; -use rustc_span::Span; +use rustc_span::{BytePos, Span}; use crate::errors::{ BreakInsideAsyncBlock, BreakInsideClosure, BreakNonLoop, ContinueLabeledBlock, OutsideLoop, - UnlabeledCfInWhileCondition, UnlabeledInLabeledBlock, + OutsideLoopSuggestion, UnlabeledCfInWhileCondition, UnlabeledInLabeledBlock, }; #[derive(Clone, Copy, Debug, PartialEq)] enum Context { Normal, + Fn, Loop(hir::LoopSource), Closure(Span), AsyncClosure(Span), + UnlabeledBlock(Span), LabeledBlock, Constant, } @@ -60,6 +62,25 @@ self.with_context(Constant, |v| intravisit::walk_inline_const(v, c)); } + fn visit_fn( + &mut self, + fk: hir::intravisit::FnKind<'hir>, + fd: &'hir hir::FnDecl<'hir>, + b: hir::BodyId, + _: Span, + id: LocalDefId, + ) { + self.with_context(Fn, |v| intravisit::walk_fn(v, fk, fd, b, id)); + } + + fn visit_trait_item(&mut self, trait_item: &'hir hir::TraitItem<'hir>) { + self.with_context(Fn, |v| intravisit::walk_trait_item(v, trait_item)); + } + + fn visit_impl_item(&mut self, impl_item: &'hir hir::ImplItem<'hir>) { + self.with_context(Fn, |v| intravisit::walk_impl_item(v, impl_item)); + } + fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) { match e.kind { hir::ExprKind::Loop(ref b, _, source, _) => { @@ -83,6 +104,14 @@ hir::ExprKind::Block(ref b, Some(_label)) => { self.with_context(LabeledBlock, |v| v.visit_block(&b)); } + hir::ExprKind::Block(ref b, None) if matches!(self.cx, Fn) => { + self.with_context(Normal, |v| v.visit_block(&b)); + } + hir::ExprKind::Block(ref b, None) + if matches!(self.cx, Normal | Constant | UnlabeledBlock(_)) => + { + self.with_context(UnlabeledBlock(b.span.shrink_to_lo()), |v| v.visit_block(&b)); + } hir::ExprKind::Break(break_label, ref opt_expr) => { if let Some(e) = opt_expr { self.visit_expr(e); @@ -147,7 +176,12 @@ } } - self.require_break_cx("break", e.span); + let sp_lo = e.span.with_lo(e.span.lo() + BytePos("break".len() as u32)); + let label_sp = match break_label.label { + Some(label) => sp_lo.with_hi(label.ident.span.hi()), + None => sp_lo.shrink_to_lo(), + }; + self.require_break_cx("break", e.span, label_sp); } hir::ExprKind::Continue(destination) => { self.require_label_in_labeled_block(e.span, &destination, "continue"); @@ -169,7 +203,7 @@ } Err(_) => {} } - self.require_break_cx("continue", e.span) + self.require_break_cx("continue", e.span, e.span) } _ => intravisit::walk_expr(self, e), } @@ -187,7 +221,8 @@ self.cx = old_cx; } - fn require_break_cx(&self, name: &str, span: Span) { + fn require_break_cx(&self, name: &str, span: Span, break_span: Span) { + let is_break = name == "break"; match self.cx { LabeledBlock | Loop(_) => {} Closure(closure_span) => { @@ -196,8 +231,12 @@ AsyncClosure(closure_span) => { self.sess.emit_err(BreakInsideAsyncBlock { span, closure_span, name }); } - Normal | Constant => { - self.sess.emit_err(OutsideLoop { span, name, is_break: name == "break" }); + UnlabeledBlock(block_span) if is_break && block_span.eq_ctxt(break_span) => { + let suggestion = Some(OutsideLoopSuggestion { block_span, break_span }); + self.sess.emit_err(OutsideLoop { span, name, is_break, suggestion }); + } + Normal | Constant | Fn | UnlabeledBlock(_) => { + self.sess.emit_err(OutsideLoop { span, name, is_break, suggestion: None }); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/reachable.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/reachable.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/reachable.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/reachable.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,43 +18,10 @@ use rustc_session::config::CrateType; use rustc_target::spec::abi::Abi; -// Returns true if the given item must be inlined because it may be -// monomorphized or it was marked with `#[inline]`. This will only return -// true for functions. -fn item_might_be_inlined(tcx: TyCtxt<'_>, item: &hir::Item<'_>, attrs: &CodegenFnAttrs) -> bool { - if attrs.requests_inline() { - return true; - } - - match item.kind { - hir::ItemKind::Fn(ref sig, ..) if sig.header.is_const() => true, - hir::ItemKind::Impl { .. } | hir::ItemKind::Fn(..) => { - let generics = tcx.generics_of(item.owner_id); - generics.requires_monomorphization(tcx) - } - _ => false, - } -} - -fn method_might_be_inlined( - tcx: TyCtxt<'_>, - impl_item: &hir::ImplItem<'_>, - impl_src: LocalDefId, -) -> bool { - let codegen_fn_attrs = tcx.codegen_fn_attrs(impl_item.hir_id().owner.to_def_id()); - let generics = tcx.generics_of(impl_item.owner_id); - if codegen_fn_attrs.requests_inline() || generics.requires_monomorphization(tcx) { - return true; - } - if let hir::ImplItemKind::Fn(method_sig, _) = &impl_item.kind { - if method_sig.header.is_const() { - return true; - } - } - match tcx.hir().find_by_def_id(impl_src) { - Some(Node::Item(item)) => item_might_be_inlined(tcx, &item, codegen_fn_attrs), - Some(..) | None => span_bug!(impl_item.span, "impl did is not an item"), - } +fn item_might_be_inlined(tcx: TyCtxt<'_>, def_id: DefId) -> bool { + tcx.generics_of(def_id).requires_monomorphization(tcx) + || tcx.cross_crate_inlinable(def_id) + || tcx.is_const_fn(def_id) } // Information needed while computing reachability. @@ -97,7 +64,9 @@ _ => None, }; - if let Some(res) = res && let Some(def_id) = res.opt_def_id().and_then(|el| el.as_local()) { + if let Some(res) = res + && let Some(def_id) = res.opt_def_id().and_then(|el| el.as_local()) + { if self.def_id_represents_local_inlined_item(def_id.to_def_id()) { self.worklist.push(def_id); } else { @@ -148,9 +117,7 @@ match self.tcx.hir().find_by_def_id(def_id) { Some(Node::Item(item)) => match item.kind { - hir::ItemKind::Fn(..) => { - item_might_be_inlined(self.tcx, &item, self.tcx.codegen_fn_attrs(def_id)) - } + hir::ItemKind::Fn(..) => item_might_be_inlined(self.tcx, def_id.into()), _ => false, }, Some(Node::TraitItem(trait_method)) => match trait_method.kind { @@ -162,9 +129,7 @@ Some(Node::ImplItem(impl_item)) => match impl_item.kind { hir::ImplItemKind::Const(..) => true, hir::ImplItemKind::Fn(..) => { - let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id); - let impl_did = self.tcx.hir().get_parent_item(hir_id); - method_might_be_inlined(self.tcx, impl_item, impl_did.def_id) + item_might_be_inlined(self.tcx, impl_item.hir_id().owner.to_def_id()) } hir::ImplItemKind::Type(_) => false, }, @@ -224,11 +189,7 @@ Node::Item(item) => { match item.kind { hir::ItemKind::Fn(.., body) => { - if item_might_be_inlined( - self.tcx, - &item, - self.tcx.codegen_fn_attrs(item.owner_id), - ) { + if item_might_be_inlined(self.tcx, item.owner_id.into()) { self.visit_nested_body(body); } } @@ -277,8 +238,7 @@ self.visit_nested_body(body); } hir::ImplItemKind::Fn(_, body) => { - let impl_def_id = self.tcx.local_parent(search_item); - if method_might_be_inlined(self.tcx, impl_item, impl_def_id) { + if item_might_be_inlined(self.tcx, impl_item.hir_id().owner.to_def_id()) { self.visit_nested_body(body) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/stability.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/stability.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/stability.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/stability.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,8 +3,8 @@ use crate::errors; use rustc_attr::{ - self as attr, rust_version_symbol, ConstStability, Stability, StabilityLevel, Unstable, - UnstableReason, VERSION_PLACEHOLDER, + self as attr, ConstStability, DeprecatedSince, Stability, StabilityLevel, StableSince, + Unstable, UnstableReason, VERSION_PLACEHOLDER, }; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; use rustc_hir as hir; @@ -24,8 +24,6 @@ use rustc_span::Span; use rustc_target::spec::abi::Abi; -use std::cmp::Ordering; -use std::iter; use std::mem::replace; use std::num::NonZeroU32; @@ -198,10 +196,8 @@ } } - if let Some((rustc_attr::Deprecation { is_since_rustc_version: true, .. }, span)) = &depr { - if stab.is_none() { - self.tcx.sess.emit_err(errors::DeprecatedAttribute { span: *span }); - } + if let Some((depr, span)) = &depr && depr.is_since_rustc_version() && stab.is_none() { + self.tcx.sess.emit_err(errors::DeprecatedAttribute { span: *span }); } if let Some((body_stab, _span)) = body_stab { @@ -223,40 +219,25 @@ // Check if deprecated_since < stable_since. If it is, // this is *almost surely* an accident. - if let (&Some(dep_since), &attr::Stable { since: stab_since, .. }) = - (&depr.as_ref().and_then(|(d, _)| d.since), &stab.level) + if let ( + &Some(DeprecatedSince::RustcVersion(dep_since)), + &attr::Stable { since: stab_since, .. }, + ) = (&depr.as_ref().map(|(d, _)| d.since), &stab.level) { - // Explicit version of iter::order::lt to handle parse errors properly - for (dep_v, stab_v) in - iter::zip(dep_since.as_str().split('.'), stab_since.as_str().split('.')) - { - match stab_v.parse::() { - Err(_) => { - self.tcx.sess.emit_err(errors::InvalidStability { span, item_sp }); - break; + match stab_since { + StableSince::Current => { + self.tcx.sess.emit_err(errors::CannotStabilizeDeprecated { span, item_sp }); + } + StableSince::Version(stab_since) => { + if dep_since < stab_since { + self.tcx + .sess + .emit_err(errors::CannotStabilizeDeprecated { span, item_sp }); } - Ok(stab_vp) => match dep_v.parse::() { - Ok(dep_vp) => match dep_vp.cmp(&stab_vp) { - Ordering::Less => { - self.tcx.sess.emit_err(errors::CannotStabilizeDeprecated { - span, - item_sp, - }); - break; - } - Ordering::Equal => continue, - Ordering::Greater => break, - }, - Err(_) => { - if dep_v != "TBD" { - self.tcx.sess.emit_err(errors::InvalidDeprecationVersion { - span, - item_sp, - }); - } - break; - } - }, + } + StableSince::Err => { + // An error already reported. Assume the unparseable stabilization + // version is older than the deprecation version. } } } @@ -998,14 +979,17 @@ all_implications: &FxHashMap, ) { for (feature, since) in defined_features { - if let Some(since) = since && let Some(span) = remaining_lib_features.get(&feature) { + if let Some(since) = since + && let Some(span) = remaining_lib_features.get(&feature) + { // Warn if the user has enabled an already-stable lib feature. if let Some(implies) = all_implications.get(&feature) { - unnecessary_partially_stable_feature_lint(tcx, *span, *feature, *implies, *since); + unnecessary_partially_stable_feature_lint( + tcx, *span, *feature, *implies, *since, + ); } else { unnecessary_stable_feature_lint(tcx, *span, *feature, *since); } - } remaining_lib_features.remove(feature); @@ -1106,7 +1090,7 @@ mut since: Symbol, ) { if since.as_str() == VERSION_PLACEHOLDER { - since = rust_version_symbol(); + since = sym::env_CFG_RELEASE; } tcx.emit_spanned_lint( lint::builtin::STABLE_FEATURES, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/weak_lang_items.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/weak_lang_items.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/weak_lang_items.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_passes/src/weak_lang_items.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,9 @@ for id in crate_items.foreign_items() { let attrs = tcx.hir().attrs(id.hir_id()); if let Some((lang_item, _)) = lang_items::extract(attrs) { - if let Some(item) = LangItem::from_name(lang_item) && item.is_weak() { + if let Some(item) = LangItem::from_name(lang_item) + && item.is_weak() + { if items.get(item).is_none() { items.missing.push(item); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -[package] -name = "rustc_plugin_impl" -version = "0.0.0" -build = false -edition = "2021" - -[lib] - -[dependencies] -libloading = "0.7.1" -rustc_errors = { path = "../rustc_errors" } -rustc_lint = { path = "../rustc_lint" } -rustc_macros = { path = "../rustc_macros" } -rustc_metadata = { path = "../rustc_metadata" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_ast = { path = "../rustc_ast" } -rustc_session = { path = "../rustc_session" } -rustc_span = { path = "../rustc_span" } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/messages.ftl 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -plugin_impl_load_plugin_error = {$msg} - -plugin_impl_malformed_plugin_attribute = malformed `plugin` attribute - .label = malformed attribute diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/errors.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -//! Errors emitted by plugin_impl - -use rustc_macros::Diagnostic; -use rustc_span::Span; - -#[derive(Diagnostic)] -#[diag(plugin_impl_load_plugin_error)] -pub struct LoadPluginError { - #[primary_span] - pub span: Span, - pub msg: String, -} - -#[derive(Diagnostic)] -#[diag(plugin_impl_malformed_plugin_attribute, code = "E0498")] -pub struct MalformedPluginAttribute { - #[primary_span] - #[label] - pub span: Span, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -//! Infrastructure for compiler plugins. -//! -//! Plugins are a deprecated way to extend the behavior of `rustc` in various ways. -//! -//! See the [`plugin` -//! feature](https://doc.rust-lang.org/nightly/unstable-book/language-features/plugin.html) -//! of the Unstable Book for some examples. - -#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] -#![recursion_limit = "256"] -#![deny(rustc::untranslatable_diagnostic)] -#![deny(rustc::diagnostic_outside_of_impl)] - -use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage}; -use rustc_fluent_macro::fluent_messages; -use rustc_lint::LintStore; - -mod errors; -pub mod load; - -fluent_messages! { "../messages.ftl" } - -/// Structure used to register plugins. -/// -/// A plugin registrar function takes an `&mut Registry` and should call -/// methods to register its plugins. -pub struct Registry<'a> { - /// The `LintStore` allows plugins to register new lints. - pub lint_store: &'a mut LintStore, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/load.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/load.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/load.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_plugin_impl/src/load.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,78 +0,0 @@ -//! Used by `rustc` when loading a plugin. - -use crate::errors::{LoadPluginError, MalformedPluginAttribute}; -use crate::Registry; -use libloading::Library; -use rustc_ast::Attribute; -use rustc_metadata::locator; -use rustc_session::cstore::MetadataLoader; -use rustc_session::Session; -use rustc_span::symbol::{sym, Ident}; - -use std::env; -use std::mem; -use std::path::PathBuf; - -/// Pointer to a registrar function. -type PluginRegistrarFn = fn(&mut Registry<'_>); - -/// Read plugin metadata and dynamically load registrar functions. -pub fn load_plugins( - sess: &Session, - metadata_loader: &dyn MetadataLoader, - attrs: &[Attribute], -) -> Vec { - let mut plugins = Vec::new(); - - for attr in attrs { - if !attr.has_name(sym::plugin) { - continue; - } - - for plugin in attr.meta_item_list().unwrap_or_default() { - match plugin.ident() { - Some(ident) if plugin.is_word() => { - load_plugin(&mut plugins, sess, metadata_loader, ident) - } - _ => { - sess.emit_err(MalformedPluginAttribute { span: plugin.span() }); - } - } - } - } - - plugins -} - -fn load_plugin( - plugins: &mut Vec, - sess: &Session, - metadata_loader: &dyn MetadataLoader, - ident: Ident, -) { - let lib = locator::find_plugin_registrar(sess, metadata_loader, ident.span, ident.name); - let fun = dylink_registrar(lib).unwrap_or_else(|err| { - // This is fatal: there are almost certainly macros we need inside this crate, so - // continuing would spew "macro undefined" errors. - sess.emit_fatal(LoadPluginError { span: ident.span, msg: err.to_string() }); - }); - plugins.push(fun); -} - -/// Dynamically link a registrar function into the compiler process. -fn dylink_registrar(lib_path: PathBuf) -> Result { - // Make sure the path contains a / or the linker will search for it. - let lib_path = env::current_dir().unwrap().join(&lib_path); - - let lib = unsafe { Library::new(&lib_path) }?; - - let registrar_sym = unsafe { lib.get::(b"__rustc_plugin_registrar") }?; - - // Intentionally leak the dynamic library. We can't ever unload it - // since the library can make things that will live arbitrarily long - // (e.g., an Rc cycle or a thread). - let registrar_sym = unsafe { registrar_sym.into_raw() }; - mem::forget(lib); - - Ok(*registrar_sym) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,15 +4,17 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } +rustc_hir_analysis = { path = "../rustc_hir_analysis" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } -rustc_hir_analysis = { path = "../rustc_hir_analysis" } tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_privacy/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(associated_type_defaults)] #![feature(rustc_private)] #![feature(try_blocks)] @@ -185,7 +188,7 @@ | ty::Foreign(def_id) | ty::FnDef(def_id, ..) | ty::Closure(def_id, ..) - | ty::Generator(def_id, ..) => { + | ty::Coroutine(def_id, ..) => { self.def_id_visitor.visit_def_id(def_id, "type", &ty)?; if V::SHALLOW { return ControlFlow::Continue(()); @@ -207,22 +210,7 @@ } } } - ty::Alias(ty::Weak, alias) => { - self.def_id_visitor.visit_def_id(alias.def_id, "type alias", &ty); - } - ty::Alias(ty::Projection, proj) => { - if V::SKIP_ASSOC_TYS { - // Visitors searching for minimal visibility/reachability want to - // conservatively approximate associated types like `::Alias` - // as visible/reachable even if both `Type` and `Trait` are private. - // Ideally, associated types should be substituted in the same way as - // free type aliases, but this isn't done yet. - return ControlFlow::Continue(()); - } - // This will also visit args if necessary, so we don't need to recurse. - return self.visit_projection_ty(proj); - } - ty::Alias(ty::Inherent, data) => { + ty::Alias(kind @ (ty::Inherent | ty::Weak | ty::Projection), data) => { if V::SKIP_ASSOC_TYS { // Visitors searching for minimal visibility/reachability want to // conservatively approximate associated types like `Type::Alias` @@ -232,9 +220,14 @@ return ControlFlow::Continue(()); } + let kind = match kind { + ty::Inherent | ty::Projection => "associated type", + ty::Weak => "type alias", + ty::Opaque => unreachable!(), + }; self.def_id_visitor.visit_def_id( data.def_id, - "associated type", + kind, &LazyDefPathStr { def_id: data.def_id, tcx }, )?; @@ -291,7 +284,7 @@ | ty::Param(..) | ty::Bound(..) | ty::Error(_) - | ty::GeneratorWitness(..) => {} + | ty::CoroutineWitness(..) => {} ty::Placeholder(..) | ty::Infer(..) => { bug!("unexpected type: {:?}", ty) } @@ -570,7 +563,8 @@ if !child.reexport_chain.is_empty() && child.vis.is_accessible_from(defining_mod, self.tcx) && let Res::Def(def_kind, def_id) = child.res - && let Some(def_id) = def_id.as_local() { + && let Some(def_id) = def_id.as_local() + { let vis = self.tcx.local_visibility(def_id); self.update_macro_reachable_def(def_id, def_kind, vis, defining_mod, macro_ev); } @@ -662,7 +656,7 @@ | DefKind::GlobalAsm | DefKind::Impl { .. } | DefKind::Closure - | DefKind::Generator => (), + | DefKind::Coroutine => (), } } } @@ -671,7 +665,8 @@ fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { if self.impl_trait_pass && let hir::ItemKind::OpaqueTy(ref opaque) = item.kind - && !opaque.in_trait { + && !opaque.in_trait + { // FIXME: This is some serious pessimization intended to workaround deficiencies // in the reachability pass (`middle/reachable.rs`). Types are marked as link-time // reachable if they are returned via `impl Trait`, even from private functions. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,28 +3,25 @@ version = "0.0.0" edition = "2021" -[lib] - - [dependencies] +# tidy-alphabetical-start field-offset = "0.3.5" measureme = "10.0.0" +rustc-rayon-core = { version = "0.5.0", optional = true } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } -rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_query_system = { path = "../rustc_query_system" } -rustc-rayon-core = { version = "0.5.0", optional = true } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } thin-vec = "0.2.12" tracing = "0.1" - -# Not used directly, but included to enable the unstable_offset_of feature -memoffset = { version = "0.9.0", features = ["unstable_offset_of"] } +# tidy-alphabetical-end [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ["rustc-rayon-core", "rustc_query_system/rustc_use_parallel_compiler"] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,8 @@ //! Support for serializing the dep-graph and reloading it. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] // this shouldn't be necessary, but the check for `&mut _` is too naive and denies returning a function pointer that takes a mut ref #![feature(const_mut_refs)] #![feature(const_refs_to_cell)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/plumbing.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/plumbing.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/plumbing.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_impl/src/plumbing.rs 2023-12-21 16:55:28.000000000 +0000 @@ -197,6 +197,9 @@ ([(fatal_cycle) $($rest:tt)*]) => {{ rustc_query_system::HandleCycleError::Fatal }}; + ([(cycle_stash) $($rest:tt)*]) => {{ + rustc_query_system::HandleCycleError::Stash + }}; ([(cycle_delay_bug) $($rest:tt)*]) => {{ rustc_query_system::HandleCycleError::DelayBug }}; @@ -437,8 +440,6 @@ ); if let Some(key) = Q::Key::recover(tcx, &dep_node) { - #[cfg(debug_assertions)] - let _guard = tracing::span!(tracing::Level::TRACE, stringify!($name), ?key).entered(); force_query(query, QueryCtxt::new(tcx), key, dep_node); true } else { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,10 +3,10 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start parking_lot = "0.12" +rustc-rayon-core = { version = "0.5.0", optional = true } rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } @@ -15,7 +15,6 @@ rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } -rustc-rayon-core = { version = "0.5.0", optional = true } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } @@ -24,6 +23,9 @@ smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" tracing = "0.1" +# tidy-alphabetical-end [features] +# tidy-alphabetical-start rustc_use_parallel_compiler = ["rustc-rayon-core"] +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -15,8 +15,6 @@ query_system_cycle_usage = cycle used when {$usage} -query_system_cycle_which_requires = ...which requires {$desc}... - query_system_increment_compilation = internal compiler error: encountered incremental compilation error with {$dep_node} .help = This is a known issue with the compiler. Run {$run_cmd} to allow your project to compile diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/dep_graph/graph.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/dep_graph/graph.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/dep_graph/graph.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/dep_graph/graph.rs 2023-12-21 16:55:28.000000000 +0000 @@ -149,7 +149,6 @@ DepNode { kind: D::DEP_KIND_RED, hash: Fingerprint::ZERO.into() }, EdgesVec::new(), None, - false, ); assert_eq!(red_node_index, DepNodeIndex::FOREVER_RED_NODE); match red_node_prev_index_and_color { @@ -332,7 +331,7 @@ /// - If you need 3+ arguments, use a tuple for the /// `arg` parameter. /// - /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/incremental-compilation.html + /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/queries/incremental-compilation.html #[inline(always)] pub fn with_task, A: Debug, R>( &self, @@ -373,8 +372,6 @@ let current_fingerprint = hash_result.map(|f| dcx.with_stable_hashing_context(|mut hcx| f(&mut hcx, &result))); - let print_status = cfg!(debug_assertions) && dcx.sess().opts.unstable_opts.dep_tasks; - // Intern the new `DepNode`. let (dep_node_index, prev_and_color) = self.current.intern_node( dcx.profiler(), @@ -382,7 +379,6 @@ key, edges, current_fingerprint, - print_status, ); hashing_timer.finish_with_query_invocation_id(dep_node_index.into()); @@ -589,8 +585,6 @@ cx.with_stable_hashing_context(|mut hcx| hash_result(&mut hcx, result)) }); - let print_status = cfg!(debug_assertions) && cx.sess().opts.unstable_opts.dep_tasks; - // Intern the new `DepNode` with the dependencies up-to-now. let (dep_node_index, prev_and_color) = data.current.intern_node( cx.profiler(), @@ -598,7 +592,6 @@ node, edges, current_fingerprint, - print_status, ); hashing_timer.finish_with_query_invocation_id(dep_node_index.into()); @@ -1219,20 +1212,13 @@ key: DepNode, edges: EdgesVec, fingerprint: Option, - print_status: bool, ) -> (DepNodeIndex, Option<(SerializedDepNodeIndex, DepNodeColor)>) { - let print_status = cfg!(debug_assertions) && print_status; - // Get timer for profiling `DepNode` interning let _node_intern_timer = self.node_intern_event_id.map(|eid| profiler.generic_activity_with_event_id(eid)); if let Some(prev_index) = prev_graph.node_to_index_opt(&key) { - let get_dep_node_index = |color, fingerprint| { - if print_status { - eprintln!("[task::{color:}] {key:?}"); - } - + let get_dep_node_index = |fingerprint| { let mut prev_index_to_index = self.prev_index_to_index.lock(); let dep_node_index = match prev_index_to_index[prev_index] { @@ -1256,12 +1242,12 @@ if fingerprint == prev_graph.fingerprint_by_index(prev_index) { // This is a green node: it existed in the previous compilation, // its query was re-executed, and it has the same result as before. - let dep_node_index = get_dep_node_index("green", fingerprint); + let dep_node_index = get_dep_node_index(fingerprint); (dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index)))) } else { // This is a red node: it existed in the previous compilation, its query // was re-executed, but it has a different result from before. - let dep_node_index = get_dep_node_index("red", fingerprint); + let dep_node_index = get_dep_node_index(fingerprint); (dep_node_index, Some((prev_index, DepNodeColor::Red))) } } else { @@ -1269,14 +1255,10 @@ // session, its query was re-executed, but it doesn't compute a result hash // (i.e. it represents a `no_hash` query), so we have no way of determining // whether or not the result was the same as before. - let dep_node_index = get_dep_node_index("unknown", Fingerprint::ZERO); + let dep_node_index = get_dep_node_index(Fingerprint::ZERO); (dep_node_index, Some((prev_index, DepNodeColor::Red))) } } else { - if print_status { - eprintln!("[task::new] {key:?}"); - } - let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO); // This is a new node: it didn't exist in the previous compilation session. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/error.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,6 +15,7 @@ Error, Fatal, DelayBug, + Stash, } #[derive(Subdiagnostic)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/query/plumbing.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/query/plumbing.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/query/plumbing.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_query_system/src/query/plumbing.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,7 +19,7 @@ use rustc_data_structures::sync::Lock; #[cfg(parallel_compiler)] use rustc_data_structures::{outline, sync}; -use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed, FatalError}; +use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed, FatalError, StashKey}; use rustc_span::{Span, DUMMY_SP}; use std::cell::Cell; use std::collections::hash_map::Entry; @@ -133,6 +133,17 @@ let guar = error.delay_as_bug(); query.value_from_cycle_error(*qcx.dep_context(), &cycle_error.cycle, guar) } + Stash => { + let guar = if let Some(root) = cycle_error.cycle.first() + && let Some(span) = root.query.span + { + error.stash(span, StashKey::Cycle); + qcx.dep_context().sess().delay_span_bug(span, "delayed cycle error") + } else { + error.emit() + }; + query.value_from_cycle_error(*qcx.dep_context(), &cycle_error.cycle, guar) + } } } @@ -459,7 +470,9 @@ // Similarly, fingerprint the result to assert that // it doesn't have anything not considered hashable. - if cfg!(debug_assertions) && let Some(hash_result) = query.hash_result() { + if cfg!(debug_assertions) + && let Some(hash_result) = query.hash_result() + { qcx.dep_context().with_stable_hashing_context(|mut hcx| { hash_result(&mut hcx, &result); }); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,9 +3,8 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" pulldown-cmark = { version = "0.9.3", default-features = false } rustc_arena = { path = "../rustc_arena" } @@ -28,3 +27,4 @@ smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -130,9 +130,6 @@ resolve_glob_import_doesnt_reexport = glob import doesn't reexport anything because no candidate is public enough -resolve_help_try_using_local_generic_param = - try using a local generic parameter instead - resolve_ident_bound_more_than_once_in_parameter_list = identifier `{$identifier}` is bound more than once in this parameter list .label = used as parameter more than once @@ -249,9 +246,6 @@ generic parameters cannot use `Self` in their defaults .label = `Self` in generic parameter default -resolve_self_type_implicitly_declared_by_impl = - `Self` type implicitly declared here, by this `impl` - resolve_tool_module_imported = cannot use a tool module through an import .note = the tool module imported here @@ -267,12 +261,6 @@ .label = does not match trait .label_trait_item = item in trait -resolve_try_adding_local_generic_param_on_method = - try adding a local generic parameter in this method instead - -resolve_try_using_local_generic_parameter = - try using a local generic parameter instead - resolve_try_using_similarly_named_label = try using similarly named label diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/build_reduced_graph.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/build_reduced_graph.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/build_reduced_graph.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/build_reduced_graph.rs 2023-12-21 16:55:28.000000000 +0000 @@ -981,7 +981,7 @@ | DefKind::GlobalAsm | DefKind::Closure | DefKind::Impl { .. } - | DefKind::Generator, + | DefKind::Coroutine, _, ) | Res::Local(..) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/check_unused.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/check_unused.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/check_unused.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/check_unused.rs 2023-12-21 16:55:28.000000000 +0000 @@ -59,7 +59,6 @@ base_use_tree: Option<&'a ast::UseTree>, base_id: ast::NodeId, item_span: Span, - base_use_is_pub: bool, } struct ExternCrateToLint { @@ -146,7 +145,6 @@ // because this means that they were generated in some fashion by the // compiler and we don't need to consider them. ast::ItemKind::Use(..) if item.span.is_dummy() => return, - ast::ItemKind::Use(..) => self.base_use_is_pub = item.vis.kind.is_pub(), ast::ItemKind::ExternCrate(orig_name) => { self.extern_crate_items.push(ExternCrateToLint { id: item.id, @@ -173,7 +171,7 @@ self.base_use_tree = Some(use_tree); } - if self.base_use_is_pub { + if self.r.effective_visibilities.is_exported(self.r.local_def_id(id)) { self.check_import_as_underscore(use_tree, id); return; } @@ -332,13 +330,12 @@ base_use_tree: None, base_id: ast::DUMMY_NODE_ID, item_span: DUMMY_SP, - base_use_is_pub: false, }; visit::walk_crate(&mut visitor, krate); for unused in visitor.unused_imports.values() { let mut fixes = Vec::new(); - let mut spans = match calc_unused_spans(unused, unused.use_tree, unused.use_tree_id) { + let spans = match calc_unused_spans(unused, unused.use_tree, unused.use_tree_id) { UnusedSpanResult::Used => continue, UnusedSpanResult::FlatUnused(span, remove) => { fixes.push((remove, String::new())); @@ -356,20 +353,19 @@ } }; - let len = spans.len(); - spans.sort(); - let ms = MultiSpan::from_spans(spans.clone()); - let mut span_snippets = spans + let ms = MultiSpan::from_spans(spans); + + let mut span_snippets = ms + .primary_spans() .iter() - .filter_map(|s| match tcx.sess.source_map().span_to_snippet(*s) { - Ok(s) => Some(format!("`{s}`")), - _ => None, - }) + .filter_map(|span| tcx.sess.source_map().span_to_snippet(*span).ok()) + .map(|s| format!("`{s}`")) .collect::>(); span_snippets.sort(); + let msg = format!( "unused import{}{}", - pluralize!(len), + pluralize!(ms.primary_spans().len()), if !span_snippets.is_empty() { format!(": {}", span_snippets.join(", ")) } else { @@ -379,7 +375,7 @@ let fix_msg = if fixes.len() == 1 && fixes[0].0 == unused.item_span { "remove the whole `use` item" - } else if spans.len() > 1 { + } else if ms.primary_spans().len() > 1 { "remove the unused imports" } else { "remove the unused import" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/def_collector.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/def_collector.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/def_collector.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/def_collector.rs 2023-12-21 16:55:28.000000000 +0000 @@ -260,7 +260,7 @@ Async::No => closure_def, } } - ExprKind::Async(_, _) => self.create_def(expr.id, DefPathData::ClosureExpr, expr.span), + ExprKind::Gen(_, _, _) => self.create_def(expr.id, DefPathData::ClosureExpr, expr.span), _ => self.parent_def, }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -25,7 +25,7 @@ use rustc_span::source_map::SourceMap; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{BytePos, Span, SyntaxContext}; -use thin_vec::ThinVec; +use thin_vec::{thin_vec, ThinVec}; use crate::errors::{ AddedMacroUse, ChangeImportBinding, ChangeImportBindingSuggestion, ConsiderAddingADerive, @@ -187,7 +187,9 @@ } else if let Some((span, msg, sugg, appl)) = suggestion { err.span_suggestion_verbose(span, msg, sugg, appl); err.emit(); - } else if let [segment] = path.as_slice() && is_call { + } else if let [segment] = path.as_slice() + && is_call + { err.stash(segment.ident.span, rustc_errors::StashKey::CallIntoMethod); } else { err.emit(); @@ -1145,7 +1147,7 @@ namespace: Namespace, parent_scope: &ParentScope<'a>, start_module: Module<'a>, - crate_name: Ident, + crate_path: ThinVec, filter_fn: FilterFn, ) -> Vec where @@ -1161,14 +1163,16 @@ Some(x) => Some(x), } { let in_module_is_extern = !in_module.def_id().is_local(); - // We have to visit module children in deterministic order to avoid - // instabilities in reported imports (#43552). in_module.for_each_child(self, |this, ident, ns, name_binding| { // avoid non-importable candidates if !name_binding.is_importable() { return; } + if ident.name == kw::Underscore { + return; + } + let child_accessible = accessible && this.is_accessible_from(name_binding.vis, parent_scope.module); @@ -1208,12 +1212,14 @@ let res = name_binding.res(); if filter_fn(res) { // create the path - let mut segms = path_segments.clone(); - if lookup_ident.span.at_least_rust_2018() { + let mut segms = if lookup_ident.span.at_least_rust_2018() { // crate-local absolute paths start with `crate::` in edition 2018 // FIXME: may also be stabilized for Rust 2015 (Issues #45477, #44660) - segms.insert(0, ast::PathSegment::from_ident(crate_name)); - } + crate_path.clone() + } else { + ThinVec::new() + }; + segms.append(&mut path_segments.clone()); segms.push(ast::PathSegment::from_ident(ident)); let path = Path { span: name_binding.span, segments: segms, tokens: None }; @@ -1312,18 +1318,18 @@ where FilterFn: Fn(Res) -> bool, { + let crate_path = thin_vec![ast::PathSegment::from_ident(Ident::with_dummy_span(kw::Crate))]; let mut suggestions = self.lookup_import_candidates_from_module( lookup_ident, namespace, parent_scope, self.graph_root, - Ident::with_dummy_span(kw::Crate), + crate_path, &filter_fn, ); if lookup_ident.span.at_least_rust_2018() { - let extern_prelude_names = self.extern_prelude.clone(); - for (ident, _) in extern_prelude_names.into_iter() { + for ident in self.extern_prelude.clone().into_keys() { if ident.span.from_expansion() { // Idents are adjusted to the root context before being // resolved in the extern prelude, so reporting this to the @@ -1334,13 +1340,43 @@ } let crate_id = self.crate_loader(|c| c.maybe_process_path_extern(ident.name)); if let Some(crate_id) = crate_id { - let crate_root = self.expect_module(crate_id.as_def_id()); + let crate_def_id = crate_id.as_def_id(); + let crate_root = self.expect_module(crate_def_id); + + // Check if there's already an item in scope with the same name as the crate. + // If so, we have to disambiguate the potential import suggestions by making + // the paths *global* (i.e., by prefixing them with `::`). + let needs_disambiguation = + self.resolutions(parent_scope.module).borrow().iter().any( + |(key, name_resolution)| { + if key.ns == TypeNS + && key.ident == ident + && let Some(binding) = name_resolution.borrow().binding + { + match binding.res() { + // No disambiguation needed if the identically named item we + // found in scope actually refers to the crate in question. + Res::Def(_, def_id) => def_id != crate_def_id, + Res::PrimTy(_) => true, + _ => false, + } + } else { + false + } + }, + ); + let mut crate_path = ThinVec::new(); + if needs_disambiguation { + crate_path.push(ast::PathSegment::path_root(rustc_span::DUMMY_SP)); + } + crate_path.push(ast::PathSegment::from_ident(ident)); + suggestions.extend(self.lookup_import_candidates_from_module( lookup_ident, namespace, parent_scope, crate_root, - ident, + crate_path, &filter_fn, )); } @@ -1505,9 +1541,22 @@ ), ); } + + let (span, sugg, post) = if let SuggestionTarget::SimilarlyNamed = suggestion.target + && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) + && let Some(span) = suggestion.span + && let Some(candidate) = suggestion.candidate.as_str().strip_prefix('_') + && snippet == candidate + { + // When the suggested binding change would be from `x` to `_x`, suggest changing the + // original binding definition instead. (#60164) + (span, snippet, ", consider changing it") + } else { + (span, suggestion.candidate.to_string(), "") + }; let msg = match suggestion.target { SuggestionTarget::SimilarlyNamed => format!( - "{} {} with a similar name exists", + "{} {} with a similar name exists{post}", suggestion.res.article(), suggestion.res.descr() ), @@ -1515,7 +1564,7 @@ format!("maybe you meant this {}", suggestion.res.descr()) } }; - err.span_suggestion(span, msg, suggestion.candidate, Applicability::MaybeIncorrect); + err.span_suggestion(span, msg, sugg, Applicability::MaybeIncorrect); true } @@ -1686,7 +1735,9 @@ non_exhaustive = Some(attr.span); } else if let Some(span) = ctor_fields_span { err.span_label(span, "a constructor is private if any of the fields is private"); - if let Res::Def(_, d) = res && let Some(fields) = self.field_visibility_spans.get(&d) { + if let Res::Def(_, d) = res + && let Some(fields) = self.field_visibility_spans.get(&d) + { err.multipart_suggestion_verbose( format!( "consider making the field{} publicly accessible", @@ -1735,7 +1786,9 @@ } // Final step in the import chain, point out if the ADT is `non_exhaustive` // which is probably why this privacy violation occurred. - if next_binding.is_none() && let Some(span) = non_exhaustive { + if next_binding.is_none() + && let Some(span) = non_exhaustive + { note_span.push_span_label( span, "cannot be constructed because it is `#[non_exhaustive]`", @@ -1842,7 +1895,8 @@ parent_scope, None, ignore_binding, - ).ok() + ) + .ok() } else if let Some(ribs) = ribs && let Some(TypeNS | ValueNS) = opt_ns { @@ -1866,7 +1920,8 @@ None, false, ignore_binding, - ).ok() + ) + .ok() }; if let Some(binding) = binding { let mut found = |what| { @@ -2228,7 +2283,9 @@ // Add the import to the start, with a `{` if required. let start_point = source_map.start_point(after_crate_name); - if is_definitely_crate && let Ok(start_snippet) = source_map.span_to_snippet(start_point) { + if is_definitely_crate + && let Ok(start_snippet) = source_map.span_to_snippet(start_point) + { corrections.push(( start_point, if has_nested { @@ -2243,7 +2300,8 @@ // Add a `};` to the end if nested, matching the `{` added at the start. if !has_nested { - corrections.push((source_map.end_point(after_crate_name), "};".to_string())); + corrections + .push((source_map.end_point(after_crate_name), "};".to_string())); } } else { // If the root import is module-relative, add the import separately @@ -2526,7 +2584,7 @@ candidates.iter().for_each(|c| { (if c.accessible { &mut accessible_path_strings } else { &mut inaccessible_path_strings }) - .push((path_names_to_string(&c.path), c.descr, c.did, &c.note, c.via_import)) + .push((pprust::path_to_string(&c.path), c.descr, c.did, &c.note, c.via_import)) }); // we want consistent results across executions, but candidates are produced @@ -2582,7 +2640,13 @@ for candidate in &mut accessible_path_strings { // produce an additional newline to separate the new use statement // from the directly following item. - let additional_newline = if let FoundUse::No = found_use && let DiagnosticMode::Normal = mode { "\n" } else { "" }; + let additional_newline = if let FoundUse::No = found_use + && let DiagnosticMode::Normal = mode + { + "\n" + } else { + "" + }; candidate.0 = format!("{add_use}{}{append}{trailing}{additional_newline}", &candidate.0); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/effective_visibilities.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/effective_visibilities.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/effective_visibilities.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/effective_visibilities.rs 2023-12-21 16:55:28.000000000 +0000 @@ -147,7 +147,8 @@ warn_ambiguity |= nested_binding.warn_ambiguity; } if !is_ambiguity(binding, warn_ambiguity) - && let Some(def_id) = binding.res().opt_def_id().and_then(|id| id.as_local()) { + && let Some(def_id) = binding.res().opt_def_id().and_then(|id| id.as_local()) + { self.update_def(def_id, binding.vis.expect_local(), parent_id); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/ident.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/ident.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/ident.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/ident.rs 2023-12-21 16:55:28.000000000 +0000 @@ -896,7 +896,8 @@ if !restricted_shadowing && binding.expansion != LocalExpnId::ROOT { if let NameBindingKind::Import { import, .. } = binding.kind - && matches!(import.kind, ImportKind::MacroExport) { + && matches!(import.kind, ImportKind::MacroExport) + { self.macro_expanded_macro_export_errors.insert((path_span, binding.span)); } } @@ -928,9 +929,10 @@ if !self.is_accessible_from(import_vis, parent_scope.module) { continue; } - if let Some(ignored) = ignore_binding && - let NameBindingKind::Import { import, .. } = ignored.kind && - import == *single_import { + if let Some(ignored) = ignore_binding + && let NameBindingKind::Import { import, .. } = ignored.kind + && import == *single_import + { // Ignore not just the binding itself, but if it has a shadowed_glob, // ignore that, too, because this loop is supposed to only process // named imports. @@ -1081,7 +1083,7 @@ for rib in ribs { match rib.kind { RibKind::Normal - | RibKind::ClosureOrAsync + | RibKind::FnOrCoroutine | RibKind::Module(..) | RibKind::MacroDefinition(..) | RibKind::ForwardGenericParamBan => { @@ -1154,7 +1156,7 @@ for rib in ribs { let has_generic_params: HasGenericParams = match rib.kind { RibKind::Normal - | RibKind::ClosureOrAsync + | RibKind::FnOrCoroutine | RibKind::Module(..) | RibKind::MacroDefinition(..) | RibKind::InlineAsmSym @@ -1238,7 +1240,7 @@ for rib in ribs { let has_generic_params = match rib.kind { RibKind::Normal - | RibKind::ClosureOrAsync + | RibKind::FnOrCoroutine | RibKind::Module(..) | RibKind::MacroDefinition(..) | RibKind::InlineAsmSym @@ -1440,7 +1442,9 @@ finalize, ignore_binding, ) - } else if let Some(ribs) = ribs && let Some(TypeNS | ValueNS) = opt_ns { + } else if let Some(ribs) = ribs + && let Some(TypeNS | ValueNS) = opt_ns + { match self.resolve_ident_in_lexical_scope( ident, ns, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/imports.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/imports.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/imports.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/imports.rs 2023-12-21 16:55:28.000000000 +0000 @@ -313,26 +313,20 @@ (true, true) => { // FIXME: remove `!binding.is_ambiguity()` after delete the warning ambiguity. if !binding.is_ambiguity() - && let NameBindingKind::Import { import: old_import, .. } = old_binding.kind + && let NameBindingKind::Import { import: old_import, .. } = + old_binding.kind && let NameBindingKind::Import { import, .. } = binding.kind - && old_import == import { + && old_import == import + { // We should replace the `old_binding` with `binding` regardless // of whether they has same resolution or not when they are // imported from the same glob-import statement. resolution.binding = Some(binding); } else if res != old_binding.res() { let binding = if warn_ambiguity { - this.warn_ambiguity( - AmbiguityKind::GlobVsGlob, - old_binding, - binding, - ) + this.warn_ambiguity(AmbiguityKind::GlobVsGlob, old_binding, binding) } else { - this.ambiguity( - AmbiguityKind::GlobVsGlob, - old_binding, - binding, - ) + this.ambiguity(AmbiguityKind::GlobVsGlob, old_binding, binding) }; resolution.binding = Some(binding); } else if !old_binding.vis.is_at_least(binding.vis, this.tcx) { @@ -434,7 +428,9 @@ let t = f(self, resolution); - if let Some(binding) = resolution.binding() && old_binding != Some(binding) { + if let Some(binding) = resolution.binding() + && old_binding != Some(binding) + { (binding, t, warn_ambiguity || old_binding.is_some()) } else { return t; @@ -637,7 +633,8 @@ if binding.res() != Res::Err && glob_binding.res() != Res::Err - && let NameBindingKind::Import { import: glob_import, .. } = glob_binding.kind + && let NameBindingKind::Import { import: glob_import, .. } = + glob_binding.kind && let Some(binding_id) = binding_id && let Some(glob_import_id) = glob_import.id() && let glob_import_def_id = self.local_def_id(glob_import_id) @@ -738,11 +735,11 @@ match &import.kind { ImportKind::Single { source, .. } => { if let Some(ModuleOrUniformRoot::Module(module)) = import.imported_module.get() - && let Some(module) = module.opt_def_id() + && let Some(module) = module.opt_def_id() { self.find_cfg_stripped(&mut diag, &source.name, module) } - }, + } _ => {} } } @@ -989,10 +986,15 @@ } } if !is_prelude - && let Some(max_vis) = max_vis.get() - && !max_vis.is_at_least(import.expect_vis(), self.tcx) + && let Some(max_vis) = max_vis.get() + && !max_vis.is_at_least(import.expect_vis(), self.tcx) { - self.lint_buffer.buffer_lint(UNUSED_IMPORTS, id, import.span, fluent::resolve_glob_import_doesnt_reexport); + self.lint_buffer.buffer_lint( + UNUSED_IMPORTS, + id, + import.span, + fluent::resolve_glob_import_doesnt_reexport, + ); } return None; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late/diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late/diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late/diagnostics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late/diagnostics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -229,8 +229,7 @@ i.ident.name == item_str.name // Don't suggest if the item is in Fn signature arguments (#112590). && !sig.span.contains(item_span) - }) - { + }) { let sp = item_span.shrink_to_lo(); // Account for `Foo { field }` when suggesting `self.field` so we result on @@ -241,7 +240,9 @@ } _ => None, }; - let pre = if let Some(field) = field && field.is_shorthand { + let pre = if let Some(field) = field + && field.is_shorthand + { format!("{item_ident}: ") } else { String::new() @@ -254,13 +255,14 @@ } _ => matches!( source, - PathSource::Expr(Some(Expr { kind: ExprKind::Call(..), ..})), + PathSource::Expr(Some(Expr { kind: ExprKind::Call(..), .. })), ), }; match &item.kind { AssocItemKind::Fn(fn_) - if (!sig.decl.has_self() || !is_call) && fn_.sig.decl.has_self() => { + if (!sig.decl.has_self() || !is_call) && fn_.sig.decl.has_self() => + { // Ensure that we only suggest `self.` if `self` is available, // you can't call `fn foo(&self)` from `fn bar()` (#115992). // We also want to mention that the method exists. @@ -270,19 +272,16 @@ )); None } - AssocItemKind::Fn(fn_) - if !fn_.sig.decl.has_self() && !is_call => { + AssocItemKind::Fn(fn_) if !fn_.sig.decl.has_self() && !is_call => { span_label = Some(( item.ident.span, "an associated function by that name is available on `Self` here", )); None } - AssocItemKind::Fn(fn_) if fn_.sig.decl.has_self() => Some(( - sp, - "consider using the method on `Self`", - format!("{pre}self."), - )), + AssocItemKind::Fn(fn_) if fn_.sig.decl.has_self() => { + Some((sp, "consider using the method on `Self`", format!("{pre}self."))) + } AssocItemKind::Fn(_) => Some(( sp, "consider using the associated function on `Self`", @@ -293,7 +292,7 @@ "consider using the associated constant on `Self`", format!("{pre}Self::"), )), - _ => None + _ => None, } } else { None @@ -379,8 +378,8 @@ prefix_path: &[Segment], following_seg: Option<&Segment>, ) -> Vec { - if let Some(segment) = prefix_path.last() && - let Some(following_seg) = following_seg + if let Some(segment) = prefix_path.last() + && let Some(following_seg) = following_seg { let candidates = self.r.lookup_import_candidates( segment.ident, @@ -392,12 +391,16 @@ candidates .into_iter() .filter(|candidate| { - if let Some(def_id) = candidate.did && - let Some(module) = self.r.get_module(def_id) { - Some(def_id) != self.parent_scope.module.opt_def_id() && - self.r.resolutions(module).borrow().iter().any(|(key, _r)| { - key.ident.name == following_seg.ident.name - }) + if let Some(def_id) = candidate.did + && let Some(module) = self.r.get_module(def_id) + { + Some(def_id) != self.parent_scope.module.opt_def_id() + && self + .r + .resolutions(module) + .borrow() + .iter() + .any(|(key, _r)| key.ident.name == following_seg.ident.name) } else { false } @@ -747,11 +750,15 @@ } // Try to find in last block rib - if let Some(rib) = &self.last_block_rib && let RibKind::Normal = rib.kind { + if let Some(rib) = &self.last_block_rib + && let RibKind::Normal = rib.kind + { for (ident, &res) in &rib.bindings { - if let Res::Local(_) = res && path.len() == 1 && - ident.span.eq_ctxt(path[0].ident.span) && - ident.name == path[0].ident.name { + if let Res::Local(_) = res + && path.len() == 1 + && ident.span.eq_ctxt(path[0].ident.span) + && ident.name == path[0].ident.name + { err.span_help( ident.span, format!("the binding `{path_str}` is available in a different scope in the same function"), @@ -867,9 +874,7 @@ // (could be in a different file) or introduced in the same file as the typo // (could belong to a different crate) if let TypoCandidate::Shadowed(res, Some(sugg_span)) = typo_sugg - && res - .opt_def_id() - .is_some_and(|id| id.is_local() || is_in_same_file(span, sugg_span)) + && res.opt_def_id().is_some_and(|id| id.is_local() || is_in_same_file(span, sugg_span)) { err.span_label( sugg_span, @@ -1074,12 +1079,11 @@ && trait_ref.path.span == span && let PathSource::Trait(_) = source && let Some(Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, _)) = res - && let Ok(self_ty_str) = - self.r.tcx.sess.source_map().span_to_snippet(self_ty.span) + && let Ok(self_ty_str) = self.r.tcx.sess.source_map().span_to_snippet(self_ty.span) && let Ok(trait_ref_str) = self.r.tcx.sess.source_map().span_to_snippet(trait_ref.path.span) { - err.multipart_suggestion( + err.multipart_suggestion( "`impl` items mention the trait being implemented first and the type it is being implemented for second", vec![(trait_ref.path.span, self_ty_str), (self_ty.span, trait_ref_str)], Applicability::MaybeIncorrect, @@ -1106,12 +1110,10 @@ source: PathSource<'_>, span: Span, ) -> bool { - if let PathSource::Expr(_) = source && - let Some(Expr { - span: expr_span, - kind: ExprKind::Assign(lhs, _, _), - .. - }) = self.diagnostic_metadata.in_if_condition { + if let PathSource::Expr(_) = source + && let Some(Expr { span: expr_span, kind: ExprKind::Assign(lhs, _, _), .. }) = + self.diagnostic_metadata.in_if_condition + { // Icky heuristic so we don't suggest: // `if (i + 2) = 2` => `if let (i + 2) = 2` (approximately pattern) // `if 2 = i` => `if let 2 = i` (lhs needs to contain error span) @@ -1240,31 +1242,32 @@ let mut has_self_arg = None; if let PathSource::Expr(Some(parent)) = source && let ExprKind::Call(_, args) = &parent.kind - && !args.is_empty() { - let mut expr_kind = &args[0].kind; - loop { - match expr_kind { - ExprKind::Path(_, arg_name) if arg_name.segments.len() == 1 => { - if arg_name.segments[0].ident.name == kw::SelfLower { - let call_span = parent.span; - let tail_args_span = if args.len() > 1 { - Some(Span::new( - args[1].span.lo(), - args.last().unwrap().span.hi(), - call_span.ctxt(), - None, - )) - } else { - None - }; - has_self_arg = Some((call_span, tail_args_span)); - } - break; + && !args.is_empty() + { + let mut expr_kind = &args[0].kind; + loop { + match expr_kind { + ExprKind::Path(_, arg_name) if arg_name.segments.len() == 1 => { + if arg_name.segments[0].ident.name == kw::SelfLower { + let call_span = parent.span; + let tail_args_span = if args.len() > 1 { + Some(Span::new( + args[1].span.lo(), + args.last().unwrap().span.hi(), + call_span.ctxt(), + None, + )) + } else { + None + }; + has_self_arg = Some((call_span, tail_args_span)); } - ExprKind::AddrOf(_, _, expr) => expr_kind = &expr.kind, - _ => break, + break; } + ExprKind::AddrOf(_, _, expr) => expr_kind = &expr.kind, + _ => break, } + } } has_self_arg } @@ -1321,8 +1324,8 @@ ); true } else if kind == DefKind::Struct - && let Some(lhs_source_span) = lhs_span.find_ancestor_inside(expr.span) - && let Ok(snippet) = self.r.tcx.sess.source_map().span_to_snippet(lhs_source_span) + && let Some(lhs_source_span) = lhs_span.find_ancestor_inside(expr.span) + && let Ok(snippet) = self.r.tcx.sess.source_map().span_to_snippet(lhs_source_span) { // The LHS is a type that originates from a macro call. // We have to add angle brackets around it. @@ -1427,7 +1430,13 @@ .map(|(idx, new)| (new, old_fields.get(idx))) .map(|(new, old)| { let new = new.to_ident_string(); - if let Some(Some(old)) = old && new != *old { format!("{new}: {old}") } else { new } + if let Some(Some(old)) = old + && new != *old + { + format!("{new}: {old}") + } else { + new + } }) .collect::>() } else { @@ -1561,7 +1570,26 @@ err.set_primary_message( "cannot initialize a tuple struct which contains private fields", ); - + if !def_id.is_local() + && self + .r + .tcx + .inherent_impls(def_id) + .iter() + .flat_map(|impl_def_id| { + self.r.tcx.provided_trait_methods(*impl_def_id) + }) + .any(|assoc| !assoc.fn_has_self_parameter && assoc.name == sym::new) + { + // FIXME: look for associated functions with Self return type, + // instead of relying only on the name and lack of self receiver. + err.span_suggestion_verbose( + span.shrink_to_hi(), + "you might have meant to use the `new` associated function", + "::new".to_string(), + Applicability::MaybeIncorrect, + ); + } // Use spans of the tuple struct definition. self.r.field_def_ids(def_id).map(|field_ids| { field_ids @@ -1813,7 +1841,9 @@ } } - if let RibKind::MacroDefinition(def) = rib.kind && def == self.r.macro_def(ctxt) { + if let RibKind::MacroDefinition(def) = rib.kind + && def == self.r.macro_def(ctxt) + { // If an invocation of this macro created `ident`, give up on `ident` // and switch to `ident`'s source from the macro definition. ctxt.remove_mark(); @@ -1934,18 +1964,20 @@ // try to give a suggestion for this pattern: `name = blah`, which is common in other languages // suggest `let name = blah` to introduce a new binding fn let_binding_suggestion(&mut self, err: &mut Diagnostic, ident_span: Span) -> bool { - if let Some(Expr { kind: ExprKind::Assign(lhs, .. ), .. }) = self.diagnostic_metadata.in_assignment && - let ast::ExprKind::Path(None, _) = lhs.kind { - if !ident_span.from_expansion() { - err.span_suggestion_verbose( - ident_span.shrink_to_lo(), - "you might have meant to introduce a new binding", - "let ".to_string(), - Applicability::MaybeIncorrect, - ); - return true; - } + if let Some(Expr { kind: ExprKind::Assign(lhs, ..), .. }) = + self.diagnostic_metadata.in_assignment + && let ast::ExprKind::Path(None, _) = lhs.kind + { + if !ident_span.from_expansion() { + err.span_suggestion_verbose( + ident_span.shrink_to_lo(), + "you might have meant to introduce a new binding", + "let ".to_string(), + Applicability::MaybeIncorrect, + ); + return true; } + } false } @@ -2406,7 +2438,10 @@ continue; } - if !span.can_be_used_for_suggestions() && suggest_note && let Some(name) = name { + if !span.can_be_used_for_suggestions() + && suggest_note + && let Some(name) = name + { suggest_note = false; // Avoid displaying the same help multiple times. err.span_label( span, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/late.rs 2023-12-21 16:55:28.000000000 +0000 @@ -41,9 +41,6 @@ type IdentMap = FxHashMap; -/// Map from the name in a pattern to its binding mode. -type BindingMap = IdentMap; - use diagnostics::{ ElisionFnParameter, LifetimeElisionCandidate, MissingLifetime, MissingLifetimeKind, }; @@ -180,8 +177,8 @@ /// upvars). AssocItem, - /// We passed through a closure. Disallow labels. - ClosureOrAsync, + /// We passed through a function, closure or coroutine signature. Disallow labels. + FnOrCoroutine, /// We passed through an item scope. Disallow upvars. Item(HasGenericParams), @@ -218,7 +215,7 @@ pub(crate) fn contains_params(&self) -> bool { match self { RibKind::Normal - | RibKind::ClosureOrAsync + | RibKind::FnOrCoroutine | RibKind::ConstantItem(..) | RibKind::Module(_) | RibKind::MacroDefinition(_) @@ -234,7 +231,7 @@ RibKind::Normal | RibKind::MacroDefinition(..) => false, RibKind::AssocItem - | RibKind::ClosureOrAsync + | RibKind::FnOrCoroutine | RibKind::Item(..) | RibKind::ConstantItem(..) | RibKind::Module(..) @@ -739,7 +736,8 @@ // Check whether we should interpret this as a bare trait object. if qself.is_none() && let Some(partial_res) = self.r.partial_res_map.get(&ty.id) - && let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) = partial_res.full_res() + && let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) = + partial_res.full_res() { // This path is actually a bare trait object. In case of a bare `Fn`-trait // object with anonymous lifetimes, we need this rib to correctly place the @@ -926,9 +924,9 @@ debug!("(resolving function) entering function"); // Create a value rib for the function. - self.with_rib(ValueNS, RibKind::ClosureOrAsync, |this| { + self.with_rib(ValueNS, RibKind::FnOrCoroutine, |this| { // Create a label rib for the function. - this.with_label_rib(RibKind::ClosureOrAsync, |this| { + this.with_label_rib(RibKind::FnOrCoroutine, |this| { match fn_kind { FnKind::Fn(_, _, sig, _, generics, body) => { this.visit_generics(generics); @@ -2049,7 +2047,9 @@ if lifetime_count != 0 { parameter_info.push(ElisionFnParameter { index, - ident: if let Some(pat) = pat && let PatKind::Ident(_, ident, _) = pat.kind { + ident: if let Some(pat) = pat + && let PatKind::Ident(_, ident, _) = pat.kind + { Some(ident) } else { None @@ -2143,7 +2143,9 @@ impl<'a> Visitor<'a> for SelfVisitor<'_, '_, '_> { fn visit_ty(&mut self, ty: &'a Ty) { trace!("SelfVisitor considering ty={:?}", ty); - if let TyKind::Ref(lt, ref mt) = ty.kind && self.is_self_ty(&mt.ty) { + if let TyKind::Ref(lt, ref mt) = ty.kind + && self.is_self_ty(&mt.ty) + { let lt_id = if let Some(lt) = lt { lt.id } else { @@ -3164,8 +3166,8 @@ /// this is done hygienically. This could arise for a macro /// that expands into an or-pattern where one 'x' was from the /// user and one 'x' came from the macro. - fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap { - let mut binding_map = FxHashMap::default(); + fn binding_mode_map(&mut self, pat: &Pat) -> FxIndexMap { + let mut binding_map = FxIndexMap::default(); pat.walk(&mut |pat| { match pat.kind { @@ -3200,22 +3202,28 @@ /// Checks that all of the arms in an or-pattern have exactly the /// same set of bindings, with the same binding modes for each. - fn check_consistent_bindings(&mut self, pats: &[P]) -> Vec { - let mut missing_vars = FxHashMap::default(); - let mut inconsistent_vars = FxHashMap::default(); + fn check_consistent_bindings( + &mut self, + pats: &[P], + ) -> Vec> { + // pats is consistent. + let mut missing_vars = FxIndexMap::default(); + let mut inconsistent_vars = FxIndexMap::default(); // 1) Compute the binding maps of all arms. let maps = pats.iter().map(|pat| self.binding_mode_map(pat)).collect::>(); // 2) Record any missing bindings or binding mode inconsistencies. - for (map_outer, pat_outer) in pats.iter().enumerate().map(|(idx, pat)| (&maps[idx], pat)) { + for (map_outer, pat_outer) in maps.iter().zip(pats.iter()) { // Check against all arms except for the same pattern which is always self-consistent. - let inners = pats + let inners = maps .iter() - .enumerate() + .zip(pats.iter()) .filter(|(_, pat)| pat.id != pat_outer.id) - .flat_map(|(idx, _)| maps[idx].iter()) - .map(|(key, binding)| (key.name, map_outer.get(&key), binding)); + .flat_map(|(map, _)| map) + .map(|(key, binding)| (key.name, map_outer.get(key), binding)); + + let inners = inners.collect::>(); for (name, info, &binding_inner) in inners { match info { @@ -3244,10 +3252,7 @@ } // 3) Report all missing variables we found. - let mut missing_vars = missing_vars.into_iter().collect::>(); - missing_vars.sort_by_key(|&(sym, ref _err)| sym); - - for (name, mut v) in missing_vars.into_iter() { + for (name, mut v) in missing_vars { if inconsistent_vars.contains_key(&name) { v.could_be_path = false; } @@ -3258,10 +3263,8 @@ } // 4) Report all inconsistencies in binding modes we found. - let mut inconsistent_vars = inconsistent_vars.iter().collect::>(); - inconsistent_vars.sort(); for (name, v) in inconsistent_vars { - self.report_error(v.0, ResolutionError::VariableBoundWithDifferentMode(*name, v.1)); + self.report_error(v.0, ResolutionError::VariableBoundWithDifferentMode(name, v.1)); } // 5) Finally bubble up all the binding maps. @@ -3604,7 +3607,9 @@ sugg.to_string(), Applicability::MaybeIncorrect, )) - } else if res.is_none() && let PathSource::Type | PathSource::Expr(_) = source { + } else if res.is_none() + && let PathSource::Type | PathSource::Expr(_) = source + { this.suggest_adding_generic_parameter(path, source) } else { None @@ -4006,7 +4011,7 @@ "unnecessary qualification", lint::BuiltinLintDiagnostics::UnusedQualifications { removal_span: finalize.path_span.until(last_segment.ident.span), - } + }, ) } } @@ -4059,13 +4064,13 @@ (block.could_be_bare_literal, &block.stmts[..]) && let ExprKind::Type(..) = expr.kind { - self.diagnostic_metadata.current_block_could_be_bare_struct_literal = - Some(block.span); + self.diagnostic_metadata.current_block_could_be_bare_struct_literal = Some(block.span); } // Descend into the block. for stmt in &block.stmts { if let StmtKind::Item(ref item) = stmt.kind - && let ItemKind::MacroDef(..) = item.kind { + && let ItemKind::MacroDef(..) = item.kind + { num_macro_definition_ribs += 1; let res = self.r.local_def_id(item.id).to_def_id(); self.ribs[ValueNS].push(Rib::new(RibKind::MacroDefinition(res))); @@ -4282,7 +4287,7 @@ .. }) => { self.with_rib(ValueNS, RibKind::Normal, |this| { - this.with_label_rib(RibKind::ClosureOrAsync, |this| { + this.with_label_rib(RibKind::FnOrCoroutine, |this| { // Resolve arguments: this.resolve_params(&fn_decl.inputs); // No need to resolve return type -- @@ -4299,7 +4304,7 @@ }) }); } - // For closures, ClosureOrAsyncRibKind is added in visit_fn + // For closures, RibKind::FnOrCoroutine is added in visit_fn ExprKind::Closure(box ast::Closure { binder: ClosureBinder::For { ref generic_params, span }, .. @@ -4316,8 +4321,8 @@ ); } ExprKind::Closure(..) => visit::walk_expr(self, expr), - ExprKind::Async(..) => { - self.with_label_rib(RibKind::ClosureOrAsync, |this| visit::walk_expr(this, expr)); + ExprKind::Gen(..) => { + self.with_label_rib(RibKind::FnOrCoroutine, |this| visit::walk_expr(this, expr)); } ExprKind::Repeat(ref elem, ref ct) => { self.visit_expr(elem); @@ -4421,7 +4426,11 @@ && let Some(def_id) = res.opt_def_id() && !def_id.is_local() && self.r.tcx.crate_types().contains(&CrateType::ProcMacro) - && matches!(self.r.tcx.sess.opts.resolve_doc_links, ResolveDocLinks::ExportedMetadata) { + && matches!( + self.r.tcx.sess.opts.resolve_doc_links, + ResolveDocLinks::ExportedMetadata + ) + { // Encoding foreign def ids in proc macro crate metadata will ICE. return None; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,6 +7,8 @@ //! Type-relative name resolution (methods, fields, associated items) happens in `rustc_hir_analysis`. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(assert_matches)] #![feature(box_patterns)] #![feature(extract_if)] @@ -824,8 +826,10 @@ matches!(import.kind, ImportKind::ExternCrate { .. }) } NameBindingKind::Module(module) - if let ModuleKind::Def(DefKind::Mod, def_id, _) = module.kind - => def_id.is_crate_root(), + if let ModuleKind::Def(DefKind::Mod, def_id, _) = module.kind => + { + def_id.is_crate_root() + } _ => false, } } @@ -1074,8 +1078,8 @@ /// Also includes of list of each fields visibility struct_constructors: LocalDefIdMap<(Res, ty::Visibility, Vec>)>, - /// Features enabled for this crate. - active_features: FxHashSet, + /// Features declared for this crate. + declared_features: FxHashSet, lint_buffer: LintBuffer, @@ -1417,12 +1421,7 @@ multi_segment_macro_resolutions: Default::default(), builtin_attrs: Default::default(), containers_deriving_copy: Default::default(), - active_features: features - .declared_lib_features - .iter() - .map(|(feat, ..)| *feat) - .chain(features.declared_lang_features.iter().map(|(feat, ..)| *feat)) - .collect(), + declared_features: features.declared_features.clone(), lint_buffer: LintBuffer::default(), next_node_id: CRATE_NODE_ID, node_id_to_def_id, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/macros.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/macros.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/macros.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/macros.rs 2023-12-21 16:55:28.000000000 +0000 @@ -748,33 +748,45 @@ } path_res @ (PathResult::NonModule(..) | PathResult::Failed { .. }) => { let mut suggestion = None; - let (span, label, module) = if let PathResult::Failed { span, label, module, .. } = path_res { - // try to suggest if it's not a macro, maybe a function - if let PathResult::NonModule(partial_res) = self.maybe_resolve_path(&path, Some(ValueNS), &parent_scope) - && partial_res.unresolved_segments() == 0 { - let sm = self.tcx.sess.source_map(); - let exclamation_span = sm.next_point(span); - suggestion = Some(( - vec![(exclamation_span, "".to_string())], - format!("{} is not a macro, but a {}, try to remove `!`", Segment::names_to_string(&path), partial_res.base_res().descr()), - Applicability::MaybeIncorrect + let (span, label, module) = + if let PathResult::Failed { span, label, module, .. } = path_res { + // try to suggest if it's not a macro, maybe a function + if let PathResult::NonModule(partial_res) = + self.maybe_resolve_path(&path, Some(ValueNS), &parent_scope) + && partial_res.unresolved_segments() == 0 + { + let sm = self.tcx.sess.source_map(); + let exclamation_span = sm.next_point(span); + suggestion = Some(( + vec![(exclamation_span, "".to_string())], + format!( + "{} is not a macro, but a {}, try to remove `!`", + Segment::names_to_string(&path), + partial_res.base_res().descr() + ), + Applicability::MaybeIncorrect, )); - } - (span, label, module) - } else { - ( - path_span, - format!( - "partially resolved path in {} {}", - kind.article(), - kind.descr() - ), - None, - ) - }; + } + (span, label, module) + } else { + ( + path_span, + format!( + "partially resolved path in {} {}", + kind.article(), + kind.descr() + ), + None, + ) + }; self.report_error( span, - ResolutionError::FailedToResolve { last_segment: path.last().map(|segment| segment.ident.name), label, suggestion, module }, + ResolutionError::FailedToResolve { + last_segment: path.last().map(|segment| segment.ident.name), + label, + suggestion, + module, + }, ); } PathResult::Module(..) | PathResult::Indeterminate => unreachable!(), @@ -854,7 +866,7 @@ let feature = stability.feature; let is_allowed = |feature| { - self.active_features.contains(&feature) || span.allows_unstable(feature) + self.declared_features.contains(&feature) || span.allows_unstable(feature) }; let allowed_by_implication = implied_by.is_some_and(|feature| is_allowed(feature)); if !is_allowed(feature) && !allowed_by_implication { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/rustdoc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/rustdoc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/rustdoc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_resolve/src/rustdoc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -346,7 +346,9 @@ for attr in attrs { if attr.has_name(sym::rustc_doc_primitive) { return true; - } else if attr.has_name(sym::doc) && let Some(items) = attr.meta_item_list() { + } else if attr.has_name(sym::doc) + && let Some(items) = attr.meta_item_list() + { for item in items { if item.has_name(sym::keyword) { return true; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,10 +4,14 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start indexmap = "2.0.0" smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" +# tidy-alphabetical-end [dev-dependencies] +# tidy-alphabetical-start rustc_macros = { path = "../rustc_macros" } tempfile = "3.2" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/collection_impls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/collection_impls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/collection_impls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/collection_impls.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,279 +0,0 @@ -//! Implementations of serialization for structures found in liballoc - -use crate::{Decodable, Decoder, Encodable, Encoder}; -use smallvec::{Array, SmallVec}; -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, LinkedList, VecDeque}; -use std::hash::{BuildHasher, Hash}; -use std::rc::Rc; -use std::sync::Arc; -use thin_vec::ThinVec; - -impl>> Encodable for SmallVec { - fn encode(&self, s: &mut S) { - let slice: &[A::Item] = self; - slice.encode(s); - } -} - -impl>> Decodable for SmallVec { - fn decode(d: &mut D) -> SmallVec { - let len = d.read_usize(); - (0..len).map(|_| Decodable::decode(d)).collect() - } -} - -impl> Encodable for ThinVec { - fn encode(&self, s: &mut S) { - self.as_slice().encode(s); - } -} - -impl> Decodable for ThinVec { - fn decode(d: &mut D) -> ThinVec { - let len = d.read_usize(); - (0..len).map(|_| Decodable::decode(d)).collect() - } -} - -impl> Encodable for LinkedList { - fn encode(&self, s: &mut S) { - s.emit_usize(self.len()); - for e in self.iter() { - e.encode(s); - } - } -} - -impl> Decodable for LinkedList { - fn decode(d: &mut D) -> LinkedList { - let len = d.read_usize(); - (0..len).map(|_| Decodable::decode(d)).collect() - } -} - -impl> Encodable for VecDeque { - fn encode(&self, s: &mut S) { - s.emit_usize(self.len()); - for e in self.iter() { - e.encode(s); - } - } -} - -impl> Decodable for VecDeque { - fn decode(d: &mut D) -> VecDeque { - let len = d.read_usize(); - (0..len).map(|_| Decodable::decode(d)).collect() - } -} - -impl Encodable for BTreeMap -where - K: Encodable + PartialEq + Ord, - V: Encodable, -{ - fn encode(&self, e: &mut S) { - e.emit_usize(self.len()); - for (key, val) in self.iter() { - key.encode(e); - val.encode(e); - } - } -} - -impl Decodable for BTreeMap -where - K: Decodable + PartialEq + Ord, - V: Decodable, -{ - fn decode(d: &mut D) -> BTreeMap { - let len = d.read_usize(); - let mut map = BTreeMap::new(); - for _ in 0..len { - let key = Decodable::decode(d); - let val = Decodable::decode(d); - map.insert(key, val); - } - map - } -} - -impl Encodable for BTreeSet -where - T: Encodable + PartialEq + Ord, -{ - fn encode(&self, s: &mut S) { - s.emit_usize(self.len()); - for e in self.iter() { - e.encode(s); - } - } -} - -impl Decodable for BTreeSet -where - T: Decodable + PartialEq + Ord, -{ - fn decode(d: &mut D) -> BTreeSet { - let len = d.read_usize(); - let mut set = BTreeSet::new(); - for _ in 0..len { - set.insert(Decodable::decode(d)); - } - set - } -} - -impl Encodable for HashMap -where - K: Encodable + Eq, - V: Encodable, - S: BuildHasher, -{ - fn encode(&self, e: &mut E) { - e.emit_usize(self.len()); - for (key, val) in self.iter() { - key.encode(e); - val.encode(e); - } - } -} - -impl Decodable for HashMap -where - K: Decodable + Hash + Eq, - V: Decodable, - S: BuildHasher + Default, -{ - fn decode(d: &mut D) -> HashMap { - let len = d.read_usize(); - let state = Default::default(); - let mut map = HashMap::with_capacity_and_hasher(len, state); - for _ in 0..len { - let key = Decodable::decode(d); - let val = Decodable::decode(d); - map.insert(key, val); - } - map - } -} - -impl Encodable for HashSet -where - T: Encodable + Eq, - S: BuildHasher, -{ - fn encode(&self, s: &mut E) { - s.emit_usize(self.len()); - for e in self.iter() { - e.encode(s); - } - } -} - -impl Decodable for HashSet -where - T: Decodable + Hash + Eq, - S: BuildHasher + Default, -{ - fn decode(d: &mut D) -> HashSet { - let len = d.read_usize(); - let state = Default::default(); - let mut set = HashSet::with_capacity_and_hasher(len, state); - for _ in 0..len { - set.insert(Decodable::decode(d)); - } - set - } -} - -impl Encodable for indexmap::IndexMap -where - K: Encodable + Hash + Eq, - V: Encodable, - S: BuildHasher, -{ - fn encode(&self, e: &mut E) { - e.emit_usize(self.len()); - for (key, val) in self.iter() { - key.encode(e); - val.encode(e); - } - } -} - -impl Decodable for indexmap::IndexMap -where - K: Decodable + Hash + Eq, - V: Decodable, - S: BuildHasher + Default, -{ - fn decode(d: &mut D) -> indexmap::IndexMap { - let len = d.read_usize(); - let state = Default::default(); - let mut map = indexmap::IndexMap::with_capacity_and_hasher(len, state); - for _ in 0..len { - let key = Decodable::decode(d); - let val = Decodable::decode(d); - map.insert(key, val); - } - map - } -} - -impl Encodable for indexmap::IndexSet -where - T: Encodable + Hash + Eq, - S: BuildHasher, -{ - fn encode(&self, s: &mut E) { - s.emit_usize(self.len()); - for e in self.iter() { - e.encode(s); - } - } -} - -impl Decodable for indexmap::IndexSet -where - T: Decodable + Hash + Eq, - S: BuildHasher + Default, -{ - fn decode(d: &mut D) -> indexmap::IndexSet { - let len = d.read_usize(); - let state = Default::default(); - let mut set = indexmap::IndexSet::with_capacity_and_hasher(len, state); - for _ in 0..len { - set.insert(Decodable::decode(d)); - } - set - } -} - -impl> Encodable for Rc<[T]> { - fn encode(&self, s: &mut E) { - let slice: &[T] = self; - slice.encode(s); - } -} - -impl> Decodable for Rc<[T]> { - fn decode(d: &mut D) -> Rc<[T]> { - let vec: Vec = Decodable::decode(d); - vec.into() - } -} - -impl> Encodable for Arc<[T]> { - fn encode(&self, s: &mut E) { - let slice: &[T] = self; - slice.encode(s); - } -} - -impl> Decodable for Arc<[T]> { - fn decode(d: &mut D) -> Arc<[T]> { - let vec: Vec = Decodable::decode(d); - vec.into() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,25 +1,22 @@ //! Support code for encoding and decoding types. -/* -Core encoding and decoding interfaces. -*/ - #![doc( html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", html_playground_url = "https://play.rust-lang.org/", test(attr(allow(unused_variables), deny(warnings))) )] -#![feature(never_type)] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![feature(allocator_api)] #![feature(associated_type_bounds)] -#![feature(min_specialization)] +#![feature(const_option)] #![feature(core_intrinsics)] -#![feature(maybe_uninit_slice)] -#![feature(new_uninit)] -#![feature(allocator_api)] +#![feature(inline_const)] +#![feature(min_specialization)] +#![feature(never_type)] #![feature(ptr_sub_ptr)] #![feature(slice_first_last_chunk)] -#![feature(inline_const)] -#![feature(const_option)] #![cfg_attr(test, feature(test))] #![allow(rustc::internal)] #![deny(rustc::untranslatable_diagnostic)] @@ -27,7 +24,6 @@ pub use self::serialize::{Decodable, Decoder, Encodable, Encoder}; -mod collection_impls; mod serialize; pub mod leb128; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/serialize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/serialize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/serialize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_serialize/src/serialize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,15 @@ //! Support code for encoding and decoding types. -use std::alloc::Allocator; +use smallvec::{Array, SmallVec}; use std::borrow::Cow; use std::cell::{Cell, RefCell}; +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}; +use std::hash::{BuildHasher, Hash}; use std::marker::PhantomData; use std::path; use std::rc::Rc; use std::sync::Arc; +use thin_vec::ThinVec; /// A byte that [cannot occur in UTF8 sequences][utf8]. Used to mark the end of a string. /// This way we can skip validation and still be relatively sure that deserialization @@ -273,9 +276,9 @@ } } -impl> Decodable for Box<[T], A> { - fn decode(d: &mut D) -> Box<[T], A> { - let v: Vec = Decodable::decode(d); +impl> Decodable for Box<[T]> { + fn decode(d: &mut D) -> Box<[T]> { + let v: Vec = Decodable::decode(d); v.into_boxed_slice() } } @@ -303,33 +306,20 @@ impl> Encodable for Vec { fn encode(&self, s: &mut S) { - let slice: &[T] = self; - slice.encode(s); + self.as_slice().encode(s); } } -impl, A: Allocator + Default> Decodable for Vec { - default fn decode(d: &mut D) -> Vec { +impl> Decodable for Vec { + default fn decode(d: &mut D) -> Vec { let len = d.read_usize(); - let allocator = A::default(); - // SAFETY: we set the capacity in advance, only write elements, and - // only set the length at the end once the writing has succeeded. - let mut vec = Vec::with_capacity_in(len, allocator); - unsafe { - let ptr: *mut T = vec.as_mut_ptr(); - for i in 0..len { - std::ptr::write(ptr.add(i), Decodable::decode(d)); - } - vec.set_len(len); - } - vec + (0..len).map(|_| Decodable::decode(d)).collect() } } impl, const N: usize> Encodable for [T; N] { fn encode(&self, s: &mut S) { - let slice: &[T] = self; - slice.encode(s); + self.as_slice().encode(s); } } @@ -497,15 +487,233 @@ } } -impl, A: Allocator + Default> Encodable for Box { +impl> Encodable for Box { fn encode(&self, s: &mut S) { (**self).encode(s) } } -impl> Decodable for Box { - fn decode(d: &mut D) -> Box { - let allocator = A::default(); - Box::new_in(Decodable::decode(d), allocator) +impl> Decodable for Box { + fn decode(d: &mut D) -> Box { + Box::new(Decodable::decode(d)) + } +} + +impl>> Encodable for SmallVec { + fn encode(&self, s: &mut S) { + self.as_slice().encode(s); + } +} + +impl>> Decodable for SmallVec { + fn decode(d: &mut D) -> SmallVec { + let len = d.read_usize(); + (0..len).map(|_| Decodable::decode(d)).collect() + } +} + +impl> Encodable for ThinVec { + fn encode(&self, s: &mut S) { + self.as_slice().encode(s); + } +} + +impl> Decodable for ThinVec { + fn decode(d: &mut D) -> ThinVec { + let len = d.read_usize(); + (0..len).map(|_| Decodable::decode(d)).collect() + } +} + +impl> Encodable for VecDeque { + fn encode(&self, s: &mut S) { + s.emit_usize(self.len()); + for e in self.iter() { + e.encode(s); + } + } +} + +impl> Decodable for VecDeque { + fn decode(d: &mut D) -> VecDeque { + let len = d.read_usize(); + (0..len).map(|_| Decodable::decode(d)).collect() + } +} + +impl Encodable for BTreeMap +where + K: Encodable + PartialEq + Ord, + V: Encodable, +{ + fn encode(&self, e: &mut S) { + e.emit_usize(self.len()); + for (key, val) in self.iter() { + key.encode(e); + val.encode(e); + } + } +} + +impl Decodable for BTreeMap +where + K: Decodable + PartialEq + Ord, + V: Decodable, +{ + fn decode(d: &mut D) -> BTreeMap { + let len = d.read_usize(); + (0..len).map(|_| (Decodable::decode(d), Decodable::decode(d))).collect() + } +} + +impl Encodable for BTreeSet +where + T: Encodable + PartialEq + Ord, +{ + fn encode(&self, s: &mut S) { + s.emit_usize(self.len()); + for e in self.iter() { + e.encode(s); + } + } +} + +impl Decodable for BTreeSet +where + T: Decodable + PartialEq + Ord, +{ + fn decode(d: &mut D) -> BTreeSet { + let len = d.read_usize(); + (0..len).map(|_| Decodable::decode(d)).collect() + } +} + +impl Encodable for HashMap +where + K: Encodable + Eq, + V: Encodable, + S: BuildHasher, +{ + fn encode(&self, e: &mut E) { + e.emit_usize(self.len()); + for (key, val) in self.iter() { + key.encode(e); + val.encode(e); + } + } +} + +impl Decodable for HashMap +where + K: Decodable + Hash + Eq, + V: Decodable, + S: BuildHasher + Default, +{ + fn decode(d: &mut D) -> HashMap { + let len = d.read_usize(); + (0..len).map(|_| (Decodable::decode(d), Decodable::decode(d))).collect() + } +} + +impl Encodable for HashSet +where + T: Encodable + Eq, + S: BuildHasher, +{ + fn encode(&self, s: &mut E) { + s.emit_usize(self.len()); + for e in self.iter() { + e.encode(s); + } + } +} + +impl Decodable for HashSet +where + T: Decodable + Hash + Eq, + S: BuildHasher + Default, +{ + fn decode(d: &mut D) -> HashSet { + let len = d.read_usize(); + (0..len).map(|_| Decodable::decode(d)).collect() + } +} + +impl Encodable for indexmap::IndexMap +where + K: Encodable + Hash + Eq, + V: Encodable, + S: BuildHasher, +{ + fn encode(&self, e: &mut E) { + e.emit_usize(self.len()); + for (key, val) in self.iter() { + key.encode(e); + val.encode(e); + } + } +} + +impl Decodable for indexmap::IndexMap +where + K: Decodable + Hash + Eq, + V: Decodable, + S: BuildHasher + Default, +{ + fn decode(d: &mut D) -> indexmap::IndexMap { + let len = d.read_usize(); + (0..len).map(|_| (Decodable::decode(d), Decodable::decode(d))).collect() + } +} + +impl Encodable for indexmap::IndexSet +where + T: Encodable + Hash + Eq, + S: BuildHasher, +{ + fn encode(&self, s: &mut E) { + s.emit_usize(self.len()); + for e in self.iter() { + e.encode(s); + } + } +} + +impl Decodable for indexmap::IndexSet +where + T: Decodable + Hash + Eq, + S: BuildHasher + Default, +{ + fn decode(d: &mut D) -> indexmap::IndexSet { + let len = d.read_usize(); + (0..len).map(|_| Decodable::decode(d)).collect() + } +} + +impl> Encodable for Rc<[T]> { + fn encode(&self, s: &mut E) { + let slice: &[T] = self; + slice.encode(s); + } +} + +impl> Decodable for Rc<[T]> { + fn decode(d: &mut D) -> Rc<[T]> { + let vec: Vec = Decodable::decode(d); + vec.into() + } +} + +impl> Encodable for Arc<[T]> { + fn encode(&self, s: &mut E) { + let slice: &[T] = self; + slice.encode(s); + } +} + +impl> Decodable for Arc<[T]> { + fn decode(d: &mut D) -> Arc<[T]> { + let vec: Vec = Decodable::decode(d); + vec.into() } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,26 +4,30 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" getopts = "0.2" -rustc_macros = { path = "../rustc_macros" } -tracing = "0.1" +rustc_ast = { path = "../rustc_ast" } +rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_feature = { path = "../rustc_feature" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_target = { path = "../rustc_target" } -rustc_serialize = { path = "../rustc_serialize" } -rustc_data_structures = { path = "../rustc_data_structures" } -rustc_span = { path = "../rustc_span" } rustc_fs_util = { path = "../rustc_fs_util" } -rustc_ast = { path = "../rustc_ast" } +rustc_hir = { path = "../rustc_hir" } rustc_lint_defs = { path = "../rustc_lint_defs" } +rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } smallvec = "1.8.1" termize = "0.1.1" +tracing = "0.1" +# tidy-alphabetical-end [target.'cfg(unix)'.dependencies] +# tidy-alphabetical-start libc = "0.2" +# tidy-alphabetical-end [target.'cfg(windows)'.dependencies.windows] version = "0.48.0" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -5,9 +5,6 @@ session_cannot_mix_and_match_sanitizers = `-Zsanitizer={$first}` is incompatible with `-Zsanitizer={$second}` -session_cgu_not_recorded = - CGU-reuse for `{$cgu_user_name}` is (mangled: `{$cgu_name}`) was not recorded - session_cli_feature_diagnostic_help = add `-Zcrate-attr="feature({$feature})"` to the command-line options to enable @@ -25,8 +22,6 @@ session_feature_diagnostic_help = add `#![feature({$feature})]` to the crate attributes to enable -session_feature_gate_error = {$explain} - session_file_is_not_writeable = output file {$file} is not writeable -- check its permissions session_file_write_fail = failed to write `{$path}` due to error `{$err}` @@ -36,12 +31,6 @@ session_incompatible_linker_flavor = linker flavor `{$flavor}` is incompatible with the current target .note = compatible flavors are: {$compatible_list} -session_incorrect_cgu_reuse_type = - CGU-reuse for `{$cgu_user_name}` is `{$actual_reuse}` but should be {$at_least -> - [one] {"at least "} - *[other] {""} - }`{$expected_reuse}` - session_instrumentation_not_supported = {$us} instrumentation is not supported for this target session_int_literal_too_large = integer literal is too large diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/cgu_reuse_tracker.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/cgu_reuse_tracker.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/cgu_reuse_tracker.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/cgu_reuse_tracker.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,136 +0,0 @@ -//! Some facilities for tracking how codegen-units are reused during incremental -//! compilation. This is used for incremental compilation tests and debug -//! output. - -use crate::errors::{CguNotRecorded, IncorrectCguReuseType}; -use crate::Session; -use rustc_data_structures::fx::FxHashMap; -use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg}; -use rustc_span::{Span, Symbol}; -use std::borrow::Cow; -use std::fmt::{self}; -use std::sync::{Arc, Mutex}; - -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] -pub enum CguReuse { - No, - PreLto, - PostLto, -} - -impl fmt::Display for CguReuse { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - CguReuse::No => write!(f, "No"), - CguReuse::PreLto => write!(f, "PreLto "), - CguReuse::PostLto => write!(f, "PostLto "), - } - } -} - -impl IntoDiagnosticArg for CguReuse { - fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> { - DiagnosticArgValue::Str(Cow::Owned(self.to_string())) - } -} - -#[derive(Copy, Clone, Debug, PartialEq)] -pub enum ComparisonKind { - Exact, - AtLeast, -} - -struct TrackerData { - actual_reuse: FxHashMap, - expected_reuse: FxHashMap, -} - -// Span does not implement `Send`, so we can't just store it in the shared -// `TrackerData` object. Instead of splitting up `TrackerData` into shared and -// non-shared parts (which would be complicated), we just mark the `Span` here -// explicitly as `Send`. That's safe because the span data here is only ever -// accessed from the main thread. -struct SendSpan(Span); -unsafe impl Send for SendSpan {} - -#[derive(Clone)] -pub struct CguReuseTracker { - data: Option>>, -} - -impl CguReuseTracker { - pub fn new() -> CguReuseTracker { - let data = - TrackerData { actual_reuse: Default::default(), expected_reuse: Default::default() }; - - CguReuseTracker { data: Some(Arc::new(Mutex::new(data))) } - } - - pub fn new_disabled() -> CguReuseTracker { - CguReuseTracker { data: None } - } - - pub fn set_actual_reuse(&self, cgu_name: &str, kind: CguReuse) { - if let Some(ref data) = self.data { - debug!("set_actual_reuse({cgu_name:?}, {kind:?})"); - - let prev_reuse = data.lock().unwrap().actual_reuse.insert(cgu_name.to_string(), kind); - - if let Some(prev_reuse) = prev_reuse { - // The only time it is legal to overwrite reuse state is when - // we discover during ThinLTO that we can actually reuse the - // post-LTO version of a CGU. - assert_eq!(prev_reuse, CguReuse::PreLto); - } - } - } - - pub fn set_expectation( - &self, - cgu_name: Symbol, - cgu_user_name: &str, - error_span: Span, - expected_reuse: CguReuse, - comparison_kind: ComparisonKind, - ) { - if let Some(ref data) = self.data { - debug!("set_expectation({cgu_name:?}, {expected_reuse:?}, {comparison_kind:?})"); - let mut data = data.lock().unwrap(); - - data.expected_reuse.insert( - cgu_name.to_string(), - (cgu_user_name.to_string(), SendSpan(error_span), expected_reuse, comparison_kind), - ); - } - } - - pub fn check_expected_reuse(&self, sess: &Session) { - if let Some(ref data) = self.data { - let data = data.lock().unwrap(); - - for (cgu_name, &(ref cgu_user_name, ref error_span, expected_reuse, comparison_kind)) in - &data.expected_reuse - { - if let Some(&actual_reuse) = data.actual_reuse.get(cgu_name) { - let (error, at_least) = match comparison_kind { - ComparisonKind::Exact => (expected_reuse != actual_reuse, false), - ComparisonKind::AtLeast => (actual_reuse < expected_reuse, true), - }; - - if error { - let at_least = if at_least { 1 } else { 0 }; - IncorrectCguReuseType { - span: error_span.0, - cgu_user_name, - actual_reuse, - expected_reuse, - at_least, - }; - } - } else { - sess.emit_fatal(CguNotRecorded { cgu_user_name, cgu_name }); - } - } - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/code_stats.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/code_stats.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/code_stats.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/code_stats.rs 2023-12-21 16:55:28.000000000 +0000 @@ -24,7 +24,7 @@ pub enum FieldKind { AdtField, Upvar, - GeneratorLocal, + CoroutineLocal, } impl std::fmt::Display for FieldKind { @@ -32,7 +32,7 @@ match self { FieldKind::AdtField => write!(w, "field"), FieldKind::Upvar => write!(w, "upvar"), - FieldKind::GeneratorLocal => write!(w, "local"), + FieldKind::CoroutineLocal => write!(w, "local"), } } } @@ -52,7 +52,7 @@ Union, Enum, Closure, - Generator, + Coroutine, } #[derive(PartialEq, Eq, Hash, Debug)] @@ -105,9 +105,9 @@ // Sort variants so the largest ones are shown first. A stable sort is // used here so that source code order is preserved for all variants // that have the same size. - // Except for Generators, whose variants are already sorted according to - // their yield points in `variant_info_for_generator`. - if kind != DataTypeKind::Generator { + // Except for Coroutines, whose variants are already sorted according to + // their yield points in `variant_info_for_coroutine`. + if kind != DataTypeKind::Coroutine { variants.sort_by_key(|info| cmp::Reverse(info.size)); } let info = TypeSizeInfo { @@ -160,7 +160,7 @@ let struct_like = match kind { DataTypeKind::Struct | DataTypeKind::Closure => true, - DataTypeKind::Enum | DataTypeKind::Union | DataTypeKind::Generator => false, + DataTypeKind::Enum | DataTypeKind::Union | DataTypeKind::Coroutine => false, }; for (i, variant_info) in variants.into_iter().enumerate() { let VariantInfo { ref name, kind: _, align: _, size, ref fields } = *variant_info; @@ -226,7 +226,7 @@ } } - pub fn print_vtable_sizes(&self, crate_name: &str) { + pub fn print_vtable_sizes(&self, crate_name: Symbol) { let mut infos = std::mem::take(&mut *self.vtable_sizes.lock()).into_values().collect::>(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/config.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/config.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/config.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/config.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,19 +9,18 @@ use crate::{lint, HashStableContext}; use crate::{EarlyErrorHandler, Session}; -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; use rustc_data_structures::stable_hasher::{StableOrd, ToStableHashKey}; use rustc_target::abi::Align; +use rustc_target::spec::LinkSelfContainedComponents; use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, SplitDebuginfo}; use rustc_target::spec::{Target, TargetTriple, TargetWarnings, TARGETS}; -use crate::parse::{CrateCheckConfig, CrateConfig}; use rustc_feature::UnstableFeatures; use rustc_span::edition::{Edition, DEFAULT_EDITION, EDITION_NAME_LIST, LATEST_STABLE_EDITION}; -use rustc_span::source_map::{FileName, FilePathMapping}; +use rustc_span::source_map::FilePathMapping; use rustc_span::symbol::{sym, Symbol}; -use rustc_span::RealFileName; -use rustc_span::SourceFileHashAlgorithm; +use rustc_span::{FileName, FileNameDisplayPreference, RealFileName, SourceFileHashAlgorithm}; use rustc_errors::emitter::HumanReadableErrorType; use rustc_errors::{ColorConfig, DiagnosticArgValue, HandlerFlags, IntoDiagnosticArg}; @@ -168,6 +167,9 @@ pub enum InstrumentCoverage { /// Default `-C instrument-coverage` or `-C instrument-coverage=statement` All, + /// Additionally, instrument branches and output branch coverage. + /// `-Zunstable-options -C instrument-coverage=branch` + Branch, /// `-Zunstable-options -C instrument-coverage=except-unused-generics` ExceptUnusedGenerics, /// `-Zunstable-options -C instrument-coverage=except-unused-functions` @@ -232,63 +234,35 @@ /// Used for compatibility with the existing opt-in and target inference. pub explicitly_set: Option, - /// The components that are enabled. - components: LinkSelfContainedComponents, -} - -bitflags::bitflags! { - #[derive(Default)] - /// The `-C link-self-contained` components that can individually be enabled or disabled. - pub struct LinkSelfContainedComponents: u8 { - /// CRT objects (e.g. on `windows-gnu`, `musl`, `wasi` targets) - const CRT_OBJECTS = 1 << 0; - /// libc static library (e.g. on `musl`, `wasi` targets) - const LIBC = 1 << 1; - /// libgcc/libunwind (e.g. on `windows-gnu`, `fuchsia`, `fortanix`, `gnullvm` targets) - const UNWIND = 1 << 2; - /// Linker, dlltool, and their necessary libraries (e.g. on `windows-gnu` and for `rust-lld`) - const LINKER = 1 << 3; - /// Sanitizer runtime libraries - const SANITIZERS = 1 << 4; - /// Other MinGW libs and Windows import libs - const MINGW = 1 << 5; - } -} - -impl FromStr for LinkSelfContainedComponents { - type Err = (); - - fn from_str(s: &str) -> Result { - Ok(match s { - "crto" => LinkSelfContainedComponents::CRT_OBJECTS, - "libc" => LinkSelfContainedComponents::LIBC, - "unwind" => LinkSelfContainedComponents::UNWIND, - "linker" => LinkSelfContainedComponents::LINKER, - "sanitizers" => LinkSelfContainedComponents::SANITIZERS, - "mingw" => LinkSelfContainedComponents::MINGW, - _ => return Err(()), - }) - } + /// The components that are enabled on the CLI, using the `+component` syntax or one of the + /// `true` shorcuts. + enabled_components: LinkSelfContainedComponents, + + /// The components that are disabled on the CLI, using the `-component` syntax or one of the + /// `false` shortcuts. + disabled_components: LinkSelfContainedComponents, } impl LinkSelfContained { /// Incorporates an enabled or disabled component as specified on the CLI, if possible. /// For example: `+linker`, and `-crto`. - pub(crate) fn handle_cli_component(&mut self, component: &str) -> Result<(), ()> { + pub(crate) fn handle_cli_component(&mut self, component: &str) -> Option<()> { // Note that for example `-Cself-contained=y -Cself-contained=-linker` is not an explicit // set of all values like `y` or `n` used to be. Therefore, if this flag had previously been // set in bulk with its historical values, then manually setting a component clears that // `explicitly_set` state. if let Some(component_to_enable) = component.strip_prefix('+') { self.explicitly_set = None; - self.components.insert(component_to_enable.parse()?); - Ok(()) + self.enabled_components + .insert(LinkSelfContainedComponents::from_str(component_to_enable)?); + Some(()) } else if let Some(component_to_disable) = component.strip_prefix('-') { self.explicitly_set = None; - self.components.remove(component_to_disable.parse()?); - Ok(()) + self.disabled_components + .insert(LinkSelfContainedComponents::from_str(component_to_disable)?); + Some(()) } else { - Err(()) + None } } @@ -296,11 +270,14 @@ /// purposes. pub(crate) fn set_all_explicitly(&mut self, enabled: bool) { self.explicitly_set = Some(enabled); - self.components = if enabled { - LinkSelfContainedComponents::all() + + if enabled { + self.enabled_components = LinkSelfContainedComponents::all(); + self.disabled_components = LinkSelfContainedComponents::empty(); } else { - LinkSelfContainedComponents::empty() - }; + self.enabled_components = LinkSelfContainedComponents::empty(); + self.disabled_components = LinkSelfContainedComponents::all(); + } } /// Helper creating a fully enabled `LinkSelfContained` instance. Used in tests. @@ -314,13 +291,32 @@ /// components was set individually. This would also require the `-Zunstable-options` flag, to /// be allowed. fn are_unstable_variants_set(&self) -> bool { - let any_component_set = !self.components.is_empty(); + let any_component_set = + !self.enabled_components.is_empty() || !self.disabled_components.is_empty(); self.explicitly_set.is_none() && any_component_set } - /// Returns whether the self-contained linker component is enabled. - pub fn linker(&self) -> bool { - self.components.contains(LinkSelfContainedComponents::LINKER) + /// Returns whether the self-contained linker component was enabled on the CLI, using the + /// `-C link-self-contained=+linker` syntax, or one of the `true` shorcuts. + pub fn is_linker_enabled(&self) -> bool { + self.enabled_components.contains(LinkSelfContainedComponents::LINKER) + } + + /// Returns whether the self-contained linker component was disabled on the CLI, using the + /// `-C link-self-contained=-linker` syntax, or one of the `false` shorcuts. + pub fn is_linker_disabled(&self) -> bool { + self.disabled_components.contains(LinkSelfContainedComponents::LINKER) + } + + /// Returns CLI inconsistencies to emit errors: individual components were both enabled and + /// disabled. + fn check_consistency(&self) -> Option { + if self.explicitly_set.is_some() { + None + } else { + let common = self.enabled_components.intersection(self.disabled_components); + if common.is_empty() { None } else { Some(common) } + } } } @@ -813,7 +809,6 @@ FileName::Anon(_) => None, FileName::MacroExpansion(_) => None, FileName::ProcMacroSourceCode(_) => None, - FileName::CfgSpec(_) => None, FileName::CliCrateAttr(_) => None, FileName::Custom(_) => None, FileName::DocTest(path, _) => Some(path), @@ -1024,6 +1019,32 @@ } } +bitflags::bitflags! { + /// Scopes used to determined if it need to apply to --remap-path-prefix + pub struct RemapPathScopeComponents: u8 { + /// Apply remappings to the expansion of std::file!() macro + const MACRO = 1 << 0; + /// Apply remappings to printed compiler diagnostics + const DIAGNOSTICS = 1 << 1; + /// Apply remappings to debug information only when they are written to + /// compiled executables or libraries, but not when they are in split + /// debuginfo files + const UNSPLIT_DEBUGINFO = 1 << 2; + /// Apply remappings to debug information only when they are written to + /// split debug information files, but not in compiled executables or + /// libraries + const SPLIT_DEBUGINFO = 1 << 3; + /// Apply remappings to the paths pointing to split debug information + /// files. Does nothing when these files are not generated. + const SPLIT_DEBUGINFO_PATH = 1 << 4; + + /// An alias for macro,unsplit-debuginfo,split-debuginfo-path. This + /// ensures all paths in compiled executables or libraries are remapped + /// but not elsewhere. + const OBJECT = Self::MACRO.bits | Self::UNSPLIT_DEBUGINFO.bits | Self::SPLIT_DEBUGINFO_PATH.bits; + } +} + pub fn host_triple() -> &'static str { // Get the host triple out of the build environment. This ensures that our // idea of the host triple is the same as for the set of libraries we've @@ -1036,6 +1057,22 @@ (option_env!("CFG_COMPILER_HOST_TRIPLE")).expect("CFG_COMPILER_HOST_TRIPLE") } +fn file_path_mapping( + remap_path_prefix: Vec<(PathBuf, PathBuf)>, + unstable_opts: &UnstableOptions, +) -> FilePathMapping { + FilePathMapping::new( + remap_path_prefix.clone(), + if unstable_opts.remap_path_scope.contains(RemapPathScopeComponents::DIAGNOSTICS) + && !remap_path_prefix.is_empty() + { + FileNameDisplayPreference::Remapped + } else { + FileNameDisplayPreference::Local + }, + ) +} + impl Default for Options { fn default() -> Options { Options { @@ -1053,6 +1090,7 @@ target_triple: TargetTriple::from_triple(host_triple()), test: false, incremental: None, + untracked_state_hash: Default::default(), unstable_opts: Default::default(), prints: Vec::new(), cg: Default::default(), @@ -1090,7 +1128,7 @@ } pub fn file_path_mapping(&self) -> FilePathMapping { - FilePathMapping::new(self.remap_path_prefix.clone()) + file_path_mapping(self.remap_path_prefix.clone(), &self.unstable_opts) } /// Returns `true` if there will be an output file generated. @@ -1208,8 +1246,8 @@ CrateType::Rlib } -fn default_configuration(sess: &Session) -> CrateConfig { - // NOTE: This should be kept in sync with `CrateCheckConfig::fill_well_known` below. +fn default_configuration(sess: &Session) -> Cfg { + // NOTE: This should be kept in sync with `CheckCfg::fill_well_known` below. let end = &sess.target.endian; let arch = &sess.target.arch; let wordsz = sess.target.pointer_width.to_string(); @@ -1225,7 +1263,7 @@ sess.emit_fatal(err); }); - let mut ret = CrateConfig::default(); + let mut ret = Cfg::default(); ret.reserve(7); // the minimum number of insertions // Target bindings. ret.insert((sym::target_os, Some(Symbol::intern(os)))); @@ -1318,55 +1356,22 @@ ret } -/// Converts the crate `cfg!` configuration from `String` to `Symbol`. -/// `rustc_interface::interface::Config` accepts this in the compiler configuration, -/// but the symbol interner is not yet set up then, so we must convert it later. -pub fn to_crate_config(cfg: FxHashSet<(String, Option)>) -> CrateConfig { - cfg.into_iter().map(|(a, b)| (Symbol::intern(&a), b.map(|b| Symbol::intern(&b)))).collect() -} - -/// The parsed `--check-cfg` options -pub struct CheckCfg { +/// The parsed `--cfg` options that define the compilation environment of the +/// crate, used to drive conditional compilation. +/// +/// An `FxIndexSet` is used to ensure deterministic ordering of error messages +/// relating to `--cfg`. +pub type Cfg = FxIndexSet<(Symbol, Option)>; + +/// The parsed `--check-cfg` options. +#[derive(Default)] +pub struct CheckCfg { /// Is well known names activated pub exhaustive_names: bool, /// Is well known values activated pub exhaustive_values: bool, /// All the expected values for a config name - pub expecteds: FxHashMap>, -} - -impl Default for CheckCfg { - fn default() -> Self { - CheckCfg { - exhaustive_names: false, - exhaustive_values: false, - expecteds: FxHashMap::default(), - } - } -} - -impl CheckCfg { - fn map_data(self, f: impl Fn(T) -> O) -> CheckCfg { - CheckCfg { - exhaustive_names: self.exhaustive_names, - exhaustive_values: self.exhaustive_values, - expecteds: self - .expecteds - .into_iter() - .map(|(name, values)| { - ( - f(name), - match values { - ExpectedValues::Some(values) => ExpectedValues::Some( - values.into_iter().map(|b| b.map(|b| f(b))).collect(), - ), - ExpectedValues::Any => ExpectedValues::Any, - }, - ) - }) - .collect(), - } - } + pub expecteds: FxHashMap>, } pub enum ExpectedValues { @@ -1401,14 +1406,7 @@ } } -/// Converts the crate `--check-cfg` options from `String` to `Symbol`. -/// `rustc_interface::interface::Config` accepts this in the compiler configuration, -/// but the symbol interner is not yet set up then, so we must convert it later. -pub fn to_crate_check_config(cfg: CheckCfg) -> CrateCheckConfig { - cfg.map_data(|s| Symbol::intern(&s)) -} - -impl CrateCheckConfig { +impl CheckCfg { pub fn fill_well_known(&mut self, current_target: &Target) { if !self.exhaustive_values && !self.exhaustive_names { return; @@ -1548,7 +1546,7 @@ } } -pub fn build_configuration(sess: &Session, mut user_cfg: CrateConfig) -> CrateConfig { +pub fn build_configuration(sess: &Session, mut user_cfg: Cfg) -> Cfg { // Combine the configuration requested by the session (command line) with // some default and generated configuration items. let default_cfg = default_configuration(sess); @@ -2479,7 +2477,7 @@ let mut error = handler.early_struct_error(format!( "crate name `{name}` passed to `--extern` is not a valid ASCII identifier" )); - let adjusted_name = name.replace("-", "_"); + let adjusted_name = name.replace('-', "_"); if crate::utils::is_ascii_ident(&adjusted_name) { error.help(format!( "consider replacing the dashes with underscores: `{adjusted_name}`" @@ -2675,53 +2673,40 @@ ); } - // Handle both `-Z symbol-mangling-version` and `-C symbol-mangling-version`; the latter takes - // precedence. - match (cg.symbol_mangling_version, unstable_opts.symbol_mangling_version) { - (Some(smv_c), Some(smv_z)) if smv_c != smv_z => { - handler.early_error( - "incompatible values passed for `-C symbol-mangling-version` \ - and `-Z symbol-mangling-version`", - ); - } - (Some(SymbolManglingVersion::V0), _) => {} - (Some(_), _) if !unstable_opts.unstable_options => { - handler - .early_error("`-C symbol-mangling-version=legacy` requires `-Z unstable-options`"); - } - (None, None) => {} - (None, smv) => { - handler.early_warn( - "`-Z symbol-mangling-version` is deprecated; use `-C symbol-mangling-version`", - ); - cg.symbol_mangling_version = smv; + // Check for unstable values of `-C symbol-mangling-version`. + // This is what prevents them from being used on stable compilers. + match cg.symbol_mangling_version { + // Stable values: + None | Some(SymbolManglingVersion::V0) => {} + // Unstable values: + Some(SymbolManglingVersion::Legacy) => { + if !unstable_opts.unstable_options { + handler.early_error( + "`-C symbol-mangling-version=legacy` requires `-Z unstable-options`", + ); + } } - _ => {} } - // Handle both `-Z instrument-coverage` and `-C instrument-coverage`; the latter takes - // precedence. - match (cg.instrument_coverage, unstable_opts.instrument_coverage) { - (Some(ic_c), Some(ic_z)) if ic_c != ic_z => { - handler.early_error( - "incompatible values passed for `-C instrument-coverage` \ - and `-Z instrument-coverage`", - ); - } - (Some(InstrumentCoverage::Off | InstrumentCoverage::All), _) => {} - (Some(_), _) if !unstable_opts.unstable_options => { - handler.early_error("`-C instrument-coverage=except-*` requires `-Z unstable-options`"); - } - (None, None) => {} - (None, ic) => { - handler - .early_warn("`-Z instrument-coverage` is deprecated; use `-C instrument-coverage`"); - cg.instrument_coverage = ic; + // Check for unstable values of `-C instrument-coverage`. + // This is what prevents them from being used on stable compilers. + match cg.instrument_coverage { + // Stable values: + InstrumentCoverage::All | InstrumentCoverage::Off => {} + // Unstable values: + InstrumentCoverage::Branch + | InstrumentCoverage::ExceptUnusedFunctions + | InstrumentCoverage::ExceptUnusedGenerics => { + if !unstable_opts.unstable_options { + handler.early_error( + "`-C instrument-coverage=branch` and `-C instrument-coverage=except-*` \ + require `-Z unstable-options`", + ); + } } - _ => {} } - if cg.instrument_coverage.is_some() && cg.instrument_coverage != Some(InstrumentCoverage::Off) { + if cg.instrument_coverage != InstrumentCoverage::Off { if cg.profile_generate.enabled() || cg.profile_use.is_some() { handler.early_error( "option `-C instrument-coverage` is not compatible with either `-C profile-use` \ @@ -2759,9 +2744,8 @@ } // For testing purposes, until we have more feedback about these options: ensure `-Z - // unstable-options` is required when using the unstable `-C link-self-contained` options, like - // `-C link-self-contained=+linker`, and when using the unstable `-C linker-flavor` options, like - // `-C linker-flavor=gnu-lld-cc`. + // unstable-options` is required when using the unstable `-C link-self-contained` and `-C + // linker-flavor` options. if !nightly_options::is_unstable_enabled(matches) { let uses_unstable_self_contained_option = cg.link_self_contained.are_unstable_variants_set(); @@ -2783,6 +2767,19 @@ } } + // Check `-C link-self-contained` for consistency: individual components cannot be both enabled + // and disabled at the same time. + if let Some(erroneous_components) = cg.link_self_contained.check_consistency() { + let names: String = erroneous_components + .into_iter() + .map(|c| c.as_str().unwrap()) + .intersperse(", ") + .collect(); + handler.early_error(format!( + "some `-C link-self-contained` components were both enabled and disabled: {names}" + )); + } + let prints = collect_print_requests(handler, &mut cg, &mut unstable_opts, matches); let cg = cg; @@ -2860,7 +2857,7 @@ handler.early_error(format!("Current directory is invalid: {e}")); }); - let remap = FilePathMapping::new(remap_path_prefix.clone()); + let remap = file_path_mapping(remap_path_prefix.clone(), &unstable_opts); let (path, remapped) = remap.map_prefix(&working_dir); let working_dir = if remapped { RealFileName::Remapped { virtual_name: path.into_owned(), local_path: Some(working_dir) } @@ -2883,6 +2880,7 @@ target_triple, test, incremental, + untracked_state_hash: Default::default(), unstable_opts, prints, cg, @@ -2919,8 +2917,8 @@ "expanded" => Source(PpSourceMode::Expanded), "expanded,identified" => Source(PpSourceMode::ExpandedIdentified), "expanded,hygiene" => Source(PpSourceMode::ExpandedHygiene), - "ast-tree" => AstTree(PpAstTreeMode::Normal), - "ast-tree,expanded" => AstTree(PpAstTreeMode::Expanded), + "ast-tree" => AstTree, + "ast-tree,expanded" => AstTreeExpanded, "hir" => Hir(PpHirMode::Normal), "hir,identified" => Hir(PpHirMode::Identified), "hir,typed" => Hir(PpHirMode::Typed), @@ -3078,14 +3076,6 @@ } #[derive(Copy, Clone, PartialEq, Debug)] -pub enum PpAstTreeMode { - /// `-Zunpretty=ast` - Normal, - /// `-Zunpretty=ast,expanded` - Expanded, -} - -#[derive(Copy, Clone, PartialEq, Debug)] pub enum PpHirMode { /// `-Zunpretty=hir` Normal, @@ -3100,7 +3090,10 @@ /// Options that print the source code, i.e. /// `-Zunpretty=normal` and `-Zunpretty=expanded` Source(PpSourceMode), - AstTree(PpAstTreeMode), + /// `-Zunpretty=ast-tree` + AstTree, + /// `-Zunpretty=ast-tree,expanded` + AstTreeExpanded, /// Options that print the HIR, i.e. `-Zunpretty=hir` Hir(PpHirMode), /// `-Zunpretty=hir-tree` @@ -3120,10 +3113,10 @@ use PpMode::*; use PpSourceMode::*; match *self { - Source(Normal | Identified) | AstTree(PpAstTreeMode::Normal) => false, + Source(Normal | Identified) | AstTree => false, Source(Expanded | ExpandedIdentified | ExpandedHygiene) - | AstTree(PpAstTreeMode::Expanded) + | AstTreeExpanded | Hir(_) | HirTree | ThirTree @@ -3135,7 +3128,7 @@ pub fn needs_hir(&self) -> bool { use PpMode::*; match *self { - Source(_) | AstTree(_) => false, + Source(_) | AstTree | AstTreeExpanded => false, Hir(_) | HirTree | ThirTree | ThirFlat | Mir | MirCFG => true, } @@ -3143,7 +3136,7 @@ pub fn needs_analysis(&self) -> bool { use PpMode::*; - matches!(*self, Mir | MirCFG | ThirTree | ThirFlat) + matches!(*self, Hir(PpHirMode::Typed) | Mir | MirCFG | ThirTree | ThirFlat) } } @@ -3168,14 +3161,15 @@ pub(crate) mod dep_tracking { use super::{ BranchProtection, CFGuard, CFProtection, CrateType, DebugInfo, DebugInfoCompression, - ErrorOutputType, InstrumentCoverage, InstrumentXRay, LdImpl, LinkerPluginLto, + ErrorOutputType, InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, LocationDetail, LtoCli, OomStrategy, OptLevel, OutFileName, OutputType, OutputTypes, - Passes, ResolveDocLinks, SourceFileHashAlgorithm, SplitDwarfKind, SwitchWithOptPath, - SymbolManglingVersion, TraitSolver, TrimmedDefPaths, + Polonius, RemapPathScopeComponents, ResolveDocLinks, SourceFileHashAlgorithm, + SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion, TraitSolver, TrimmedDefPaths, }; use crate::lint; use crate::options::WasiExecModel; - use crate::utils::{NativeLib, NativeLibKind}; + use crate::utils::NativeLib; + use rustc_data_structures::stable_hasher::Hash64; use rustc_errors::LanguageIdentifier; use rustc_feature::UnstableFeatures; use rustc_span::edition::Edition; @@ -3231,6 +3225,7 @@ usize, NonZeroUsize, u64, + Hash64, String, PathBuf, lint::Level, @@ -3245,14 +3240,12 @@ MergeFunctions, PanicStrategy, RelroLevel, - Passes, OptLevel, LtoCli, DebugInfo, DebugInfoCompression, UnstableFeatures, NativeLib, - NativeLibKind, SanitizerSet, CFGuard, CFProtection, @@ -3265,9 +3258,9 @@ StackProtector, SwitchWithOptPath, SymbolManglingVersion, + RemapPathScopeComponents, SourceFileHashAlgorithm, TrimmedDefPaths, - Option, OutFileName, OutputType, RealFileName, @@ -3276,6 +3269,8 @@ OomStrategy, LanguageIdentifier, TraitSolver, + Polonius, + InliningThreshold, ); impl DepTrackingHash for (T1, T2) @@ -3414,3 +3409,43 @@ } } } + +/// `-Zpolonius` values, enabling the borrow checker polonius analysis, and which version: legacy, +/// or future prototype. +#[derive(Clone, Copy, PartialEq, Hash, Debug, Default)] +pub enum Polonius { + /// The default value: disabled. + #[default] + Off, + + /// Legacy version, using datalog and the `polonius-engine` crate. Historical value for `-Zpolonius`. + Legacy, + + /// In-tree prototype, extending the NLL infrastructure. + Next, +} + +impl Polonius { + /// Returns whether the legacy version of polonius is enabled + pub fn is_legacy_enabled(&self) -> bool { + matches!(self, Polonius::Legacy) + } + + /// Returns whether the "next" version of polonius is enabled + pub fn is_next_enabled(&self) -> bool { + matches!(self, Polonius::Next) + } +} + +#[derive(Clone, Copy, PartialEq, Hash, Debug)] +pub enum InliningThreshold { + Always, + Sometimes(usize), + Never, +} + +impl Default for InliningThreshold { + fn default() -> Self { + Self::Sometimes(100) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,5 @@ use std::num::NonZeroU32; -use crate::cgu_reuse_tracker::CguReuse; use crate::parse::ParseSess; use rustc_ast::token; use rustc_ast::util::literal::LitError; @@ -9,24 +8,6 @@ use rustc_span::{BytePos, Span, Symbol}; use rustc_target::spec::{SplitDebuginfo, StackProtector, TargetTriple}; -#[derive(Diagnostic)] -#[diag(session_incorrect_cgu_reuse_type)] -pub struct IncorrectCguReuseType<'a> { - #[primary_span] - pub span: Span, - pub cgu_user_name: &'a str, - pub actual_reuse: CguReuse, - pub expected_reuse: CguReuse, - pub at_least: u8, -} - -#[derive(Diagnostic)] -#[diag(session_cgu_not_recorded)] -pub struct CguNotRecorded<'a> { - pub cgu_user_name: &'a str, - pub cgu_name: &'a str, -} - pub struct FeatureGateError { pub span: MultiSpan, pub explain: DiagnosticMessage, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,6 +6,7 @@ #![feature(option_get_or_insert_default)] #![feature(rustc_attrs)] #![feature(map_many_mut)] +#![feature(iter_intersperse)] #![recursion_limit = "256"] #![allow(rustc::potential_query_instability)] #![deny(rustc::untranslatable_diagnostic)] @@ -22,7 +23,6 @@ use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage}; use rustc_fluent_macro::fluent_messages; -pub mod cgu_reuse_tracker; pub mod utils; pub use lint::{declare_lint, declare_lint_pass, declare_tool_lint, impl_lint_pass}; pub use rustc_lint_defs as lint; @@ -43,6 +43,9 @@ pub use getopts; +mod version; +pub use version::RustcVersion; + fluent_messages! { "../messages.ftl" } /// Requirements for a `StableHashingContext` to be used in this crate. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/options.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/options.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/options.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/options.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,7 @@ use crate::utils::NativeLib; use crate::{lint, EarlyErrorHandler}; use rustc_data_structures::profiling::TimePassesFormat; +use rustc_data_structures::stable_hasher::Hash64; use rustc_errors::ColorConfig; use rustc_errors::{LanguageIdentifier, TerminalUrl}; use rustc_target::spec::{CodeModel, LinkerFlavorCli, MergeFunctions, PanicStrategy, SanitizerSet}; @@ -158,6 +159,10 @@ /// directory to store intermediate results. incremental: Option [UNTRACKED], assert_incr_state: Option [UNTRACKED], + /// Set by the `Config::hash_untracked_state` callback for custom + /// drivers to invalidate the incremental cache + #[rustc_lint_opt_deny_field_access("should only be used via `Config::hash_untracked_state`")] + untracked_state_hash: Hash64 [TRACKED_NO_CRATE_HASH], unstable_opts: UnstableOptions [SUBSTRUCT], prints: Vec [UNTRACKED], @@ -289,7 +294,7 @@ // JUSTIFICATION: defn of the suggested wrapper fn #[allow(rustc::bad_opt_access)] pub fn instrument_coverage(&self) -> InstrumentCoverage { - self.instrument_coverage.unwrap_or(InstrumentCoverage::Off) + self.instrument_coverage } } @@ -384,7 +389,7 @@ pub const parse_mir_spanview: &str = "`statement` (default), `terminator`, or `block`"; pub const parse_dump_mono_stats: &str = "`markdown` (default) or `json`"; pub const parse_instrument_coverage: &str = - "`all` (default), `except-unused-generics`, `except-unused-functions`, or `off`"; + "`all` (default), `branch`, `except-unused-generics`, `except-unused-functions`, or `off`"; pub const parse_instrument_xray: &str = "either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit`"; pub const parse_unpretty: &str = "`string` or `string=string`"; pub const parse_treat_err_as_bug: &str = "either no value or a non-negative number"; @@ -412,9 +417,9 @@ "one of supported split-debuginfo modes (`off`, `packed`, or `unpacked`)"; pub const parse_split_dwarf_kind: &str = "one of supported split dwarf modes (`split` or `single`)"; - pub const parse_gcc_ld: &str = "one of: no value, `lld`"; pub const parse_link_self_contained: &str = "one of: `y`, `yes`, `on`, `n`, `no`, `off`, or a list of enabled (`+` prefix) and disabled (`-` prefix) \ components: `crto`, `libc`, `unwind`, `linker`, `sanitizers`, `mingw`"; + pub const parse_polonius: &str = "either no value or `legacy` (the default), or `next`"; pub const parse_stack_protector: &str = "one of (`none` (default), `basic`, `strong`, or `all`)"; pub const parse_branch_protection: &str = @@ -422,6 +427,9 @@ pub const parse_proc_macro_execution_strategy: &str = "one of supported execution strategies (`same-thread`, or `cross-thread`)"; pub const parse_dump_solver_proof_tree: &str = "one of: `always`, `on-request`, `on-error`"; + pub const parse_remap_path_scope: &str = "comma separated list of scopes: `macro`, `diagnostics`, `unsplit-debuginfo`, `split-debuginfo`, `split-debuginfo-path`, `object`, `all`"; + pub const parse_inlining_threshold: &str = + "either a boolean (`yes`, `no`, `on`, `off`, etc), or a non-negative number"; } mod parse { @@ -472,6 +480,21 @@ } } + /// Parses whether polonius is enabled, and if so, which version. + pub(crate) fn parse_polonius(slot: &mut Polonius, v: Option<&str>) -> bool { + match v { + Some("legacy") | None => { + *slot = Polonius::Legacy; + true + } + Some("next") => { + *slot = Polonius::Next; + true + } + _ => false, + } + } + /// Use this for any string option that has a static default. pub(crate) fn parse_string(slot: &mut String, v: Option<&str>) -> bool { match v { @@ -892,24 +915,25 @@ } pub(crate) fn parse_instrument_coverage( - slot: &mut Option, + slot: &mut InstrumentCoverage, v: Option<&str>, ) -> bool { if v.is_some() { - let mut bool_arg = None; - if parse_opt_bool(&mut bool_arg, v) { - *slot = bool_arg.unwrap().then_some(InstrumentCoverage::All); + let mut bool_arg = false; + if parse_bool(&mut bool_arg, v) { + *slot = if bool_arg { InstrumentCoverage::All } else { InstrumentCoverage::Off }; return true; } } let Some(v) = v else { - *slot = Some(InstrumentCoverage::All); + *slot = InstrumentCoverage::All; return true; }; - *slot = Some(match v { + *slot = match v { "all" => InstrumentCoverage::All, + "branch" => InstrumentCoverage::Branch, "except-unused-generics" | "except_unused_generics" => { InstrumentCoverage::ExceptUnusedGenerics } @@ -918,7 +942,7 @@ } "off" | "no" | "n" | "false" | "0" => InstrumentCoverage::Off, _ => return false, - }); + }; true } @@ -1075,6 +1099,30 @@ true } + pub(crate) fn parse_remap_path_scope( + slot: &mut RemapPathScopeComponents, + v: Option<&str>, + ) -> bool { + if let Some(v) = v { + *slot = RemapPathScopeComponents::empty(); + for s in v.split(',') { + *slot |= match s { + "macro" => RemapPathScopeComponents::MACRO, + "diagnostics" => RemapPathScopeComponents::DIAGNOSTICS, + "unsplit-debuginfo" => RemapPathScopeComponents::UNSPLIT_DEBUGINFO, + "split-debuginfo" => RemapPathScopeComponents::SPLIT_DEBUGINFO, + "split-debuginfo-path" => RemapPathScopeComponents::SPLIT_DEBUGINFO_PATH, + "object" => RemapPathScopeComponents::OBJECT, + "all" => RemapPathScopeComponents::all(), + _ => return false, + } + } + true + } else { + false + } + } + pub(crate) fn parse_relocation_model(slot: &mut Option, v: Option<&str>) -> bool { match v.and_then(|s| RelocModel::from_str(s).ok()) { Some(relocation_model) => *slot = Some(relocation_model), @@ -1166,7 +1214,7 @@ // 2. Parse a list of enabled and disabled components. for comp in s.split(',') { - if slot.handle_cli_component(comp).is_err() { + if slot.handle_cli_component(comp).is_none() { return false; } } @@ -1202,15 +1250,6 @@ true } - pub(crate) fn parse_gcc_ld(slot: &mut Option, v: Option<&str>) -> bool { - match v { - None => *slot = None, - Some("lld") => *slot = Some(LdImpl::Lld), - _ => return false, - } - true - } - pub(crate) fn parse_stack_protector(slot: &mut StackProtector, v: Option<&str>) -> bool { match v.and_then(|s| StackProtector::from_str(s).ok()) { Some(ssp) => *slot = ssp, @@ -1273,6 +1312,26 @@ }; true } + + pub(crate) fn parse_inlining_threshold(slot: &mut InliningThreshold, v: Option<&str>) -> bool { + match v { + Some("always" | "yes") => { + *slot = InliningThreshold::Always; + } + Some("never") => { + *slot = InliningThreshold::Never; + } + Some(v) => { + if let Ok(threshold) = v.parse() { + *slot = InliningThreshold::Sometimes(threshold); + } else { + return false; + } + } + None => return false, + } + true + } } options! { @@ -1315,11 +1374,12 @@ inline_threshold: Option = (None, parse_opt_number, [TRACKED], "set the threshold for inlining a function"), #[rustc_lint_opt_deny_field_access("use `Session::instrument_coverage` instead of this field")] - instrument_coverage: Option = (None, parse_instrument_coverage, [TRACKED], + instrument_coverage: InstrumentCoverage = (InstrumentCoverage::Off, parse_instrument_coverage, [TRACKED], "instrument the generated code to support LLVM source-based code coverage \ reports (note, the compiler build config must include `profiler = true`); \ implies `-C symbol-mangling-version=v0`. Optional values are: `=all` (implicit value) + `=branch` `=except-unused-generics` `=except-unused-functions` `=off` (default)"), @@ -1441,6 +1501,8 @@ "combine CGUs into a single one"), crate_attr: Vec = (Vec::new(), parse_string_push, [TRACKED], "inject the given attribute in the crate"), + cross_crate_inline_threshold: InliningThreshold = (InliningThreshold::Sometimes(100), parse_inlining_threshold, [TRACKED], + "threshold to allow cross crate inlining of functions"), debug_info_for_profiling: bool = (false, parse_bool, [TRACKED], "emit discriminators and other data necessary for AutoFDO"), debug_macros: bool = (false, parse_bool, [TRACKED], @@ -1452,9 +1514,6 @@ dep_info_omit_d_target: bool = (false, parse_bool, [TRACKED], "in dep-info output, omit targets for tracking dependencies of the dep-info files \ themselves (default: no)"), - dep_tasks: bool = (false, parse_bool, [UNTRACKED], - "print tasks that execute and the color their dep node gets (requires debug build) \ - (default: no)"), dont_buffer_diagnostics: bool = (false, parse_bool, [UNTRACKED], "emit diagnostics rather than buffering (breaks NLL error downgrading, sorting) \ (default: no)"), @@ -1492,8 +1551,6 @@ dump_solver_proof_tree: DumpSolverProofTree = (DumpSolverProofTree::Never, parse_dump_solver_proof_tree, [UNTRACKED], "dump a proof tree for every goal evaluated by the new trait solver. If the flag is specified without any options after it then it defaults to `always`. If the flag is not specified at all it defaults to `on-request`."), - dump_solver_proof_tree_use_cache: Option = (None, parse_opt_bool, [UNTRACKED], - "determines whether dumped proof trees use the global cache"), dwarf_version: Option = (None, parse_opt_number, [TRACKED], "version of DWARF debug information to emit (default: 2 or 4, depending on platform)"), dylib_lto: bool = (false, parse_bool, [UNTRACKED], @@ -1521,7 +1578,6 @@ "whether each function should go in its own section"), future_incompat_test: bool = (false, parse_bool, [UNTRACKED], "forces all lints to be future incompatible, used for internal testing (default: no)"), - gcc_ld: Option = (None, parse_gcc_ld, [TRACKED], "implementation of ld used by cc"), graphviz_dark_mode: bool = (false, parse_bool, [UNTRACKED], "use dark-themed colors in graphviz output (default: no)"), graphviz_font: String = ("Courier, monospace".to_string(), parse_string, [UNTRACKED], @@ -1554,15 +1610,6 @@ "a default MIR inlining threshold (default: 50)"), input_stats: bool = (false, parse_bool, [UNTRACKED], "gather statistics about the input (default: no)"), - #[rustc_lint_opt_deny_field_access("use `Session::instrument_coverage` instead of this field")] - instrument_coverage: Option = (None, parse_instrument_coverage, [TRACKED], - "instrument the generated code to support LLVM source-based code coverage \ - reports (note, the compiler build config must include `profiler = true`); \ - implies `-C symbol-mangling-version=v0`. Optional values are: - `=all` (implicit value) - `=except-unused-generics` - `=except-unused-functions` - `=off` (default)"), instrument_mcount: bool = (false, parse_bool, [TRACKED], "insert function instrument code for mcount-based tracing (default: no)"), instrument_xray: Option = (None, parse_instrument_xray, [TRACKED], @@ -1610,9 +1657,10 @@ "emit Retagging MIR statements, interpreted e.g., by miri; implies -Zmir-opt-level=0 \ (default: no)"), mir_enable_passes: Vec<(String, bool)> = (Vec::new(), parse_list_with_polarity, [TRACKED], - "use like `-Zmir-enable-passes=+DestinationPropagation,-InstSimplify`. Forces the specified passes to be \ - enabled, overriding all other checks. Passes that are not specified are enabled or \ - disabled by other flags as usual."), + "use like `-Zmir-enable-passes=+DestinationPropagation,-InstSimplify`. Forces the \ + specified passes to be enabled, overriding all other checks. In particular, this will \ + enable unsound (known-buggy and hence usually disabled) passes without further warning! \ + Passes that are not specified are enabled or disabled by other flags as usual."), mir_include_spans: bool = (false, parse_bool, [UNTRACKED], "use line numbers relative to the function in mir pretty printing"), mir_keep_place_mention: bool = (false, parse_bool, [TRACKED], @@ -1669,7 +1717,7 @@ "whether to use the PLT when calling into shared libraries; only has effect for PIC code on systems with ELF binaries (default: PLT is disabled if full relro is enabled on x86_64)"), - polonius: bool = (false, parse_bool, [TRACKED], + polonius: Polonius = (Polonius::default(), parse_polonius, [TRACKED], "enable polonius-based borrow-checker (default: no)"), polymorphize: bool = (false, parse_bool, [TRACKED], "perform polymorphization analysis"), @@ -1720,6 +1768,8 @@ "choose which RELRO level to use"), remap_cwd_prefix: Option = (None, parse_opt_pathbuf, [TRACKED], "remap paths under the current working directory to this path prefix"), + remap_path_scope: RemapPathScopeComponents = (RemapPathScopeComponents::all(), parse_remap_path_scope, [TRACKED], + "remap path scope (default: all)"), remark_dir: Option = (None, parse_opt_pathbuf, [UNTRACKED], "directory into which to write optimization remarks (if not specified, they will be \ written to standard error output)"), @@ -1791,11 +1841,6 @@ "prefer dynamic linking to static linking for staticlibs (default: no)"), strict_init_checks: bool = (false, parse_bool, [TRACKED], "control if mem::uninitialized and mem::zeroed panic on more UB"), - strip: Strip = (Strip::None, parse_strip, [UNTRACKED], - "tell the linker which information to strip (`none` (default), `debuginfo` or `symbols`)"), - symbol_mangling_version: Option = (None, - parse_symbol_mangling_version, [TRACKED], - "which mangling version to use for symbol names ('legacy' (default) or 'v0')"), #[rustc_lint_opt_deny_field_access("use `Session::teach` instead of this field")] teach: bool = (false, parse_bool, [TRACKED], "show extended diagnostic help (default: no)"), @@ -1869,6 +1914,7 @@ `hir` (the HIR), `hir,identified`, `hir,typed` (HIR with types for each node), `hir-tree` (dump the raw HIR), + `thir-tree`, `thir-flat`, `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR)"), unsound_mir_opts: bool = (false, parse_bool, [TRACKED], "enable unsound and buggy MIR optimizations (default: no)"), @@ -1906,8 +1952,3 @@ Command, Reactor, } - -#[derive(Clone, Copy, Hash)] -pub enum LdImpl { - Lld, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/output.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/output.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/output.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/output.rs 2023-12-21 16:55:28.000000000 +0000 @@ -186,10 +186,14 @@ return true; } } - if let CrateType::ProcMacro | CrateType::Dylib = crate_type && sess.target.only_cdylib { + if let CrateType::ProcMacro | CrateType::Dylib = crate_type + && sess.target.only_cdylib + { return true; } - if let CrateType::Executable = crate_type && !sess.target.executables { + if let CrateType::Executable = crate_type + && !sess.target.executables + { return true; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/parse.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/parse.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/parse.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/parse.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ //! Contains `ParseSess` which holds state living beyond what one `Parser` might. //! It also serves as an input to the parser itself. -use crate::config::CheckCfg; +use crate::config::{Cfg, CheckCfg}; use crate::errors::{ CliFeatureDiagnosticHelp, FeatureDiagnosticForIssue, FeatureDiagnosticHelp, FeatureGateError, }; @@ -9,7 +9,7 @@ builtin::UNSTABLE_SYNTAX_PRE_EXPANSION, BufferedEarlyLint, BuiltinLintDiagnostics, Lint, LintId, }; use rustc_ast::node_id::NodeId; -use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::sync::{AppendOnlyVec, Lock, Lrc}; use rustc_errors::{emitter::SilentEmitter, Handler}; use rustc_errors::{ @@ -25,11 +25,6 @@ use rustc_ast::attr::AttrIdGenerator; use std::str; -/// The set of keys (and, optionally, values) that define the compilation -/// environment of the crate, used to drive conditional compilation. -pub type CrateConfig = FxIndexSet<(Symbol, Option)>; -pub type CrateCheckConfig = CheckCfg; - /// Collected spans during parsing for places where a certain feature was /// used and should be feature gated accordingly in `check_crate`. #[derive(Default)] @@ -39,7 +34,7 @@ impl GatedSpans { /// Feature gate the given `span` under the given `feature` - /// which is same `Symbol` used in `active.rs`. + /// which is same `Symbol` used in `unstable.rs`. pub fn gate(&self, feature: Symbol, span: Span) { self.spans.borrow_mut().entry(feature).or_default().push(span); } @@ -78,7 +73,7 @@ } /// Construct a diagnostic for a language feature error due to the given `span`. -/// The `feature`'s `Symbol` is the one you used in `active.rs` and `rustc_span::symbols`. +/// The `feature`'s `Symbol` is the one you used in `unstable.rs` and `rustc_span::symbols`. #[track_caller] pub fn feature_err( sess: &ParseSess, @@ -193,8 +188,8 @@ pub struct ParseSess { pub span_diagnostic: Handler, pub unstable_features: UnstableFeatures, - pub config: CrateConfig, - pub check_config: CrateCheckConfig, + pub config: Cfg, + pub check_config: CheckCfg, pub edition: Edition, /// Places where raw identifiers were used. This is used to avoid complaining about idents /// clashing with keywords in new editions. @@ -237,8 +232,8 @@ Self { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(None), - config: FxIndexSet::default(), - check_config: CrateCheckConfig::default(), + config: Cfg::default(), + check_config: CheckCfg::default(), edition: ExpnId::root().expn_data().edition, raw_identifier_spans: Default::default(), bad_unicode_identifiers: Lock::new(Default::default()), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/session.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/session.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/session.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/session.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,8 @@ -use crate::cgu_reuse_tracker::CguReuseTracker; use crate::code_stats::CodeStats; pub use crate::code_stats::{DataTypeKind, FieldInfo, FieldKind, SizeKind, VariantInfo}; use crate::config::{ - self, CrateType, InstrumentCoverage, OptLevel, OutFileName, OutputType, SwitchWithOptPath, + self, CrateType, InstrumentCoverage, OptLevel, OutFileName, OutputType, + RemapPathScopeComponents, SwitchWithOptPath, }; use crate::config::{ErrorOutputType, Input}; use crate::errors; @@ -31,8 +31,8 @@ use rustc_macros::HashStable_Generic; pub use rustc_span::def_id::StableCrateId; use rustc_span::edition::Edition; -use rustc_span::source_map::{FileLoader, RealFileLoader, SourceMap, Span}; -use rustc_span::{SourceFileHashAlgorithm, Symbol}; +use rustc_span::source_map::{FileLoader, RealFileLoader, SourceMap}; +use rustc_span::{SourceFileHashAlgorithm, Span, Symbol}; use rustc_target::asm::InlineAsmArch; use rustc_target::spec::{CodeModel, PanicStrategy, RelocModel, RelroLevel}; use rustc_target::spec::{ @@ -45,7 +45,7 @@ use std::ops::{Div, Mul}; use std::path::{Path, PathBuf}; use std::str::FromStr; -use std::sync::Arc; +use std::sync::{atomic::AtomicBool, Arc}; use std::time::Duration; pub struct OptimizationFuel { @@ -153,9 +153,6 @@ pub io: CompilerIO, incr_comp_session: OneThread>, - /// Used for incremental compilation tests. Will only be populated if - /// `-Zquery-dep-graph` is specified. - pub cgu_reuse_tracker: CguReuseTracker, /// Used by `-Z self-profile`. pub prof: SelfProfilerRef, @@ -205,6 +202,12 @@ /// The version of the rustc process, possibly including a commit hash and description. pub cfg_version: &'static str, + /// The inner atomic value is set to true when a feature marked as `internal` is + /// enabled. Makes it so that "please report a bug" is hidden, as ICEs with + /// internal features are wontfix, and they are usually the cause of the ICEs. + /// None signifies that this is not tracked. + pub using_internal_features: Arc, + /// All commandline args used to invoke the compiler, with @file args fully expanded. /// This will only be used within debug info, e.g. in the pdb file on windows /// This is mainly useful for other tools that reads that debuginfo to figure out @@ -258,7 +261,11 @@ pub fn local_crate_source_file(&self) -> Option { let path = self.io.input.opt_path()?; - Some(self.opts.file_path_mapping().map_prefix(path).0.into_owned()) + if self.should_prefer_remapped_for_codegen() { + Some(self.opts.file_path_mapping().map_prefix(path).0.into_owned()) + } else { + Some(path.to_path_buf()) + } } fn check_miri_unleashed_features(&self) { @@ -701,6 +708,10 @@ self.opts.cg.instrument_coverage() != InstrumentCoverage::Off } + pub fn instrument_coverage_branch(&self) -> bool { + self.opts.cg.instrument_coverage() == InstrumentCoverage::Branch + } + pub fn instrument_coverage_except_unused_generics(&self) -> bool { self.opts.cg.instrument_coverage() == InstrumentCoverage::ExceptUnusedGenerics } @@ -1247,6 +1258,53 @@ pub fn link_dead_code(&self) -> bool { self.opts.cg.link_dead_code.unwrap_or(false) } + + pub fn should_prefer_remapped_for_codegen(&self) -> bool { + // bail out, if any of the requested crate types aren't: + // "compiled executables or libraries" + for crate_type in &self.opts.crate_types { + match crate_type { + CrateType::Executable + | CrateType::Dylib + | CrateType::Rlib + | CrateType::Staticlib + | CrateType::Cdylib => continue, + CrateType::ProcMacro => return false, + } + } + + let has_split_debuginfo = match self.split_debuginfo() { + SplitDebuginfo::Off => false, + SplitDebuginfo::Packed => true, + SplitDebuginfo::Unpacked => true, + }; + + let remap_path_scopes = &self.opts.unstable_opts.remap_path_scope; + let mut prefer_remapped = false; + + if remap_path_scopes.contains(RemapPathScopeComponents::UNSPLIT_DEBUGINFO) { + prefer_remapped |= !has_split_debuginfo; + } + + if remap_path_scopes.contains(RemapPathScopeComponents::SPLIT_DEBUGINFO) { + prefer_remapped |= has_split_debuginfo; + } + + prefer_remapped + } + + pub fn should_prefer_remapped_for_split_debuginfo_paths(&self) -> bool { + let has_split_debuginfo = match self.split_debuginfo() { + SplitDebuginfo::Off => false, + SplitDebuginfo::Packed | SplitDebuginfo::Unpacked => true, + }; + + self.opts + .unstable_opts + .remap_path_scope + .contains(RemapPathScopeComponents::SPLIT_DEBUGINFO_PATH) + && has_split_debuginfo + } } // JUSTIFICATION: part of session construction @@ -1337,6 +1395,7 @@ target_override: Option, cfg_version: &'static str, ice_file: Option, + using_internal_features: Arc, expanded_args: Vec, ) -> Session { // FIXME: This is not general enough to make the warning lint completely override @@ -1431,12 +1490,6 @@ }); let print_fuel = AtomicU64::new(0); - let cgu_reuse_tracker = if sopts.unstable_opts.query_dep_graph { - CguReuseTracker::new() - } else { - CguReuseTracker::new_disabled() - }; - let prof = SelfProfilerRef::new( self_profiler, sopts.unstable_opts.time_passes.then(|| sopts.unstable_opts.time_passes_format), @@ -1461,7 +1514,6 @@ sysroot, io, incr_comp_session: OneThread::new(RefCell::new(IncrCompSession::NotInitialized)), - cgu_reuse_tracker, prof, perf_stats: PerfStats { symbol_hash_time: Lock::new(Duration::from_secs(0)), @@ -1480,6 +1532,7 @@ target_features: Default::default(), unstable_target_features: Default::default(), cfg_version, + using_internal_features, expanded_args, }; @@ -1763,3 +1816,53 @@ }; emitter } + +pub trait RemapFileNameExt { + type Output<'a> + where + Self: 'a; + + fn for_scope(&self, sess: &Session, scopes: RemapPathScopeComponents) -> Self::Output<'_>; + + fn for_codegen(&self, sess: &Session) -> Self::Output<'_>; +} + +impl RemapFileNameExt for rustc_span::FileName { + type Output<'a> = rustc_span::FileNameDisplay<'a>; + + fn for_scope(&self, sess: &Session, scopes: RemapPathScopeComponents) -> Self::Output<'_> { + if sess.opts.unstable_opts.remap_path_scope.contains(scopes) { + self.prefer_remapped_unconditionaly() + } else { + self.prefer_local() + } + } + + fn for_codegen(&self, sess: &Session) -> Self::Output<'_> { + if sess.should_prefer_remapped_for_codegen() { + self.prefer_remapped_unconditionaly() + } else { + self.prefer_local() + } + } +} + +impl RemapFileNameExt for rustc_span::RealFileName { + type Output<'a> = &'a Path; + + fn for_scope(&self, sess: &Session, scopes: RemapPathScopeComponents) -> Self::Output<'_> { + if sess.opts.unstable_opts.remap_path_scope.contains(scopes) { + self.remapped_path_if_available() + } else { + self.local_path_if_available() + } + } + + fn for_codegen(&self, sess: &Session) -> Self::Output<'_> { + if sess.should_prefer_remapped_for_codegen() { + self.remapped_path_if_available() + } else { + self.local_path_if_available() + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/utils.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/utils.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/utils.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/utils.rs 2023-12-21 16:55:28.000000000 +0000 @@ -161,7 +161,9 @@ pub(crate) fn is_ascii_ident(string: &str) -> bool { let mut chars = string.chars(); - if let Some(start) = chars.next() && (start.is_ascii_alphabetic() || start == '_') { + if let Some(start) = chars.next() + && (start.is_ascii_alphabetic() || start == '_') + { chars.all(|char| char.is_ascii_alphanumeric() || char == '_') } else { false diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/version.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/version.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/version.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_session/src/version.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use std::fmt::{self, Display}; + +#[derive(Encodable, Decodable, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(HashStable_Generic)] +pub struct RustcVersion { + pub major: u16, + pub minor: u16, + pub patch: u16, +} + +impl RustcVersion { + pub const CURRENT: Self = current_rustc_version!(env!("CFG_RELEASE")); +} + +impl Display for RustcVersion { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "{}.{}.{}", self.major, self.minor, self.patch) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,14 +4,13 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start +rustc_data_structures = { path = "../rustc_data_structures" } rustc_hir = { path = "../rustc_hir" } rustc_middle = { path = "../rustc_middle" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } -rustc_driver = { path = "../rustc_driver" } -rustc_interface = { path = "../rustc_interface" } -rustc_session = {path = "../rustc_session" } -tracing = "0.1" +scoped-tls = "1.0" stable_mir = {path = "../stable_mir" } - -[features] +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,10 +10,10 @@ html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(allow(unused_variables), deny(warnings))) )] -#![feature(rustc_private)] -#![feature(ptr_metadata)] -#![feature(type_alias_impl_trait)] // Used to define opaque types. -#![feature(intra_doc_pointers)] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] +#![allow(rustc::usage_of_ty_tykind)] pub mod rustc_internal; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/internal.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/internal.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/internal.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/internal.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,67 @@ +//! Module containing the translation from stable mir constructs to the rustc counterpart. +//! +//! This module will only include a few constructs to allow users to invoke internal rustc APIs +//! due to incomplete stable coverage. + +// Prefer importing stable_mir over internal rustc constructs to make this file more readable. +use crate::rustc_smir::Tables; +use rustc_middle::ty::{self as rustc_ty, Ty as InternalTy}; +use stable_mir::ty::{Const, GenericArgKind, GenericArgs, Region, Ty}; +use stable_mir::DefId; + +use super::RustcInternal; + +impl<'tcx> RustcInternal<'tcx> for DefId { + type T = rustc_span::def_id::DefId; + fn internal(&self, tables: &mut Tables<'tcx>) -> Self::T { + tables.def_ids[*self] + } +} + +impl<'tcx> RustcInternal<'tcx> for GenericArgs { + type T = rustc_ty::GenericArgsRef<'tcx>; + fn internal(&self, tables: &mut Tables<'tcx>) -> Self::T { + tables.tcx.mk_args_from_iter(self.0.iter().map(|arg| arg.internal(tables))) + } +} + +impl<'tcx> RustcInternal<'tcx> for GenericArgKind { + type T = rustc_ty::GenericArg<'tcx>; + fn internal(&self, tables: &mut Tables<'tcx>) -> Self::T { + match self { + GenericArgKind::Lifetime(reg) => reg.internal(tables).into(), + GenericArgKind::Type(ty) => ty.internal(tables).into(), + GenericArgKind::Const(cnst) => ty_const(cnst, tables).into(), + } + } +} + +impl<'tcx> RustcInternal<'tcx> for Region { + type T = rustc_ty::Region<'tcx>; + fn internal(&self, _tables: &mut Tables<'tcx>) -> Self::T { + todo!() + } +} + +impl<'tcx> RustcInternal<'tcx> for Ty { + type T = InternalTy<'tcx>; + fn internal(&self, tables: &mut Tables<'tcx>) -> Self::T { + tables.types[*self] + } +} + +fn ty_const<'tcx>(constant: &Const, tables: &mut Tables<'tcx>) -> rustc_ty::Const<'tcx> { + match constant.internal(tables) { + rustc_middle::mir::Const::Ty(c) => c, + cnst => { + panic!("Trying to convert constant `{constant:?}` to type constant, but found {cnst:?}") + } + } +} + +impl<'tcx> RustcInternal<'tcx> for Const { + type T = rustc_middle::mir::Const<'tcx>; + fn internal(&self, tables: &mut Tables<'tcx>) -> Self::T { + tables.constants[self.id] + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_internal/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,24 +3,38 @@ //! For that, we define APIs that will temporarily be public to 3P that exposes rustc internal APIs //! until stable MIR is complete. -use std::ops::{ControlFlow, Index}; - -use crate::rustc_internal; -use crate::rustc_smir::Tables; -use rustc_driver::{Callbacks, Compilation, RunCompiler}; -use rustc_interface::{interface, Queries}; +use crate::rustc_smir::{Stable, Tables, TablesWrapper}; +use rustc_data_structures::fx; +use rustc_data_structures::fx::FxIndexMap; use rustc_middle::mir::interpret::AllocId; +use rustc_middle::ty; use rustc_middle::ty::TyCtxt; -pub use rustc_span::def_id::{CrateNum, DefId}; +use rustc_span::def_id::{CrateNum, DefId}; use rustc_span::Span; -use stable_mir::CompilerError; +use scoped_tls::scoped_thread_local; +use stable_mir::ty::IndexedVal; +use std::cell::Cell; +use std::cell::RefCell; +use std::fmt::Debug; +use std::hash::Hash; +use std::ops::Index; + +mod internal; + +pub fn stable<'tcx, S: Stable<'tcx>>(item: &S) -> S::T { + with_tables(|tables| item.stable(tables)) +} + +pub fn internal<'tcx, S: RustcInternal<'tcx>>(item: &S) -> S::T { + with_tables(|tables| item.internal(tables)) +} impl<'tcx> Index for Tables<'tcx> { type Output = DefId; #[inline(always)] fn index(&self, index: stable_mir::DefId) -> &Self::Output { - &self.def_ids[index.0] + &self.def_ids[index] } } @@ -29,7 +43,7 @@ #[inline(always)] fn index(&self, index: stable_mir::ty::Span) -> &Self::Output { - &self.spans[index.0] + &self.spans[index] } } @@ -54,8 +68,8 @@ stable_mir::ty::ClosureDef(self.create_def_id(did)) } - pub fn generator_def(&mut self, did: DefId) -> stable_mir::ty::GeneratorDef { - stable_mir::ty::GeneratorDef(self.create_def_id(did)) + pub fn coroutine_def(&mut self, did: DefId) -> stable_mir::ty::CoroutineDef { + stable_mir::ty::CoroutineDef(self.create_def_id(did)) } pub fn alias_def(&mut self, did: DefId) -> stable_mir::ty::AliasDef { @@ -94,37 +108,27 @@ stable_mir::ty::Prov(self.create_alloc_id(aid)) } - fn create_def_id(&mut self, did: DefId) -> stable_mir::DefId { - // FIXME: this becomes inefficient when we have too many ids - for (i, &d) in self.def_ids.iter().enumerate() { - if d == did { - return stable_mir::DefId(i); - } - } - let id = self.def_ids.len(); - self.def_ids.push(did); - stable_mir::DefId(id) + pub(crate) fn create_def_id(&mut self, did: DefId) -> stable_mir::DefId { + self.def_ids.create_or_fetch(did) } fn create_alloc_id(&mut self, aid: AllocId) -> stable_mir::AllocId { - // FIXME: this becomes inefficient when we have too many ids - if let Some(i) = self.alloc_ids.iter().position(|a| *a == aid) { - return stable_mir::AllocId(i); - }; - let id = self.def_ids.len(); - self.alloc_ids.push(aid); - stable_mir::AllocId(id) + self.alloc_ids.create_or_fetch(aid) } pub(crate) fn create_span(&mut self, span: Span) -> stable_mir::ty::Span { - for (i, &sp) in self.spans.iter().enumerate() { - if sp == span { - return stable_mir::ty::Span(i); - } - } - let id = self.spans.len(); - self.spans.push(span); - stable_mir::ty::Span(id) + self.spans.create_or_fetch(span) + } + + pub(crate) fn instance_def( + &mut self, + instance: ty::Instance<'tcx>, + ) -> stable_mir::mir::mono::InstanceDef { + self.instances.create_or_fetch(instance) + } + + pub(crate) fn static_def(&mut self, did: DefId) -> stable_mir::mir::mono::StaticDef { + stable_mir::mir::mono::StaticDef(self.create_def_id(did)) } } @@ -132,68 +136,157 @@ item.id.into() } +// A thread local variable that stores a pointer to the tables mapping between TyCtxt +// datastructures and stable MIR datastructures +scoped_thread_local! (static TLV: Cell<*const ()>); + +pub(crate) fn init<'tcx>(tables: &TablesWrapper<'tcx>, f: impl FnOnce()) { + assert!(!TLV.is_set()); + let ptr = tables as *const _ as *const (); + TLV.set(&Cell::new(ptr), || { + f(); + }); +} + +/// Loads the current context and calls a function with it. +/// Do not nest these, as that will ICE. +pub(crate) fn with_tables<'tcx, R>(f: impl FnOnce(&mut Tables<'tcx>) -> R) -> R { + assert!(TLV.is_set()); + TLV.with(|tlv| { + let ptr = tlv.get(); + assert!(!ptr.is_null()); + let wrapper = ptr as *const TablesWrapper<'tcx>; + let mut tables = unsafe { (*wrapper).0.borrow_mut() }; + f(&mut *tables) + }) +} + pub fn run(tcx: TyCtxt<'_>, f: impl FnOnce()) { - stable_mir::run( - Tables { tcx, def_ids: vec![], alloc_ids: vec![], spans: vec![], types: vec![] }, - f, - ); + let tables = TablesWrapper(RefCell::new(Tables { + tcx, + def_ids: IndexMap::default(), + alloc_ids: IndexMap::default(), + spans: IndexMap::default(), + types: IndexMap::default(), + instances: IndexMap::default(), + constants: IndexMap::default(), + })); + stable_mir::run(&tables, || init(&tables, f)); } -pub struct StableMir -where - B: Send, - C: Send, -{ - args: Vec, - callback: fn(TyCtxt<'_>) -> ControlFlow, - result: Option>, +#[macro_export] +macro_rules! run { + ($args:expr, $callback:expr) => { + run!($args, tcx, $callback) + }; + ($args:expr, $tcx:ident, $callback:expr) => {{ + use rustc_driver::{Callbacks, Compilation, RunCompiler}; + use rustc_interface::{interface, Queries}; + use stable_mir::CompilerError; + use std::ops::ControlFlow; + + pub struct StableMir + where + B: Send, + C: Send, + { + args: Vec, + callback: fn(TyCtxt<'_>) -> ControlFlow, + result: Option>, + } + + impl StableMir + where + B: Send, + C: Send, + { + /// Creates a new `StableMir` instance, with given test_function and arguments. + pub fn new(args: Vec, callback: fn(TyCtxt<'_>) -> ControlFlow) -> Self { + StableMir { args, callback, result: None } + } + + /// Runs the compiler against given target and tests it with `test_function` + pub fn run(&mut self) -> Result> { + let compiler_result = rustc_driver::catch_fatal_errors(|| { + RunCompiler::new(&self.args.clone(), self).run() + }); + match (compiler_result, self.result.take()) { + (Ok(Ok(())), Some(ControlFlow::Continue(value))) => Ok(value), + (Ok(Ok(())), Some(ControlFlow::Break(value))) => { + Err(CompilerError::Interrupted(value)) + } + (Ok(Ok(_)), None) => Err(CompilerError::Skipped), + (Ok(Err(_)), _) => Err(CompilerError::CompilationFailed), + (Err(_), _) => Err(CompilerError::ICE), + } + } + } + + impl Callbacks for StableMir + where + B: Send, + C: Send, + { + /// Called after analysis. Return value instructs the compiler whether to + /// continue the compilation afterwards (defaults to `Compilation::Continue`) + fn after_analysis<'tcx>( + &mut self, + _compiler: &interface::Compiler, + queries: &'tcx Queries<'tcx>, + ) -> Compilation { + queries.global_ctxt().unwrap().enter(|tcx| { + rustc_internal::run(tcx, || { + self.result = Some((self.callback)(tcx)); + }); + if self.result.as_ref().is_some_and(|val| val.is_continue()) { + Compilation::Continue + } else { + Compilation::Stop + } + }) + } + } + + StableMir::new($args, |$tcx| $callback).run() + }}; } -impl StableMir -where - B: Send, - C: Send, -{ - /// Creates a new `StableMir` instance, with given test_function and arguments. - pub fn new(args: Vec, callback: fn(TyCtxt<'_>) -> ControlFlow) -> Self { - StableMir { args, callback, result: None } +/// Simmilar to rustc's `FxIndexMap`, `IndexMap` with extra +/// safety features added. +pub struct IndexMap { + index_map: fx::FxIndexMap, +} + +impl Default for IndexMap { + fn default() -> Self { + Self { index_map: FxIndexMap::default() } } +} - /// Runs the compiler against given target and tests it with `test_function` - pub fn run(&mut self) -> Result> { - let compiler_result = - rustc_driver::catch_fatal_errors(|| RunCompiler::new(&self.args.clone(), self).run()); - match (compiler_result, self.result.take()) { - (Ok(Ok(())), Some(ControlFlow::Continue(value))) => Ok(value), - (Ok(Ok(())), Some(ControlFlow::Break(value))) => Err(CompilerError::Interrupted(value)), - (Ok(Ok(_)), None) => Err(CompilerError::Skipped), - (Ok(Err(_)), _) => Err(CompilerError::CompilationFailed), - (Err(_), _) => Err(CompilerError::ICE), - } +impl IndexMap { + pub fn create_or_fetch(&mut self, key: K) -> V { + let len = self.index_map.len(); + let v = self.index_map.entry(key).or_insert(V::to_val(len)); + *v } } -impl Callbacks for StableMir -where - B: Send, - C: Send, +impl Index + for IndexMap { - /// Called after analysis. Return value instructs the compiler whether to - /// continue the compilation afterwards (defaults to `Compilation::Continue`) - fn after_analysis<'tcx>( - &mut self, - _compiler: &interface::Compiler, - queries: &'tcx Queries<'tcx>, - ) -> Compilation { - queries.global_ctxt().unwrap().enter(|tcx| { - rustc_internal::run(tcx, || { - self.result = Some((self.callback)(tcx)); - }); - if self.result.as_ref().is_some_and(|val| val.is_continue()) { - Compilation::Continue - } else { - Compilation::Stop - } - }) + type Output = K; + + fn index(&self, index: V) -> &Self::Output { + let (k, v) = self.index_map.get_index(index.to_index()).unwrap(); + assert_eq!(*v, index, "Provided value doesn't match with indexed value"); + k } } + +/// Trait used to translate a stable construct to its rustc counterpart. +/// +/// This is basically a mirror of [crate::rustc_smir::Stable]. +pub trait RustcInternal<'tcx> { + type T; + fn internal(&self, tables: &mut Tables<'tcx>) -> Self::T; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/builder.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,55 @@ +//! Logic required to produce a monomorphic stable body. +//! +//! We first retrieve and monomorphize the rustc body representation, i.e., we generate a +//! monomorphic body using internal representation. +//! After that, we convert the internal representation into a stable one. +use crate::rustc_smir::{Stable, Tables}; +use rustc_middle::mir; +use rustc_middle::mir::visit::MutVisitor; +use rustc_middle::ty::{self, Ty, TyCtxt}; + +/// Builds a monomorphic body for a given instance. +pub struct BodyBuilder<'tcx> { + tcx: TyCtxt<'tcx>, + instance: ty::Instance<'tcx>, +} + +impl<'tcx> BodyBuilder<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, instance: ty::Instance<'tcx>) -> Self { + BodyBuilder { tcx, instance } + } + + pub fn build(mut self, tables: &mut Tables<'tcx>) -> stable_mir::mir::Body { + let mut body = self.tcx.instance_mir(self.instance.def).clone(); + let generics = self.tcx.generics_of(self.instance.def_id()); + if generics.requires_monomorphization(self.tcx) { + self.visit_body(&mut body); + } + body.stable(tables) + } + + fn monomorphize(&self, value: T) -> T + where + T: ty::TypeFoldable>, + { + self.instance.instantiate_mir_and_normalize_erasing_regions( + self.tcx, + ty::ParamEnv::reveal_all(), + ty::EarlyBinder::bind(value), + ) + } +} + +impl<'tcx> MutVisitor<'tcx> for BodyBuilder<'tcx> { + fn visit_ty_const(&mut self, ct: &mut ty::Const<'tcx>, _location: mir::Location) { + *ct = self.monomorphize(*ct); + } + + fn visit_ty(&mut self, ty: &mut Ty<'tcx>, _: mir::visit::TyContext) { + *ty = self.monomorphize(*ty); + } + + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_smir/src/rustc_smir/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,200 +7,269 @@ //! //! For now, we are developing everything inside `rustc`, thus, we keep this module private. +use crate::rustc_internal::{IndexMap, RustcInternal}; use crate::rustc_smir::hir::def::DefKind; use crate::rustc_smir::stable_mir::ty::{BoundRegion, EarlyBoundRegion, Region}; use rustc_hir as hir; use rustc_middle::mir; use rustc_middle::mir::interpret::{alloc_range, AllocId}; -use rustc_middle::ty::{self, Ty, TyCtxt, Variance}; +use rustc_middle::mir::mono::MonoItem; +use rustc_middle::ty::{self, Instance, ParamEnv, Ty, TyCtxt, Variance}; use rustc_span::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc_target::abi::FieldIdx; -use stable_mir::mir::{CopyNonOverlapping, UserTypeProjection, VariantIdx}; -use stable_mir::ty::{FloatTy, GenericParamDef, IntTy, Movability, RigidTy, Span, TyKind, UintTy}; -use stable_mir::{self, opaque, Context}; +use stable_mir::mir::mono::InstanceDef; +use stable_mir::mir::{Body, CopyNonOverlapping, Statement, UserTypeProjection, VariantIdx}; +use stable_mir::ty::{ + Const, ConstId, ConstantKind, FloatTy, GenericParamDef, IntTy, LineInfo, Movability, RigidTy, + Span, TyKind, UintTy, +}; +use stable_mir::{self, opaque, Context, Filename}; +use std::cell::RefCell; use tracing::debug; mod alloc; +mod builder; -impl<'tcx> Context for Tables<'tcx> { +impl<'tcx> Context for TablesWrapper<'tcx> { fn local_crate(&self) -> stable_mir::Crate { - smir_crate(self.tcx, LOCAL_CRATE) + let tables = self.0.borrow(); + smir_crate(tables.tcx, LOCAL_CRATE) } fn external_crates(&self) -> Vec { - self.tcx.crates(()).iter().map(|crate_num| smir_crate(self.tcx, *crate_num)).collect() + let tables = self.0.borrow(); + tables.tcx.crates(()).iter().map(|crate_num| smir_crate(tables.tcx, *crate_num)).collect() } - fn find_crate(&self, name: &str) -> Option { - [LOCAL_CRATE].iter().chain(self.tcx.crates(()).iter()).find_map(|crate_num| { - let crate_name = self.tcx.crate_name(*crate_num).to_string(); - (name == crate_name).then(|| smir_crate(self.tcx, *crate_num)) - }) + fn find_crates(&self, name: &str) -> Vec { + let tables = self.0.borrow(); + let crates: Vec = [LOCAL_CRATE] + .iter() + .chain(tables.tcx.crates(()).iter()) + .map(|crate_num| { + let crate_name = tables.tcx.crate_name(*crate_num).to_string(); + (name == crate_name).then(|| smir_crate(tables.tcx, *crate_num)) + }) + .into_iter() + .filter_map(|c| c) + .collect(); + crates } fn name_of_def_id(&self, def_id: stable_mir::DefId) -> String { - self.tcx.def_path_str(self[def_id]) + let tables = self.0.borrow(); + tables.tcx.def_path_str(tables[def_id]) + } + + fn span_to_string(&self, span: stable_mir::ty::Span) -> String { + let tables = self.0.borrow(); + tables.tcx.sess.source_map().span_to_diagnostic_string(tables[span]) + } + + fn get_filename(&self, span: &Span) -> Filename { + let tables = self.0.borrow(); + opaque( + &tables + .tcx + .sess + .source_map() + .span_to_filename(tables[*span]) + .display(rustc_span::FileNameDisplayPreference::Local) + .to_string(), + ) } - fn print_span(&self, span: stable_mir::ty::Span) -> String { - self.tcx.sess.source_map().span_to_diagnostic_string(self[span]) + fn get_lines(&self, span: &Span) -> LineInfo { + let tables = self.0.borrow(); + let lines = &tables.tcx.sess.source_map().span_to_location_info(tables[*span]); + LineInfo { start_line: lines.1, start_col: lines.2, end_line: lines.3, end_col: lines.4 } } - fn def_kind(&mut self, def_id: stable_mir::DefId) -> stable_mir::DefKind { - self.tcx.def_kind(self[def_id]).stable(self) + fn def_kind(&self, def_id: stable_mir::DefId) -> stable_mir::DefKind { + let mut tables = self.0.borrow_mut(); + tables.tcx.def_kind(tables[def_id]).stable(&mut *tables) } - fn span_of_an_item(&mut self, def_id: stable_mir::DefId) -> Span { - self.tcx.def_span(self[def_id]).stable(self) + fn span_of_an_item(&self, def_id: stable_mir::DefId) -> Span { + let mut tables = self.0.borrow_mut(); + tables.tcx.def_span(tables[def_id]).stable(&mut *tables) } - fn all_local_items(&mut self) -> stable_mir::CrateItems { - self.tcx.mir_keys(()).iter().map(|item| self.crate_item(item.to_def_id())).collect() + fn all_local_items(&self) -> stable_mir::CrateItems { + let mut tables = self.0.borrow_mut(); + tables.tcx.mir_keys(()).iter().map(|item| tables.crate_item(item.to_def_id())).collect() } - fn entry_fn(&mut self) -> Option { - Some(self.crate_item(self.tcx.entry_fn(())?.0)) + fn entry_fn(&self) -> Option { + let mut tables = self.0.borrow_mut(); + let tcx = tables.tcx; + Some(tables.crate_item(tcx.entry_fn(())?.0)) } - fn all_trait_decls(&mut self) -> stable_mir::TraitDecls { - self.tcx + fn all_trait_decls(&self) -> stable_mir::TraitDecls { + let mut tables = self.0.borrow_mut(); + tables + .tcx .traits(LOCAL_CRATE) .iter() - .map(|trait_def_id| self.trait_def(*trait_def_id)) + .map(|trait_def_id| tables.trait_def(*trait_def_id)) .collect() } - fn trait_decl(&mut self, trait_def: &stable_mir::ty::TraitDef) -> stable_mir::ty::TraitDecl { - let def_id = self[trait_def.0]; - let trait_def = self.tcx.trait_def(def_id); - trait_def.stable(self) + fn trait_decl(&self, trait_def: &stable_mir::ty::TraitDef) -> stable_mir::ty::TraitDecl { + let mut tables = self.0.borrow_mut(); + let def_id = tables[trait_def.0]; + let trait_def = tables.tcx.trait_def(def_id); + trait_def.stable(&mut *tables) } - fn all_trait_impls(&mut self) -> stable_mir::ImplTraitDecls { - self.tcx + fn all_trait_impls(&self) -> stable_mir::ImplTraitDecls { + let mut tables = self.0.borrow_mut(); + tables + .tcx .trait_impls_in_crate(LOCAL_CRATE) .iter() - .map(|impl_def_id| self.impl_def(*impl_def_id)) + .map(|impl_def_id| tables.impl_def(*impl_def_id)) .collect() } - fn trait_impl(&mut self, impl_def: &stable_mir::ty::ImplDef) -> stable_mir::ty::ImplTrait { - let def_id = self[impl_def.0]; - let impl_trait = self.tcx.impl_trait_ref(def_id).unwrap(); - impl_trait.stable(self) - } - - fn mir_body(&mut self, item: stable_mir::DefId) -> stable_mir::mir::Body { - let def_id = self[item]; - let mir = self.tcx.instance_mir(ty::InstanceDef::Item(def_id)); - stable_mir::mir::Body { - blocks: mir - .basic_blocks - .iter() - .map(|block| stable_mir::mir::BasicBlock { - terminator: block.terminator().stable(self), - statements: block - .statements - .iter() - .map(|statement| statement.stable(self)) - .collect(), - }) - .collect(), - locals: mir.local_decls.iter().map(|decl| self.intern_ty(decl.ty)).collect(), - } + fn trait_impl(&self, impl_def: &stable_mir::ty::ImplDef) -> stable_mir::ty::ImplTrait { + let mut tables = self.0.borrow_mut(); + let def_id = tables[impl_def.0]; + let impl_trait = tables.tcx.impl_trait_ref(def_id).unwrap(); + impl_trait.stable(&mut *tables) } - fn ty_kind(&mut self, ty: stable_mir::ty::Ty) -> TyKind { - self.types[ty.0].clone().stable(self) + fn mir_body(&self, item: stable_mir::DefId) -> stable_mir::mir::Body { + let mut tables = self.0.borrow_mut(); + let def_id = tables[item]; + tables.tcx.instance_mir(ty::InstanceDef::Item(def_id)).stable(&mut tables) } - fn mk_ty(&mut self, kind: TyKind) -> stable_mir::ty::Ty { - let n = self.types.len(); - self.types.push(MaybeStable::Stable(kind)); - stable_mir::ty::Ty(n) + fn ty_kind(&self, ty: stable_mir::ty::Ty) -> TyKind { + let mut tables = self.0.borrow_mut(); + tables.types[ty].kind().stable(&mut *tables) } - fn generics_of(&mut self, def_id: stable_mir::DefId) -> stable_mir::ty::Generics { - let def_id = self[def_id]; - let generics = self.tcx.generics_of(def_id); - generics.stable(self) + fn generics_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::Generics { + let mut tables = self.0.borrow_mut(); + let def_id = tables[def_id]; + let generics = tables.tcx.generics_of(def_id); + generics.stable(&mut *tables) } - fn predicates_of(&mut self, def_id: stable_mir::DefId) -> stable_mir::ty::GenericPredicates { - let def_id = self[def_id]; - let ty::GenericPredicates { parent, predicates } = self.tcx.predicates_of(def_id); + fn predicates_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::GenericPredicates { + let mut tables = self.0.borrow_mut(); + let def_id = tables[def_id]; + let ty::GenericPredicates { parent, predicates } = tables.tcx.predicates_of(def_id); stable_mir::ty::GenericPredicates { - parent: parent.map(|did| self.trait_def(did)), + parent: parent.map(|did| tables.trait_def(did)), predicates: predicates .iter() .map(|(clause, span)| { - (clause.as_predicate().kind().skip_binder().stable(self), span.stable(self)) + ( + clause.as_predicate().kind().skip_binder().stable(&mut *tables), + span.stable(&mut *tables), + ) }) .collect(), } } fn explicit_predicates_of( - &mut self, + &self, def_id: stable_mir::DefId, ) -> stable_mir::ty::GenericPredicates { - let def_id = self[def_id]; - let ty::GenericPredicates { parent, predicates } = self.tcx.explicit_predicates_of(def_id); + let mut tables = self.0.borrow_mut(); + let def_id = tables[def_id]; + let ty::GenericPredicates { parent, predicates } = + tables.tcx.explicit_predicates_of(def_id); stable_mir::ty::GenericPredicates { - parent: parent.map(|did| self.trait_def(did)), + parent: parent.map(|did| tables.trait_def(did)), predicates: predicates .iter() .map(|(clause, span)| { - (clause.as_predicate().kind().skip_binder().stable(self), span.stable(self)) + ( + clause.as_predicate().kind().skip_binder().stable(&mut *tables), + span.stable(&mut *tables), + ) }) .collect(), } } -} -#[derive(Clone)] -pub enum MaybeStable { - Stable(S), - Rustc(R), -} + fn instance_body(&self, def: InstanceDef) -> Body { + let mut tables = self.0.borrow_mut(); + let instance = tables.instances[def]; + builder::BodyBuilder::new(tables.tcx, instance).build(&mut *tables) + } -impl<'tcx, S, R> MaybeStable { - fn stable(self, tables: &mut Tables<'tcx>) -> S - where - R: Stable<'tcx, T = S>, - { - match self { - MaybeStable::Stable(s) => s, - MaybeStable::Rustc(r) => r.stable(tables), - } + fn instance_ty(&self, def: InstanceDef) -> stable_mir::ty::Ty { + let mut tables = self.0.borrow_mut(); + let instance = tables.instances[def]; + instance.ty(tables.tcx, ParamEnv::empty()).stable(&mut *tables) } -} -impl PartialEq for MaybeStable { - fn eq(&self, other: &R) -> bool { - match self { - MaybeStable::Stable(_) => false, - MaybeStable::Rustc(r) => r == other, + fn instance_def_id(&self, def: InstanceDef) -> stable_mir::DefId { + let mut tables = self.0.borrow_mut(); + let def_id = tables.instances[def].def_id(); + tables.create_def_id(def_id) + } + + fn instance_mangled_name(&self, def: InstanceDef) -> String { + let tables = self.0.borrow_mut(); + let instance = tables.instances[def]; + tables.tcx.symbol_name(instance).name.to_string() + } + + fn mono_instance(&self, item: stable_mir::CrateItem) -> stable_mir::mir::mono::Instance { + let mut tables = self.0.borrow_mut(); + let def_id = tables[item.0]; + Instance::mono(tables.tcx, def_id).stable(&mut *tables) + } + + fn requires_monomorphization(&self, def_id: stable_mir::DefId) -> bool { + let tables = self.0.borrow(); + let def_id = tables[def_id]; + let generics = tables.tcx.generics_of(def_id); + let result = generics.requires_monomorphization(tables.tcx); + result + } + + fn resolve_instance( + &self, + def: stable_mir::ty::FnDef, + args: &stable_mir::ty::GenericArgs, + ) -> Option { + let mut tables = self.0.borrow_mut(); + let def_id = def.0.internal(&mut *tables); + let args_ref = args.internal(&mut *tables); + match Instance::resolve(tables.tcx, ParamEnv::reveal_all(), def_id, args_ref) { + Ok(Some(instance)) => Some(instance.stable(&mut *tables)), + Ok(None) | Err(_) => None, } } } +pub(crate) struct TablesWrapper<'tcx>(pub(crate) RefCell>); + pub struct Tables<'tcx> { - pub tcx: TyCtxt<'tcx>, - pub def_ids: Vec, - pub alloc_ids: Vec, - pub spans: Vec, - pub types: Vec>>, + pub(crate) tcx: TyCtxt<'tcx>, + pub(crate) def_ids: IndexMap, + pub(crate) alloc_ids: IndexMap, + pub(crate) spans: IndexMap, + pub(crate) types: IndexMap, stable_mir::ty::Ty>, + pub(crate) instances: IndexMap, InstanceDef>, + pub(crate) constants: IndexMap, ConstId>, } impl<'tcx> Tables<'tcx> { fn intern_ty(&mut self, ty: Ty<'tcx>) -> stable_mir::ty::Ty { - if let Some(id) = self.types.iter().position(|t| *t == ty) { - return stable_mir::ty::Ty(id); - } - let id = self.types.len(); - self.types.push(MaybeStable::Rustc(ty)); - stable_mir::ty::Ty(id) + self.types.create_or_fetch(ty) + } + + fn intern_const(&mut self, constant: mir::Const<'tcx>) -> ConstId { + self.constants.create_or_fetch(constant) } } @@ -213,51 +282,102 @@ } /// Trait used to convert between an internal MIR type to a Stable MIR type. -pub(crate) trait Stable<'tcx> { +pub trait Stable<'tcx> { /// The stable representation of the type implementing Stable. type T; /// Converts an object to the equivalent Stable MIR representation. fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T; } +impl<'tcx> Stable<'tcx> for mir::Body<'tcx> { + type T = stable_mir::mir::Body; + + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + stable_mir::mir::Body::new( + self.basic_blocks + .iter() + .map(|block| stable_mir::mir::BasicBlock { + terminator: block.terminator().stable(tables), + statements: block + .statements + .iter() + .map(|statement| statement.stable(tables)) + .collect(), + }) + .collect(), + self.local_decls + .iter() + .map(|decl| stable_mir::mir::LocalDecl { + ty: decl.ty.stable(tables), + span: decl.source_info.span.stable(tables), + }) + .collect(), + self.arg_count, + ) + } +} + impl<'tcx> Stable<'tcx> for mir::Statement<'tcx> { type T = stable_mir::mir::Statement; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { - use rustc_middle::mir::StatementKind::*; - match &self.kind { - Assign(assign) => { - stable_mir::mir::Statement::Assign(assign.0.stable(tables), assign.1.stable(tables)) - } - FakeRead(fake_read_place) => stable_mir::mir::Statement::FakeRead( - fake_read_place.0.stable(tables), - fake_read_place.1.stable(tables), + Statement { kind: self.kind.stable(tables), span: self.source_info.span.stable(tables) } + } +} + +impl<'tcx> Stable<'tcx> for mir::StatementKind<'tcx> { + type T = stable_mir::mir::StatementKind; + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + match self { + mir::StatementKind::Assign(assign) => stable_mir::mir::StatementKind::Assign( + assign.0.stable(tables), + assign.1.stable(tables), ), - SetDiscriminant { place: plc, variant_index: idx } => { - stable_mir::mir::Statement::SetDiscriminant { - place: plc.as_ref().stable(tables), - variant_index: idx.stable(tables), + mir::StatementKind::FakeRead(fake_read_place) => { + stable_mir::mir::StatementKind::FakeRead( + fake_read_place.0.stable(tables), + fake_read_place.1.stable(tables), + ) + } + mir::StatementKind::SetDiscriminant { place, variant_index } => { + stable_mir::mir::StatementKind::SetDiscriminant { + place: place.as_ref().stable(tables), + variant_index: variant_index.stable(tables), } } - Deinit(place) => stable_mir::mir::Statement::Deinit(place.stable(tables)), - StorageLive(place) => stable_mir::mir::Statement::StorageLive(place.stable(tables)), - StorageDead(place) => stable_mir::mir::Statement::StorageDead(place.stable(tables)), - Retag(retag, place) => { - stable_mir::mir::Statement::Retag(retag.stable(tables), place.stable(tables)) - } - PlaceMention(place) => stable_mir::mir::Statement::PlaceMention(place.stable(tables)), - AscribeUserType(place_projection, variance) => { - stable_mir::mir::Statement::AscribeUserType { + mir::StatementKind::Deinit(place) => { + stable_mir::mir::StatementKind::Deinit(place.stable(tables)) + } + + mir::StatementKind::StorageLive(place) => { + stable_mir::mir::StatementKind::StorageLive(place.stable(tables)) + } + + mir::StatementKind::StorageDead(place) => { + stable_mir::mir::StatementKind::StorageDead(place.stable(tables)) + } + mir::StatementKind::Retag(retag, place) => { + stable_mir::mir::StatementKind::Retag(retag.stable(tables), place.stable(tables)) + } + mir::StatementKind::PlaceMention(place) => { + stable_mir::mir::StatementKind::PlaceMention(place.stable(tables)) + } + mir::StatementKind::AscribeUserType(place_projection, variance) => { + stable_mir::mir::StatementKind::AscribeUserType { place: place_projection.as_ref().0.stable(tables), projections: place_projection.as_ref().1.stable(tables), variance: variance.stable(tables), } } - Coverage(coverage) => stable_mir::mir::Statement::Coverage(opaque(coverage)), - Intrinsic(intrinstic) => { - stable_mir::mir::Statement::Intrinsic(intrinstic.stable(tables)) + mir::StatementKind::Coverage(coverage) => { + stable_mir::mir::StatementKind::Coverage(opaque(coverage)) + } + mir::StatementKind::Intrinsic(intrinstic) => { + stable_mir::mir::StatementKind::Intrinsic(intrinstic.stable(tables)) + } + mir::StatementKind::ConstEvalCounter => { + stable_mir::mir::StatementKind::ConstEvalCounter } - ConstEvalCounter => stable_mir::mir::Statement::ConstEvalCounter, - Nop => stable_mir::mir::Statement::Nop, + mir::StatementKind::Nop => stable_mir::mir::StatementKind::Nop, } } } @@ -287,7 +407,7 @@ Cast(cast_kind, op, ty) => stable_mir::mir::Rvalue::Cast( cast_kind.stable(tables), op.stable(tables), - tables.intern_ty(*ty), + ty.stable(tables), ), BinaryOp(bin_op, ops) => stable_mir::mir::Rvalue::BinaryOp( bin_op.stable(tables), @@ -300,7 +420,7 @@ ops.1.stable(tables), ), NullaryOp(null_op, ty) => { - stable_mir::mir::Rvalue::NullaryOp(null_op.stable(tables), tables.intern_ty(*ty)) + stable_mir::mir::Rvalue::NullaryOp(null_op.stable(tables), ty.stable(tables)) } UnaryOp(un_op, op) => { stable_mir::mir::Rvalue::UnaryOp(un_op.stable(tables), op.stable(tables)) @@ -311,7 +431,7 @@ stable_mir::mir::Rvalue::Aggregate(agg_kind.stable(tables), operands) } ShallowInitBox(op, ty) => { - stable_mir::mir::Rvalue::ShallowInitBox(op.stable(tables), tables.intern_ty(*ty)) + stable_mir::mir::Rvalue::ShallowInitBox(op.stable(tables), ty.stable(tables)) } CopyForDeref(place) => stable_mir::mir::Rvalue::CopyForDeref(place.stable(tables)), } @@ -455,7 +575,7 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { use stable_mir::ty::TermKind; match self { - ty::TermKind::Ty(ty) => TermKind::Type(tables.intern_ty(*ty)), + ty::TermKind::Ty(ty) => TermKind::Type(ty.stable(tables)), ty::TermKind::Const(cnst) => { let cnst = cnst.stable(tables); TermKind::Const(cnst) @@ -529,6 +649,13 @@ } } +impl<'tcx> Stable<'tcx> for (rustc_target::abi::VariantIdx, FieldIdx) { + type T = (usize, usize); + fn stable(&self, _: &mut Tables<'tcx>) -> Self::T { + (self.0.as_usize(), self.1.as_usize()) + } +} + impl<'tcx> Stable<'tcx> for mir::Operand<'tcx> { type T = stable_mir::mir::Operand; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { @@ -673,11 +800,11 @@ AssertKind::RemainderByZero(op) => { stable_mir::mir::AssertMessage::RemainderByZero(op.stable(tables)) } - AssertKind::ResumedAfterReturn(generator) => { - stable_mir::mir::AssertMessage::ResumedAfterReturn(generator.stable(tables)) + AssertKind::ResumedAfterReturn(coroutine) => { + stable_mir::mir::AssertMessage::ResumedAfterReturn(coroutine.stable(tables)) } - AssertKind::ResumedAfterPanic(generator) => { - stable_mir::mir::AssertMessage::ResumedAfterPanic(generator.stable(tables)) + AssertKind::ResumedAfterPanic(coroutine) => { + stable_mir::mir::AssertMessage::ResumedAfterPanic(coroutine.stable(tables)) } AssertKind::MisalignedPointerDereference { required, found } => { stable_mir::mir::AssertMessage::MisalignedPointerDereference { @@ -736,7 +863,7 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { match self { mir::AggregateKind::Array(ty) => { - stable_mir::mir::AggregateKind::Array(tables.intern_ty(*ty)) + stable_mir::mir::AggregateKind::Array(ty.stable(tables)) } mir::AggregateKind::Tuple => stable_mir::mir::AggregateKind::Tuple, mir::AggregateKind::Adt(def_id, var_idx, generic_arg, user_ty_index, field_idx) => { @@ -754,9 +881,9 @@ generic_arg.stable(tables), ) } - mir::AggregateKind::Generator(def_id, generic_arg, movability) => { - stable_mir::mir::AggregateKind::Generator( - tables.generator_def(*def_id), + mir::AggregateKind::Coroutine(def_id, generic_arg, movability) => { + stable_mir::mir::AggregateKind::Coroutine( + tables.coroutine_def(*def_id), generic_arg.stable(tables), movability.stable(tables), ) @@ -765,20 +892,30 @@ } } -impl<'tcx> Stable<'tcx> for rustc_hir::GeneratorKind { - type T = stable_mir::mir::GeneratorKind; +impl<'tcx> Stable<'tcx> for rustc_hir::CoroutineSource { + type T = stable_mir::mir::CoroutineSource; fn stable(&self, _: &mut Tables<'tcx>) -> Self::T { - use rustc_hir::{AsyncGeneratorKind, GeneratorKind}; + use rustc_hir::CoroutineSource; match self { - GeneratorKind::Async(async_gen) => { - let async_gen = match async_gen { - AsyncGeneratorKind::Block => stable_mir::mir::AsyncGeneratorKind::Block, - AsyncGeneratorKind::Closure => stable_mir::mir::AsyncGeneratorKind::Closure, - AsyncGeneratorKind::Fn => stable_mir::mir::AsyncGeneratorKind::Fn, - }; - stable_mir::mir::GeneratorKind::Async(async_gen) + CoroutineSource::Block => stable_mir::mir::CoroutineSource::Block, + CoroutineSource::Closure => stable_mir::mir::CoroutineSource::Closure, + CoroutineSource::Fn => stable_mir::mir::CoroutineSource::Fn, + } + } +} + +impl<'tcx> Stable<'tcx> for rustc_hir::CoroutineKind { + type T = stable_mir::mir::CoroutineKind; + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + use rustc_hir::CoroutineKind; + match self { + CoroutineKind::Async(source) => { + stable_mir::mir::CoroutineKind::Async(source.stable(tables)) } - GeneratorKind::Gen => stable_mir::mir::GeneratorKind::Gen, + CoroutineKind::Gen(source) => { + stable_mir::mir::CoroutineKind::Gen(source.stable(tables)) + } + CoroutineKind::Coroutine => stable_mir::mir::CoroutineKind::Coroutine, } } } @@ -806,11 +943,20 @@ impl<'tcx> Stable<'tcx> for mir::Terminator<'tcx> { type T = stable_mir::mir::Terminator; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { - use rustc_middle::mir::TerminatorKind::*; use stable_mir::mir::Terminator; - match &self.kind { - Goto { target } => Terminator::Goto { target: target.as_usize() }, - SwitchInt { discr, targets } => Terminator::SwitchInt { + Terminator { kind: self.kind.stable(tables), span: self.source_info.span.stable(tables) } + } +} + +impl<'tcx> Stable<'tcx> for mir::TerminatorKind<'tcx> { + type T = stable_mir::mir::TerminatorKind; + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + use stable_mir::mir::TerminatorKind; + match self { + mir::TerminatorKind::Goto { target } => { + TerminatorKind::Goto { target: target.as_usize() } + } + mir::TerminatorKind::SwitchInt { discr, targets } => TerminatorKind::SwitchInt { discr: discr.stable(tables), targets: targets .iter() @@ -821,42 +967,60 @@ .collect(), otherwise: targets.otherwise().as_usize(), }, - UnwindResume => Terminator::Resume, - UnwindTerminate(_) => Terminator::Abort, - Return => Terminator::Return, - Unreachable => Terminator::Unreachable, - Drop { place, target, unwind, replace: _ } => Terminator::Drop { - place: place.stable(tables), - target: target.as_usize(), - unwind: unwind.stable(tables), - }, - Call { func, args, destination, target, unwind, call_source: _, fn_span: _ } => { - Terminator::Call { - func: func.stable(tables), - args: args.iter().map(|arg| arg.stable(tables)).collect(), - destination: destination.stable(tables), - target: target.map(|t| t.as_usize()), + mir::TerminatorKind::UnwindResume => TerminatorKind::Resume, + mir::TerminatorKind::UnwindTerminate(_) => TerminatorKind::Abort, + mir::TerminatorKind::Return => TerminatorKind::Return, + mir::TerminatorKind::Unreachable => TerminatorKind::Unreachable, + mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => { + TerminatorKind::Drop { + place: place.stable(tables), + target: target.as_usize(), unwind: unwind.stable(tables), } } - Assert { cond, expected, msg, target, unwind } => Terminator::Assert { - cond: cond.stable(tables), - expected: *expected, - msg: msg.stable(tables), - target: target.as_usize(), + mir::TerminatorKind::Call { + func, + args, + destination, + target, + unwind, + call_source: _, + fn_span: _, + } => TerminatorKind::Call { + func: func.stable(tables), + args: args.iter().map(|arg| arg.stable(tables)).collect(), + destination: destination.stable(tables), + target: target.map(|t| t.as_usize()), unwind: unwind.stable(tables), }, - InlineAsm { template, operands, options, line_spans, destination, unwind } => { - Terminator::InlineAsm { - template: format!("{template:?}"), - operands: operands.iter().map(|operand| operand.stable(tables)).collect(), - options: format!("{options:?}"), - line_spans: format!("{line_spans:?}"), - destination: destination.map(|d| d.as_usize()), + mir::TerminatorKind::Assert { cond, expected, msg, target, unwind } => { + TerminatorKind::Assert { + cond: cond.stable(tables), + expected: *expected, + msg: msg.stable(tables), + target: target.as_usize(), unwind: unwind.stable(tables), } } - Yield { .. } | GeneratorDrop | FalseEdge { .. } | FalseUnwind { .. } => unreachable!(), + mir::TerminatorKind::InlineAsm { + template, + operands, + options, + line_spans, + destination, + unwind, + } => TerminatorKind::InlineAsm { + template: format!("{template:?}"), + operands: operands.iter().map(|operand| operand.stable(tables)).collect(), + options: format!("{options:?}"), + line_spans: format!("{line_spans:?}"), + destination: destination.map(|d| d.as_usize()), + unwind: unwind.stable(tables), + }, + mir::TerminatorKind::Yield { .. } + | mir::TerminatorKind::CoroutineDrop + | mir::TerminatorKind::FalseEdge { .. } + | mir::TerminatorKind::FalseUnwind { .. } => unreachable!(), } } } @@ -877,7 +1041,7 @@ use stable_mir::ty::GenericArgKind; match self { ty::GenericArgKind::Lifetime(region) => GenericArgKind::Lifetime(region.stable(tables)), - ty::GenericArgKind::Type(ty) => GenericArgKind::Type(tables.intern_ty(*ty)), + ty::GenericArgKind::Type(ty) => GenericArgKind::Type(ty.stable(tables)), ty::GenericArgKind::Const(cnst) => GenericArgKind::Const(cnst.stable(tables)), } } @@ -923,11 +1087,7 @@ use stable_mir::ty::{Abi, FnSig}; FnSig { - inputs_and_output: self - .inputs_and_output - .iter() - .map(|ty| tables.intern_ty(ty)) - .collect(), + inputs_and_output: self.inputs_and_output.iter().map(|ty| ty.stable(tables)).collect(), c_variadic: self.c_variadic, unsafety: self.unsafety.stable(tables), abi: match self.abi { @@ -1065,9 +1225,16 @@ } impl<'tcx> Stable<'tcx> for Ty<'tcx> { + type T = stable_mir::ty::Ty; + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + tables.intern_ty(*self) + } +} + +impl<'tcx> Stable<'tcx> for ty::TyKind<'tcx> { type T = stable_mir::ty::TyKind; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { - match self.kind() { + match self { ty::Bool => TyKind::RigidTy(RigidTy::Bool), ty::Char => TyKind::RigidTy(RigidTy::Char), ty::Int(int_ty) => TyKind::RigidTy(RigidTy::Int(int_ty.stable(tables))), @@ -1080,15 +1247,15 @@ ty::Foreign(def_id) => TyKind::RigidTy(RigidTy::Foreign(tables.foreign_def(*def_id))), ty::Str => TyKind::RigidTy(RigidTy::Str), ty::Array(ty, constant) => { - TyKind::RigidTy(RigidTy::Array(tables.intern_ty(*ty), constant.stable(tables))) + TyKind::RigidTy(RigidTy::Array(ty.stable(tables), constant.stable(tables))) } - ty::Slice(ty) => TyKind::RigidTy(RigidTy::Slice(tables.intern_ty(*ty))), + ty::Slice(ty) => TyKind::RigidTy(RigidTy::Slice(ty.stable(tables))), ty::RawPtr(ty::TypeAndMut { ty, mutbl }) => { - TyKind::RigidTy(RigidTy::RawPtr(tables.intern_ty(*ty), mutbl.stable(tables))) + TyKind::RigidTy(RigidTy::RawPtr(ty.stable(tables), mutbl.stable(tables))) } ty::Ref(region, ty, mutbl) => TyKind::RigidTy(RigidTy::Ref( region.stable(tables), - tables.intern_ty(*ty), + ty.stable(tables), mutbl.stable(tables), )), ty::FnDef(def_id, generic_args) => { @@ -1109,15 +1276,15 @@ tables.closure_def(*def_id), generic_args.stable(tables), )), - ty::Generator(def_id, generic_args, movability) => TyKind::RigidTy(RigidTy::Generator( - tables.generator_def(*def_id), + ty::Coroutine(def_id, generic_args, movability) => TyKind::RigidTy(RigidTy::Coroutine( + tables.coroutine_def(*def_id), generic_args.stable(tables), movability.stable(tables), )), ty::Never => TyKind::RigidTy(RigidTy::Never), - ty::Tuple(fields) => TyKind::RigidTy(RigidTy::Tuple( - fields.iter().map(|ty| tables.intern_ty(ty)).collect(), - )), + ty::Tuple(fields) => { + TyKind::RigidTy(RigidTy::Tuple(fields.iter().map(|ty| ty.stable(tables)).collect())) + } ty::Alias(alias_kind, alias_ty) => { TyKind::Alias(alias_kind.stable(tables), alias_ty.stable(tables)) } @@ -1125,7 +1292,7 @@ ty::Bound(debruijn_idx, bound_ty) => { TyKind::Bound(debruijn_idx.as_usize(), bound_ty.stable(tables)) } - ty::Placeholder(..) | ty::GeneratorWitness(..) | ty::Infer(_) | ty::Error(_) => { + ty::Placeholder(..) | ty::CoroutineWitness(..) | ty::Infer(_) | ty::Error(_) => { unreachable!(); } } @@ -1136,32 +1303,36 @@ type T = stable_mir::ty::Const; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { - stable_mir::ty::Const { - literal: match self.kind() { - ty::Value(val) => { - let const_val = tables.tcx.valtree_to_const_val((self.ty(), val)); + let kind = match self.kind() { + ty::Value(val) => { + let const_val = tables.tcx.valtree_to_const_val((self.ty(), val)); + if matches!(const_val, mir::ConstValue::ZeroSized) { + ConstantKind::ZeroSized + } else { stable_mir::ty::ConstantKind::Allocated(alloc::new_allocation( self.ty(), const_val, tables, )) } - ty::ParamCt(param) => stable_mir::ty::ConstantKind::Param(param.stable(tables)), - ty::ErrorCt(_) => unreachable!(), - ty::InferCt(_) => unreachable!(), - ty::BoundCt(_, _) => unimplemented!(), - ty::PlaceholderCt(_) => unimplemented!(), - ty::Unevaluated(uv) => { - stable_mir::ty::ConstantKind::Unevaluated(stable_mir::ty::UnevaluatedConst { - def: tables.const_def(uv.def), - args: uv.args.stable(tables), - promoted: None, - }) - } - ty::ExprCt(_) => unimplemented!(), - }, - ty: tables.intern_ty(self.ty()), - } + } + ty::ParamCt(param) => stable_mir::ty::ConstantKind::Param(param.stable(tables)), + ty::ErrorCt(_) => unreachable!(), + ty::InferCt(_) => unreachable!(), + ty::BoundCt(_, _) => unimplemented!(), + ty::PlaceholderCt(_) => unimplemented!(), + ty::Unevaluated(uv) => { + stable_mir::ty::ConstantKind::Unevaluated(stable_mir::ty::UnevaluatedConst { + def: tables.const_def(uv.def), + args: uv.args.stable(tables), + promoted: None, + }) + } + ty::ExprCt(_) => unimplemented!(), + }; + let ty = self.ty().stable(tables); + let id = tables.intern_const(mir::Const::Ty(*self)); + Const::new(kind, ty, id) } } @@ -1246,22 +1417,28 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { match *self { mir::Const::Ty(c) => c.stable(tables), - mir::Const::Unevaluated(unev_const, ty) => stable_mir::ty::Const { - literal: stable_mir::ty::ConstantKind::Unevaluated( - stable_mir::ty::UnevaluatedConst { + mir::Const::Unevaluated(unev_const, ty) => { + let kind = + stable_mir::ty::ConstantKind::Unevaluated(stable_mir::ty::UnevaluatedConst { def: tables.const_def(unev_const.def), args: unev_const.args.stable(tables), promoted: unev_const.promoted.map(|u| u.as_u32()), - }, - ), - ty: tables.intern_ty(ty), - }, - mir::Const::Val(val, ty) => stable_mir::ty::Const { - literal: stable_mir::ty::ConstantKind::Allocated(alloc::new_allocation( - ty, val, tables, - )), - ty: tables.intern_ty(ty), - }, + }); + let ty = ty.stable(tables); + let id = tables.intern_const(*self); + Const::new(kind, ty, id) + } + mir::Const::Val(val, ty) if matches!(val, mir::ConstValue::ZeroSized) => { + let ty = ty.stable(tables); + let id = tables.intern_const(*self); + Const::new(ConstantKind::ZeroSized, ty, id) + } + mir::Const::Val(val, ty) => { + let kind = ConstantKind::Allocated(alloc::new_allocation(ty, val, tables)); + let ty = ty.stable(tables); + let id = tables.intern_const(*self); + Const::new(kind, ty, id) + } } } } @@ -1375,30 +1552,32 @@ type T = stable_mir::ty::ClauseKind; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { - use ty::ClauseKind::*; + use ty::ClauseKind; match *self { - Trait(trait_object) => stable_mir::ty::ClauseKind::Trait(trait_object.stable(tables)), - RegionOutlives(region_outlives) => { + ClauseKind::Trait(trait_object) => { + stable_mir::ty::ClauseKind::Trait(trait_object.stable(tables)) + } + ClauseKind::RegionOutlives(region_outlives) => { stable_mir::ty::ClauseKind::RegionOutlives(region_outlives.stable(tables)) } - TypeOutlives(type_outlives) => { + ClauseKind::TypeOutlives(type_outlives) => { let ty::OutlivesPredicate::<_, _>(a, b) = type_outlives; stable_mir::ty::ClauseKind::TypeOutlives(stable_mir::ty::OutlivesPredicate( - tables.intern_ty(a), + a.stable(tables), b.stable(tables), )) } - Projection(projection_predicate) => { + ClauseKind::Projection(projection_predicate) => { stable_mir::ty::ClauseKind::Projection(projection_predicate.stable(tables)) } - ConstArgHasType(const_, ty) => stable_mir::ty::ClauseKind::ConstArgHasType( + ClauseKind::ConstArgHasType(const_, ty) => stable_mir::ty::ClauseKind::ConstArgHasType( const_.stable(tables), - tables.intern_ty(ty), + ty.stable(tables), ), - WellFormed(generic_arg) => { + ClauseKind::WellFormed(generic_arg) => { stable_mir::ty::ClauseKind::WellFormed(generic_arg.unpack().stable(tables)) } - ConstEvaluatable(const_) => { + ClauseKind::ConstEvaluatable(const_) => { stable_mir::ty::ClauseKind::ConstEvaluatable(const_.stable(tables)) } } @@ -1423,7 +1602,7 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { let ty::SubtypePredicate { a, b, a_is_expected: _ } = self; - stable_mir::ty::SubtypePredicate { a: tables.intern_ty(*a), b: tables.intern_ty(*b) } + stable_mir::ty::SubtypePredicate { a: a.stable(tables), b: b.stable(tables) } } } @@ -1432,7 +1611,7 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { let ty::CoercePredicate { a, b } = self; - stable_mir::ty::CoercePredicate { a: tables.intern_ty(*a), b: tables.intern_ty(*b) } + stable_mir::ty::CoercePredicate { a: a.stable(tables), b: b.stable(tables) } } } @@ -1553,3 +1732,38 @@ opaque(self) } } + +impl<'tcx> Stable<'tcx> for ty::Instance<'tcx> { + type T = stable_mir::mir::mono::Instance; + + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + let def = tables.instance_def(*self); + let kind = match self.def { + ty::InstanceDef::Item(..) => stable_mir::mir::mono::InstanceKind::Item, + ty::InstanceDef::Intrinsic(..) => stable_mir::mir::mono::InstanceKind::Intrinsic, + ty::InstanceDef::Virtual(..) => stable_mir::mir::mono::InstanceKind::Virtual, + ty::InstanceDef::VTableShim(..) + | ty::InstanceDef::ReifyShim(..) + | ty::InstanceDef::FnPtrAddrShim(..) + | ty::InstanceDef::ClosureOnceShim { .. } + | ty::InstanceDef::ThreadLocalShim(..) + | ty::InstanceDef::DropGlue(..) + | ty::InstanceDef::CloneShim(..) + | ty::InstanceDef::FnPtrShim(..) => stable_mir::mir::mono::InstanceKind::Shim, + }; + stable_mir::mir::mono::Instance { def, kind } + } +} + +impl<'tcx> Stable<'tcx> for MonoItem<'tcx> { + type T = stable_mir::mir::mono::MonoItem; + + fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { + use stable_mir::mir::mono::MonoItem as StableMonoItem; + match self { + MonoItem::Fn(instance) => StableMonoItem::Fn(instance.stable(tables)), + MonoItem::Static(def_id) => StableMonoItem::Static(tables.static_def(*def_id)), + MonoItem::GlobalAsm(item_id) => StableMonoItem::GlobalAsm(opaque(item_id)), + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,19 +3,18 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -rustc_serialize = { path = "../rustc_serialize" } -rustc_macros = { path = "../rustc_macros" } +# tidy-alphabetical-start +indexmap = { version = "2.0.0" } +md5 = { package = "md-5", version = "0.10.0" } +rustc_arena = { path = "../rustc_arena" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_index = { path = "../rustc_index" } -rustc_arena = { path = "../rustc_arena" } +rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } scoped-tls = "1.0" -unicode-width = "0.1.4" -cfg-if = "1.0" -tracing = "0.1" sha1 = "0.10.0" sha2 = "0.10.1" -md5 = { package = "md-5", version = "0.10.0" } -indexmap = { version = "2.0.0" } +tracing = "0.1" +unicode-width = "0.1.4" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/analyze_source_file.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/analyze_source_file.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/analyze_source_file.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/analyze_source_file.rs 2023-12-21 16:55:28.000000000 +0000 @@ -33,8 +33,8 @@ (lines, multi_byte_chars, non_narrow_chars) } -cfg_if::cfg_if! { - if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] { +cfg_match! { + cfg(any(target_arch = "x86", target_arch = "x86_64")) => { fn analyze_source_file_dispatch(src: &str, lines: &mut Vec, multi_byte_chars: &mut Vec, @@ -172,8 +172,8 @@ non_narrow_chars); } } - } else { - + } + _ => { // The target (or compiler version) does not support SSE2 ... fn analyze_source_file_dispatch(src: &str, lines: &mut Vec, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/fatal_error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/fatal_error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/fatal_error.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/fatal_error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,13 +1,11 @@ -/// Used as a return value to signify a fatal error occurred. (It is also -/// used as the argument to panic at the moment, but that will eventually -/// not be true.) +/// Used as a return value to signify a fatal error occurred. #[derive(Copy, Clone, Debug)] #[must_use] pub struct FatalError; -pub struct FatalErrorMarker; +pub use rustc_data_structures::FatalErrorMarker; -// Don't implement Send on FatalError. This makes it impossible to panic!(FatalError). +// Don't implement Send on FatalError. This makes it impossible to `panic_any!(FatalError)`. // We don't want to invoke the panic handler and print a backtrace for fatal errors. impl !Send for FatalError {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/hygiene.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/hygiene.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/hygiene.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/hygiene.rs 2023-12-21 16:55:28.000000000 +0000 @@ -24,16 +24,13 @@ // because getting it wrong can lead to nested `HygieneData::with` calls that // trigger runtime aborts. (Fortunately these are obvious and easy to fix.) +use crate::def_id::{CrateNum, DefId, StableCrateId, CRATE_DEF_ID, LOCAL_CRATE}; use crate::edition::Edition; use crate::symbol::{kw, sym, Symbol}; -use crate::with_session_globals; -use crate::{HashStableContext, Span, DUMMY_SP}; - -use crate::def_id::{CrateNum, DefId, StableCrateId, CRATE_DEF_ID, LOCAL_CRATE}; +use crate::{with_session_globals, HashStableContext, Span, DUMMY_SP}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_data_structures::stable_hasher::HashingControls; -use rustc_data_structures::stable_hasher::{Hash64, HashStable, StableHasher}; +use rustc_data_structures::stable_hasher::{Hash64, HashStable, HashingControls, StableHasher}; use rustc_data_structures::sync::{Lock, Lrc, WorkerLocal}; use rustc_data_structures::unhash::UnhashMap; use rustc_index::IndexVec; @@ -130,7 +127,7 @@ /// Returns the crate-local part of the [ExpnHash]. /// - /// Used for tests. + /// Used for assertions. #[inline] pub fn local_hash(self) -> Hash64 { self.0.split().1 @@ -173,7 +170,7 @@ pub const ROOT: LocalExpnId = LocalExpnId::from_u32(0); #[inline] - pub fn from_raw(idx: ExpnIndex) -> LocalExpnId { + fn from_raw(idx: ExpnIndex) -> LocalExpnId { LocalExpnId::from_u32(idx.as_u32()) } @@ -205,11 +202,6 @@ } #[inline] - pub fn expn_hash(self) -> ExpnHash { - HygieneData::with(|data| data.local_expn_hash(self)) - } - - #[inline] pub fn expn_data(self) -> ExpnData { HygieneData::with(|data| data.local_expn_data(self).clone()) } @@ -239,13 +231,6 @@ self.to_expn_id().is_descendant_of(ancestor.to_expn_id()) } - /// `expn_id.outer_expn_is_descendant_of(ctxt)` is equivalent to but faster than - /// `expn_id.is_descendant_of(ctxt.outer_expn())`. - #[inline] - pub fn outer_expn_is_descendant_of(self, ctxt: SyntaxContext) -> bool { - self.to_expn_id().outer_expn_is_descendant_of(ctxt) - } - /// Returns span for the macro which originally caused this expansion to happen. /// /// Stops backtracing at include! boundary. @@ -253,12 +238,6 @@ pub fn expansion_cause(self) -> Option { self.to_expn_id().expansion_cause() } - - #[inline] - #[track_caller] - pub fn parent(self) -> LocalExpnId { - self.expn_data().parent.as_local().unwrap() - } } impl ExpnId { @@ -333,7 +312,7 @@ } #[derive(Debug)] -pub struct HygieneData { +pub(crate) struct HygieneData { /// Each expansion should have an associated expansion data, but sometimes there's a delay /// between creation of an expansion ID and obtaining its data (e.g. macros are collected /// first and then resolved later), so we use an `Option` here. @@ -384,16 +363,11 @@ } } - pub fn with T>(f: F) -> T { + fn with T>(f: F) -> T { with_session_globals(|session_globals| f(&mut session_globals.hygiene_data.borrow_mut())) } #[inline] - fn local_expn_hash(&self, expn_id: LocalExpnId) -> ExpnHash { - self.local_expn_hashes[expn_id] - } - - #[inline] fn expn_hash(&self, expn_id: ExpnId) -> ExpnHash { match expn_id.as_local() { Some(expn_id) => self.local_expn_hashes[expn_id], @@ -746,7 +720,7 @@ } /// Like `SyntaxContext::adjust`, but also normalizes `self` to macros 2.0. - pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option { + pub(crate) fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option { HygieneData::with(|data| { *self = data.normalize_to_macros_2_0(*self); data.adjust(self, expn_id) @@ -779,7 +753,11 @@ /// ``` /// This returns `None` if the context cannot be glob-adjusted. /// Otherwise, it returns the scope to use when privacy checking (see `adjust` for details). - pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option> { + pub(crate) fn glob_adjust( + &mut self, + expn_id: ExpnId, + glob_span: Span, + ) -> Option> { HygieneData::with(|data| { let mut scope = None; let mut glob_ctxt = data.normalize_to_macros_2_0(glob_span.ctxt()); @@ -803,7 +781,7 @@ /// assert!(self.glob_adjust(expansion, glob_ctxt) == Some(privacy_checking_scope)); /// } /// ``` - pub fn reverse_glob_adjust( + pub(crate) fn reverse_glob_adjust( &mut self, expn_id: ExpnId, glob_span: Span, @@ -858,11 +836,11 @@ } #[inline] - pub fn outer_mark(self) -> (ExpnId, Transparency) { + fn outer_mark(self) -> (ExpnId, Transparency) { HygieneData::with(|data| data.outer_mark(self)) } - pub fn dollar_crate_name(self) -> Symbol { + pub(crate) fn dollar_crate_name(self) -> Symbol { HygieneData::with(|data| data.syntax_context_data[self.0 as usize].dollar_crate_name) } @@ -961,12 +939,12 @@ /// The normal module (`mod`) in which the expanded macro was defined. pub parent_module: Option, /// Suppresses the `unsafe_code` lint for code produced by this macro. - pub allow_internal_unsafe: bool, + pub(crate) allow_internal_unsafe: bool, /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`) for this macro. pub local_inner_macros: bool, /// Should debuginfo for the macro be collapsed to the outermost expansion site (in other /// words, was the macro definition annotated with `#[collapse_debuginfo]`)? - pub collapse_debuginfo: bool, + pub(crate) collapse_debuginfo: bool, } impl !PartialEq for ExpnData {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,28 +4,36 @@ //! //! - the *span*, represented by [`SpanData`] and related types; //! - source code as represented by a [`SourceMap`]; and -//! - interned strings, represented by [`Symbol`]s, with some common symbols available statically in the [`sym`] module. +//! - interned strings, represented by [`Symbol`]s, with some common symbols available statically +//! in the [`sym`] module. //! -//! Unlike most compilers, the span contains not only the position in the source code, but also various other metadata, -//! such as the edition and macro hygiene. This metadata is stored in [`SyntaxContext`] and [`ExpnData`]. +//! Unlike most compilers, the span contains not only the position in the source code, but also +//! various other metadata, such as the edition and macro hygiene. This metadata is stored in +//! [`SyntaxContext`] and [`ExpnData`]. //! //! ## Note //! //! This API is completely unstable and subject to change. +// tidy-alphabetical-start +#![allow(internal_features)] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![deny(rustc::diagnostic_outside_of_impl)] +#![deny(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![feature(array_windows)] +#![feature(cfg_match)] +#![feature(core_io_borrowed_buf)] #![feature(if_let_guard)] -#![feature(negative_impls)] -#![feature(min_specialization)] -#![feature(rustc_attrs)] #![feature(let_chains)] -#![feature(round_char_boundary)] -#![feature(read_buf)] +#![feature(min_specialization)] +#![feature(negative_impls)] #![feature(new_uninit)] -#![deny(rustc::untranslatable_diagnostic)] -#![deny(rustc::diagnostic_outside_of_impl)] -#![allow(internal_features)] +#![feature(read_buf)] +#![feature(round_char_boundary)] +#![feature(rustc_attrs)] +// tidy-alphabetical-end #[macro_use] extern crate rustc_macros; @@ -112,7 +120,6 @@ } } -#[inline] pub fn create_session_globals_then(edition: Edition, f: impl FnOnce() -> R) -> R { assert!( !SESSION_GLOBALS.is_set(), @@ -123,7 +130,6 @@ SESSION_GLOBALS.set(&session_globals, f) } -#[inline] pub fn set_session_globals_then(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R { assert!( !SESSION_GLOBALS.is_set(), @@ -133,7 +139,6 @@ SESSION_GLOBALS.set(session_globals, f) } -#[inline] pub fn create_default_session_if_not_set_then(f: F) -> R where F: FnOnce(&SessionGlobals) -> R, @@ -141,7 +146,6 @@ create_session_if_not_set_then(edition::DEFAULT_EDITION, f) } -#[inline] pub fn create_session_if_not_set_then(edition: Edition, f: F) -> R where F: FnOnce(&SessionGlobals) -> R, @@ -154,7 +158,6 @@ } } -#[inline] pub fn with_session_globals(f: F) -> R where F: FnOnce(&SessionGlobals) -> R, @@ -162,7 +165,6 @@ SESSION_GLOBALS.with(f) } -#[inline] pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { create_session_globals_then(edition::DEFAULT_EDITION, f) } @@ -174,8 +176,7 @@ // FIXME: We should use this enum or something like it to get rid of the // use of magic `/rust/1.x/...` paths across the board. -#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd)] -#[derive(Decodable)] +#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Decodable)] pub enum RealFileName { LocalPath(PathBuf), /// For remapped paths (namely paths into libstd that have been mapped @@ -212,8 +213,8 @@ RealFileName::Remapped { ref local_path, ref virtual_name } => encoder .emit_enum_variant(1, |encoder| { - // For privacy and build reproducibility, we must not embed host-dependant path in artifacts - // if they have been remapped by --remap-path-prefix + // For privacy and build reproducibility, we must not embed host-dependant path + // in artifacts if they have been remapped by --remap-path-prefix assert!(local_path.is_none()); local_path.encode(encoder); virtual_name.encode(encoder); @@ -280,8 +281,7 @@ } /// Differentiates between real files and common virtual files. -#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)] -#[derive(Decodable, Encodable)] +#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, Decodable, Encodable)] pub enum FileName { Real(RealFileName), /// Call to `quote!`. @@ -292,8 +292,6 @@ // FIXME(jseyfried) MacroExpansion(Hash64), ProcMacroSourceCode(Hash64), - /// Strings provided as `--cfg [cfgspec]` stored in a `crate_cfg`. - CfgSpec(Hash64), /// Strings provided as crate attributes in the CLI. CliCrateAttr(Hash64), /// Custom sources for explicit parser calls from plugins and drivers. @@ -305,7 +303,6 @@ impl From for FileName { fn from(p: PathBuf) -> Self { - assert!(!p.to_string_lossy().ends_with('>')); FileName::Real(RealFileName::LocalPath(p)) } } @@ -339,7 +336,6 @@ MacroExpansion(_) => write!(fmt, ""), Anon(_) => write!(fmt, ""), ProcMacroSourceCode(_) => write!(fmt, ""), - CfgSpec(_) => write!(fmt, ""), CliCrateAttr(_) => write!(fmt, ""), Custom(ref s) => write!(fmt, "<{s}>"), DocTest(ref path, _) => write!(fmt, "{}", path.display()), @@ -365,7 +361,6 @@ Anon(_) | MacroExpansion(_) | ProcMacroSourceCode(_) - | CfgSpec(_) | CliCrateAttr(_) | Custom(_) | QuoteExpansion(_) @@ -374,7 +369,7 @@ } } - pub fn prefer_remapped(&self) -> FileNameDisplay<'_> { + pub fn prefer_remapped_unconditionaly(&self) -> FileNameDisplay<'_> { FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped } } @@ -955,7 +950,7 @@ /// Produces a span with the same location as `self` and context produced by a macro with the /// given ID and transparency, assuming that macro was defined directly and not produced by /// some other macro (which is the case for built-in and procedural macros). - pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { + fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency)) } @@ -1530,7 +1525,8 @@ }) } - /// This converts the `lines` field to contain `SourceFileLines::Lines` if needed and freezes it. + /// This converts the `lines` field to contain `SourceFileLines::Lines` if needed and freezes + /// it. fn convert_diffs_to_lines_frozen(&self) { let mut guard = if let Some(guard) = self.lines.try_write() { guard } else { return }; @@ -2248,7 +2244,10 @@ /// Useful type to use with `Result<>` indicate that an error has already /// been reported to the user, so no need to continue checking. -#[derive(Clone, Copy, Debug, Encodable, Decodable, Hash, PartialEq, Eq, PartialOrd, Ord)] +/// +/// The `()` field is necessary: it is non-`pub`, which means values of this +/// type cannot be constructed outside of this crate. +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(HashStable_Generic)] pub struct ErrorGuaranteed(()); @@ -2260,3 +2259,21 @@ ErrorGuaranteed(()) } } + +impl Encodable for ErrorGuaranteed { + #[inline] + fn encode(&self, _e: &mut E) { + panic!( + "should never serialize an `ErrorGuaranteed`, as we do not write metadata or \ + incremental caches in case errors occurred" + ) + } +} +impl Decodable for ErrorGuaranteed { + #[inline] + fn decode(_d: &mut D) -> ErrorGuaranteed { + panic!( + "`ErrorGuaranteed` should never have been serialized to metadata or incremental caches" + ) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -351,7 +351,10 @@ fn path_prefix_remapping() { // Relative to relative { - let mapping = &FilePathMapping::new(vec![(path("abc/def"), path("foo"))]); + let mapping = &FilePathMapping::new( + vec![(path("abc/def"), path("foo"))], + FileNameDisplayPreference::Remapped, + ); assert_eq!(map_path_prefix(mapping, "abc/def/src/main.rs"), path_str("foo/src/main.rs")); assert_eq!(map_path_prefix(mapping, "abc/def"), path_str("foo")); @@ -359,7 +362,10 @@ // Relative to absolute { - let mapping = &FilePathMapping::new(vec![(path("abc/def"), path("/foo"))]); + let mapping = &FilePathMapping::new( + vec![(path("abc/def"), path("/foo"))], + FileNameDisplayPreference::Remapped, + ); assert_eq!(map_path_prefix(mapping, "abc/def/src/main.rs"), path_str("/foo/src/main.rs")); assert_eq!(map_path_prefix(mapping, "abc/def"), path_str("/foo")); @@ -367,7 +373,10 @@ // Absolute to relative { - let mapping = &FilePathMapping::new(vec![(path("/abc/def"), path("foo"))]); + let mapping = &FilePathMapping::new( + vec![(path("/abc/def"), path("foo"))], + FileNameDisplayPreference::Remapped, + ); assert_eq!(map_path_prefix(mapping, "/abc/def/src/main.rs"), path_str("foo/src/main.rs")); assert_eq!(map_path_prefix(mapping, "/abc/def"), path_str("foo")); @@ -375,7 +384,10 @@ // Absolute to absolute { - let mapping = &FilePathMapping::new(vec![(path("/abc/def"), path("/foo"))]); + let mapping = &FilePathMapping::new( + vec![(path("/abc/def"), path("/foo"))], + FileNameDisplayPreference::Remapped, + ); assert_eq!(map_path_prefix(mapping, "/abc/def/src/main.rs"), path_str("/foo/src/main.rs")); assert_eq!(map_path_prefix(mapping, "/abc/def"), path_str("/foo")); @@ -385,8 +397,10 @@ #[test] fn path_prefix_remapping_expand_to_absolute() { // "virtual" working directory is relative path - let mapping = - &FilePathMapping::new(vec![(path("/foo"), path("FOO")), (path("/bar"), path("BAR"))]); + let mapping = &FilePathMapping::new( + vec![(path("/foo"), path("FOO")), (path("/bar"), path("BAR"))], + FileNameDisplayPreference::Remapped, + ); let working_directory = path("/foo"); let working_directory = RealFileName::Remapped { local_path: Some(working_directory.clone()), @@ -487,8 +501,10 @@ fn path_prefix_remapping_reverse() { // Ignores options without alphanumeric chars. { - let mapping = - &FilePathMapping::new(vec![(path("abc"), path("/")), (path("def"), path("."))]); + let mapping = &FilePathMapping::new( + vec![(path("abc"), path("/")), (path("def"), path("."))], + FileNameDisplayPreference::Remapped, + ); assert_eq!(reverse_map_prefix(mapping, "/hello.rs"), None); assert_eq!(reverse_map_prefix(mapping, "./hello.rs"), None); @@ -496,20 +512,20 @@ // Returns `None` if multiple options match. { - let mapping = &FilePathMapping::new(vec![ - (path("abc"), path("/redacted")), - (path("def"), path("/redacted")), - ]); + let mapping = &FilePathMapping::new( + vec![(path("abc"), path("/redacted")), (path("def"), path("/redacted"))], + FileNameDisplayPreference::Remapped, + ); assert_eq!(reverse_map_prefix(mapping, "/redacted/hello.rs"), None); } // Distinct reverse mappings. { - let mapping = &FilePathMapping::new(vec![ - (path("abc"), path("/redacted")), - (path("def/ghi"), path("/fake/dir")), - ]); + let mapping = &FilePathMapping::new( + vec![(path("abc"), path("/redacted")), (path("def/ghi"), path("/fake/dir"))], + FileNameDisplayPreference::Remapped, + ); assert_eq!( reverse_map_prefix(mapping, "/redacted/path/hello.rs"), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/source_map.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,21 +9,16 @@ //! within the `SourceMap`, which upon request can be converted to line and column //! information, source code snippets, etc. -pub use crate::hygiene::{ExpnData, ExpnKind}; -pub use crate::*; - +use crate::*; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::{Hash128, Hash64, StableHasher}; use rustc_data_structures::sync::{IntoDynSyncSend, Lrc, MappedReadGuard, ReadGuard, RwLock}; use std::cmp; +use std::fs; use std::hash::Hash; +use std::io::{self, BorrowedBuf, Read}; use std::path::{self, Path, PathBuf}; -use std::fs; -use std::io; -use std::io::BorrowedBuf; -use std::io::Read; - #[cfg(test)] mod tests; @@ -41,7 +36,7 @@ } } -pub mod monotonic { +mod monotonic { use std::ops::{Deref, DerefMut}; /// A `MonotonicVec` is a `Vec` which can only be grown. @@ -51,18 +46,14 @@ // field is inaccessible pub struct MonotonicVec(Vec); impl MonotonicVec { - pub fn new(val: Vec) -> MonotonicVec { - MonotonicVec(val) - } - - pub fn push(&mut self, val: T) { + pub(super) fn push(&mut self, val: T) { self.0.push(val); } } impl Default for MonotonicVec { fn default() -> Self { - MonotonicVec::new(vec![]) + MonotonicVec(vec![]) } } @@ -207,7 +198,7 @@ // #[derive(Default)] -pub(super) struct SourceMapFiles { +struct SourceMapFiles { source_files: monotonic::MonotonicVec>, stable_id_to_source_file: FxHashMap>, } @@ -466,33 +457,6 @@ self.span_to_string(sp, FileNameDisplayPreference::Remapped) } - /// Format the span location suitable for pretty printing annotations with relative line numbers - pub fn span_to_relative_line_string(&self, sp: Span, relative_to: Span) -> String { - if self.files.borrow().source_files.is_empty() || sp.is_dummy() || relative_to.is_dummy() { - return "no-location".to_string(); - } - - let lo = self.lookup_char_pos(sp.lo()); - let hi = self.lookup_char_pos(sp.hi()); - let offset = self.lookup_char_pos(relative_to.lo()); - - if lo.file.name != offset.file.name || !relative_to.contains(sp) { - return self.span_to_embeddable_string(sp); - } - - let lo_line = lo.line.saturating_sub(offset.line); - let hi_line = hi.line.saturating_sub(offset.line); - - format!( - "{}:+{}:{}: +{}:{}", - lo.file.name.display(FileNameDisplayPreference::Remapped), - lo_line, - lo.col.to_usize() + 1, - hi_line, - hi.col.to_usize() + 1, - ) - } - /// Format the span location to be printed in diagnostics. Must not be emitted /// to build artifacts as this may leak local file paths. Use span_to_embeddable_string /// for string suitable for embedding. @@ -1124,16 +1088,13 @@ impl FilePathMapping { pub fn empty() -> FilePathMapping { - FilePathMapping::new(Vec::new()) + FilePathMapping::new(Vec::new(), FileNameDisplayPreference::Local) } - pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { - let filename_display_for_diagnostics = if mapping.is_empty() { - FileNameDisplayPreference::Local - } else { - FileNameDisplayPreference::Remapped - }; - + pub fn new( + mapping: Vec<(PathBuf, PathBuf)>, + filename_display_for_diagnostics: FileNameDisplayPreference, + ) -> FilePathMapping { FilePathMapping { mapping, filename_display_for_diagnostics } } @@ -1287,6 +1248,27 @@ } } + /// Expand a relative path to an absolute path **without** remapping taken into account. + /// + /// The resulting `RealFileName` will have its `virtual_path` portion erased if + /// possible (i.e. if there's also a remapped path). + pub fn to_local_embeddable_absolute_path( + &self, + file_path: RealFileName, + working_directory: &RealFileName, + ) -> RealFileName { + let file_path = file_path.local_path_if_available(); + if file_path.is_absolute() { + // No remapping has applied to this path and it is absolute, + // so the working directory cannot influence it either, so + // we are done. + return RealFileName::LocalPath(file_path.to_path_buf()); + } + debug_assert!(file_path.is_relative()); + let working_directory = working_directory.local_path_if_available(); + RealFileName::LocalPath(Path::new(working_directory).join(file_path)) + } + /// Attempts to (heuristically) reverse a prefix mapping. /// /// Returns [`Some`] if there is exactly one mapping where the "to" part is diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/span_encoding.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/span_encoding.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/span_encoding.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/span_encoding.rs 2023-12-21 16:55:28.000000000 +0000 @@ -126,7 +126,7 @@ return Span { lo_or_index: lo2, len_with_tag_or_marker: PARENT_TAG | len as u16, - ctxt_or_parent_or_marker: parent2 as u16 + ctxt_or_parent_or_marker: parent2 as u16, }; } } @@ -212,6 +212,7 @@ /// This function is used as a fast path when decoding the full `SpanData` is not necessary. /// It's a cut-down version of `data_untracked`. + #[cfg_attr(not(test), rustc_diagnostic_item = "SpanCtxt")] #[inline] pub fn ctxt(self) -> SyntaxContext { if self.len_with_tag_or_marker != BASE_LEN_INTERNED_MARKER { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ #[test] fn interner_tests() { - let i = Interner::default(); + let i = Interner::prefill(&[]); // first one is zero: assert_eq!(i.intern("dog"), Symbol::new(0)); // re-use gets the same entry: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_span/src/symbol.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ //! type, and vice versa. use rustc_arena::DroplessArena; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey}; use rustc_data_structures::sync::Lock; use rustc_macros::HashStable_Generic; @@ -20,8 +20,8 @@ // The proc macro code for this is in `compiler/rustc_macros/src/symbols.rs`. symbols! { - // After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`, - // this should be rarely necessary though if the keywords are kept in alphabetic order. + // If you modify this list, adjust `is_special` and `is_used_keyword`/`is_unused_keyword`. + // But this should rarely be necessary if the keywords are kept in alphabetic order. Keywords { // Special reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. @@ -98,6 +98,7 @@ Builtin: "builtin", Catch: "catch", Default: "default", + Gen: "gen", MacroRules: "macro_rules", Raw: "raw", Union: "union", @@ -129,9 +130,11 @@ Alignment, Any, Arc, + ArcWeak, Argument, ArgumentMethods, Arguments, + ArrayIntoIter, AsMut, AsRef, AssertParamIsClone, @@ -164,6 +167,7 @@ Capture, Center, Clone, + Command, ConstParamTy, Context, Continue, @@ -171,6 +175,7 @@ Count, Cow, Debug, + DebugStruct, Decodable, Decoder, DecorateLint, @@ -189,11 +194,16 @@ Error, File, FileType, + Fn, + FnMut, + FnOnce, FormatSpec, Formatter, From, FromIterator, FromResidual, + FsOpenOptions, + FsPermissions, Future, FutureOutput, FxHashMap, @@ -207,16 +217,23 @@ Implied, IndexOutput, Input, + Instant, Into, IntoDiagnostic, IntoFuture, IntoIterator, + IoLines, IoRead, + IoSeek, IoWrite, IpAddr, IrTyKind, Is, + Item, ItemContext, + IterEmpty, + IterOnce, + IterPeekable, Iterator, IteratorItem, Layout, @@ -227,6 +244,7 @@ Mutex, MutexGuard, N, + NonNull, NonZeroI128, NonZeroI16, NonZeroI32, @@ -259,15 +277,19 @@ ProcMacro, ProceduralMasqueradeDummyType, Range, + RangeBounds, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, Rc, + RcWeak, Ready, Receiver, RefCell, + RefCellRef, + RefCellRefMut, Relaxed, Release, Result, @@ -284,7 +306,9 @@ Send, SeqCst, SliceIndex, + SliceIter, Some, + SpanCtxt, String, StructuralEq, StructuralPartialEq, @@ -295,6 +319,7 @@ ToOwned, ToString, TokenStream, + Trait, Try, TryCaptureGeneric, TryCapturePrintable, @@ -561,6 +586,7 @@ constant, constructor, context, + convert_identity, copy, copy_closures, copy_nonoverlapping, @@ -571,6 +597,10 @@ core_panic_2015_macro, core_panic_2021_macro, core_panic_macro, + coroutine, + coroutine_clone, + coroutine_state, + coroutines, cosf32, cosf64, count, @@ -616,6 +646,7 @@ declare_lint_pass, decode, default_alloc_error_handler, + default_fn, default_lib_allocator, default_method_body_is_const, default_type_parameter_fallback, @@ -628,6 +659,7 @@ deref, deref_method, deref_mut, + deref_mut_method, deref_target, derive, derive_const, @@ -687,6 +719,7 @@ encode, end, env, + env_CFG_RELEASE: env!("CFG_RELEASE"), eprint_macro, eprintln_macro, eq, @@ -777,22 +810,24 @@ from_desugaring, from_fn, from_iter, + from_iter_fn, from_output, from_residual, from_size_align_unchecked, + from_str_method, from_usize, from_yeet, + fs_create_dir, fsub_fast, fundamental, future, future_trait, gdb_script_file, ge, + gen_blocks, gen_future, gen_kill, - generator, generator_clone, - generator_state, generators, generic_arg_infer, generic_assert, @@ -862,12 +897,16 @@ inline_const_pat, inout, instruction_set, - integer_: "integer", + integer_: "integer", // underscore to avoid clashing with the function `sym::integer` below integral, into_future, into_iter, intra_doc_pointers, intrinsics, + intrinsics_unaligned_volatile_load, + intrinsics_unaligned_volatile_store, + io_stderr, + io_stdout, irrefutable_let_patterns, isa_attribute, isize, @@ -879,6 +918,7 @@ iter, iter_mut, iter_repeat, + iterator, iterator_collect_fn, kcfi, keyword, @@ -926,6 +966,7 @@ log_syntax, logf32, logf64, + loongarch_target_feature, loop_break_value, lt, macro_at_most_once_rep, @@ -962,6 +1003,7 @@ mem_replace, mem_size_of, mem_size_of_val, + mem_swap, mem_uninitialized, mem_variant_count, mem_zeroed, @@ -1075,6 +1117,7 @@ off, offset, offset_of, + offset_of_enum, omit_gdb_pretty_printer_section, on, on_unimplemented, @@ -1091,6 +1134,7 @@ options, or, or_patterns, + ord_cmp_method, other, out, overflow_checks, @@ -1104,7 +1148,6 @@ panic_abort, panic_bounds_check, panic_cannot_unwind, - panic_display, panic_fmt, panic_handler, panic_impl, @@ -1172,6 +1215,7 @@ proc_macro_mod, proc_macro_non_items, proc_macro_path_invoc, + process_exit, profiler_builtins, profiler_runtime, ptr, @@ -1179,6 +1223,10 @@ ptr_cast_const, ptr_cast_mut, ptr_const_is_null, + ptr_copy, + ptr_copy_nonoverlapping, + ptr_drop_in_place, + ptr_eq, ptr_from_ref, ptr_guaranteed_cmp, ptr_is_null, @@ -1187,8 +1235,17 @@ ptr_null_mut, ptr_offset_from, ptr_offset_from_unsigned, + ptr_read, + ptr_read_unaligned, + ptr_read_volatile, + ptr_replace, + ptr_slice_from_raw_parts, + ptr_slice_from_raw_parts_mut, + ptr_swap, + ptr_swap_nonoverlapping, ptr_unique, ptr_write, + ptr_write_bytes, ptr_write_unaligned, ptr_write_volatile, pub_macro_rules, @@ -1284,6 +1341,7 @@ rust_cold_cc, rust_eh_catch_typeinfo, rust_eh_personality, + rust_logo, rustc, rustc_abi, rustc_allocator, @@ -1299,6 +1357,7 @@ rustc_coherence_is_core, rustc_coinductive, rustc_confusables, + rustc_const_panic_str, rustc_const_stable, rustc_const_unstable, rustc_conversion_suggestion, @@ -1321,6 +1380,7 @@ rustc_evaluate_where_clauses, rustc_expected_cgu_reuse, rustc_has_incoherent_inherent_impls, + rustc_hidden_type_of_opaques, rustc_host, rustc_if_this_changed, rustc_inherit_overflow_checks, @@ -1478,6 +1538,8 @@ sized, skip, slice, + slice_from_raw_parts, + slice_from_raw_parts_mut, slice_len_fn, slice_patterns, slicing_syntax, @@ -1565,7 +1627,9 @@ thumb2, thumb_mode: "thumb-mode", tmm_reg, + to_owned_method, to_string, + to_string_method, to_vec, todo_macro, tool_attributes, @@ -1588,6 +1652,7 @@ try_blocks, try_capture, try_from, + try_from_fn, try_into, try_trait_v2, tt, @@ -1722,6 +1787,7 @@ xmm_reg, yeet_desugar_details, yeet_expr, + yield_expr, ymm_reg, zmm_reg, } @@ -2015,43 +2081,33 @@ } } -#[derive(Default)] pub(crate) struct Interner(Lock); // The `&'static str`s in this type actually point into the arena. // -// The `FxHashMap`+`Vec` pair could be replaced by `FxIndexSet`, but #75278 -// found that to regress performance up to 2% in some cases. This might be -// revisited after further improvements to `indexmap`. -// // This type is private to prevent accidentally constructing more than one // `Interner` on the same thread, which makes it easy to mix up `Symbol`s // between `Interner`s. -#[derive(Default)] struct InternerInner { arena: DroplessArena, - names: FxHashMap<&'static str, Symbol>, - strings: Vec<&'static str>, + strings: FxIndexSet<&'static str>, } impl Interner { fn prefill(init: &[&'static str]) -> Self { Interner(Lock::new(InternerInner { - strings: init.into(), - names: init.iter().copied().zip((0..).map(Symbol::new)).collect(), - ..Default::default() + arena: Default::default(), + strings: init.iter().copied().collect(), })) } #[inline] fn intern(&self, string: &str) -> Symbol { let mut inner = self.0.lock(); - if let Some(&name) = inner.names.get(string) { - return name; + if let Some(idx) = inner.strings.get_index_of(string) { + return Symbol::new(idx as u32); } - let name = Symbol::new(inner.strings.len() as u32); - // SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena, // and immediately convert the clone back to `&[u8]`, all because there // is no `inner.arena.alloc_str()` method. This is clearly safe. @@ -2061,20 +2117,21 @@ // SAFETY: we can extend the arena allocation to `'static` because we // only access these while the arena is still alive. let string: &'static str = unsafe { &*(string as *const str) }; - inner.strings.push(string); // This second hash table lookup can be avoided by using `RawEntryMut`, // but this code path isn't hot enough for it to be worth it. See // #91445 for details. - inner.names.insert(string, name); - name + let (idx, is_new) = inner.strings.insert_full(string); + debug_assert!(is_new); // due to the get_index_of check above + + Symbol::new(idx as u32) } /// Get the symbol as a string. /// /// [`Symbol::as_str()`] should be used in preference to this function. fn get(&self, symbol: Symbol) -> &str { - self.0.lock().strings[symbol.0.as_usize()] + self.0.lock().strings.get_index(symbol.0.as_usize()).unwrap() } } @@ -2132,8 +2189,9 @@ self >= kw::Abstract && self <= kw::Yield } - fn is_unused_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool { - self == kw::Try && edition() >= Edition::Edition2018 + fn is_unused_keyword_conditional(self, edition: impl Copy + FnOnce() -> Edition) -> bool { + self == kw::Try && edition().at_least_rust_2018() + || self == kw::Gen && edition().at_least_rust_2024() } pub fn is_reserved(self, edition: impl Copy + FnOnce() -> Edition) -> bool { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,21 +3,18 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" -tracing = "0.1" punycode = "0.4.0" rustc-demangle = "0.1.21" -twox-hash = "1.6.3" - -rustc_span = { path = "../rustc_span" } -rustc_middle = { path = "../rustc_middle" } -rustc_hir = { path = "../rustc_hir" } -rustc_fluent_macro = { path = "../rustc_fluent_macro" } -rustc_target = { path = "../rustc_target" } rustc_data_structures = { path = "../rustc_data_structures" } -rustc_session = { path = "../rustc_session" } -rustc_macros = { path = "../rustc_macros" } rustc_errors = { path = "../rustc_errors" } +rustc_hir = { path = "../rustc_hir" } +rustc_middle = { path = "../rustc_middle" } +rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } +tracing = "0.1" +twox-hash = "1.6.3" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/messages.ftl 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -symbol_mangling_test_output = {$kind}({$content}) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/errors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/errors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/errors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/errors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,18 +1,32 @@ //! Errors emitted by symbol_mangling. -use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg}; -use rustc_macros::Diagnostic; +use rustc_errors::{ErrorGuaranteed, IntoDiagnostic}; use rustc_span::Span; +use std::fmt; -#[derive(Diagnostic)] -#[diag(symbol_mangling_test_output)] pub struct TestOutput { - #[primary_span] pub span: Span, pub kind: Kind, pub content: String, } +// This diagnostic doesn't need translation because (a) it doesn't contain any +// natural language, and (b) it's only used in tests. So we construct it +// manually and avoid the fluent machinery. +impl IntoDiagnostic<'_> for TestOutput { + fn into_diagnostic( + self, + handler: &'_ rustc_errors::Handler, + ) -> rustc_errors::DiagnosticBuilder<'_, ErrorGuaranteed> { + let TestOutput { span, kind, content } = self; + + #[allow(rustc::untranslatable_diagnostic)] + let mut diag = handler.struct_err(format!("{kind}({content})")); + diag.set_span(span); + diag + } +} + pub enum Kind { SymbolName, Demangling, @@ -20,15 +34,13 @@ DefPath, } -impl IntoDiagnosticArg for Kind { - fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> { - let kind = match self { - Kind::SymbolName => "symbol-name", - Kind::Demangling => "demangling", - Kind::DemanglingAlt => "demangling-alt", - Kind::DefPath => "def-path", +impl fmt::Display for Kind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Kind::SymbolName => write!(f, "symbol-name"), + Kind::Demangling => write!(f, "demangling"), + Kind::DemanglingAlt => write!(f, "demangling-alt"), + Kind::DefPath => write!(f, "def-path"), } - .into(); - DiagnosticArgValue::Str(kind) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/legacy.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/legacy.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/legacy.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/legacy.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use rustc_data_structures::stable_hasher::{Hash64, HashStable, StableHasher}; use rustc_hir::def_id::CrateNum; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; -use rustc_middle::ty::print::{PrettyPrinter, Print, Printer}; +use rustc_middle::ty::print::{PrettyPrinter, Print, PrintError, Printer}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::ty::{GenericArg, GenericArgKind}; use rustc_middle::util::common::record_time; @@ -199,46 +199,38 @@ // `PrettyPrinter` aka pretty printing of e.g. types in paths, // symbol names should have their own printing machinery. -impl<'tcx> Printer<'tcx> for &mut SymbolPrinter<'tcx> { - type Error = fmt::Error; - - type Path = Self; - type Region = Self; - type Type = Self; - type DynExistential = Self; - type Const = Self; - +impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } - fn print_region(self, _region: ty::Region<'_>) -> Result { - Ok(self) + fn print_region(&mut self, _region: ty::Region<'_>) -> Result<(), PrintError> { + Ok(()) } - fn print_type(mut self, ty: Ty<'tcx>) -> Result { + fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { match *ty.kind() { // Print all nominal types as paths (unlike `pretty_print_type`). ty::FnDef(def_id, args) | ty::Alias(ty::Projection | ty::Opaque, ty::AliasTy { def_id, args, .. }) | ty::Closure(def_id, args) - | ty::Generator(def_id, args, _) => self.print_def_path(def_id, args), + | ty::Coroutine(def_id, args, _) => self.print_def_path(def_id, args), // The `pretty_print_type` formatting of array size depends on // -Zverbose flag, so we cannot reuse it here. ty::Array(ty, size) => { self.write_str("[")?; - self = self.print_type(ty)?; + self.print_type(ty)?; self.write_str("; ")?; if let Some(size) = size.try_to_target_usize(self.tcx()) { write!(self, "{size}")? } else if let ty::ConstKind::Param(param) = size.kind() { - self = param.print(self)? + param.print(self)? } else { self.write_str("_")? } self.write_str("]")?; - Ok(self) + Ok(()) } ty::Alias(ty::Inherent, _) => panic!("unexpected inherent projection"), @@ -248,21 +240,21 @@ } fn print_dyn_existential( - mut self, + &mut self, predicates: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { let mut first = true; for p in predicates { if !first { write!(self, "+")?; } first = false; - self = p.print(self)?; + p.print(self)?; } - Ok(self) + Ok(()) } - fn print_const(self, ct: ty::Const<'tcx>) -> Result { + fn print_const(&mut self, ct: ty::Const<'tcx>) -> Result<(), PrintError> { // only print integers match (ct.kind(), ct.ty().kind()) { (ty::ConstKind::Value(ty::ValTree::Leaf(scalar)), ty::Int(_) | ty::Uint(_)) => { @@ -277,22 +269,22 @@ } _ => self.write_str("_")?, } - Ok(self) + Ok(()) } - fn path_crate(self, cnum: CrateNum) -> Result { + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.write_str(self.tcx.crate_name(cnum).as_str())?; - Ok(self) + Ok(()) } fn path_qualified( - self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { // Similar to `pretty_path_qualified`, but for the other // types that are printed as paths (see `print_type` above). match self_ty.kind() { - ty::FnDef(..) | ty::Alias(..) | ty::Closure(..) | ty::Generator(..) + ty::FnDef(..) | ty::Alias(..) | ty::Closure(..) | ty::Coroutine(..) if trait_ref.is_none() => { self.print_type(self_ty) @@ -303,15 +295,15 @@ } fn path_append_impl( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { self.pretty_path_append_impl( - |mut cx| { - cx = print_prefix(cx)?; + |cx| { + print_prefix(cx)?; if cx.keep_within_component { // HACK(eddyb) print the path similarly to how `FmtPrinter` prints it. @@ -320,22 +312,22 @@ cx.path.finalize_pending_component(); } - Ok(cx) + Ok(()) }, self_ty, trait_ref, ) } fn path_append( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; // Skip `::{{extern}}` blocks and `::{{constructor}}` on tuple/unit structs. if let DefPathData::ForeignMod | DefPathData::Ctor = disambiguated_data.data { - return Ok(self); + return Ok(()); } if self.keep_within_component { @@ -347,14 +339,14 @@ write!(self, "{}", disambiguated_data.data)?; - Ok(self) + Ok(()) } fn path_generic_args( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], - ) -> Result { - self = print_prefix(self)?; + ) -> Result<(), PrintError> { + print_prefix(self)?; let args = args.iter().cloned().filter(|arg| !matches!(arg.unpack(), GenericArgKind::Lifetime(_))); @@ -362,42 +354,42 @@ if args.clone().next().is_some() { self.generic_delimiters(|cx| cx.comma_sep(args)) } else { - Ok(self) + Ok(()) } } } -impl<'tcx> PrettyPrinter<'tcx> for &mut SymbolPrinter<'tcx> { +impl<'tcx> PrettyPrinter<'tcx> for SymbolPrinter<'tcx> { fn should_print_region(&self, _region: ty::Region<'_>) -> bool { false } - fn comma_sep(mut self, mut elems: impl Iterator) -> Result + fn comma_sep(&mut self, mut elems: impl Iterator) -> Result<(), PrintError> where - T: Print<'tcx, Self, Output = Self, Error = Self::Error>, + T: Print<'tcx, Self>, { if let Some(first) = elems.next() { - self = first.print(self)?; + first.print(self)?; for elem in elems { self.write_str(",")?; - self = elem.print(self)?; + elem.print(self)?; } } - Ok(self) + Ok(()) } fn generic_delimiters( - mut self, - f: impl FnOnce(Self) -> Result, - ) -> Result { + &mut self, + f: impl FnOnce(&mut Self) -> Result<(), PrintError>, + ) -> Result<(), PrintError> { write!(self, "<")?; let kept_within_component = mem::replace(&mut self.keep_within_component, true); - self = f(self)?; + f(self)?; self.keep_within_component = kept_within_component; write!(self, ">")?; - Ok(self) + Ok(()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -88,6 +88,9 @@ //! DefPaths which are much more robust in the face of changes to the code base. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(never_type)] #![recursion_limit = "256"] #![allow(rustc::potential_query_instability)] @@ -100,8 +103,6 @@ #[macro_use] extern crate tracing; -use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage}; -use rustc_fluent_macro::fluent_messages; use rustc_hir::def::DefKind; use rustc_hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; @@ -118,8 +119,6 @@ pub mod test; pub mod typeid; -fluent_messages! { "../messages.ftl" } - /// This function computes the symbol name for the given `instance` and the /// given instantiating crate. That is, if you know that instance X is /// instantiated in crate Y, this is the symbol name this instance would have. @@ -235,7 +234,7 @@ // and we want to be sure to avoid any symbol conflicts here. let is_globally_shared_function = matches!( tcx.def_kind(instance.def_id()), - DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Generator | DefKind::Ctor(..) + DefKind::Fn | DefKind::AssocFn | DefKind::Closure | DefKind::Coroutine | DefKind::Ctor(..) ) && matches!( MonoItem::Fn(instance).instantiation_mode(tcx), InstantiationMode::GloballyShared { may_conflict: true } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -639,7 +639,7 @@ typeid.push_str(&s); } - ty::Generator(def_id, args, ..) => { + ty::Coroutine(def_id, args, ..) => { // u[IE], where is , // as vendor extended type. let mut s = String::new(); @@ -648,7 +648,7 @@ // Encode parent args only s.push_str(&encode_args( tcx, - tcx.mk_args(args.as_generator().parent_args()), + tcx.mk_args(args.as_coroutine().parent_args()), dict, options, )); @@ -719,7 +719,7 @@ ty::Alias(..) | ty::Bound(..) | ty::Error(..) - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::Infer(..) | ty::Placeholder(..) => { bug!("encode_ty: unexpected `{:?}`", ty.kind()); @@ -778,7 +778,7 @@ | ty::Str | ty::Never | ty::Foreign(..) - | ty::GeneratorWitness(..) => {} + | ty::CoroutineWitness(..) => {} ty::Bool => { if options.contains(EncodeTyOptions::NORMALIZE_INTEGERS) { @@ -892,8 +892,8 @@ ty = Ty::new_closure(tcx, *def_id, transform_args(tcx, args, options)); } - ty::Generator(def_id, args, movability) => { - ty = Ty::new_generator(tcx, *def_id, transform_args(tcx, args, options), *movability); + ty::Coroutine(def_id, args, movability) => { + ty = Ty::new_coroutine(tcx, *def_id, transform_args(tcx, args, options), *movability); } ty::Ref(region, ty0, ..) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/v0.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/v0.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/v0.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_symbol_mangling/src/v0.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,7 @@ use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; use rustc_middle::ty::layout::IntegerExt; -use rustc_middle::ty::print::{Print, Printer}; +use rustc_middle::ty::print::{Print, PrintError, Printer}; use rustc_middle::ty::{ self, EarlyBinder, FloatTy, Instance, IntTy, Ty, TyCtxt, TypeVisitable, TypeVisitableExt, UintTy, @@ -30,7 +30,7 @@ let args = tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), instance.args); let prefix = "_R"; - let mut cx = &mut SymbolMangler { + let mut cx: SymbolMangler<'_> = SymbolMangler { tcx, start_offset: prefix.len(), paths: FxHashMap::default(), @@ -49,13 +49,13 @@ _ => None, }; - cx = if let Some(shim_kind) = shim_kind { + if let Some(shim_kind) = shim_kind { cx.path_append_ns(|cx| cx.print_def_path(def_id, args), 'S', 0, shim_kind).unwrap() } else { cx.print_def_path(def_id, args).unwrap() }; if let Some(instantiating_crate) = instantiating_crate { - cx = cx.print_def_path(instantiating_crate.as_def_id(), &[]).unwrap(); + cx.print_def_path(instantiating_crate.as_def_id(), &[]).unwrap(); } std::mem::take(&mut cx.out) } @@ -65,7 +65,7 @@ trait_ref: ty::PolyExistentialTraitRef<'tcx>, ) -> String { // FIXME(flip1995): See comment in `mangle_typeid_for_fnabi`. - let mut cx = &mut SymbolMangler { + let mut cx = SymbolMangler { tcx, start_offset: 0, paths: FxHashMap::default(), @@ -74,7 +74,7 @@ binders: vec![], out: String::new(), }; - cx = cx.print_def_path(trait_ref.def_id(), &[]).unwrap(); + cx.print_def_path(trait_ref.def_id(), &[]).unwrap(); std::mem::take(&mut cx.out) } @@ -179,32 +179,32 @@ self.push(ident); } - fn path_append_ns<'a>( - mut self: &'a mut Self, - print_prefix: impl FnOnce(&'a mut Self) -> Result<&'a mut Self, !>, + fn path_append_ns( + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, ns: char, disambiguator: u64, name: &str, - ) -> Result<&'a mut Self, !> { + ) -> Result<(), PrintError> { self.push("N"); self.out.push(ns); - self = print_prefix(self)?; + print_prefix(self)?; self.push_disambiguator(disambiguator as u64); self.push_ident(name); - Ok(self) + Ok(()) } - fn print_backref(&mut self, i: usize) -> Result<&mut Self, !> { + fn print_backref(&mut self, i: usize) -> Result<(), PrintError> { self.push("B"); self.push_integer_62((i - self.start_offset) as u64); - Ok(self) + Ok(()) } - fn in_binder<'a, T>( - mut self: &'a mut Self, + fn in_binder( + &mut self, value: &ty::Binder<'tcx, T>, - print_value: impl FnOnce(&'a mut Self, &T) -> Result<&'a mut Self, !>, - ) -> Result<&'a mut Self, !> + print_value: impl FnOnce(&mut Self, &T) -> Result<(), PrintError>, + ) -> Result<(), PrintError> where T: TypeVisitable>, { @@ -222,53 +222,45 @@ lifetime_depths.end += lifetimes; self.binders.push(BinderLevel { lifetime_depths }); - self = print_value(self, value.as_ref().skip_binder())?; + print_value(self, value.as_ref().skip_binder())?; self.binders.pop(); - Ok(self) + Ok(()) } } -impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> { - type Error = !; - - type Path = Self; - type Region = Self; - type Type = Self; - type DynExistential = Self; - type Const = Self; - +impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } fn print_def_path( - mut self, + &mut self, def_id: DefId, args: &'tcx [GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { if let Some(&i) = self.paths.get(&(def_id, args)) { return self.print_backref(i); } let start = self.out.len(); - self = self.default_print_def_path(def_id, args)?; + self.default_print_def_path(def_id, args)?; // Only cache paths that do not refer to an enclosing // binder (which would change depending on context). if !args.iter().any(|k| k.has_escaping_bound_vars()) { self.paths.insert((def_id, args), start); } - Ok(self) + Ok(()) } fn print_impl_path( - mut self, + &mut self, impl_def_id: DefId, args: &'tcx [GenericArg<'tcx>], mut self_ty: Ty<'tcx>, mut impl_trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { let key = self.tcx.def_key(impl_def_id); let parent_def_id = DefId { index: key.parent.unwrap(), ..impl_def_id }; @@ -296,7 +288,7 @@ // Encode impl generic params if the substitutions contain parameters (implying // polymorphization is enabled) and this isn't an inherent impl. if impl_trait_ref.is_some() && args.iter().any(|a| a.has_non_region_param()) { - self = self.path_generic_args( + self.path_generic_args( |this| { this.path_append_ns( |cx| cx.print_def_path(parent_def_id, &[]), @@ -309,19 +301,19 @@ )?; } else { self.push_disambiguator(key.disambiguated_data.disambiguator as u64); - self = self.print_def_path(parent_def_id, &[])?; + self.print_def_path(parent_def_id, &[])?; } - self = self_ty.print(self)?; + self_ty.print(self)?; if let Some(trait_ref) = impl_trait_ref { - self = self.print_def_path(trait_ref.def_id, trait_ref.args)?; + self.print_def_path(trait_ref.def_id, trait_ref.args)?; } - Ok(self) + Ok(()) } - fn print_region(self, region: ty::Region<'_>) -> Result { + fn print_region(&mut self, region: ty::Region<'_>) -> Result<(), PrintError> { let i = match *region { // Erased lifetimes use the index 0, for a // shorter mangling of `L_`. @@ -340,10 +332,10 @@ }; self.push("L"); self.push_integer_62(i as u64); - Ok(self) + Ok(()) } - fn print_type(mut self, ty: Ty<'tcx>) -> Result { + fn print_type(&mut self, ty: Ty<'tcx>) -> Result<(), PrintError> { // Basic types, never cached (single-character). let basic_type = match ty.kind() { ty::Bool => "b", @@ -373,7 +365,7 @@ }; if !basic_type.is_empty() { self.push(basic_type); - return Ok(self); + return Ok(()); } if let Some(&i) = self.types.get(&ty) { @@ -399,9 +391,9 @@ hir::Mutability::Mut => "Q", }); if !r.is_erased() { - self = r.print(self)?; + r.print(self)?; } - self = ty.print(self)?; + ty.print(self)?; } ty::RawPtr(mt) => { @@ -409,23 +401,23 @@ hir::Mutability::Not => "P", hir::Mutability::Mut => "O", }); - self = mt.ty.print(self)?; + mt.ty.print(self)?; } ty::Array(ty, len) => { self.push("A"); - self = ty.print(self)?; - self = self.print_const(len)?; + ty.print(self)?; + self.print_const(len)?; } ty::Slice(ty) => { self.push("S"); - self = ty.print(self)?; + ty.print(self)?; } ty::Tuple(tys) => { self.push("T"); for ty in tys.iter() { - self = ty.print(self)?; + ty.print(self)?; } self.push("E"); } @@ -435,16 +427,16 @@ | ty::FnDef(def_id, args) | ty::Alias(ty::Projection | ty::Opaque, ty::AliasTy { def_id, args, .. }) | ty::Closure(def_id, args) - | ty::Generator(def_id, args, _) => { - self = self.print_def_path(def_id, args)?; + | ty::Coroutine(def_id, args, _) => { + self.print_def_path(def_id, args)?; } ty::Foreign(def_id) => { - self = self.print_def_path(def_id, &[])?; + self.print_def_path(def_id, &[])?; } ty::FnPtr(sig) => { self.push("F"); - self = self.in_binder(&sig, |mut cx, sig| { + self.in_binder(&sig, |cx, sig| { if sig.unsafety == hir::Unsafety::Unsafe { cx.push("U"); } @@ -462,7 +454,7 @@ } } for &ty in sig.inputs() { - cx = ty.print(cx)?; + ty.print(cx)?; } if sig.c_variadic { cx.push("v"); @@ -478,13 +470,13 @@ // FIXME(dyn-star): need to update v0 mangling docs ty::DynStar => "D*", }); - self = self.print_dyn_existential(predicates)?; - self = r.print(self)?; + self.print_dyn_existential(predicates)?; + r.print(self)?; } ty::Alias(ty::Inherent, _) => bug!("symbol_names: unexpected inherent projection"), ty::Alias(ty::Weak, _) => bug!("symbol_names: unexpected weak projection"), - ty::GeneratorWitness(..) => bug!("symbol_names: unexpected `GeneratorWitness`"), + ty::CoroutineWitness(..) => bug!("symbol_names: unexpected `CoroutineWitness`"), } // Only cache types that do not refer to an enclosing @@ -492,13 +484,13 @@ if !ty.has_escaping_bound_vars() { self.types.insert(ty, start); } - Ok(self) + Ok(()) } fn print_dyn_existential( - mut self, + &mut self, predicates: &'tcx ty::List>, - ) -> Result { + ) -> Result<(), PrintError> { // Okay, so this is a bit tricky. Imagine we have a trait object like // `dyn for<'a> Foo<'a, Bar = &'a ()>`. When we mangle this, the // output looks really close to the syntax, where the `Bar = &'a ()` bit @@ -525,7 +517,7 @@ // [ [{}]] [{}] // Since any predicates after the first one shouldn't change the binders, // just put them all in the binders of the first. - self = self.in_binder(&predicates[0], |mut cx, _| { + self.in_binder(&predicates[0], |cx, _| { for predicate in predicates.iter() { // It would be nice to be able to validate bound vars here, but // projections can actually include bound vars from super traits @@ -536,30 +528,30 @@ // Use a type that can't appear in defaults of type parameters. let dummy_self = Ty::new_fresh(cx.tcx, 0); let trait_ref = trait_ref.with_self_ty(cx.tcx, dummy_self); - cx = cx.print_def_path(trait_ref.def_id, trait_ref.args)?; + cx.print_def_path(trait_ref.def_id, trait_ref.args)?; } ty::ExistentialPredicate::Projection(projection) => { let name = cx.tcx.associated_item(projection.def_id).name; cx.push("p"); cx.push_ident(name.as_str()); - cx = match projection.term.unpack() { + match projection.term.unpack() { ty::TermKind::Ty(ty) => ty.print(cx), ty::TermKind::Const(c) => c.print(cx), }?; } ty::ExistentialPredicate::AutoTrait(def_id) => { - cx = cx.print_def_path(*def_id, &[])?; + cx.print_def_path(*def_id, &[])?; } } } - Ok(cx) + Ok(()) })?; self.push("E"); - Ok(self) + Ok(()) } - fn print_const(mut self, ct: ty::Const<'tcx>) -> Result { + fn print_const(&mut self, ct: ty::Const<'tcx>) -> Result<(), PrintError> { // We only mangle a typed value if the const can be evaluated. let ct = ct.normalize(self.tcx, ty::ParamEnv::reveal_all()); match ct.kind() { @@ -578,12 +570,13 @@ | ty::ConstKind::Error(_) => { // Never cached (single-character). self.push("p"); - return Ok(self); + return Ok(()); } } if let Some(&i) = self.consts.get(&ct) { - return self.print_backref(i); + self.print_backref(i)?; + return Ok(()); } let start = self.out.len(); @@ -591,7 +584,7 @@ match ty.kind() { ty::Uint(_) | ty::Int(_) | ty::Bool | ty::Char => { - self = ty.print(self)?; + ty.print(self)?; let mut bits = ct.eval_bits(self.tcx, ty::ParamEnv::reveal_all()); @@ -653,7 +646,7 @@ .ty; // FIXME(const_generics): add an assert that we only do this for valtrees. let dereferenced_const = self.tcx.mk_ct_from_kind(ct.kind(), pointee_ty); - self = dereferenced_const.print(self)?; + dereferenced_const.print(self)?; } } } @@ -662,22 +655,22 @@ let contents = self.tcx.destructure_const(ct); let fields = contents.fields.iter().copied(); - let print_field_list = |mut this: Self| { + let print_field_list = |this: &mut Self| { for field in fields.clone() { - this = field.print(this)?; + field.print(this)?; } this.push("E"); - Ok(this) + Ok(()) }; match *ct.ty().kind() { ty::Array(..) | ty::Slice(_) => { self.push("A"); - self = print_field_list(self)?; + print_field_list(self)?; } ty::Tuple(..) => { self.push("T"); - self = print_field_list(self)?; + print_field_list(self)?; } ty::Adt(def, args) => { let variant_idx = @@ -685,7 +678,7 @@ let variant_def = &def.variant(variant_idx); self.push("V"); - self = self.print_def_path(variant_def.def_id, args)?; + self.print_def_path(variant_def.def_id, args)?; match variant_def.ctor_kind() { Some(CtorKind::Const) => { @@ -693,7 +686,7 @@ } Some(CtorKind::Fn) => { self.push("T"); - self = print_field_list(self)?; + print_field_list(self)?; } None => { self.push("S"); @@ -709,7 +702,7 @@ ); self.push_ident(field_name.unwrap_or(kw::Empty).as_str()); - self = field.print(self)?; + field.print(self)?; } self.push("E"); } @@ -728,47 +721,47 @@ if !ct.has_escaping_bound_vars() { self.consts.insert(ct, start); } - Ok(self) + Ok(()) } - fn path_crate(self, cnum: CrateNum) -> Result { + fn path_crate(&mut self, cnum: CrateNum) -> Result<(), PrintError> { self.push("C"); let stable_crate_id = self.tcx.def_path_hash(cnum.as_def_id()).stable_crate_id(); self.push_disambiguator(stable_crate_id.as_u64()); let name = self.tcx.crate_name(cnum); self.push_ident(name.as_str()); - Ok(self) + Ok(()) } fn path_qualified( - mut self, + &mut self, self_ty: Ty<'tcx>, trait_ref: Option>, - ) -> Result { + ) -> Result<(), PrintError> { assert!(trait_ref.is_some()); let trait_ref = trait_ref.unwrap(); self.push("Y"); - self = self_ty.print(self)?; + self_ty.print(self)?; self.print_def_path(trait_ref.def_id, trait_ref.args) } fn path_append_impl( - self, - _: impl FnOnce(Self) -> Result, + &mut self, + _: impl FnOnce(&mut Self) -> Result<(), PrintError>, _: &DisambiguatedDefPathData, _: Ty<'tcx>, _: Option>, - ) -> Result { + ) -> Result<(), PrintError> { // Inlined into `print_impl_path` unreachable!() } fn path_append( - self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, disambiguated_data: &DisambiguatedDefPathData, - ) -> Result { + ) -> Result<(), PrintError> { let ns = match disambiguated_data.data { // Extern block segments can be skipped, names from extern blocks // are effectively living in their parent modules. @@ -805,10 +798,10 @@ } fn path_generic_args( - mut self, - print_prefix: impl FnOnce(Self) -> Result, + &mut self, + print_prefix: impl FnOnce(&mut Self) -> Result<(), PrintError>, args: &[GenericArg<'tcx>], - ) -> Result { + ) -> Result<(), PrintError> { // Don't print any regions if they're all erased. let print_regions = args.iter().any(|arg| match arg.unpack() { GenericArgKind::Lifetime(r) => !r.is_erased(), @@ -824,23 +817,23 @@ } self.push("I"); - self = print_prefix(self)?; + print_prefix(self)?; for arg in args { match arg.unpack() { GenericArgKind::Lifetime(lt) => { - self = lt.print(self)?; + lt.print(self)?; } GenericArgKind::Type(ty) => { - self = ty.print(self)?; + ty.print(self)?; } GenericArgKind::Const(c) => { self.push("K"); - self = c.print(self)?; + c.print(self)?; } } } self.push("E"); - Ok(self) + Ok(()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,18 +4,23 @@ edition = "2021" [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" -tracing = "0.1" -serde_json = "1.0.59" -rustc_fs_util = { path = "../rustc_fs_util" } rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_feature = { path = "../rustc_feature" } +rustc_fs_util = { path = "../rustc_fs_util" } +rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } rustc_serialize = { path = "../rustc_serialize" } rustc_span = { path = "../rustc_span" } +serde_json = "1.0.59" +tracing = "0.1" +# tidy-alphabetical-end [dependencies.object] -version = "0.32.0" +# tidy-alphabetical-start default-features = false -features = ["elf"] +features = ["elf", "macho"] +version = "0.32.0" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/csky.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/csky.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/csky.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/csky.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,17 +1,39 @@ -// See https://github.com/llvm/llvm-project/blob/d85b94bf0080dcd780656c0f5e6342800720eba9/llvm/lib/Target/CSKY/CSKYCallingConv.td -use crate::abi::call::{ArgAbi, FnAbi}; +// Reference: CSKY ABI Manual +// https://occ-oss-prod.oss-cn-hangzhou.aliyuncs.com/resource//1695027452256/T-HEAD_800_Series_ABI_Standards_Manual.pdf +// +// Reference: Clang CSKY lowering code +// https://github.com/llvm/llvm-project/blob/4a074f32a6914f2a8d7215d78758c24942dddc3d/clang/lib/CodeGen/Targets/CSKY.cpp#L76-L162 -fn classify_ret(ret: &mut ArgAbi<'_, Ty>) { - if ret.layout.is_aggregate() || ret.layout.size.bits() > 64 { - ret.make_indirect(); +use crate::abi::call::{ArgAbi, FnAbi, Reg, Uniform}; + +fn classify_ret(arg: &mut ArgAbi<'_, Ty>) { + // For return type, aggregate which <= 2*XLen will be returned in registers. + // Otherwise, aggregate will be returned indirectly. + if arg.layout.is_aggregate() { + let total = arg.layout.size; + if total.bits() > 64 { + arg.make_indirect(); + } else if total.bits() > 32 { + arg.cast_to(Uniform { unit: Reg::i32(), total }); + } else { + arg.cast_to(Reg::i32()); + } } else { - ret.extend_integer_width_to(32); + arg.extend_integer_width_to(32); } } fn classify_arg(arg: &mut ArgAbi<'_, Ty>) { - if arg.layout.is_aggregate() || arg.layout.size.bits() > 64 { - arg.make_indirect(); + // For argument type, the first 4*XLen parts of aggregate will be passed + // in registers, and the rest will be passed in stack. + // So we can coerce to integers directly and let backend handle it correctly. + if arg.layout.is_aggregate() { + let total = arg.layout.size; + if total.bits() > 32 { + arg.cast_to(Uniform { unit: Reg::i32(), total }); + } else { + arg.cast_to(Reg::i32()); + } } else { arg.extend_integer_width_to(32); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/x86.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/x86.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/x86.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/call/x86.rs 2023-12-21 16:55:28.000000000 +0000 @@ -72,7 +72,8 @@ // - For backwards compatibility, arguments with natural alignment > 4 are still passed // on stack (via `byval`). For example, this includes `double`, `int64_t`, // and structs containing them, provided they lack an explicit alignment attribute. - assert!(arg.layout.align.abi >= max_repr_align, + assert!( + arg.layout.align.abi >= max_repr_align, "abi alignment {:?} less than requested alignment {max_repr_align:?}", arg.layout.align.abi, ); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/abi/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,4 @@ +use rustc_data_structures::intern::Interned; pub use Integer::*; pub use Primitive::*; @@ -18,6 +19,111 @@ } } +rustc_index::newtype_index! { + /// The *source-order* index of a field in a variant. + /// + /// This is how most code after type checking refers to fields, rather than + /// using names (as names have hygiene complications and more complex lookup). + /// + /// Particularly for `repr(Rust)` types, this may not be the same as *layout* order. + /// (It is for `repr(C)` `struct`s, however.) + /// + /// For example, in the following types, + /// ```rust + /// # enum Never {} + /// # #[repr(u16)] + /// enum Demo1 { + /// Variant0 { a: Never, b: i32 } = 100, + /// Variant1 { c: u8, d: u64 } = 10, + /// } + /// struct Demo2 { e: u8, f: u16, g: u8 } + /// ``` + /// `b` is `FieldIdx(1)` in `VariantIdx(0)`, + /// `d` is `FieldIdx(1)` in `VariantIdx(1)`, and + /// `f` is `FieldIdx(1)` in `VariantIdx(0)`. + #[derive(HashStable_Generic)] + pub struct FieldIdx {} +} + +rustc_index::newtype_index! { + /// The *source-order* index of a variant in a type. + /// + /// For enums, these are always `0..variant_count`, regardless of any + /// custom discriminants that may have been defined, and including any + /// variants that may end up uninhabited due to field types. (Some of the + /// variants may not be present in a monomorphized ABI [`Variants`], but + /// those skipped variants are always counted when determining the *index*.) + /// + /// `struct`s, `tuples`, and `unions`s are considered to have a single variant + /// with variant index zero, aka [`FIRST_VARIANT`]. + #[derive(HashStable_Generic)] + pub struct VariantIdx { + /// Equivalent to `VariantIdx(0)`. + const FIRST_VARIANT = 0; + } +} +#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)] +#[rustc_pass_by_value] +pub struct Layout<'a>(pub Interned<'a, LayoutS>); + +impl<'a> fmt::Debug for Layout<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // See comment on `::fmt` above. + self.0.0.fmt(f) + } +} + +impl<'a> Deref for Layout<'a> { + type Target = &'a LayoutS; + fn deref(&self) -> &&'a LayoutS { + &self.0.0 + } +} + +impl<'a> Layout<'a> { + pub fn fields(self) -> &'a FieldsShape { + &self.0.0.fields + } + + pub fn variants(self) -> &'a Variants { + &self.0.0.variants + } + + pub fn abi(self) -> Abi { + self.0.0.abi + } + + pub fn largest_niche(self) -> Option { + self.0.0.largest_niche + } + + pub fn align(self) -> AbiAndPrefAlign { + self.0.0.align + } + + pub fn size(self) -> Size { + self.0.0.size + } + + pub fn max_repr_align(self) -> Option { + self.0.0.max_repr_align + } + + pub fn unadjusted_abi_align(self) -> Align { + self.0.0.unadjusted_abi_align + } + + /// Whether the layout is from a type that implements [`std::marker::PointerLike`]. + /// + /// Currently, that means that the type is pointer-sized, pointer-aligned, + /// and has a scalar ABI. + pub fn is_pointer_like(self, data_layout: &TargetDataLayout) -> bool { + self.size() == data_layout.pointer_size + && self.align().abi == data_layout.pointer_align.abi + && matches!(self.abi(), Abi::Scalar(..)) + } +} + /// The layout of a type, alongside the type itself. /// Provides various type traversal APIs (e.g., recursing into fields). /// @@ -42,8 +148,8 @@ } impl<'a, Ty> Deref for TyAndLayout<'a, Ty> { - type Target = &'a LayoutS; - fn deref(&self) -> &&'a LayoutS { + type Target = &'a LayoutS; + fn deref(&self) -> &&'a LayoutS { &self.layout.0.0 } } @@ -144,14 +250,17 @@ Ty::is_transparent(self) } - pub fn offset_of_subfield(self, cx: &C, indices: impl Iterator) -> Size + pub fn offset_of_subfield(self, cx: &C, indices: I) -> Size where Ty: TyAbiInterface<'a, C>, + I: Iterator, { let mut layout = self; let mut offset = Size::ZERO; - for index in indices { + for (variant, field) in indices { + layout = layout.for_variant(cx, variant); + let index = field.index(); offset += layout.fields.offset(index); layout = layout.field(cx, index); assert!( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/asm/csky.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/asm/csky.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/asm/csky.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/asm/csky.rs 2023-12-21 16:55:28.000000000 +0000 @@ -64,9 +64,9 @@ r20: reg = ["r20","t4"],// feature high-register r21: reg = ["r21","t5"],// feature high-register r22: reg = ["r22","t6"],// feature high-register - r23: reg = ["r23","t7", "fp"],// feature high-register - r24: reg = ["r24","t8", "sop"],// feature high-register - r25: reg = ["r25","t9","tp", "bsp"],// feature high-register + r23: reg = ["r23","t7"],// feature high-register + r24: reg = ["r24","t8"],// feature high-register + r25: reg = ["r25","t9"],// feature high-register f0: freg = ["fr0","vr0"], f1: freg = ["fr1","vr1"], f2: freg = ["fr2","vr2"], diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,6 +8,8 @@ //! LLVM. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(assert_matches)] #![feature(associated_type_bounds)] #![feature(exhaustive_patterns)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use super::apple_base::{macos_llvm_target, opts, Arch}; -use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Arm64; - let mut base = opts("macos", arch); - base.cpu = "apple-m1".into(); - base.max_atomic_width = Some(128); - - // FIXME: The leak sanitizer currently fails the tests, see #88132. - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD; - - Target { - // Clang automatically chooses a more specific target based on - // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work - // correctly, we do too. - llvm_target: macos_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - mcount: "\u{1}mcount".into(), - frame_pointer: FramePointer::NonLeaf, - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -use super::apple_base::{ios_llvm_target, opts, Arch}; -use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Arm64; - let mut base = opts("ios", arch); - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::THREAD; - - Target { - // Clang automatically chooses a more specific target based on - // IPHONEOS_DEPLOYMENT_TARGET. - // This is required for the target to pick the right - // MACH-O commands, so we do too. - llvm_target: ios_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+neon,+fp-armv8,+apple-a7".into(), - max_atomic_width: Some(128), - forces_embed_bitcode: true, - frame_pointer: FramePointer::NonLeaf, - // Taken from a clang build on Xcode 11.4.1. - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - arm64-apple-ios11.0.0\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -target-abi\0\ - darwinpcs\0\ - -Os\0" - .into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -use super::apple_base::{opts, Arch}; -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - let llvm_target = "arm64-apple-ios14.0-macabi"; - - let arch = Arch::Arm64_macabi; - let mut base = opts("ios", arch); - base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-target", llvm_target]); - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::THREAD; - - Target { - llvm_target: llvm_target.into(), - pointer_width: 64, - data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+neon,+fp-armv8,+apple-a12".into(), - max_atomic_width: Some(128), - forces_embed_bitcode: true, - frame_pointer: FramePointer::NonLeaf, - // Taken from a clang build on Xcode 11.4.1. - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - arm64-apple-ios-macabi\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -Os\0" - .into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -use super::apple_base::{ios_sim_llvm_target, opts, Arch}; -use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Arm64_sim; - let mut base = opts("ios", arch); - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::THREAD; - - Target { - // Clang automatically chooses a more specific target based on - // IPHONEOS_DEPLOYMENT_TARGET. - // This is required for the simulator target to pick the right - // MACH-O commands, so we do too. - llvm_target: ios_sim_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+neon,+fp-armv8,+apple-a7".into(), - max_atomic_width: Some(128), - forces_embed_bitcode: true, - frame_pointer: FramePointer::NonLeaf, - // Taken from a clang build on Xcode 11.4.1. - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - arm64-apple-ios14.0-simulator\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -target-abi\0\ - darwinpcs\0\ - -Os\0" - .into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use super::apple_base::{opts, tvos_llvm_target, Arch}; -use crate::spec::{FramePointer, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Arm64; - Target { - llvm_target: tvos_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+neon,+fp-armv8,+apple-a7".into(), - max_atomic_width: Some(128), - forces_embed_bitcode: true, - frame_pointer: FramePointer::NonLeaf, - ..opts("tvos", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -use super::apple_base::{opts, watchos_sim_llvm_target, Arch}; -use crate::spec::{FramePointer, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Arm64_sim; - Target { - // Clang automatically chooses a more specific target based on - // WATCHOS_DEPLOYMENT_TARGET. - // This is required for the simulator target to pick the right - // MACH-O commands, so we do too. - llvm_target: watchos_sim_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+neon,+fp-armv8,+apple-a7".into(), - max_atomic_width: Some(128), - forces_embed_bitcode: true, - frame_pointer: FramePointer::NonLeaf, - // Taken from a clang build on Xcode 11.4.1. - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - arm64-apple-watchos5.0-simulator\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -target-abi\0\ - darwinpcs\0\ - -Os\0" - .into(), - ..opts("watchos", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64_be-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a,+outline-atomics".into(), - max_atomic_width: Some(128), - mcount: "\u{1}_mcount".into(), - endian: Endian::Big, - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.max_atomic_width = Some(128); - - Target { - llvm_target: "aarch64_be-unknown-linux-gnu_ilp32".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - abi: "ilp32".into(), - features: "+v8a,+outline-atomics".into(), - mcount: "\u{1}_mcount".into(), - endian: Endian::Big, - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_be_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64_be-unknown-netbsd".into(), - pointer_width: 64, - data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - mcount: "__mcount".into(), - max_atomic_width: Some(128), - endian: Endian::Big, - ..super::netbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_fuchsia.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -pub use crate::spec::aarch64_unknown_fuchsia::target; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use super::{RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let base = super::solid_base::opts("asp3"); - Target { - llvm_target: "aarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - linker: Some("aarch64-kmc-elf-gcc".into()), - features: "+v8a,+neon,+fp-armv8".into(), - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(128), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_linux_android.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{SanitizerSet, Target, TargetOptions}; - -// See https://developer.android.com/ndk/guides/abis.html#arm64-v8a -// for target ABI requirements. - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-linux-android".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - max_atomic_width: Some(128), - // As documented in https://developer.android.com/ndk/guides/cpu-features.html - // the neon (ASIMD) and FP must exist on all android aarch64 targets. - features: "+v8a,+neon,+fp-armv8".into(), - supported_sanitizers: SanitizerSet::CFI - | SanitizerSet::HWADDRESS - | SanitizerSet::MEMTAG - | SanitizerSet::SHADOWCALLSTACK - | SanitizerSet::ADDRESS, - supports_xray: true, - ..super::android_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use super::{Cc, LinkerFlavor, Lld, PanicStrategy, RelroLevel, Target, TargetOptions}; - -const LINKER_SCRIPT: &str = include_str!("./aarch64_nintendo_switch_freestanding_linker_script.ld"); - -/// A base target for Nintendo Switch devices using a pure LLVM toolchain. -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - link_script: Some(LINKER_SCRIPT.into()), - os: "horizon".into(), - max_atomic_width: Some(128), - panic_strategy: PanicStrategy::Abort, - position_independent_executables: true, - dynamic_linking: true, - relro_level: RelroLevel::Off, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding_linker_script.ld rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding_linker_script.ld --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding_linker_script.ld 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding_linker_script.ld 1970-01-01 00:00:00.000000000 +0000 @@ -1,78 +0,0 @@ -OUTPUT_FORMAT(elf64-littleaarch64) -OUTPUT_ARCH(aarch64) -ENTRY(_start) - -PHDRS -{ - text PT_LOAD FLAGS(5); - rodata PT_LOAD FLAGS(4); - data PT_LOAD FLAGS(6); - bss PT_LOAD FLAGS(6); - dynamic PT_DYNAMIC; -} - -SECTIONS -{ - . = 0; - - .text : ALIGN(0x1000) { - HIDDEN(__text_start = .); - KEEP(*(.text.jmp)) - - . = 0x80; - - *(.text .text.*) - *(.plt .plt.*) - } - - /* Read-only sections */ - - . = ALIGN(0x1000); - - .module_name : { *(.module_name) } :rodata - - .rodata : { *(.rodata .rodata.*) } :rodata - .hash : { *(.hash) } - .dynsym : { *(.dynsym .dynsym.*) } - .dynstr : { *(.dynstr .dynstr.*) } - .rela.dyn : { *(.rela.dyn) } - - .eh_frame : { - HIDDEN(__eh_frame_start = .); - *(.eh_frame .eh_frame.*) - HIDDEN(__eh_frame_end = .); - } - - .eh_frame_hdr : { - HIDDEN(__eh_frame_hdr_start = .); - *(.eh_frame_hdr .eh_frame_hdr.*) - HIDDEN(__eh_frame_hdr_end = .); - } - - /* Read-write sections */ - - . = ALIGN(0x1000); - - .data : { - *(.data .data.*) - *(.got .got.*) - *(.got.plt .got.plt.*) - } :data - - .dynamic : { - HIDDEN(__dynamic_start = .); - *(.dynamic) - } - - /* BSS section */ - - . = ALIGN(0x1000); - - .bss : { - HIDDEN(__bss_start = .); - *(.bss .bss.*) - *(COMMON) - . = ALIGN(8); - HIDDEN(__bss_end = .); - } :bss -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::windows_gnullvm_base::opts(); - base.max_atomic_width = Some(128); - base.features = "+v8a,+neon,+fp-armv8".into(); - base.linker = Some("aarch64-w64-mingw32-clang".into()); - - Target { - llvm_target: "aarch64-pc-windows-gnu".into(), - pointer_width: 64, - data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::windows_msvc_base::opts(); - base.max_atomic_width = Some(128); - base.features = "+v8a,+neon,+fp-armv8".into(); - - Target { - llvm_target: "aarch64-pc-windows-msvc".into(), - pointer_width: 64, - data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-freebsd".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - max_atomic_width: Some(128), - supported_sanitizers: SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::MEMORY - | SanitizerSet::THREAD, - ..super::freebsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-fuchsia".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - max_atomic_width: Some(128), - supported_sanitizers: SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::SHADOWCALLSTACK, - ..super::fuchsia_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-hermit".into(), - pointer_width: 64, - arch: "aarch64".into(), - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - options: TargetOptions { - features: "+v8a,+strict-align,+neon,+fp-armv8".into(), - max_atomic_width: Some(128), - ..super::hermit_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a,+outline-atomics".into(), - mcount: "\u{1}_mcount".into(), - max_atomic_width: Some(128), - supported_sanitizers: SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::MEMTAG - | SanitizerSet::THREAD - | SanitizerSet::HWADDRESS, - supports_xray: true, - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-linux-gnu_ilp32".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - abi: "ilp32".into(), - features: "+v8a,+outline-atomics".into(), - max_atomic_width: Some(128), - mcount: "\u{1}_mcount".into(), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.max_atomic_width = Some(128); - base.supports_xray = true; - base.features = "+v8a".into(); - base.supported_sanitizers = SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::THREAD; - - Target { - llvm_target: "aarch64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { mcount: "\u{1}_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,28 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -use super::SanitizerSet; - -pub fn target() -> Target { - let mut base = super::linux_ohos_base::opts(); - base.max_atomic_width = Some(128); - - Target { - // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. - llvm_target: "aarch64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+reserve-x18".into(), - mcount: "\u{1}_mcount".into(), - supported_sanitizers: SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::MEMTAG - | SanitizerSet::THREAD - | SanitizerSet::HWADDRESS, - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-netbsd".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - mcount: "__mcount".into(), - max_atomic_width: Some(128), - ..super::netbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -// Generic AArch64 target for bare-metal code - Floating point enabled -// -// Can be used in conjunction with the `target-feature` and -// `target-cpu` compiler flags to opt-in more hardware-specific -// features. -// -// For example, `-C target-cpu=cortex-a53`. - -use super::{ - Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, SanitizerSet, Target, TargetOptions, -}; - -pub fn target() -> Target { - let opts = TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - features: "+v8a,+strict-align,+neon,+fp-armv8".into(), - supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS, - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(128), - panic_strategy: PanicStrategy::Abort, - ..Default::default() - }; - Target { - llvm_target: "aarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -// Generic AArch64 target for bare-metal code - Floating point disabled -// -// Can be used in conjunction with the `target-feature` and -// `target-cpu` compiler flags to opt-in more hardware-specific -// features. -// -// For example, `-C target-cpu=cortex-a53`. - -use super::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let opts = TargetOptions { - abi: "softfloat".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - features: "+v8a,+strict-align,-neon,-fp-armv8".into(), - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(128), - panic_strategy: PanicStrategy::Abort, - ..Default::default() - }; - Target { - llvm_target: "aarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -use super::nto_qnx_base; -use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-unknown".into(), - pointer_width: 64, - // from: https://llvm.org/docs/LangRef.html#data-layout - // e = little endian - // m:e = ELF mangling: Private symbols get a .L prefix - // i8:8:32 = 8-bit-integer, minimum_alignment=8, preferred_alignment=32 - // i16:16:32 = 16-bit-integer, minimum_alignment=16, preferred_alignment=32 - // i64:64 = 64-bit-integer, minimum_alignment=64, preferred_alignment=64 - // i128:128 = 128-bit-integer, minimum_alignment=128, preferred_alignment=128 - // n32:64 = 32 and 64 are native integer widths; Elements of this set are considered to support most general arithmetic operations efficiently. - // S128 = 128 bits are the natural alignment of the stack in bits. - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - max_atomic_width: Some(128), - pre_link_args: TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-Vgcc_ntoaarch64le_cxx"], - ), - env: "nto71".into(), - ..nto_qnx_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-openbsd".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - max_atomic_width: Some(128), - ..super::openbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::redox_base::opts(); - base.max_atomic_width = Some(128); - base.features = "+v8a".into(); - - Target { - llvm_target: "aarch64-unknown-redox".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_teeos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_teeos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_teeos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_teeos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::teeos_base::opts(); - base.features = "+strict-align,+neon,+fp-armv8".into(); - base.max_atomic_width = Some(128); - base.linker = Some("aarch64-linux-gnu-ld".into()); - - Target { - llvm_target: "aarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -// This defines the aarch64 target for UEFI systems as described in the UEFI specification. See the -// uefi-base module for generic UEFI options. - -use super::uefi_msvc_base; -use crate::spec::{LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = uefi_msvc_base::opts(); - - base.max_atomic_width = Some(128); - base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/machine:arm64"]); - base.features = "+v8a".into(); - - Target { - llvm_target: "aarch64-unknown-windows".into(), - pointer_width: 64, - data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::windows_uwp_msvc_base::opts(); - base.max_atomic_width = Some(128); - base.features = "+v8a".into(); - - Target { - llvm_target: "aarch64-pc-windows-msvc".into(), - pointer_width: 64, - data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "aarch64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a".into(), - max_atomic_width: Some(128), - ..super::vxworks_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,341 @@ +use std::fmt; + +use rustc_macros::HashStable_Generic; +use rustc_span::symbol::sym; +use rustc_span::{Span, Symbol}; + +#[cfg(test)] +mod tests; + +#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Debug)] +#[derive(HashStable_Generic, Encodable, Decodable)] +pub enum Abi { + // Some of the ABIs come first because every time we add a new ABI, we have to re-bless all the + // hashing tests. These are used in many places, so giving them stable values reduces test + // churn. The specific values are meaningless. + Rust, + C { + unwind: bool, + }, + Cdecl { + unwind: bool, + }, + Stdcall { + unwind: bool, + }, + Fastcall { + unwind: bool, + }, + Vectorcall { + unwind: bool, + }, + Thiscall { + unwind: bool, + }, + Aapcs { + unwind: bool, + }, + Win64 { + unwind: bool, + }, + SysV64 { + unwind: bool, + }, + PtxKernel, + Msp430Interrupt, + X86Interrupt, + AmdGpuKernel, + EfiApi, + AvrInterrupt, + AvrNonBlockingInterrupt, + CCmseNonSecureCall, + Wasm, + System { + unwind: bool, + }, + RustIntrinsic, + RustCall, + PlatformIntrinsic, + Unadjusted, + /// For things unlikely to be called, where reducing register pressure in + /// `extern "Rust"` callers is worth paying extra cost in the callee. + /// Stronger than just `#[cold]` because `fn` pointers might be incompatible. + RustCold, + RiscvInterruptM, + RiscvInterruptS, +} + +impl Abi { + pub fn supports_varargs(self) -> bool { + // * C and Cdecl obviously support varargs. + // * C can be based on Aapcs, SysV64 or Win64, so they must support varargs. + // * EfiApi is based on Win64 or C, so it also supports it. + // + // * Stdcall does not, because it would be impossible for the callee to clean + // up the arguments. (callee doesn't know how many arguments are there) + // * Same for Fastcall, Vectorcall and Thiscall. + // * System can become Stdcall, so is also a no-no. + // * Other calling conventions are related to hardware or the compiler itself. + match self { + Self::C { .. } + | Self::Cdecl { .. } + | Self::Aapcs { .. } + | Self::Win64 { .. } + | Self::SysV64 { .. } + | Self::EfiApi => true, + _ => false, + } + } +} + +#[derive(Copy, Clone)] +pub struct AbiData { + abi: Abi, + + /// Name of this ABI as we like it called. + name: &'static str, +} + +#[allow(non_upper_case_globals)] +const AbiDatas: &[AbiData] = &[ + AbiData { abi: Abi::Rust, name: "Rust" }, + AbiData { abi: Abi::C { unwind: false }, name: "C" }, + AbiData { abi: Abi::C { unwind: true }, name: "C-unwind" }, + AbiData { abi: Abi::Cdecl { unwind: false }, name: "cdecl" }, + AbiData { abi: Abi::Cdecl { unwind: true }, name: "cdecl-unwind" }, + AbiData { abi: Abi::Stdcall { unwind: false }, name: "stdcall" }, + AbiData { abi: Abi::Stdcall { unwind: true }, name: "stdcall-unwind" }, + AbiData { abi: Abi::Fastcall { unwind: false }, name: "fastcall" }, + AbiData { abi: Abi::Fastcall { unwind: true }, name: "fastcall-unwind" }, + AbiData { abi: Abi::Vectorcall { unwind: false }, name: "vectorcall" }, + AbiData { abi: Abi::Vectorcall { unwind: true }, name: "vectorcall-unwind" }, + AbiData { abi: Abi::Thiscall { unwind: false }, name: "thiscall" }, + AbiData { abi: Abi::Thiscall { unwind: true }, name: "thiscall-unwind" }, + AbiData { abi: Abi::Aapcs { unwind: false }, name: "aapcs" }, + AbiData { abi: Abi::Aapcs { unwind: true }, name: "aapcs-unwind" }, + AbiData { abi: Abi::Win64 { unwind: false }, name: "win64" }, + AbiData { abi: Abi::Win64 { unwind: true }, name: "win64-unwind" }, + AbiData { abi: Abi::SysV64 { unwind: false }, name: "sysv64" }, + AbiData { abi: Abi::SysV64 { unwind: true }, name: "sysv64-unwind" }, + AbiData { abi: Abi::PtxKernel, name: "ptx-kernel" }, + AbiData { abi: Abi::Msp430Interrupt, name: "msp430-interrupt" }, + AbiData { abi: Abi::X86Interrupt, name: "x86-interrupt" }, + AbiData { abi: Abi::AmdGpuKernel, name: "amdgpu-kernel" }, + AbiData { abi: Abi::EfiApi, name: "efiapi" }, + AbiData { abi: Abi::AvrInterrupt, name: "avr-interrupt" }, + AbiData { abi: Abi::AvrNonBlockingInterrupt, name: "avr-non-blocking-interrupt" }, + AbiData { abi: Abi::CCmseNonSecureCall, name: "C-cmse-nonsecure-call" }, + AbiData { abi: Abi::Wasm, name: "wasm" }, + AbiData { abi: Abi::System { unwind: false }, name: "system" }, + AbiData { abi: Abi::System { unwind: true }, name: "system-unwind" }, + AbiData { abi: Abi::RustIntrinsic, name: "rust-intrinsic" }, + AbiData { abi: Abi::RustCall, name: "rust-call" }, + AbiData { abi: Abi::PlatformIntrinsic, name: "platform-intrinsic" }, + AbiData { abi: Abi::Unadjusted, name: "unadjusted" }, + AbiData { abi: Abi::RustCold, name: "rust-cold" }, + AbiData { abi: Abi::RiscvInterruptM, name: "riscv-interrupt-m" }, + AbiData { abi: Abi::RiscvInterruptS, name: "riscv-interrupt-s" }, +]; + +#[derive(Copy, Clone, Debug)] +pub enum AbiUnsupported { + Unrecognized, + Reason { explain: &'static str }, +} + +/// Returns the ABI with the given name (if any). +pub fn lookup(name: &str) -> Result { + AbiDatas.iter().find(|abi_data| name == abi_data.name).map(|&x| x.abi).ok_or_else(|| match name { + "riscv-interrupt" => AbiUnsupported::Reason { + explain: "please use one of riscv-interrupt-m or riscv-interrupt-s for machine- or supervisor-level interrupts, respectively", + }, + "riscv-interrupt-u" => AbiUnsupported::Reason { + explain: "user-mode interrupt handlers have been removed from LLVM pending standardization, see: https://reviews.llvm.org/D149314", + }, + + _ => AbiUnsupported::Unrecognized, + + }) +} + +pub fn all_names() -> Vec<&'static str> { + AbiDatas.iter().map(|d| d.name).collect() +} + +pub fn enabled_names(features: &rustc_feature::Features, span: Span) -> Vec<&'static str> { + AbiDatas + .iter() + .map(|d| d.name) + .filter(|name| is_enabled(features, span, name).is_ok()) + .collect() +} + +pub enum AbiDisabled { + Unstable { feature: Symbol, explain: &'static str }, + Unrecognized, +} + +pub fn is_enabled( + features: &rustc_feature::Features, + span: Span, + name: &str, +) -> Result<(), AbiDisabled> { + let s = is_stable(name); + if let Err(AbiDisabled::Unstable { feature, .. }) = s { + if features.active(feature) || span.allows_unstable(feature) { + return Ok(()); + } + } + s +} + +pub fn is_stable(name: &str) -> Result<(), AbiDisabled> { + match name { + // Stable + "Rust" | "C" | "C-unwind" | "cdecl" | "cdecl-unwind" | "stdcall" | "stdcall-unwind" + | "fastcall" | "fastcall-unwind" | "aapcs" | "aapcs-unwind" | "win64" | "win64-unwind" + | "sysv64" | "sysv64-unwind" | "system" | "system-unwind" | "efiapi" | "thiscall" + | "thiscall-unwind" => Ok(()), + "rust-intrinsic" => Err(AbiDisabled::Unstable { + feature: sym::intrinsics, + explain: "intrinsics are subject to change", + }), + "platform-intrinsic" => Err(AbiDisabled::Unstable { + feature: sym::platform_intrinsics, + explain: "platform intrinsics are experimental and possibly buggy", + }), + "vectorcall" => Err(AbiDisabled::Unstable { + feature: sym::abi_vectorcall, + explain: "vectorcall is experimental and subject to change", + }), + "vectorcall-unwind" => Err(AbiDisabled::Unstable { + feature: sym::abi_vectorcall, + explain: "vectorcall-unwind ABI is experimental and subject to change", + }), + "rust-call" => Err(AbiDisabled::Unstable { + feature: sym::unboxed_closures, + explain: "rust-call ABI is subject to change", + }), + "rust-cold" => Err(AbiDisabled::Unstable { + feature: sym::rust_cold_cc, + explain: "rust-cold is experimental and subject to change", + }), + "ptx-kernel" => Err(AbiDisabled::Unstable { + feature: sym::abi_ptx, + explain: "PTX ABIs are experimental and subject to change", + }), + "unadjusted" => Err(AbiDisabled::Unstable { + feature: sym::abi_unadjusted, + explain: "unadjusted ABI is an implementation detail and perma-unstable", + }), + "msp430-interrupt" => Err(AbiDisabled::Unstable { + feature: sym::abi_msp430_interrupt, + explain: "msp430-interrupt ABI is experimental and subject to change", + }), + "x86-interrupt" => Err(AbiDisabled::Unstable { + feature: sym::abi_x86_interrupt, + explain: "x86-interrupt ABI is experimental and subject to change", + }), + "amdgpu-kernel" => Err(AbiDisabled::Unstable { + feature: sym::abi_amdgpu_kernel, + explain: "amdgpu-kernel ABI is experimental and subject to change", + }), + "avr-interrupt" | "avr-non-blocking-interrupt" => Err(AbiDisabled::Unstable { + feature: sym::abi_avr_interrupt, + explain: "avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change", + }), + "riscv-interrupt-m" | "riscv-interrupt-s" => Err(AbiDisabled::Unstable { + feature: sym::abi_riscv_interrupt, + explain: "riscv-interrupt ABIs are experimental and subject to change", + }), + "C-cmse-nonsecure-call" => Err(AbiDisabled::Unstable { + feature: sym::abi_c_cmse_nonsecure_call, + explain: "C-cmse-nonsecure-call ABI is experimental and subject to change", + }), + "wasm" => Err(AbiDisabled::Unstable { + feature: sym::wasm_abi, + explain: "wasm ABI is experimental and subject to change", + }), + _ => Err(AbiDisabled::Unrecognized), + } +} + +impl Abi { + /// Default ABI chosen for `extern fn` declarations without an explicit ABI. + pub const FALLBACK: Abi = Abi::C { unwind: false }; + + #[inline] + pub fn index(self) -> usize { + // N.B., this ordering MUST match the AbiDatas array above. + // (This is ensured by the test indices_are_correct().) + use Abi::*; + let i = match self { + // Cross-platform ABIs + Rust => 0, + C { unwind: false } => 1, + C { unwind: true } => 2, + // Platform-specific ABIs + Cdecl { unwind: false } => 3, + Cdecl { unwind: true } => 4, + Stdcall { unwind: false } => 5, + Stdcall { unwind: true } => 6, + Fastcall { unwind: false } => 7, + Fastcall { unwind: true } => 8, + Vectorcall { unwind: false } => 9, + Vectorcall { unwind: true } => 10, + Thiscall { unwind: false } => 11, + Thiscall { unwind: true } => 12, + Aapcs { unwind: false } => 13, + Aapcs { unwind: true } => 14, + Win64 { unwind: false } => 15, + Win64 { unwind: true } => 16, + SysV64 { unwind: false } => 17, + SysV64 { unwind: true } => 18, + PtxKernel => 19, + Msp430Interrupt => 20, + X86Interrupt => 21, + AmdGpuKernel => 22, + EfiApi => 23, + AvrInterrupt => 24, + AvrNonBlockingInterrupt => 25, + CCmseNonSecureCall => 26, + Wasm => 27, + // Cross-platform ABIs + System { unwind: false } => 28, + System { unwind: true } => 29, + RustIntrinsic => 30, + RustCall => 31, + PlatformIntrinsic => 32, + Unadjusted => 33, + RustCold => 34, + RiscvInterruptM => 35, + RiscvInterruptS => 36, + }; + debug_assert!( + AbiDatas + .iter() + .enumerate() + .find(|(_, AbiData { abi, .. })| *abi == self) + .map(|(index, _)| index) + .expect("abi variant has associated data") + == i, + "Abi index did not match `AbiDatas` ordering" + ); + i + } + + #[inline] + pub fn data(self) -> &'static AbiData { + &AbiDatas[self.index()] + } + + pub fn name(self) -> &'static str { + self.data().name + } +} + +impl fmt::Display for Abi { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "\"{}\"", self.name()) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/abi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,341 +0,0 @@ -use std::fmt; - -use rustc_macros::HashStable_Generic; -use rustc_span::symbol::sym; -use rustc_span::{Span, Symbol}; - -#[cfg(test)] -mod tests; - -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Debug)] -#[derive(HashStable_Generic, Encodable, Decodable)] -pub enum Abi { - // Some of the ABIs come first because every time we add a new ABI, we have to re-bless all the - // hashing tests. These are used in many places, so giving them stable values reduces test - // churn. The specific values are meaningless. - Rust, - C { - unwind: bool, - }, - Cdecl { - unwind: bool, - }, - Stdcall { - unwind: bool, - }, - Fastcall { - unwind: bool, - }, - Vectorcall { - unwind: bool, - }, - Thiscall { - unwind: bool, - }, - Aapcs { - unwind: bool, - }, - Win64 { - unwind: bool, - }, - SysV64 { - unwind: bool, - }, - PtxKernel, - Msp430Interrupt, - X86Interrupt, - AmdGpuKernel, - EfiApi, - AvrInterrupt, - AvrNonBlockingInterrupt, - CCmseNonSecureCall, - Wasm, - System { - unwind: bool, - }, - RustIntrinsic, - RustCall, - PlatformIntrinsic, - Unadjusted, - /// For things unlikely to be called, where reducing register pressure in - /// `extern "Rust"` callers is worth paying extra cost in the callee. - /// Stronger than just `#[cold]` because `fn` pointers might be incompatible. - RustCold, - RiscvInterruptM, - RiscvInterruptS, -} - -impl Abi { - pub fn supports_varargs(self) -> bool { - // * C and Cdecl obviously support varargs. - // * C can be based on Aapcs, SysV64 or Win64, so they must support varargs. - // * EfiApi is based on Win64 or C, so it also supports it. - // - // * Stdcall does not, because it would be impossible for the callee to clean - // up the arguments. (callee doesn't know how many arguments are there) - // * Same for Fastcall, Vectorcall and Thiscall. - // * System can become Stdcall, so is also a no-no. - // * Other calling conventions are related to hardware or the compiler itself. - match self { - Self::C { .. } - | Self::Cdecl { .. } - | Self::Aapcs { .. } - | Self::Win64 { .. } - | Self::SysV64 { .. } - | Self::EfiApi => true, - _ => false, - } - } -} - -#[derive(Copy, Clone)] -pub struct AbiData { - abi: Abi, - - /// Name of this ABI as we like it called. - name: &'static str, -} - -#[allow(non_upper_case_globals)] -const AbiDatas: &[AbiData] = &[ - AbiData { abi: Abi::Rust, name: "Rust" }, - AbiData { abi: Abi::C { unwind: false }, name: "C" }, - AbiData { abi: Abi::C { unwind: true }, name: "C-unwind" }, - AbiData { abi: Abi::Cdecl { unwind: false }, name: "cdecl" }, - AbiData { abi: Abi::Cdecl { unwind: true }, name: "cdecl-unwind" }, - AbiData { abi: Abi::Stdcall { unwind: false }, name: "stdcall" }, - AbiData { abi: Abi::Stdcall { unwind: true }, name: "stdcall-unwind" }, - AbiData { abi: Abi::Fastcall { unwind: false }, name: "fastcall" }, - AbiData { abi: Abi::Fastcall { unwind: true }, name: "fastcall-unwind" }, - AbiData { abi: Abi::Vectorcall { unwind: false }, name: "vectorcall" }, - AbiData { abi: Abi::Vectorcall { unwind: true }, name: "vectorcall-unwind" }, - AbiData { abi: Abi::Thiscall { unwind: false }, name: "thiscall" }, - AbiData { abi: Abi::Thiscall { unwind: true }, name: "thiscall-unwind" }, - AbiData { abi: Abi::Aapcs { unwind: false }, name: "aapcs" }, - AbiData { abi: Abi::Aapcs { unwind: true }, name: "aapcs-unwind" }, - AbiData { abi: Abi::Win64 { unwind: false }, name: "win64" }, - AbiData { abi: Abi::Win64 { unwind: true }, name: "win64-unwind" }, - AbiData { abi: Abi::SysV64 { unwind: false }, name: "sysv64" }, - AbiData { abi: Abi::SysV64 { unwind: true }, name: "sysv64-unwind" }, - AbiData { abi: Abi::PtxKernel, name: "ptx-kernel" }, - AbiData { abi: Abi::Msp430Interrupt, name: "msp430-interrupt" }, - AbiData { abi: Abi::X86Interrupt, name: "x86-interrupt" }, - AbiData { abi: Abi::AmdGpuKernel, name: "amdgpu-kernel" }, - AbiData { abi: Abi::EfiApi, name: "efiapi" }, - AbiData { abi: Abi::AvrInterrupt, name: "avr-interrupt" }, - AbiData { abi: Abi::AvrNonBlockingInterrupt, name: "avr-non-blocking-interrupt" }, - AbiData { abi: Abi::CCmseNonSecureCall, name: "C-cmse-nonsecure-call" }, - AbiData { abi: Abi::Wasm, name: "wasm" }, - AbiData { abi: Abi::System { unwind: false }, name: "system" }, - AbiData { abi: Abi::System { unwind: true }, name: "system-unwind" }, - AbiData { abi: Abi::RustIntrinsic, name: "rust-intrinsic" }, - AbiData { abi: Abi::RustCall, name: "rust-call" }, - AbiData { abi: Abi::PlatformIntrinsic, name: "platform-intrinsic" }, - AbiData { abi: Abi::Unadjusted, name: "unadjusted" }, - AbiData { abi: Abi::RustCold, name: "rust-cold" }, - AbiData { abi: Abi::RiscvInterruptM, name: "riscv-interrupt-m" }, - AbiData { abi: Abi::RiscvInterruptS, name: "riscv-interrupt-s" }, -]; - -#[derive(Copy, Clone, Debug)] -pub enum AbiUnsupported { - Unrecognized, - Reason { explain: &'static str }, -} - -/// Returns the ABI with the given name (if any). -pub fn lookup(name: &str) -> Result { - AbiDatas.iter().find(|abi_data| name == abi_data.name).map(|&x| x.abi).ok_or_else(|| match name { - "riscv-interrupt" => AbiUnsupported::Reason { - explain: "please use one of riscv-interrupt-m or riscv-interrupt-s for machine- or supervisor-level interrupts, respectively", - }, - "riscv-interrupt-u" => AbiUnsupported::Reason { - explain: "user-mode interrupt handlers have been removed from LLVM pending standardization, see: https://reviews.llvm.org/D149314", - }, - - _ => AbiUnsupported::Unrecognized, - - }) -} - -pub fn all_names() -> Vec<&'static str> { - AbiDatas.iter().map(|d| d.name).collect() -} - -pub fn enabled_names(features: &rustc_feature::Features, span: Span) -> Vec<&'static str> { - AbiDatas - .iter() - .map(|d| d.name) - .filter(|name| is_enabled(features, span, name).is_ok()) - .collect() -} - -pub enum AbiDisabled { - Unstable { feature: Symbol, explain: &'static str }, - Unrecognized, -} - -pub fn is_enabled( - features: &rustc_feature::Features, - span: Span, - name: &str, -) -> Result<(), AbiDisabled> { - let s = is_stable(name); - if let Err(AbiDisabled::Unstable { feature, .. }) = s { - if features.enabled(feature) || span.allows_unstable(feature) { - return Ok(()); - } - } - s -} - -pub fn is_stable(name: &str) -> Result<(), AbiDisabled> { - match name { - // Stable - "Rust" | "C" | "C-unwind" | "cdecl" | "cdecl-unwind" | "stdcall" | "stdcall-unwind" - | "fastcall" | "fastcall-unwind" | "aapcs" | "aapcs-unwind" | "win64" | "win64-unwind" - | "sysv64" | "sysv64-unwind" | "system" | "system-unwind" | "efiapi" | "thiscall" - | "thiscall-unwind" => Ok(()), - "rust-intrinsic" => Err(AbiDisabled::Unstable { - feature: sym::intrinsics, - explain: "intrinsics are subject to change", - }), - "platform-intrinsic" => Err(AbiDisabled::Unstable { - feature: sym::platform_intrinsics, - explain: "platform intrinsics are experimental and possibly buggy", - }), - "vectorcall" => Err(AbiDisabled::Unstable { - feature: sym::abi_vectorcall, - explain: "vectorcall is experimental and subject to change", - }), - "vectorcall-unwind" => Err(AbiDisabled::Unstable { - feature: sym::abi_vectorcall, - explain: "vectorcall-unwind ABI is experimental and subject to change", - }), - "rust-call" => Err(AbiDisabled::Unstable { - feature: sym::unboxed_closures, - explain: "rust-call ABI is subject to change", - }), - "rust-cold" => Err(AbiDisabled::Unstable { - feature: sym::rust_cold_cc, - explain: "rust-cold is experimental and subject to change", - }), - "ptx-kernel" => Err(AbiDisabled::Unstable { - feature: sym::abi_ptx, - explain: "PTX ABIs are experimental and subject to change", - }), - "unadjusted" => Err(AbiDisabled::Unstable { - feature: sym::abi_unadjusted, - explain: "unadjusted ABI is an implementation detail and perma-unstable", - }), - "msp430-interrupt" => Err(AbiDisabled::Unstable { - feature: sym::abi_msp430_interrupt, - explain: "msp430-interrupt ABI is experimental and subject to change", - }), - "x86-interrupt" => Err(AbiDisabled::Unstable { - feature: sym::abi_x86_interrupt, - explain: "x86-interrupt ABI is experimental and subject to change", - }), - "amdgpu-kernel" => Err(AbiDisabled::Unstable { - feature: sym::abi_amdgpu_kernel, - explain: "amdgpu-kernel ABI is experimental and subject to change", - }), - "avr-interrupt" | "avr-non-blocking-interrupt" => Err(AbiDisabled::Unstable { - feature: sym::abi_avr_interrupt, - explain: "avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change", - }), - "riscv-interrupt-m" | "riscv-interrupt-s" => Err(AbiDisabled::Unstable { - feature: sym::abi_riscv_interrupt, - explain: "riscv-interrupt ABIs are experimental and subject to change", - }), - "C-cmse-nonsecure-call" => Err(AbiDisabled::Unstable { - feature: sym::abi_c_cmse_nonsecure_call, - explain: "C-cmse-nonsecure-call ABI is experimental and subject to change", - }), - "wasm" => Err(AbiDisabled::Unstable { - feature: sym::wasm_abi, - explain: "wasm ABI is experimental and subject to change", - }), - _ => Err(AbiDisabled::Unrecognized), - } -} - -impl Abi { - /// Default ABI chosen for `extern fn` declarations without an explicit ABI. - pub const FALLBACK: Abi = Abi::C { unwind: false }; - - #[inline] - pub fn index(self) -> usize { - // N.B., this ordering MUST match the AbiDatas array above. - // (This is ensured by the test indices_are_correct().) - use Abi::*; - let i = match self { - // Cross-platform ABIs - Rust => 0, - C { unwind: false } => 1, - C { unwind: true } => 2, - // Platform-specific ABIs - Cdecl { unwind: false } => 3, - Cdecl { unwind: true } => 4, - Stdcall { unwind: false } => 5, - Stdcall { unwind: true } => 6, - Fastcall { unwind: false } => 7, - Fastcall { unwind: true } => 8, - Vectorcall { unwind: false } => 9, - Vectorcall { unwind: true } => 10, - Thiscall { unwind: false } => 11, - Thiscall { unwind: true } => 12, - Aapcs { unwind: false } => 13, - Aapcs { unwind: true } => 14, - Win64 { unwind: false } => 15, - Win64 { unwind: true } => 16, - SysV64 { unwind: false } => 17, - SysV64 { unwind: true } => 18, - PtxKernel => 19, - Msp430Interrupt => 20, - X86Interrupt => 21, - AmdGpuKernel => 22, - EfiApi => 23, - AvrInterrupt => 24, - AvrNonBlockingInterrupt => 25, - CCmseNonSecureCall => 26, - Wasm => 27, - // Cross-platform ABIs - System { unwind: false } => 28, - System { unwind: true } => 29, - RustIntrinsic => 30, - RustCall => 31, - PlatformIntrinsic => 32, - Unadjusted => 33, - RustCold => 34, - RiscvInterruptM => 35, - RiscvInterruptS => 36, - }; - debug_assert!( - AbiDatas - .iter() - .enumerate() - .find(|(_, AbiData { abi, .. })| *abi == self) - .map(|(index, _)| index) - .expect("abi variant has associated data") - == i, - "Abi index did not match `AbiDatas` ordering" - ); - i - } - - #[inline] - pub fn data(self) -> &'static AbiData { - &AbiDatas[self.index()] - } - - pub fn name(self) -> &'static str { - self.data().name - } -} - -impl fmt::Display for Abi { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "\"{}\"", self.name()) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aix_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aix_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aix_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/aix_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{crt_objects, cvs, Cc, CodeModel, LinkOutputKind, LinkerFlavor, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - abi: "vec-extabi".into(), - code_model: Some(CodeModel::Small), - cpu: "pwr7".into(), - os: "aix".into(), - vendor: "ibm".into(), - dynamic_linking: true, - endian: Endian::Big, - executables: true, - archive_format: "aix_big".into(), - families: cvs!["unix"], - has_rpath: false, - has_thread_local: true, - crt_static_respected: true, - linker_flavor: LinkerFlavor::Unix(Cc::No), - linker: Some("ld".into()), - eh_frame_header: false, - is_like_aix: true, - default_dwarf_version: 3, - function_sections: true, - pre_link_objects: crt_objects::new(&[ - (LinkOutputKind::DynamicNoPicExe, &["/usr/lib/crt0_64.o", "/usr/lib/crti_64.o"]), - (LinkOutputKind::DynamicPicExe, &["/usr/lib/crt0_64.o", "/usr/lib/crti_64.o"]), - ]), - dll_suffix: ".a".into(), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/android_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/android_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/android_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/android_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{SanitizerSet, TargetOptions}; - -pub fn opts() -> TargetOptions { - let mut base = super::linux_base::opts(); - base.os = "android".into(); - base.is_like_android = true; - base.default_dwarf_version = 2; - base.has_thread_local = false; - base.supported_sanitizers = SanitizerSet::ADDRESS; - // This is for backward compatibility, see https://github.com/rust-lang/rust/issues/49867 - // for context. (At that time, there was no `-C force-unwind-tables`, so the only solution - // was to always emit `uwtable`). - base.default_uwtable = true; - base.crt_static_respected = true; - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple/tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -use crate::spec::{ - aarch64_apple_darwin, aarch64_apple_ios_sim, aarch64_apple_watchos_sim, i686_apple_darwin, - x86_64_apple_darwin, x86_64_apple_ios, x86_64_apple_tvos, x86_64_apple_watchos_sim, -}; - -#[test] -fn simulator_targets_set_abi() { - let all_sim_targets = [ - x86_64_apple_ios::target(), - x86_64_apple_tvos::target(), - x86_64_apple_watchos_sim::target(), - aarch64_apple_ios_sim::target(), - // Note: There is currently no ARM64 tvOS simulator target - aarch64_apple_watchos_sim::target(), - ]; - - for target in all_sim_targets { - assert_eq!(target.abi, "sim") - } -} - -#[test] -fn macos_link_environment_unmodified() { - let all_macos_targets = [ - aarch64_apple_darwin::target(), - i686_apple_darwin::target(), - x86_64_apple_darwin::target(), - ]; - - for target in all_macos_targets { - // macOS targets should only remove information for cross-compiling, but never - // for the host. - assert_eq!( - target.link_env_remove, - crate::spec::cvs!["IPHONEOS_DEPLOYMENT_TARGET", "TVOS_DEPLOYMENT_TARGET"], - ); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/apple_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,356 +0,0 @@ -use std::{borrow::Cow, env}; - -use crate::spec::{cvs, Cc, DebuginfoKind, FramePointer, LinkArgs}; -use crate::spec::{LinkerFlavor, Lld, SplitDebuginfo, StaticCow, Target, TargetOptions}; - -#[cfg(test)] -#[path = "apple/tests.rs"] -mod tests; - -use Arch::*; -#[allow(non_camel_case_types)] -#[derive(Copy, Clone)] -pub enum Arch { - Armv7k, - Armv7s, - Arm64, - Arm64_32, - I386, - I686, - X86_64, - X86_64h, - X86_64_sim, - X86_64_macabi, - Arm64_macabi, - Arm64_sim, -} - -impl Arch { - pub fn target_name(self) -> &'static str { - match self { - Armv7k => "armv7k", - Armv7s => "armv7s", - Arm64 | Arm64_macabi | Arm64_sim => "arm64", - Arm64_32 => "arm64_32", - I386 => "i386", - I686 => "i686", - X86_64 | X86_64_sim | X86_64_macabi => "x86_64", - X86_64h => "x86_64h", - } - } - - pub fn target_arch(self) -> Cow<'static, str> { - Cow::Borrowed(match self { - Armv7k | Armv7s => "arm", - Arm64 | Arm64_32 | Arm64_macabi | Arm64_sim => "aarch64", - I386 | I686 => "x86", - X86_64 | X86_64_sim | X86_64_macabi | X86_64h => "x86_64", - }) - } - - fn target_abi(self) -> &'static str { - match self { - Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | I686 | X86_64 | X86_64h => "", - X86_64_macabi | Arm64_macabi => "macabi", - // x86_64-apple-ios is a simulator target, even though it isn't - // declared that way in the target like the other ones... - Arm64_sim | X86_64_sim => "sim", - } - } - - fn target_cpu(self) -> &'static str { - match self { - Armv7k => "cortex-a8", - Armv7s => "swift", // iOS 10 is only supported on iPhone 5 or higher. - Arm64 => "apple-a7", - Arm64_32 => "apple-s4", - // Only macOS 10.12+ is supported, which means - // all x86_64/x86 CPUs must be running at least penryn - // https://github.com/llvm/llvm-project/blob/01f924d0e37a5deae51df0d77e10a15b63aa0c0f/clang/lib/Driver/ToolChains/Arch/X86.cpp#L79-L82 - I386 | I686 => "penryn", - X86_64 | X86_64_sim => "penryn", - X86_64_macabi => "penryn", - // Note: `core-avx2` is slightly more advanced than `x86_64h`, see - // comments (and disabled features) in `x86_64h_apple_darwin` for - // details. It is a higher baseline then `penryn` however. - X86_64h => "core-avx2", - Arm64_macabi => "apple-a12", - Arm64_sim => "apple-a12", - } - } -} - -fn pre_link_args(os: &'static str, arch: Arch, abi: &'static str) -> LinkArgs { - let platform_name: StaticCow = match abi { - "sim" => format!("{os}-simulator").into(), - "macabi" => "mac-catalyst".into(), - _ => os.into(), - }; - - let platform_version: StaticCow = match os { - "ios" => ios_lld_platform_version(), - "tvos" => tvos_lld_platform_version(), - "watchos" => watchos_lld_platform_version(), - "macos" => macos_lld_platform_version(arch), - _ => unreachable!(), - } - .into(); - - let arch = arch.target_name(); - - let mut args = TargetOptions::link_args( - LinkerFlavor::Darwin(Cc::No, Lld::No), - &["-arch", arch, "-platform_version"], - ); - super::add_link_args_iter( - &mut args, - LinkerFlavor::Darwin(Cc::No, Lld::No), - [platform_name, platform_version.clone(), platform_version].into_iter(), - ); - if abi != "macabi" { - super::add_link_args(&mut args, LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-arch", arch]); - } - - args -} - -pub fn opts(os: &'static str, arch: Arch) -> TargetOptions { - let abi = arch.target_abi(); - - TargetOptions { - abi: abi.into(), - os: os.into(), - cpu: arch.target_cpu().into(), - link_env_remove: link_env_remove(arch, os), - vendor: "apple".into(), - linker_flavor: LinkerFlavor::Darwin(Cc::Yes, Lld::No), - // macOS has -dead_strip, which doesn't rely on function_sections - function_sections: false, - dynamic_linking: true, - pre_link_args: pre_link_args(os, arch, abi), - families: cvs!["unix"], - is_like_osx: true, - // LLVM notes that macOS 10.11+ and iOS 9+ default - // to v4, so we do the same. - // https://github.com/llvm/llvm-project/blob/378778a0d10c2f8d5df8ceff81f95b6002984a4b/clang/lib/Driver/ToolChains/Darwin.cpp#L1203 - default_dwarf_version: 4, - frame_pointer: FramePointer::Always, - has_rpath: true, - dll_suffix: ".dylib".into(), - archive_format: "darwin".into(), - // Thread locals became available with iOS 8 and macOS 10.7, - // and both are far below our minimum. - has_thread_local: true, - abi_return_struct_as_int: true, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - - debuginfo_kind: DebuginfoKind::DwarfDsym, - // The historical default for macOS targets is to run `dsymutil` which - // generates a packed version of debuginfo split from the main file. - split_debuginfo: SplitDebuginfo::Packed, - supported_split_debuginfo: Cow::Borrowed(&[ - SplitDebuginfo::Packed, - SplitDebuginfo::Unpacked, - SplitDebuginfo::Off, - ]), - - // This environment variable is pretty magical but is intended for - // producing deterministic builds. This was first discovered to be used - // by the `ar` tool as a way to control whether or not mtime entries in - // the archive headers were set to zero or not. It appears that - // eventually the linker got updated to do the same thing and now reads - // this environment variable too in recent versions. - // - // For some more info see the commentary on #47086 - link_env: Cow::Borrowed(&[(Cow::Borrowed("ZERO_AR_DATE"), Cow::Borrowed("1"))]), - - ..Default::default() - } -} - -pub fn sdk_version(platform: u32) -> Option<(u32, u32)> { - // NOTE: These values are from an arbitrary point in time but shouldn't make it into the final - // binary since the final link command will have the current SDK version passed to it. - match platform { - object::macho::PLATFORM_MACOS => Some((13, 1)), - object::macho::PLATFORM_IOS - | object::macho::PLATFORM_IOSSIMULATOR - | object::macho::PLATFORM_TVOS - | object::macho::PLATFORM_TVOSSIMULATOR - | object::macho::PLATFORM_MACCATALYST => Some((16, 2)), - object::macho::PLATFORM_WATCHOS | object::macho::PLATFORM_WATCHOSSIMULATOR => Some((9, 1)), - _ => None, - } -} - -pub fn platform(target: &Target) -> Option { - Some(match (&*target.os, &*target.abi) { - ("macos", _) => object::macho::PLATFORM_MACOS, - ("ios", "macabi") => object::macho::PLATFORM_MACCATALYST, - ("ios", "sim") => object::macho::PLATFORM_IOSSIMULATOR, - ("ios", _) => object::macho::PLATFORM_IOS, - ("watchos", "sim") => object::macho::PLATFORM_WATCHOSSIMULATOR, - ("watchos", _) => object::macho::PLATFORM_WATCHOS, - ("tvos", "sim") => object::macho::PLATFORM_TVOSSIMULATOR, - ("tvos", _) => object::macho::PLATFORM_TVOS, - _ => return None, - }) -} - -pub fn deployment_target(target: &Target) -> Option<(u32, u32)> { - let (major, minor) = match &*target.os { - "macos" => { - // This does not need to be specific. It just needs to handle x86 vs M1. - let arch = if target.arch == "x86" || target.arch == "x86_64" { X86_64 } else { Arm64 }; - macos_deployment_target(arch) - } - "ios" => match &*target.abi { - "macabi" => mac_catalyst_deployment_target(), - _ => ios_deployment_target(), - }, - "watchos" => watchos_deployment_target(), - "tvos" => tvos_deployment_target(), - _ => return None, - }; - - Some((major, minor)) -} - -fn from_set_deployment_target(var_name: &str) -> Option<(u32, u32)> { - let deployment_target = env::var(var_name).ok()?; - let (unparsed_major, unparsed_minor) = deployment_target.split_once('.')?; - let (major, minor) = (unparsed_major.parse().ok()?, unparsed_minor.parse().ok()?); - - Some((major, minor)) -} - -fn macos_default_deployment_target(arch: Arch) -> (u32, u32) { - match arch { - // Note: Arm64_sim is not included since macOS has no simulator. - Arm64 | Arm64_macabi => (11, 0), - _ => (10, 12), - } -} - -fn macos_deployment_target(arch: Arch) -> (u32, u32) { - // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. - from_set_deployment_target("MACOSX_DEPLOYMENT_TARGET") - .unwrap_or_else(|| macos_default_deployment_target(arch)) -} - -fn macos_lld_platform_version(arch: Arch) -> String { - let (major, minor) = macos_deployment_target(arch); - format!("{major}.{minor}") -} - -pub fn macos_llvm_target(arch: Arch) -> String { - let (major, minor) = macos_deployment_target(arch); - format!("{}-apple-macosx{}.{}.0", arch.target_name(), major, minor) -} - -fn link_env_remove(arch: Arch, os: &'static str) -> StaticCow<[StaticCow]> { - // Apple platforms only officially support macOS as a host for any compilation. - // - // If building for macOS, we go ahead and remove any erroneous environment state - // that's only applicable to cross-OS compilation. Always leave anything for the - // host OS alone though. - if os == "macos" { - let mut env_remove = Vec::with_capacity(2); - // Remove the `SDKROOT` environment variable if it's clearly set for the wrong platform, which - // may occur when we're linking a custom build script while targeting iOS for example. - if let Ok(sdkroot) = env::var("SDKROOT") { - if sdkroot.contains("iPhoneOS.platform") - || sdkroot.contains("iPhoneSimulator.platform") - || sdkroot.contains("AppleTVOS.platform") - || sdkroot.contains("AppleTVSimulator.platform") - || sdkroot.contains("WatchOS.platform") - || sdkroot.contains("WatchSimulator.platform") - { - env_remove.push("SDKROOT".into()) - } - } - // Additionally, `IPHONEOS_DEPLOYMENT_TARGET` must not be set when using the Xcode linker at - // "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld", - // although this is apparently ignored when using the linker at "/usr/bin/ld". - env_remove.push("IPHONEOS_DEPLOYMENT_TARGET".into()); - env_remove.push("TVOS_DEPLOYMENT_TARGET".into()); - env_remove.into() - } else { - // Otherwise if cross-compiling for a different OS/SDK, remove any part - // of the linking environment that's wrong and reversed. - match arch { - Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | I686 | X86_64 | X86_64_sim | X86_64h - | Arm64_sim => { - cvs!["MACOSX_DEPLOYMENT_TARGET"] - } - X86_64_macabi | Arm64_macabi => cvs!["IPHONEOS_DEPLOYMENT_TARGET"], - } - } -} - -fn ios_deployment_target() -> (u32, u32) { - // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. - from_set_deployment_target("IPHONEOS_DEPLOYMENT_TARGET").unwrap_or((10, 0)) -} - -fn mac_catalyst_deployment_target() -> (u32, u32) { - // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. - from_set_deployment_target("IPHONEOS_DEPLOYMENT_TARGET").unwrap_or((14, 0)) -} - -pub fn ios_llvm_target(arch: Arch) -> String { - // Modern iOS tooling extracts information about deployment target - // from LC_BUILD_VERSION. This load command will only be emitted when - // we build with a version specific `llvm_target`, with the version - // set high enough. Luckily one LC_BUILD_VERSION is enough, for Xcode - // to pick it up (since std and core are still built with the fallback - // of version 7.0 and hence emit the old LC_IPHONE_MIN_VERSION). - let (major, minor) = ios_deployment_target(); - format!("{}-apple-ios{}.{}.0", arch.target_name(), major, minor) -} - -fn ios_lld_platform_version() -> String { - let (major, minor) = ios_deployment_target(); - format!("{major}.{minor}") -} - -pub fn ios_sim_llvm_target(arch: Arch) -> String { - let (major, minor) = ios_deployment_target(); - format!("{}-apple-ios{}.{}.0-simulator", arch.target_name(), major, minor) -} - -fn tvos_deployment_target() -> (u32, u32) { - // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. - from_set_deployment_target("TVOS_DEPLOYMENT_TARGET").unwrap_or((10, 0)) -} - -fn tvos_lld_platform_version() -> String { - let (major, minor) = tvos_deployment_target(); - format!("{major}.{minor}") -} - -pub fn tvos_llvm_target(arch: Arch) -> String { - let (major, minor) = tvos_deployment_target(); - format!("{}-apple-tvos{}.{}.0", arch.target_name(), major, minor) -} - -pub fn tvos_sim_llvm_target(arch: Arch) -> String { - let (major, minor) = tvos_deployment_target(); - format!("{}-apple-tvos{}.{}.0-simulator", arch.target_name(), major, minor) -} - -fn watchos_deployment_target() -> (u32, u32) { - // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. - from_set_deployment_target("WATCHOS_DEPLOYMENT_TARGET").unwrap_or((5, 0)) -} - -fn watchos_lld_platform_version() -> String { - let (major, minor) = watchos_deployment_target(); - format!("{major}.{minor}") -} - -pub fn watchos_sim_llvm_target(arch: Arch) -> String { - let (major, minor) = watchos_deployment_target(); - format!("{}-apple-watchos{}.{}.0-simulator", arch.target_name(), major, minor) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -use super::apple_base::{opts, Arch}; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let base = opts("watchos", Arch::Arm64_32); - Target { - llvm_target: "arm64_32-apple-watchos".into(), - pointer_width: 32, - data_layout: "e-m:o-p:32:32-i64:64-i128:128-n32:64-S128".into(), - arch: "aarch64".into(), - options: TargetOptions { - features: "+v8a,+neon,+fp-armv8,+apple-a7".into(), - max_atomic_width: Some(128), - forces_embed_bitcode: true, - dynamic_linking: false, - position_independent_executables: true, - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - arm64_32-apple-watchos5.0.0\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -target-abi\0\ - darwinpcs\0\ - -Os\0" - .into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_linux_androideabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_linux_androideabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_linux_androideabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_linux_androideabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "arm-linux-androideabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - // https://developer.android.com/ndk/guides/abis.html#armeabi - features: "+strict-align,+v5te".into(), - supported_sanitizers: SanitizerSet::ADDRESS, - max_atomic_width: Some(32), - ..super::android_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "arm-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+strict-align,+v6".into(), - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_gnueabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "arm-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - features: "+strict-align,+v6,+vfp2,-d32".into(), - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - // It's important we use "gnueabi" and not "musleabi" here. LLVM uses it - // to determine the calling convention and float ABI, and it doesn't - // support the "musleabi" value. - llvm_target: "arm-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - // Most of these settings are copied from the arm_unknown_linux_gnueabi - // target. - features: "+strict-align,+v6".into(), - max_atomic_width: Some(64), - mcount: "\u{1}mcount".into(), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/arm_unknown_linux_musleabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - // It's important we use "gnueabihf" and not "musleabihf" here. LLVM - // uses it to determine the calling convention and float ABI, and it - // doesn't support the "musleabihf" value. - llvm_target: "arm-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // Most of these settings are copied from the arm_unknown_linux_gnueabihf - // target. - features: "+strict-align,+v6,+vfp2,-d32".into(), - max_atomic_width: Some(64), - mcount: "\u{1}mcount".into(), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armeb_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armeb_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armeb_unknown_linux_gnueabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armeb_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armeb-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+strict-align,+v8,+crc".into(), - endian: Endian::Big, - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -// Targets the Big endian Cortex-R4/R5 processor (ARMv7-R) - -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armebv7r-none-eabi".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - endian: Endian::Big, - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - max_atomic_width: Some(64), - emit_debug_gdb_scripts: false, - // GCC defaults to 8 for arm-none here. - c_enum_min_bits: Some(8), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armebv7r_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -// Targets the Cortex-R4F/R5F processor (ARMv7-R) - -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armebv7r-none-eabihf".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - endian: Endian::Big, - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - features: "+vfp3,-d32,-fp16".into(), - max_atomic_width: Some(64), - emit_debug_gdb_scripts: false, - // GCC defaults to 8 for arm-none here. - c_enum_min_bits: Some(8), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -//! Targets the ARMv4T, with code as `a32` code by default. -//! -//! Primarily of use for the GBA, but usable with other devices too. -//! -//! Please ping @Lokathor if changes are needed. -//! -//! **Important:** This target profile **does not** specify a linker script. You -//! just get the default link script when you build a binary for this target. -//! The default link script is very likely wrong, so you should use -//! `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script. - -use crate::spec::{cvs, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv4t-none-eabi".into(), - pointer_width: 32, - arch: "arm".into(), - /* Data layout args are '-' separated: - * little endian - * stack is 64-bit aligned (EABI) - * pointers are 32-bit - * i64 must be 64-bit aligned (EABI) - * mangle names with ELF style - * native integers are 32-bit - * All other elements are default - */ - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - options: TargetOptions { - abi: "eabi".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - asm_args: cvs!["-mthumb-interwork", "-march=armv4t", "-mlittle-endian",], - // Force-enable 32-bit atomics, which allows the use of atomic load/store only. - // The resulting atomics are ABI incompatible with atomics backed by libatomic. - features: "+soft-float,+strict-align,+atomics-32".into(), - main_needs_argc_argv: false, - atomic_cas: false, - has_thumb_interworking: true, - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - // From thumb_base, rust-lang/rust#44993. - emit_debug_gdb_scripts: false, - // From thumb_base, GCC gives enums a minimum of 8 bits on no-os targets. - c_enum_min_bits: Some(8), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_unknown_linux_gnueabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv4t_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv4t-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+soft-float,+strict-align".into(), - // Atomic operations provided by compiler-builtins - max_atomic_width: Some(32), - mcount: "\u{1}__gnu_mcount_nc".into(), - llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), - has_thumb_interworking: true, - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -//! Targets the ARMv5TE, with code as `a32` code by default. - -use crate::spec::{cvs, FramePointer, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv5te-none-eabi".into(), - pointer_width: 32, - arch: "arm".into(), - /* Data layout args are '-' separated: - * little endian - * stack is 64-bit aligned (EABI) - * pointers are 32-bit - * i64 must be 64-bit aligned (EABI) - * mangle names with ELF style - * native integers are 32-bit - * All other elements are default - */ - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - - options: TargetOptions { - abi: "eabi".into(), - // extra args passed to the external assembler (assuming `arm-none-eabi-as`): - // * activate t32/a32 interworking - // * use arch ARMv5TE - // * use little-endian - asm_args: cvs!["-mthumb-interwork", "-march=armv5te", "-mlittle-endian",], - // minimum extra features, these cannot be disabled via -C - // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. - // The resulting atomics are ABI incompatible with atomics backed by libatomic. - features: "+soft-float,+strict-align,+atomics-32".into(), - frame_pointer: FramePointer::MayOmit, - main_needs_argc_argv: false, - // don't have atomic compare-and-swap - atomic_cas: false, - has_thumb_interworking: true, - - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_gnueabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv5te-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+soft-float,+strict-align".into(), - // Atomic operations provided by compiler-builtins - max_atomic_width: Some(32), - mcount: "\u{1}__gnu_mcount_nc".into(), - has_thumb_interworking: true, - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_musleabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_musleabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_musleabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_musleabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - // FIXME: this comment below does not seem applicable? - // It's important we use "gnueabihf" and not "musleabihf" here. LLVM - // uses it to determine the calling convention and float ABI, and LLVM - // doesn't support the "musleabihf" value. - llvm_target: "armv5te-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+soft-float,+strict-align".into(), - // Atomic operations provided by compiler-builtins - max_atomic_width: Some(32), - mcount: "\u{1}mcount".into(), - has_thumb_interworking: true, - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_uclibceabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_uclibceabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_uclibceabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv5te_unknown_linux_uclibceabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv5te-unknown-linux-uclibcgnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+soft-float,+strict-align".into(), - // Atomic operations provided by compiler-builtins - max_atomic_width: Some(32), - mcount: "\u{1}__gnu_mcount_nc".into(), - has_thumb_interworking: true, - ..super::linux_uclibc_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv6-unknown-freebsd-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), - features: "+v6,+vfp2,-d32".into(), - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), - ..super::freebsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_netbsd_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_netbsd_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_netbsd_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6_unknown_netbsd_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv6-unknown-netbsdelf-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), - features: "+v6,+vfp2,-d32".into(), - max_atomic_width: Some(64), - mcount: "__mcount".into(), - ..super::netbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6k_nintendo_3ds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6k_nintendo_3ds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6k_nintendo_3ds.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv6k_nintendo_3ds.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -use crate::spec::{cvs, Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; - -/// A base target for Nintendo 3DS devices using the devkitARM toolchain. -/// -/// Requires the devkitARM toolchain for 3DS targets on the host system. - -pub fn target() -> Target { - let pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-specs=3dsx.specs", "-mtune=mpcore", "-mfloat-abi=hard", "-mtp=soft"], - ); - - Target { - llvm_target: "armv6k-none-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - os: "horizon".into(), - env: "newlib".into(), - vendor: "nintendo".into(), - abi: "eabihf".into(), - cpu: "mpcore".into(), - families: cvs!["unix"], - linker: Some("arm-none-eabi-gcc".into()), - relocation_model: RelocModel::Static, - features: "+vfp2".into(), - pre_link_args, - exe_suffix: ".elf".into(), - no_default_libraries: false, - has_thread_local: true, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_linux_androideabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_linux_androideabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_linux_androideabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_linux_androideabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, Target, TargetOptions}; - -// This target if is for the baseline of the Android v7a ABI -// in thumb mode. It's named armv7-* instead of thumbv7-* -// for historical reasons. See the thumbv7neon variant for -// enabling NEON. - -// See https://developer.android.com/ndk/guides/abis.html#v7a -// for target ABI requirements. - -pub fn target() -> Target { - let mut base = super::android_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-march=armv7-a"]); - Target { - llvm_target: "armv7-none-linux-android".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+v7,+thumb-mode,+thumb2,+vfp3,-d32,-neon".into(), - supported_sanitizers: SanitizerSet::ADDRESS, - max_atomic_width: Some(64), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_sony_vita_newlibeabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_sony_vita_newlibeabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_sony_vita_newlibeabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_sony_vita_newlibeabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{cvs, Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; - -/// A base target for PlayStation Vita devices using the VITASDK toolchain (using newlib). -/// -/// Requires the VITASDK toolchain on the host system. - -pub fn target() -> Target { - let pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-Wl,-q", "-Wl,--pic-veneer"], - ); - - Target { - llvm_target: "thumbv7a-vita-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - os: "vita".into(), - endian: Endian::Little, - c_int_width: "32".into(), - env: "newlib".into(), - vendor: "sony".into(), - abi: "eabihf".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), - no_default_libraries: false, - cpu: "cortex-a9".into(), - families: cvs!["unix"], - linker: Some("arm-vita-eabi-gcc".into()), - relocation_model: RelocModel::Static, - features: "+v7,+neon,+vfp3,+thumb2,+thumb-mode".into(), - pre_link_args, - exe_suffix: ".elf".into(), - has_thumb_interworking: true, - max_atomic_width: Some(64), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv7-unknown-freebsd-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - ..super::freebsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for glibc Linux on ARMv7 without thumb-mode, NEON or -// hardfloat. - -pub fn target() -> Target { - Target { - llvm_target: "armv7-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+v7,+thumb2,+soft-float,-neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_gnueabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for glibc Linux on ARMv7 without NEON or -// thumb-mode. See the thumbv7neon variant for enabling both. - -pub fn target() -> Target { - Target { - llvm_target: "armv7-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // Info about features at https://wiki.debian.org/ArmHardFloatPort - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}__gnu_mcount_nc".into(), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for musl Linux on ARMv7 without thumb-mode, NEON or -// hardfloat. - -pub fn target() -> Target { - // Most of these settings are copied from the armv7_unknown_linux_gnueabi - // target. - Target { - // It's important we use "gnueabi" and not "musleabi" here. LLVM uses it - // to determine the calling convention and float ABI, and it doesn't - // support the "musleabi" value. - llvm_target: "armv7-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - features: "+v7,+thumb2,+soft-float,-neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}mcount".into(), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_musleabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for musl Linux on ARMv7 without thumb-mode or NEON. - -pub fn target() -> Target { - Target { - // It's important we use "gnueabihf" and not "musleabihf" here. LLVM - // uses it to determine the calling convention and float ABI, and LLVM - // doesn't support the "musleabihf" value. - llvm_target: "armv7-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - // Most of these settings are copied from the armv7_unknown_linux_gnueabihf - // target. - options: TargetOptions { - abi: "eabihf".into(), - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}mcount".into(), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for OpenHarmony on ARMv7 Linux with thumb-mode, but no NEON or -// hardfloat. - -pub fn target() -> Target { - // Most of these settings are copied from the armv7_unknown_linux_musleabi - // target. - Target { - // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. - llvm_target: "armv7-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - features: "+v7,+thumb2,+soft-float,-neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}mcount".into(), - ..super::linux_ohos_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for uclibc Linux on ARMv7 without NEON, -// thumb-mode or hardfloat. - -pub fn target() -> Target { - let base = super::linux_uclibc_base::opts(); - Target { - llvm_target: "armv7-unknown-linux-gnueabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - features: "+v7,+thumb2,+soft-float,-neon".into(), - cpu: "generic".into(), - max_atomic_width: Some(64), - mcount: "_mcount".into(), - abi: "eabi".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_linux_uclibceabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for uclibc Linux on ARMv7 without NEON or -// thumb-mode. See the thumbv7neon variant for enabling both. - -pub fn target() -> Target { - let base = super::linux_uclibc_base::opts(); - Target { - llvm_target: "armv7-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - // Info about features at https://wiki.debian.org/ArmHardFloatPort - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - cpu: "generic".into(), - max_atomic_width: Some(64), - mcount: "_mcount".into(), - abi: "eabihf".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_netbsd_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_netbsd_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_netbsd_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_unknown_netbsd_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv7-unknown-netbsdelf-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - max_atomic_width: Some(64), - mcount: "__mcount".into(), - ..super::netbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_wrs_vxworks_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_wrs_vxworks_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_wrs_vxworks_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7_wrs_vxworks_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv7-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // Info about features at https://wiki.debian.org/ArmHardFloatPort - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - max_atomic_width: Some(64), - ..super::vxworks_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use super::{RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let base = super::solid_base::opts("asp3"); - Target { - llvm_target: "armv7a-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - linker: Some("arm-kmc-eabi-gcc".into()), - features: "+v7,+soft-float,+thumb2,-neon".into(), - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(64), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_kmc_solid_asp3_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use super::{RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let base = super::solid_base::opts("asp3"); - Target { - llvm_target: "armv7a-none-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - linker: Some("arm-kmc-eabi-gcc".into()), - features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(64), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ -// Generic ARMv7-A target for bare-metal code - floating point disabled -// -// This is basically the `armv7-unknown-linux-gnueabi` target with some changes -// (listed below) to bring it closer to the bare-metal `thumb` & `aarch64` -// targets: -// -// - `TargetOptions.features`: added `+strict-align`. rationale: unaligned -// memory access is disabled on boot on these cores -// - linker changed to LLD. rationale: C is not strictly needed to build -// bare-metal binaries (the `gcc` linker has the advantage that it knows where C -// libraries and crt*.o are but it's not much of an advantage here); LLD is also -// faster -// - `panic_strategy` set to `abort`. rationale: matches `thumb` targets -// - `relocation-model` set to `static`; also no PIE, no relro and no dynamic -// linking. rationale: matches `thumb` targets - -use super::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let opts = TargetOptions { - abi: "eabi".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - features: "+v7,+thumb2,+soft-float,-neon,+strict-align".into(), - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(64), - panic_strategy: PanicStrategy::Abort, - emit_debug_gdb_scripts: false, - c_enum_min_bits: Some(8), - ..Default::default() - }; - Target { - llvm_target: "armv7a-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7a_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -// Generic ARMv7-A target for bare-metal code - floating point enabled (assumes -// FPU is present and emits FPU instructions) -// -// This is basically the `armv7-unknown-linux-gnueabihf` target with some -// changes (list in `armv7a_none_eabi.rs`) to bring it closer to the bare-metal -// `thumb` & `aarch64` targets. - -use super::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let opts = TargetOptions { - abi: "eabihf".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - features: "+v7,+vfp3,-d32,+thumb2,-neon,+strict-align".into(), - relocation_model: RelocModel::Static, - disable_redzone: true, - max_atomic_width: Some(64), - panic_strategy: PanicStrategy::Abort, - emit_debug_gdb_scripts: false, - // GCC defaults to 8 for arm-none here. - c_enum_min_bits: Some(8), - ..Default::default() - }; - Target { - llvm_target: "armv7a-none-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -use super::apple_base::{opts, Arch}; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Armv7k; - Target { - llvm_target: "armv7k-apple-watchos".into(), - pointer_width: 32, - data_layout: "e-m:o-p:32:32-Fi8-i64:64-a:0:32-n32-S128".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+v7,+vfp4,+neon".into(), - max_atomic_width: Some(64), - forces_embed_bitcode: true, - dynamic_linking: false, - position_independent_executables: true, - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - armv7k-apple-watchos3.0.0\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -target-abi\0\ - darwinpcs\0\ - -Os\0" - .into(), - ..opts("watchos", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -// Targets the Little-endian Cortex-R4/R5 processor (ARMv7-R) - -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv7r-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - max_atomic_width: Some(64), - emit_debug_gdb_scripts: false, - // GCC defaults to 8 for arm-none here. - c_enum_min_bits: Some(8), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7r_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -// Targets the Little-endian Cortex-R4F/R5F processor (ARMv7-R) - -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "armv7r-none-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabihf".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - features: "+vfp3,-d32,-fp16".into(), - max_atomic_width: Some(64), - emit_debug_gdb_scripts: false, - // GCC defaults to 8 for arm-none here. - c_enum_min_bits: Some(8), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7s_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7s_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7s_apple_ios.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/armv7s_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use super::apple_base::{ios_llvm_target, opts, Arch}; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::Armv7s; - Target { - llvm_target: ios_llvm_target(arch).into(), - pointer_width: 32, - data_layout: "e-m:o-p:32:32-Fi8-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".into(), - arch: arch.target_arch(), - options: TargetOptions { - features: "+v7,+vfp4,+neon".into(), - max_atomic_width: Some(64), - ..opts("ios", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/asmjs_unknown_emscripten.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/asmjs_unknown_emscripten.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/asmjs_unknown_emscripten.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/asmjs_unknown_emscripten.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -use super::{wasm32_unknown_emscripten, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut target = wasm32_unknown_emscripten::target(); - target.add_post_link_args(LinkerFlavor::EmCc, &["-sWASM=0", "--memory-init-file", "0"]); - target -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_gnu_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_gnu_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_gnu_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_gnu_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,368 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; -use object::elf; - -/// A base target for AVR devices using the GNU toolchain. -/// -/// Requires GNU avr-gcc and avr-binutils on the host system. -/// FIXME: Remove the second parameter when const string concatenation is possible. -pub fn target(target_cpu: &'static str, mmcu: &'static str) -> Target { - Target { - arch: "avr".into(), - data_layout: "e-P1-p:16:8-i8:8-i16:8-i32:8-i64:8-f32:8-f64:8-n8-a:8".into(), - llvm_target: "avr-unknown-unknown".into(), - pointer_width: 16, - options: TargetOptions { - c_int_width: "16".into(), - cpu: target_cpu.into(), - exe_suffix: ".elf".into(), - - linker: Some("avr-gcc".into()), - eh_frame_header: false, - pre_link_args: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &[mmcu]), - late_link_args: TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-lgcc"], - ), - max_atomic_width: Some(16), - atomic_cas: false, - relocation_model: RelocModel::Static, - ..TargetOptions::default() - }, - } -} - -/// Resolve the value of the EF_AVR_ARCH field for AVR ELF files, given the -/// name of the target CPU / MCU. -/// -/// In ELF files using the AVR architecture, the lower 7 bits of the e_flags -/// field is a code that identifies the "ISA revision" of the object code. -/// -/// This flag is generally set by AVR compilers in their output ELF files, -/// and linkers like avr-ld check this flag in all of their input files to -/// make sure they are compiled with the same ISA revision. -pub fn ef_avr_arch(target_cpu: &str) -> u32 { - // Adapted from llvm-project/llvm/lib/target/AVR/AVRDevices.td - match target_cpu { - // Generic MCUs - "avr1" => elf::EF_AVR_ARCH_AVR1, - "avr2" => elf::EF_AVR_ARCH_AVR2, - "avr25" => elf::EF_AVR_ARCH_AVR25, - "avr3" => elf::EF_AVR_ARCH_AVR3, - "avr31" => elf::EF_AVR_ARCH_AVR31, - "avr35" => elf::EF_AVR_ARCH_AVR35, - "avr4" => elf::EF_AVR_ARCH_AVR4, - "avr5" => elf::EF_AVR_ARCH_AVR5, - "avr51" => elf::EF_AVR_ARCH_AVR51, - "avr6" => elf::EF_AVR_ARCH_AVR6, - "avrxmega1" => elf::EF_AVR_ARCH_XMEGA1, - "avrxmega2" => elf::EF_AVR_ARCH_XMEGA2, - "avrxmega3" => elf::EF_AVR_ARCH_XMEGA3, - "avrxmega4" => elf::EF_AVR_ARCH_XMEGA4, - "avrxmega5" => elf::EF_AVR_ARCH_XMEGA5, - "avrxmega6" => elf::EF_AVR_ARCH_XMEGA6, - "avrxmega7" => elf::EF_AVR_ARCH_XMEGA7, - "avrtiny" => elf::EF_AVR_ARCH_AVRTINY, - - // Specific MCUs - "at90s1200" => elf::EF_AVR_ARCH_AVR1, - "attiny11" => elf::EF_AVR_ARCH_AVR1, - "attiny12" => elf::EF_AVR_ARCH_AVR1, - "attiny15" => elf::EF_AVR_ARCH_AVR1, - "attiny28" => elf::EF_AVR_ARCH_AVR1, - "at90s2313" => elf::EF_AVR_ARCH_AVR2, - "at90s2323" => elf::EF_AVR_ARCH_AVR2, - "at90s2333" => elf::EF_AVR_ARCH_AVR2, - "at90s2343" => elf::EF_AVR_ARCH_AVR2, - "attiny22" => elf::EF_AVR_ARCH_AVR2, - "attiny26" => elf::EF_AVR_ARCH_AVR2, - "at86rf401" => elf::EF_AVR_ARCH_AVR25, - "at90s4414" => elf::EF_AVR_ARCH_AVR2, - "at90s4433" => elf::EF_AVR_ARCH_AVR2, - "at90s4434" => elf::EF_AVR_ARCH_AVR2, - "at90s8515" => elf::EF_AVR_ARCH_AVR2, - "at90c8534" => elf::EF_AVR_ARCH_AVR2, - "at90s8535" => elf::EF_AVR_ARCH_AVR2, - "ata5272" => elf::EF_AVR_ARCH_AVR25, - "ata6616c" => elf::EF_AVR_ARCH_AVR25, - "attiny13" => elf::EF_AVR_ARCH_AVR25, - "attiny13a" => elf::EF_AVR_ARCH_AVR25, - "attiny2313" => elf::EF_AVR_ARCH_AVR25, - "attiny2313a" => elf::EF_AVR_ARCH_AVR25, - "attiny24" => elf::EF_AVR_ARCH_AVR25, - "attiny24a" => elf::EF_AVR_ARCH_AVR25, - "attiny4313" => elf::EF_AVR_ARCH_AVR25, - "attiny44" => elf::EF_AVR_ARCH_AVR25, - "attiny44a" => elf::EF_AVR_ARCH_AVR25, - "attiny84" => elf::EF_AVR_ARCH_AVR25, - "attiny84a" => elf::EF_AVR_ARCH_AVR25, - "attiny25" => elf::EF_AVR_ARCH_AVR25, - "attiny45" => elf::EF_AVR_ARCH_AVR25, - "attiny85" => elf::EF_AVR_ARCH_AVR25, - "attiny261" => elf::EF_AVR_ARCH_AVR25, - "attiny261a" => elf::EF_AVR_ARCH_AVR25, - "attiny441" => elf::EF_AVR_ARCH_AVR25, - "attiny461" => elf::EF_AVR_ARCH_AVR25, - "attiny461a" => elf::EF_AVR_ARCH_AVR25, - "attiny841" => elf::EF_AVR_ARCH_AVR25, - "attiny861" => elf::EF_AVR_ARCH_AVR25, - "attiny861a" => elf::EF_AVR_ARCH_AVR25, - "attiny87" => elf::EF_AVR_ARCH_AVR25, - "attiny43u" => elf::EF_AVR_ARCH_AVR25, - "attiny48" => elf::EF_AVR_ARCH_AVR25, - "attiny88" => elf::EF_AVR_ARCH_AVR25, - "attiny828" => elf::EF_AVR_ARCH_AVR25, - "at43usb355" => elf::EF_AVR_ARCH_AVR3, - "at76c711" => elf::EF_AVR_ARCH_AVR3, - "atmega103" => elf::EF_AVR_ARCH_AVR31, - "at43usb320" => elf::EF_AVR_ARCH_AVR31, - "attiny167" => elf::EF_AVR_ARCH_AVR35, - "at90usb82" => elf::EF_AVR_ARCH_AVR35, - "at90usb162" => elf::EF_AVR_ARCH_AVR35, - "ata5505" => elf::EF_AVR_ARCH_AVR35, - "ata6617c" => elf::EF_AVR_ARCH_AVR35, - "ata664251" => elf::EF_AVR_ARCH_AVR35, - "atmega8u2" => elf::EF_AVR_ARCH_AVR35, - "atmega16u2" => elf::EF_AVR_ARCH_AVR35, - "atmega32u2" => elf::EF_AVR_ARCH_AVR35, - "attiny1634" => elf::EF_AVR_ARCH_AVR35, - "atmega8" => elf::EF_AVR_ARCH_AVR4, - "ata6289" => elf::EF_AVR_ARCH_AVR4, - "atmega8a" => elf::EF_AVR_ARCH_AVR4, - "ata6285" => elf::EF_AVR_ARCH_AVR4, - "ata6286" => elf::EF_AVR_ARCH_AVR4, - "ata6612c" => elf::EF_AVR_ARCH_AVR4, - "atmega48" => elf::EF_AVR_ARCH_AVR4, - "atmega48a" => elf::EF_AVR_ARCH_AVR4, - "atmega48pa" => elf::EF_AVR_ARCH_AVR4, - "atmega48pb" => elf::EF_AVR_ARCH_AVR4, - "atmega48p" => elf::EF_AVR_ARCH_AVR4, - "atmega88" => elf::EF_AVR_ARCH_AVR4, - "atmega88a" => elf::EF_AVR_ARCH_AVR4, - "atmega88p" => elf::EF_AVR_ARCH_AVR4, - "atmega88pa" => elf::EF_AVR_ARCH_AVR4, - "atmega88pb" => elf::EF_AVR_ARCH_AVR4, - "atmega8515" => elf::EF_AVR_ARCH_AVR4, - "atmega8535" => elf::EF_AVR_ARCH_AVR4, - "atmega8hva" => elf::EF_AVR_ARCH_AVR4, - "at90pwm1" => elf::EF_AVR_ARCH_AVR4, - "at90pwm2" => elf::EF_AVR_ARCH_AVR4, - "at90pwm2b" => elf::EF_AVR_ARCH_AVR4, - "at90pwm3" => elf::EF_AVR_ARCH_AVR4, - "at90pwm3b" => elf::EF_AVR_ARCH_AVR4, - "at90pwm81" => elf::EF_AVR_ARCH_AVR4, - "ata5702m322" => elf::EF_AVR_ARCH_AVR5, - "ata5782" => elf::EF_AVR_ARCH_AVR5, - "ata5790" => elf::EF_AVR_ARCH_AVR5, - "ata5790n" => elf::EF_AVR_ARCH_AVR5, - "ata5791" => elf::EF_AVR_ARCH_AVR5, - "ata5795" => elf::EF_AVR_ARCH_AVR5, - "ata5831" => elf::EF_AVR_ARCH_AVR5, - "ata6613c" => elf::EF_AVR_ARCH_AVR5, - "ata6614q" => elf::EF_AVR_ARCH_AVR5, - "ata8210" => elf::EF_AVR_ARCH_AVR5, - "ata8510" => elf::EF_AVR_ARCH_AVR5, - "atmega16" => elf::EF_AVR_ARCH_AVR5, - "atmega16a" => elf::EF_AVR_ARCH_AVR5, - "atmega161" => elf::EF_AVR_ARCH_AVR5, - "atmega162" => elf::EF_AVR_ARCH_AVR5, - "atmega163" => elf::EF_AVR_ARCH_AVR5, - "atmega164a" => elf::EF_AVR_ARCH_AVR5, - "atmega164p" => elf::EF_AVR_ARCH_AVR5, - "atmega164pa" => elf::EF_AVR_ARCH_AVR5, - "atmega165" => elf::EF_AVR_ARCH_AVR5, - "atmega165a" => elf::EF_AVR_ARCH_AVR5, - "atmega165p" => elf::EF_AVR_ARCH_AVR5, - "atmega165pa" => elf::EF_AVR_ARCH_AVR5, - "atmega168" => elf::EF_AVR_ARCH_AVR5, - "atmega168a" => elf::EF_AVR_ARCH_AVR5, - "atmega168p" => elf::EF_AVR_ARCH_AVR5, - "atmega168pa" => elf::EF_AVR_ARCH_AVR5, - "atmega168pb" => elf::EF_AVR_ARCH_AVR5, - "atmega169" => elf::EF_AVR_ARCH_AVR5, - "atmega169a" => elf::EF_AVR_ARCH_AVR5, - "atmega169p" => elf::EF_AVR_ARCH_AVR5, - "atmega169pa" => elf::EF_AVR_ARCH_AVR5, - "atmega32" => elf::EF_AVR_ARCH_AVR5, - "atmega32a" => elf::EF_AVR_ARCH_AVR5, - "atmega323" => elf::EF_AVR_ARCH_AVR5, - "atmega324a" => elf::EF_AVR_ARCH_AVR5, - "atmega324p" => elf::EF_AVR_ARCH_AVR5, - "atmega324pa" => elf::EF_AVR_ARCH_AVR5, - "atmega324pb" => elf::EF_AVR_ARCH_AVR5, - "atmega325" => elf::EF_AVR_ARCH_AVR5, - "atmega325a" => elf::EF_AVR_ARCH_AVR5, - "atmega325p" => elf::EF_AVR_ARCH_AVR5, - "atmega325pa" => elf::EF_AVR_ARCH_AVR5, - "atmega3250" => elf::EF_AVR_ARCH_AVR5, - "atmega3250a" => elf::EF_AVR_ARCH_AVR5, - "atmega3250p" => elf::EF_AVR_ARCH_AVR5, - "atmega3250pa" => elf::EF_AVR_ARCH_AVR5, - "atmega328" => elf::EF_AVR_ARCH_AVR5, - "atmega328p" => elf::EF_AVR_ARCH_AVR5, - "atmega328pb" => elf::EF_AVR_ARCH_AVR5, - "atmega329" => elf::EF_AVR_ARCH_AVR5, - "atmega329a" => elf::EF_AVR_ARCH_AVR5, - "atmega329p" => elf::EF_AVR_ARCH_AVR5, - "atmega329pa" => elf::EF_AVR_ARCH_AVR5, - "atmega3290" => elf::EF_AVR_ARCH_AVR5, - "atmega3290a" => elf::EF_AVR_ARCH_AVR5, - "atmega3290p" => elf::EF_AVR_ARCH_AVR5, - "atmega3290pa" => elf::EF_AVR_ARCH_AVR5, - "atmega406" => elf::EF_AVR_ARCH_AVR5, - "atmega64" => elf::EF_AVR_ARCH_AVR5, - "atmega64a" => elf::EF_AVR_ARCH_AVR5, - "atmega640" => elf::EF_AVR_ARCH_AVR5, - "atmega644" => elf::EF_AVR_ARCH_AVR5, - "atmega644a" => elf::EF_AVR_ARCH_AVR5, - "atmega644p" => elf::EF_AVR_ARCH_AVR5, - "atmega644pa" => elf::EF_AVR_ARCH_AVR5, - "atmega645" => elf::EF_AVR_ARCH_AVR5, - "atmega645a" => elf::EF_AVR_ARCH_AVR5, - "atmega645p" => elf::EF_AVR_ARCH_AVR5, - "atmega649" => elf::EF_AVR_ARCH_AVR5, - "atmega649a" => elf::EF_AVR_ARCH_AVR5, - "atmega649p" => elf::EF_AVR_ARCH_AVR5, - "atmega6450" => elf::EF_AVR_ARCH_AVR5, - "atmega6450a" => elf::EF_AVR_ARCH_AVR5, - "atmega6450p" => elf::EF_AVR_ARCH_AVR5, - "atmega6490" => elf::EF_AVR_ARCH_AVR5, - "atmega6490a" => elf::EF_AVR_ARCH_AVR5, - "atmega6490p" => elf::EF_AVR_ARCH_AVR5, - "atmega64rfr2" => elf::EF_AVR_ARCH_AVR5, - "atmega644rfr2" => elf::EF_AVR_ARCH_AVR5, - "atmega16hva" => elf::EF_AVR_ARCH_AVR5, - "atmega16hva2" => elf::EF_AVR_ARCH_AVR5, - "atmega16hvb" => elf::EF_AVR_ARCH_AVR5, - "atmega16hvbrevb" => elf::EF_AVR_ARCH_AVR5, - "atmega32hvb" => elf::EF_AVR_ARCH_AVR5, - "atmega32hvbrevb" => elf::EF_AVR_ARCH_AVR5, - "atmega64hve" => elf::EF_AVR_ARCH_AVR5, - "atmega64hve2" => elf::EF_AVR_ARCH_AVR5, - "at90can32" => elf::EF_AVR_ARCH_AVR5, - "at90can64" => elf::EF_AVR_ARCH_AVR5, - "at90pwm161" => elf::EF_AVR_ARCH_AVR5, - "at90pwm216" => elf::EF_AVR_ARCH_AVR5, - "at90pwm316" => elf::EF_AVR_ARCH_AVR5, - "atmega32c1" => elf::EF_AVR_ARCH_AVR5, - "atmega64c1" => elf::EF_AVR_ARCH_AVR5, - "atmega16m1" => elf::EF_AVR_ARCH_AVR5, - "atmega32m1" => elf::EF_AVR_ARCH_AVR5, - "atmega64m1" => elf::EF_AVR_ARCH_AVR5, - "atmega16u4" => elf::EF_AVR_ARCH_AVR5, - "atmega32u4" => elf::EF_AVR_ARCH_AVR5, - "atmega32u6" => elf::EF_AVR_ARCH_AVR5, - "at90usb646" => elf::EF_AVR_ARCH_AVR5, - "at90usb647" => elf::EF_AVR_ARCH_AVR5, - "at90scr100" => elf::EF_AVR_ARCH_AVR5, - "at94k" => elf::EF_AVR_ARCH_AVR5, - "m3000" => elf::EF_AVR_ARCH_AVR5, - "atmega128" => elf::EF_AVR_ARCH_AVR51, - "atmega128a" => elf::EF_AVR_ARCH_AVR51, - "atmega1280" => elf::EF_AVR_ARCH_AVR51, - "atmega1281" => elf::EF_AVR_ARCH_AVR51, - "atmega1284" => elf::EF_AVR_ARCH_AVR51, - "atmega1284p" => elf::EF_AVR_ARCH_AVR51, - "atmega128rfa1" => elf::EF_AVR_ARCH_AVR51, - "atmega128rfr2" => elf::EF_AVR_ARCH_AVR51, - "atmega1284rfr2" => elf::EF_AVR_ARCH_AVR51, - "at90can128" => elf::EF_AVR_ARCH_AVR51, - "at90usb1286" => elf::EF_AVR_ARCH_AVR51, - "at90usb1287" => elf::EF_AVR_ARCH_AVR51, - "atmega2560" => elf::EF_AVR_ARCH_AVR6, - "atmega2561" => elf::EF_AVR_ARCH_AVR6, - "atmega256rfr2" => elf::EF_AVR_ARCH_AVR6, - "atmega2564rfr2" => elf::EF_AVR_ARCH_AVR6, - "atxmega16a4" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega16a4u" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega16c4" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega16d4" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32a4" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32a4u" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32c3" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32c4" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32d3" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32d4" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega32e5" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega16e5" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega8e5" => elf::EF_AVR_ARCH_XMEGA2, - "atxmega64a3" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64a3u" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64a4u" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64b1" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64b3" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64c3" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64d3" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64d4" => elf::EF_AVR_ARCH_XMEGA4, - "atxmega64a1" => elf::EF_AVR_ARCH_XMEGA5, - "atxmega64a1u" => elf::EF_AVR_ARCH_XMEGA5, - "atxmega128a3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128a3u" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128b1" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128b3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128c3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128d3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128d4" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega192a3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega192a3u" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega192c3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega192d3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega256a3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega256a3u" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega256a3b" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega256a3bu" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega256c3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega256d3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega384c3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega384d3" => elf::EF_AVR_ARCH_XMEGA6, - "atxmega128a1" => elf::EF_AVR_ARCH_XMEGA7, - "atxmega128a1u" => elf::EF_AVR_ARCH_XMEGA7, - "atxmega128a4u" => elf::EF_AVR_ARCH_XMEGA7, - "attiny4" => elf::EF_AVR_ARCH_AVRTINY, - "attiny5" => elf::EF_AVR_ARCH_AVRTINY, - "attiny9" => elf::EF_AVR_ARCH_AVRTINY, - "attiny10" => elf::EF_AVR_ARCH_AVRTINY, - "attiny20" => elf::EF_AVR_ARCH_AVRTINY, - "attiny40" => elf::EF_AVR_ARCH_AVRTINY, - "attiny102" => elf::EF_AVR_ARCH_AVRTINY, - "attiny104" => elf::EF_AVR_ARCH_AVRTINY, - "attiny202" => elf::EF_AVR_ARCH_XMEGA3, - "attiny402" => elf::EF_AVR_ARCH_XMEGA3, - "attiny204" => elf::EF_AVR_ARCH_XMEGA3, - "attiny404" => elf::EF_AVR_ARCH_XMEGA3, - "attiny804" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1604" => elf::EF_AVR_ARCH_XMEGA3, - "attiny406" => elf::EF_AVR_ARCH_XMEGA3, - "attiny806" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1606" => elf::EF_AVR_ARCH_XMEGA3, - "attiny807" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1607" => elf::EF_AVR_ARCH_XMEGA3, - "attiny212" => elf::EF_AVR_ARCH_XMEGA3, - "attiny412" => elf::EF_AVR_ARCH_XMEGA3, - "attiny214" => elf::EF_AVR_ARCH_XMEGA3, - "attiny414" => elf::EF_AVR_ARCH_XMEGA3, - "attiny814" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1614" => elf::EF_AVR_ARCH_XMEGA3, - "attiny416" => elf::EF_AVR_ARCH_XMEGA3, - "attiny816" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1616" => elf::EF_AVR_ARCH_XMEGA3, - "attiny3216" => elf::EF_AVR_ARCH_XMEGA3, - "attiny417" => elf::EF_AVR_ARCH_XMEGA3, - "attiny817" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1617" => elf::EF_AVR_ARCH_XMEGA3, - "attiny3217" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1624" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1626" => elf::EF_AVR_ARCH_XMEGA3, - "attiny1627" => elf::EF_AVR_ARCH_XMEGA3, - "atmega808" => elf::EF_AVR_ARCH_XMEGA3, - "atmega809" => elf::EF_AVR_ARCH_XMEGA3, - "atmega1608" => elf::EF_AVR_ARCH_XMEGA3, - "atmega1609" => elf::EF_AVR_ARCH_XMEGA3, - "atmega3208" => elf::EF_AVR_ARCH_XMEGA3, - "atmega3209" => elf::EF_AVR_ARCH_XMEGA3, - "atmega4808" => elf::EF_AVR_ARCH_XMEGA3, - "atmega4809" => elf::EF_AVR_ARCH_XMEGA3, - - // Unknown target CPU => Unspecified/generic code - _ => 0, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_unknown_gnu_atmega328.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_unknown_gnu_atmega328.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_unknown_gnu_atmega328.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/avr_unknown_gnu_atmega328.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - super::avr_gnu_base::target("atmega328", "-mmcu=atmega328") -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/aix.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/aix.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/aix.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/aix.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,32 @@ +use crate::abi::Endian; +use crate::spec::{crt_objects, cvs, Cc, CodeModel, LinkOutputKind, LinkerFlavor, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + abi: "vec-extabi".into(), + code_model: Some(CodeModel::Small), + cpu: "pwr7".into(), + os: "aix".into(), + vendor: "ibm".into(), + dynamic_linking: true, + endian: Endian::Big, + executables: true, + archive_format: "aix_big".into(), + families: cvs!["unix"], + has_rpath: false, + has_thread_local: true, + crt_static_respected: true, + linker_flavor: LinkerFlavor::Unix(Cc::No), + linker: Some("ld".into()), + eh_frame_header: false, + is_like_aix: true, + default_dwarf_version: 3, + function_sections: true, + pre_link_objects: crt_objects::new(&[ + (LinkOutputKind::DynamicNoPicExe, &["/usr/lib/crt0_64.o", "/usr/lib/crti_64.o"]), + (LinkOutputKind::DynamicPicExe, &["/usr/lib/crt0_64.o", "/usr/lib/crti_64.o"]), + ]), + dll_suffix: ".a".into(), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/android.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/android.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{base, SanitizerSet, TargetOptions}; + +pub fn opts() -> TargetOptions { + let mut base = base::linux::opts(); + base.os = "android".into(); + base.is_like_android = true; + base.default_dwarf_version = 2; + base.has_thread_local = false; + base.supported_sanitizers = SanitizerSet::ADDRESS; + // This is for backward compatibility, see https://github.com/rust-lang/rust/issues/49867 + // for context. (At that time, there was no `-C force-unwind-tables`, so the only solution + // was to always emit `uwtable`). + base.default_uwtable = true; + base.crt_static_respected = true; + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,356 @@ +use std::{borrow::Cow, env}; + +use crate::spec::{add_link_args, add_link_args_iter}; +use crate::spec::{cvs, Cc, DebuginfoKind, FramePointer, LinkArgs}; +use crate::spec::{LinkerFlavor, Lld, SplitDebuginfo, StaticCow, Target, TargetOptions}; + +#[cfg(test)] +mod tests; + +use Arch::*; +#[allow(non_camel_case_types)] +#[derive(Copy, Clone)] +pub enum Arch { + Armv7k, + Armv7s, + Arm64, + Arm64_32, + I386, + I686, + X86_64, + X86_64h, + X86_64_sim, + X86_64_macabi, + Arm64_macabi, + Arm64_sim, +} + +impl Arch { + pub fn target_name(self) -> &'static str { + match self { + Armv7k => "armv7k", + Armv7s => "armv7s", + Arm64 | Arm64_macabi | Arm64_sim => "arm64", + Arm64_32 => "arm64_32", + I386 => "i386", + I686 => "i686", + X86_64 | X86_64_sim | X86_64_macabi => "x86_64", + X86_64h => "x86_64h", + } + } + + pub fn target_arch(self) -> Cow<'static, str> { + Cow::Borrowed(match self { + Armv7k | Armv7s => "arm", + Arm64 | Arm64_32 | Arm64_macabi | Arm64_sim => "aarch64", + I386 | I686 => "x86", + X86_64 | X86_64_sim | X86_64_macabi | X86_64h => "x86_64", + }) + } + + fn target_abi(self) -> &'static str { + match self { + Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | I686 | X86_64 | X86_64h => "", + X86_64_macabi | Arm64_macabi => "macabi", + // x86_64-apple-ios is a simulator target, even though it isn't + // declared that way in the target like the other ones... + Arm64_sim | X86_64_sim => "sim", + } + } + + fn target_cpu(self) -> &'static str { + match self { + Armv7k => "cortex-a8", + Armv7s => "swift", // iOS 10 is only supported on iPhone 5 or higher. + Arm64 => "apple-a7", + Arm64_32 => "apple-s4", + // Only macOS 10.12+ is supported, which means + // all x86_64/x86 CPUs must be running at least penryn + // https://github.com/llvm/llvm-project/blob/01f924d0e37a5deae51df0d77e10a15b63aa0c0f/clang/lib/Driver/ToolChains/Arch/X86.cpp#L79-L82 + I386 | I686 => "penryn", + X86_64 | X86_64_sim => "penryn", + X86_64_macabi => "penryn", + // Note: `core-avx2` is slightly more advanced than `x86_64h`, see + // comments (and disabled features) in `x86_64h_apple_darwin` for + // details. It is a higher baseline then `penryn` however. + X86_64h => "core-avx2", + Arm64_macabi => "apple-a12", + Arm64_sim => "apple-a12", + } + } +} + +fn pre_link_args(os: &'static str, arch: Arch, abi: &'static str) -> LinkArgs { + let platform_name: StaticCow = match abi { + "sim" => format!("{os}-simulator").into(), + "macabi" => "mac-catalyst".into(), + _ => os.into(), + }; + + let platform_version: StaticCow = match os { + "ios" => ios_lld_platform_version(), + "tvos" => tvos_lld_platform_version(), + "watchos" => watchos_lld_platform_version(), + "macos" => macos_lld_platform_version(arch), + _ => unreachable!(), + } + .into(); + + let arch = arch.target_name(); + + let mut args = TargetOptions::link_args( + LinkerFlavor::Darwin(Cc::No, Lld::No), + &["-arch", arch, "-platform_version"], + ); + add_link_args_iter( + &mut args, + LinkerFlavor::Darwin(Cc::No, Lld::No), + [platform_name, platform_version.clone(), platform_version].into_iter(), + ); + if abi != "macabi" { + add_link_args(&mut args, LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-arch", arch]); + } + + args +} + +pub fn opts(os: &'static str, arch: Arch) -> TargetOptions { + let abi = arch.target_abi(); + + TargetOptions { + abi: abi.into(), + os: os.into(), + cpu: arch.target_cpu().into(), + link_env_remove: link_env_remove(arch, os), + vendor: "apple".into(), + linker_flavor: LinkerFlavor::Darwin(Cc::Yes, Lld::No), + // macOS has -dead_strip, which doesn't rely on function_sections + function_sections: false, + dynamic_linking: true, + pre_link_args: pre_link_args(os, arch, abi), + families: cvs!["unix"], + is_like_osx: true, + // LLVM notes that macOS 10.11+ and iOS 9+ default + // to v4, so we do the same. + // https://github.com/llvm/llvm-project/blob/378778a0d10c2f8d5df8ceff81f95b6002984a4b/clang/lib/Driver/ToolChains/Darwin.cpp#L1203 + default_dwarf_version: 4, + frame_pointer: FramePointer::Always, + has_rpath: true, + dll_suffix: ".dylib".into(), + archive_format: "darwin".into(), + // Thread locals became available with iOS 8 and macOS 10.7, + // and both are far below our minimum. + has_thread_local: true, + abi_return_struct_as_int: true, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + + debuginfo_kind: DebuginfoKind::DwarfDsym, + // The historical default for macOS targets is to run `dsymutil` which + // generates a packed version of debuginfo split from the main file. + split_debuginfo: SplitDebuginfo::Packed, + supported_split_debuginfo: Cow::Borrowed(&[ + SplitDebuginfo::Packed, + SplitDebuginfo::Unpacked, + SplitDebuginfo::Off, + ]), + + // This environment variable is pretty magical but is intended for + // producing deterministic builds. This was first discovered to be used + // by the `ar` tool as a way to control whether or not mtime entries in + // the archive headers were set to zero or not. It appears that + // eventually the linker got updated to do the same thing and now reads + // this environment variable too in recent versions. + // + // For some more info see the commentary on #47086 + link_env: Cow::Borrowed(&[(Cow::Borrowed("ZERO_AR_DATE"), Cow::Borrowed("1"))]), + + ..Default::default() + } +} + +pub fn sdk_version(platform: u32) -> Option<(u32, u32)> { + // NOTE: These values are from an arbitrary point in time but shouldn't make it into the final + // binary since the final link command will have the current SDK version passed to it. + match platform { + object::macho::PLATFORM_MACOS => Some((13, 1)), + object::macho::PLATFORM_IOS + | object::macho::PLATFORM_IOSSIMULATOR + | object::macho::PLATFORM_TVOS + | object::macho::PLATFORM_TVOSSIMULATOR + | object::macho::PLATFORM_MACCATALYST => Some((16, 2)), + object::macho::PLATFORM_WATCHOS | object::macho::PLATFORM_WATCHOSSIMULATOR => Some((9, 1)), + _ => None, + } +} + +pub fn platform(target: &Target) -> Option { + Some(match (&*target.os, &*target.abi) { + ("macos", _) => object::macho::PLATFORM_MACOS, + ("ios", "macabi") => object::macho::PLATFORM_MACCATALYST, + ("ios", "sim") => object::macho::PLATFORM_IOSSIMULATOR, + ("ios", _) => object::macho::PLATFORM_IOS, + ("watchos", "sim") => object::macho::PLATFORM_WATCHOSSIMULATOR, + ("watchos", _) => object::macho::PLATFORM_WATCHOS, + ("tvos", "sim") => object::macho::PLATFORM_TVOSSIMULATOR, + ("tvos", _) => object::macho::PLATFORM_TVOS, + _ => return None, + }) +} + +pub fn deployment_target(target: &Target) -> Option<(u32, u32)> { + let (major, minor) = match &*target.os { + "macos" => { + // This does not need to be specific. It just needs to handle x86 vs M1. + let arch = if target.arch == "x86" || target.arch == "x86_64" { X86_64 } else { Arm64 }; + macos_deployment_target(arch) + } + "ios" => match &*target.abi { + "macabi" => mac_catalyst_deployment_target(), + _ => ios_deployment_target(), + }, + "watchos" => watchos_deployment_target(), + "tvos" => tvos_deployment_target(), + _ => return None, + }; + + Some((major, minor)) +} + +fn from_set_deployment_target(var_name: &str) -> Option<(u32, u32)> { + let deployment_target = env::var(var_name).ok()?; + let (unparsed_major, unparsed_minor) = deployment_target.split_once('.')?; + let (major, minor) = (unparsed_major.parse().ok()?, unparsed_minor.parse().ok()?); + + Some((major, minor)) +} + +fn macos_default_deployment_target(arch: Arch) -> (u32, u32) { + match arch { + // Note: Arm64_sim is not included since macOS has no simulator. + Arm64 | Arm64_macabi => (11, 0), + _ => (10, 12), + } +} + +fn macos_deployment_target(arch: Arch) -> (u32, u32) { + // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. + from_set_deployment_target("MACOSX_DEPLOYMENT_TARGET") + .unwrap_or_else(|| macos_default_deployment_target(arch)) +} + +fn macos_lld_platform_version(arch: Arch) -> String { + let (major, minor) = macos_deployment_target(arch); + format!("{major}.{minor}") +} + +pub fn macos_llvm_target(arch: Arch) -> String { + let (major, minor) = macos_deployment_target(arch); + format!("{}-apple-macosx{}.{}.0", arch.target_name(), major, minor) +} + +fn link_env_remove(arch: Arch, os: &'static str) -> StaticCow<[StaticCow]> { + // Apple platforms only officially support macOS as a host for any compilation. + // + // If building for macOS, we go ahead and remove any erroneous environment state + // that's only applicable to cross-OS compilation. Always leave anything for the + // host OS alone though. + if os == "macos" { + let mut env_remove = Vec::with_capacity(2); + // Remove the `SDKROOT` environment variable if it's clearly set for the wrong platform, which + // may occur when we're linking a custom build script while targeting iOS for example. + if let Ok(sdkroot) = env::var("SDKROOT") { + if sdkroot.contains("iPhoneOS.platform") + || sdkroot.contains("iPhoneSimulator.platform") + || sdkroot.contains("AppleTVOS.platform") + || sdkroot.contains("AppleTVSimulator.platform") + || sdkroot.contains("WatchOS.platform") + || sdkroot.contains("WatchSimulator.platform") + { + env_remove.push("SDKROOT".into()) + } + } + // Additionally, `IPHONEOS_DEPLOYMENT_TARGET` must not be set when using the Xcode linker at + // "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld", + // although this is apparently ignored when using the linker at "/usr/bin/ld". + env_remove.push("IPHONEOS_DEPLOYMENT_TARGET".into()); + env_remove.push("TVOS_DEPLOYMENT_TARGET".into()); + env_remove.into() + } else { + // Otherwise if cross-compiling for a different OS/SDK, remove any part + // of the linking environment that's wrong and reversed. + match arch { + Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | I686 | X86_64 | X86_64_sim | X86_64h + | Arm64_sim => { + cvs!["MACOSX_DEPLOYMENT_TARGET"] + } + X86_64_macabi | Arm64_macabi => cvs!["IPHONEOS_DEPLOYMENT_TARGET"], + } + } +} + +fn ios_deployment_target() -> (u32, u32) { + // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. + from_set_deployment_target("IPHONEOS_DEPLOYMENT_TARGET").unwrap_or((10, 0)) +} + +fn mac_catalyst_deployment_target() -> (u32, u32) { + // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. + from_set_deployment_target("IPHONEOS_DEPLOYMENT_TARGET").unwrap_or((14, 0)) +} + +pub fn ios_llvm_target(arch: Arch) -> String { + // Modern iOS tooling extracts information about deployment target + // from LC_BUILD_VERSION. This load command will only be emitted when + // we build with a version specific `llvm_target`, with the version + // set high enough. Luckily one LC_BUILD_VERSION is enough, for Xcode + // to pick it up (since std and core are still built with the fallback + // of version 7.0 and hence emit the old LC_IPHONE_MIN_VERSION). + let (major, minor) = ios_deployment_target(); + format!("{}-apple-ios{}.{}.0", arch.target_name(), major, minor) +} + +fn ios_lld_platform_version() -> String { + let (major, minor) = ios_deployment_target(); + format!("{major}.{minor}") +} + +pub fn ios_sim_llvm_target(arch: Arch) -> String { + let (major, minor) = ios_deployment_target(); + format!("{}-apple-ios{}.{}.0-simulator", arch.target_name(), major, minor) +} + +fn tvos_deployment_target() -> (u32, u32) { + // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. + from_set_deployment_target("TVOS_DEPLOYMENT_TARGET").unwrap_or((10, 0)) +} + +fn tvos_lld_platform_version() -> String { + let (major, minor) = tvos_deployment_target(); + format!("{major}.{minor}") +} + +pub fn tvos_llvm_target(arch: Arch) -> String { + let (major, minor) = tvos_deployment_target(); + format!("{}-apple-tvos{}.{}.0", arch.target_name(), major, minor) +} + +pub fn tvos_sim_llvm_target(arch: Arch) -> String { + let (major, minor) = tvos_deployment_target(); + format!("{}-apple-tvos{}.{}.0-simulator", arch.target_name(), major, minor) +} + +fn watchos_deployment_target() -> (u32, u32) { + // If you are looking for the default deployment target, prefer `rustc --print deployment-target`. + from_set_deployment_target("WATCHOS_DEPLOYMENT_TARGET").unwrap_or((5, 0)) +} + +fn watchos_lld_platform_version() -> String { + let (major, minor) = watchos_deployment_target(); + format!("{major}.{minor}") +} + +pub fn watchos_sim_llvm_target(arch: Arch) -> String { + let (major, minor) = watchos_deployment_target(); + format!("{}-apple-watchos{}.{}.0-simulator", arch.target_name(), major, minor) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/tests.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/apple/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,38 @@ +use crate::spec::targets::{ + aarch64_apple_darwin, aarch64_apple_ios_sim, aarch64_apple_watchos_sim, i686_apple_darwin, + x86_64_apple_darwin, x86_64_apple_ios, x86_64_apple_tvos, x86_64_apple_watchos_sim, +}; + +#[test] +fn simulator_targets_set_abi() { + let all_sim_targets = [ + x86_64_apple_ios::target(), + x86_64_apple_tvos::target(), + x86_64_apple_watchos_sim::target(), + aarch64_apple_ios_sim::target(), + // Note: There is currently no ARM64 tvOS simulator target + aarch64_apple_watchos_sim::target(), + ]; + + for target in all_sim_targets { + assert_eq!(target.abi, "sim") + } +} + +#[test] +fn macos_link_environment_unmodified() { + let all_macos_targets = [ + aarch64_apple_darwin::target(), + i686_apple_darwin::target(), + x86_64_apple_darwin::target(), + ]; + + for target in all_macos_targets { + // macOS targets should only remove information for cross-compiling, but never + // for the host. + assert_eq!( + target.link_env_remove, + crate::spec::cvs!["IPHONEOS_DEPLOYMENT_TARGET", "TVOS_DEPLOYMENT_TARGET"], + ); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/avr_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/avr_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/avr_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/avr_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,368 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; +use object::elf; + +/// A base target for AVR devices using the GNU toolchain. +/// +/// Requires GNU avr-gcc and avr-binutils on the host system. +/// FIXME: Remove the second parameter when const string concatenation is possible. +pub fn target(target_cpu: &'static str, mmcu: &'static str) -> Target { + Target { + arch: "avr".into(), + data_layout: "e-P1-p:16:8-i8:8-i16:8-i32:8-i64:8-f32:8-f64:8-n8-a:8".into(), + llvm_target: "avr-unknown-unknown".into(), + pointer_width: 16, + options: TargetOptions { + c_int_width: "16".into(), + cpu: target_cpu.into(), + exe_suffix: ".elf".into(), + + linker: Some("avr-gcc".into()), + eh_frame_header: false, + pre_link_args: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &[mmcu]), + late_link_args: TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-lgcc"], + ), + max_atomic_width: Some(16), + atomic_cas: false, + relocation_model: RelocModel::Static, + ..TargetOptions::default() + }, + } +} + +/// Resolve the value of the EF_AVR_ARCH field for AVR ELF files, given the +/// name of the target CPU / MCU. +/// +/// In ELF files using the AVR architecture, the lower 7 bits of the e_flags +/// field is a code that identifies the "ISA revision" of the object code. +/// +/// This flag is generally set by AVR compilers in their output ELF files, +/// and linkers like avr-ld check this flag in all of their input files to +/// make sure they are compiled with the same ISA revision. +pub fn ef_avr_arch(target_cpu: &str) -> u32 { + // Adapted from llvm-project/llvm/lib/target/AVR/AVRDevices.td + match target_cpu { + // Generic MCUs + "avr1" => elf::EF_AVR_ARCH_AVR1, + "avr2" => elf::EF_AVR_ARCH_AVR2, + "avr25" => elf::EF_AVR_ARCH_AVR25, + "avr3" => elf::EF_AVR_ARCH_AVR3, + "avr31" => elf::EF_AVR_ARCH_AVR31, + "avr35" => elf::EF_AVR_ARCH_AVR35, + "avr4" => elf::EF_AVR_ARCH_AVR4, + "avr5" => elf::EF_AVR_ARCH_AVR5, + "avr51" => elf::EF_AVR_ARCH_AVR51, + "avr6" => elf::EF_AVR_ARCH_AVR6, + "avrxmega1" => elf::EF_AVR_ARCH_XMEGA1, + "avrxmega2" => elf::EF_AVR_ARCH_XMEGA2, + "avrxmega3" => elf::EF_AVR_ARCH_XMEGA3, + "avrxmega4" => elf::EF_AVR_ARCH_XMEGA4, + "avrxmega5" => elf::EF_AVR_ARCH_XMEGA5, + "avrxmega6" => elf::EF_AVR_ARCH_XMEGA6, + "avrxmega7" => elf::EF_AVR_ARCH_XMEGA7, + "avrtiny" => elf::EF_AVR_ARCH_AVRTINY, + + // Specific MCUs + "at90s1200" => elf::EF_AVR_ARCH_AVR1, + "attiny11" => elf::EF_AVR_ARCH_AVR1, + "attiny12" => elf::EF_AVR_ARCH_AVR1, + "attiny15" => elf::EF_AVR_ARCH_AVR1, + "attiny28" => elf::EF_AVR_ARCH_AVR1, + "at90s2313" => elf::EF_AVR_ARCH_AVR2, + "at90s2323" => elf::EF_AVR_ARCH_AVR2, + "at90s2333" => elf::EF_AVR_ARCH_AVR2, + "at90s2343" => elf::EF_AVR_ARCH_AVR2, + "attiny22" => elf::EF_AVR_ARCH_AVR2, + "attiny26" => elf::EF_AVR_ARCH_AVR2, + "at86rf401" => elf::EF_AVR_ARCH_AVR25, + "at90s4414" => elf::EF_AVR_ARCH_AVR2, + "at90s4433" => elf::EF_AVR_ARCH_AVR2, + "at90s4434" => elf::EF_AVR_ARCH_AVR2, + "at90s8515" => elf::EF_AVR_ARCH_AVR2, + "at90c8534" => elf::EF_AVR_ARCH_AVR2, + "at90s8535" => elf::EF_AVR_ARCH_AVR2, + "ata5272" => elf::EF_AVR_ARCH_AVR25, + "ata6616c" => elf::EF_AVR_ARCH_AVR25, + "attiny13" => elf::EF_AVR_ARCH_AVR25, + "attiny13a" => elf::EF_AVR_ARCH_AVR25, + "attiny2313" => elf::EF_AVR_ARCH_AVR25, + "attiny2313a" => elf::EF_AVR_ARCH_AVR25, + "attiny24" => elf::EF_AVR_ARCH_AVR25, + "attiny24a" => elf::EF_AVR_ARCH_AVR25, + "attiny4313" => elf::EF_AVR_ARCH_AVR25, + "attiny44" => elf::EF_AVR_ARCH_AVR25, + "attiny44a" => elf::EF_AVR_ARCH_AVR25, + "attiny84" => elf::EF_AVR_ARCH_AVR25, + "attiny84a" => elf::EF_AVR_ARCH_AVR25, + "attiny25" => elf::EF_AVR_ARCH_AVR25, + "attiny45" => elf::EF_AVR_ARCH_AVR25, + "attiny85" => elf::EF_AVR_ARCH_AVR25, + "attiny261" => elf::EF_AVR_ARCH_AVR25, + "attiny261a" => elf::EF_AVR_ARCH_AVR25, + "attiny441" => elf::EF_AVR_ARCH_AVR25, + "attiny461" => elf::EF_AVR_ARCH_AVR25, + "attiny461a" => elf::EF_AVR_ARCH_AVR25, + "attiny841" => elf::EF_AVR_ARCH_AVR25, + "attiny861" => elf::EF_AVR_ARCH_AVR25, + "attiny861a" => elf::EF_AVR_ARCH_AVR25, + "attiny87" => elf::EF_AVR_ARCH_AVR25, + "attiny43u" => elf::EF_AVR_ARCH_AVR25, + "attiny48" => elf::EF_AVR_ARCH_AVR25, + "attiny88" => elf::EF_AVR_ARCH_AVR25, + "attiny828" => elf::EF_AVR_ARCH_AVR25, + "at43usb355" => elf::EF_AVR_ARCH_AVR3, + "at76c711" => elf::EF_AVR_ARCH_AVR3, + "atmega103" => elf::EF_AVR_ARCH_AVR31, + "at43usb320" => elf::EF_AVR_ARCH_AVR31, + "attiny167" => elf::EF_AVR_ARCH_AVR35, + "at90usb82" => elf::EF_AVR_ARCH_AVR35, + "at90usb162" => elf::EF_AVR_ARCH_AVR35, + "ata5505" => elf::EF_AVR_ARCH_AVR35, + "ata6617c" => elf::EF_AVR_ARCH_AVR35, + "ata664251" => elf::EF_AVR_ARCH_AVR35, + "atmega8u2" => elf::EF_AVR_ARCH_AVR35, + "atmega16u2" => elf::EF_AVR_ARCH_AVR35, + "atmega32u2" => elf::EF_AVR_ARCH_AVR35, + "attiny1634" => elf::EF_AVR_ARCH_AVR35, + "atmega8" => elf::EF_AVR_ARCH_AVR4, + "ata6289" => elf::EF_AVR_ARCH_AVR4, + "atmega8a" => elf::EF_AVR_ARCH_AVR4, + "ata6285" => elf::EF_AVR_ARCH_AVR4, + "ata6286" => elf::EF_AVR_ARCH_AVR4, + "ata6612c" => elf::EF_AVR_ARCH_AVR4, + "atmega48" => elf::EF_AVR_ARCH_AVR4, + "atmega48a" => elf::EF_AVR_ARCH_AVR4, + "atmega48pa" => elf::EF_AVR_ARCH_AVR4, + "atmega48pb" => elf::EF_AVR_ARCH_AVR4, + "atmega48p" => elf::EF_AVR_ARCH_AVR4, + "atmega88" => elf::EF_AVR_ARCH_AVR4, + "atmega88a" => elf::EF_AVR_ARCH_AVR4, + "atmega88p" => elf::EF_AVR_ARCH_AVR4, + "atmega88pa" => elf::EF_AVR_ARCH_AVR4, + "atmega88pb" => elf::EF_AVR_ARCH_AVR4, + "atmega8515" => elf::EF_AVR_ARCH_AVR4, + "atmega8535" => elf::EF_AVR_ARCH_AVR4, + "atmega8hva" => elf::EF_AVR_ARCH_AVR4, + "at90pwm1" => elf::EF_AVR_ARCH_AVR4, + "at90pwm2" => elf::EF_AVR_ARCH_AVR4, + "at90pwm2b" => elf::EF_AVR_ARCH_AVR4, + "at90pwm3" => elf::EF_AVR_ARCH_AVR4, + "at90pwm3b" => elf::EF_AVR_ARCH_AVR4, + "at90pwm81" => elf::EF_AVR_ARCH_AVR4, + "ata5702m322" => elf::EF_AVR_ARCH_AVR5, + "ata5782" => elf::EF_AVR_ARCH_AVR5, + "ata5790" => elf::EF_AVR_ARCH_AVR5, + "ata5790n" => elf::EF_AVR_ARCH_AVR5, + "ata5791" => elf::EF_AVR_ARCH_AVR5, + "ata5795" => elf::EF_AVR_ARCH_AVR5, + "ata5831" => elf::EF_AVR_ARCH_AVR5, + "ata6613c" => elf::EF_AVR_ARCH_AVR5, + "ata6614q" => elf::EF_AVR_ARCH_AVR5, + "ata8210" => elf::EF_AVR_ARCH_AVR5, + "ata8510" => elf::EF_AVR_ARCH_AVR5, + "atmega16" => elf::EF_AVR_ARCH_AVR5, + "atmega16a" => elf::EF_AVR_ARCH_AVR5, + "atmega161" => elf::EF_AVR_ARCH_AVR5, + "atmega162" => elf::EF_AVR_ARCH_AVR5, + "atmega163" => elf::EF_AVR_ARCH_AVR5, + "atmega164a" => elf::EF_AVR_ARCH_AVR5, + "atmega164p" => elf::EF_AVR_ARCH_AVR5, + "atmega164pa" => elf::EF_AVR_ARCH_AVR5, + "atmega165" => elf::EF_AVR_ARCH_AVR5, + "atmega165a" => elf::EF_AVR_ARCH_AVR5, + "atmega165p" => elf::EF_AVR_ARCH_AVR5, + "atmega165pa" => elf::EF_AVR_ARCH_AVR5, + "atmega168" => elf::EF_AVR_ARCH_AVR5, + "atmega168a" => elf::EF_AVR_ARCH_AVR5, + "atmega168p" => elf::EF_AVR_ARCH_AVR5, + "atmega168pa" => elf::EF_AVR_ARCH_AVR5, + "atmega168pb" => elf::EF_AVR_ARCH_AVR5, + "atmega169" => elf::EF_AVR_ARCH_AVR5, + "atmega169a" => elf::EF_AVR_ARCH_AVR5, + "atmega169p" => elf::EF_AVR_ARCH_AVR5, + "atmega169pa" => elf::EF_AVR_ARCH_AVR5, + "atmega32" => elf::EF_AVR_ARCH_AVR5, + "atmega32a" => elf::EF_AVR_ARCH_AVR5, + "atmega323" => elf::EF_AVR_ARCH_AVR5, + "atmega324a" => elf::EF_AVR_ARCH_AVR5, + "atmega324p" => elf::EF_AVR_ARCH_AVR5, + "atmega324pa" => elf::EF_AVR_ARCH_AVR5, + "atmega324pb" => elf::EF_AVR_ARCH_AVR5, + "atmega325" => elf::EF_AVR_ARCH_AVR5, + "atmega325a" => elf::EF_AVR_ARCH_AVR5, + "atmega325p" => elf::EF_AVR_ARCH_AVR5, + "atmega325pa" => elf::EF_AVR_ARCH_AVR5, + "atmega3250" => elf::EF_AVR_ARCH_AVR5, + "atmega3250a" => elf::EF_AVR_ARCH_AVR5, + "atmega3250p" => elf::EF_AVR_ARCH_AVR5, + "atmega3250pa" => elf::EF_AVR_ARCH_AVR5, + "atmega328" => elf::EF_AVR_ARCH_AVR5, + "atmega328p" => elf::EF_AVR_ARCH_AVR5, + "atmega328pb" => elf::EF_AVR_ARCH_AVR5, + "atmega329" => elf::EF_AVR_ARCH_AVR5, + "atmega329a" => elf::EF_AVR_ARCH_AVR5, + "atmega329p" => elf::EF_AVR_ARCH_AVR5, + "atmega329pa" => elf::EF_AVR_ARCH_AVR5, + "atmega3290" => elf::EF_AVR_ARCH_AVR5, + "atmega3290a" => elf::EF_AVR_ARCH_AVR5, + "atmega3290p" => elf::EF_AVR_ARCH_AVR5, + "atmega3290pa" => elf::EF_AVR_ARCH_AVR5, + "atmega406" => elf::EF_AVR_ARCH_AVR5, + "atmega64" => elf::EF_AVR_ARCH_AVR5, + "atmega64a" => elf::EF_AVR_ARCH_AVR5, + "atmega640" => elf::EF_AVR_ARCH_AVR5, + "atmega644" => elf::EF_AVR_ARCH_AVR5, + "atmega644a" => elf::EF_AVR_ARCH_AVR5, + "atmega644p" => elf::EF_AVR_ARCH_AVR5, + "atmega644pa" => elf::EF_AVR_ARCH_AVR5, + "atmega645" => elf::EF_AVR_ARCH_AVR5, + "atmega645a" => elf::EF_AVR_ARCH_AVR5, + "atmega645p" => elf::EF_AVR_ARCH_AVR5, + "atmega649" => elf::EF_AVR_ARCH_AVR5, + "atmega649a" => elf::EF_AVR_ARCH_AVR5, + "atmega649p" => elf::EF_AVR_ARCH_AVR5, + "atmega6450" => elf::EF_AVR_ARCH_AVR5, + "atmega6450a" => elf::EF_AVR_ARCH_AVR5, + "atmega6450p" => elf::EF_AVR_ARCH_AVR5, + "atmega6490" => elf::EF_AVR_ARCH_AVR5, + "atmega6490a" => elf::EF_AVR_ARCH_AVR5, + "atmega6490p" => elf::EF_AVR_ARCH_AVR5, + "atmega64rfr2" => elf::EF_AVR_ARCH_AVR5, + "atmega644rfr2" => elf::EF_AVR_ARCH_AVR5, + "atmega16hva" => elf::EF_AVR_ARCH_AVR5, + "atmega16hva2" => elf::EF_AVR_ARCH_AVR5, + "atmega16hvb" => elf::EF_AVR_ARCH_AVR5, + "atmega16hvbrevb" => elf::EF_AVR_ARCH_AVR5, + "atmega32hvb" => elf::EF_AVR_ARCH_AVR5, + "atmega32hvbrevb" => elf::EF_AVR_ARCH_AVR5, + "atmega64hve" => elf::EF_AVR_ARCH_AVR5, + "atmega64hve2" => elf::EF_AVR_ARCH_AVR5, + "at90can32" => elf::EF_AVR_ARCH_AVR5, + "at90can64" => elf::EF_AVR_ARCH_AVR5, + "at90pwm161" => elf::EF_AVR_ARCH_AVR5, + "at90pwm216" => elf::EF_AVR_ARCH_AVR5, + "at90pwm316" => elf::EF_AVR_ARCH_AVR5, + "atmega32c1" => elf::EF_AVR_ARCH_AVR5, + "atmega64c1" => elf::EF_AVR_ARCH_AVR5, + "atmega16m1" => elf::EF_AVR_ARCH_AVR5, + "atmega32m1" => elf::EF_AVR_ARCH_AVR5, + "atmega64m1" => elf::EF_AVR_ARCH_AVR5, + "atmega16u4" => elf::EF_AVR_ARCH_AVR5, + "atmega32u4" => elf::EF_AVR_ARCH_AVR5, + "atmega32u6" => elf::EF_AVR_ARCH_AVR5, + "at90usb646" => elf::EF_AVR_ARCH_AVR5, + "at90usb647" => elf::EF_AVR_ARCH_AVR5, + "at90scr100" => elf::EF_AVR_ARCH_AVR5, + "at94k" => elf::EF_AVR_ARCH_AVR5, + "m3000" => elf::EF_AVR_ARCH_AVR5, + "atmega128" => elf::EF_AVR_ARCH_AVR51, + "atmega128a" => elf::EF_AVR_ARCH_AVR51, + "atmega1280" => elf::EF_AVR_ARCH_AVR51, + "atmega1281" => elf::EF_AVR_ARCH_AVR51, + "atmega1284" => elf::EF_AVR_ARCH_AVR51, + "atmega1284p" => elf::EF_AVR_ARCH_AVR51, + "atmega128rfa1" => elf::EF_AVR_ARCH_AVR51, + "atmega128rfr2" => elf::EF_AVR_ARCH_AVR51, + "atmega1284rfr2" => elf::EF_AVR_ARCH_AVR51, + "at90can128" => elf::EF_AVR_ARCH_AVR51, + "at90usb1286" => elf::EF_AVR_ARCH_AVR51, + "at90usb1287" => elf::EF_AVR_ARCH_AVR51, + "atmega2560" => elf::EF_AVR_ARCH_AVR6, + "atmega2561" => elf::EF_AVR_ARCH_AVR6, + "atmega256rfr2" => elf::EF_AVR_ARCH_AVR6, + "atmega2564rfr2" => elf::EF_AVR_ARCH_AVR6, + "atxmega16a4" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega16a4u" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega16c4" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega16d4" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32a4" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32a4u" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32c3" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32c4" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32d3" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32d4" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega32e5" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega16e5" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega8e5" => elf::EF_AVR_ARCH_XMEGA2, + "atxmega64a3" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64a3u" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64a4u" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64b1" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64b3" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64c3" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64d3" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64d4" => elf::EF_AVR_ARCH_XMEGA4, + "atxmega64a1" => elf::EF_AVR_ARCH_XMEGA5, + "atxmega64a1u" => elf::EF_AVR_ARCH_XMEGA5, + "atxmega128a3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128a3u" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128b1" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128b3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128c3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128d3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128d4" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega192a3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega192a3u" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega192c3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega192d3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega256a3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega256a3u" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega256a3b" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega256a3bu" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega256c3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega256d3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega384c3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega384d3" => elf::EF_AVR_ARCH_XMEGA6, + "atxmega128a1" => elf::EF_AVR_ARCH_XMEGA7, + "atxmega128a1u" => elf::EF_AVR_ARCH_XMEGA7, + "atxmega128a4u" => elf::EF_AVR_ARCH_XMEGA7, + "attiny4" => elf::EF_AVR_ARCH_AVRTINY, + "attiny5" => elf::EF_AVR_ARCH_AVRTINY, + "attiny9" => elf::EF_AVR_ARCH_AVRTINY, + "attiny10" => elf::EF_AVR_ARCH_AVRTINY, + "attiny20" => elf::EF_AVR_ARCH_AVRTINY, + "attiny40" => elf::EF_AVR_ARCH_AVRTINY, + "attiny102" => elf::EF_AVR_ARCH_AVRTINY, + "attiny104" => elf::EF_AVR_ARCH_AVRTINY, + "attiny202" => elf::EF_AVR_ARCH_XMEGA3, + "attiny402" => elf::EF_AVR_ARCH_XMEGA3, + "attiny204" => elf::EF_AVR_ARCH_XMEGA3, + "attiny404" => elf::EF_AVR_ARCH_XMEGA3, + "attiny804" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1604" => elf::EF_AVR_ARCH_XMEGA3, + "attiny406" => elf::EF_AVR_ARCH_XMEGA3, + "attiny806" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1606" => elf::EF_AVR_ARCH_XMEGA3, + "attiny807" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1607" => elf::EF_AVR_ARCH_XMEGA3, + "attiny212" => elf::EF_AVR_ARCH_XMEGA3, + "attiny412" => elf::EF_AVR_ARCH_XMEGA3, + "attiny214" => elf::EF_AVR_ARCH_XMEGA3, + "attiny414" => elf::EF_AVR_ARCH_XMEGA3, + "attiny814" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1614" => elf::EF_AVR_ARCH_XMEGA3, + "attiny416" => elf::EF_AVR_ARCH_XMEGA3, + "attiny816" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1616" => elf::EF_AVR_ARCH_XMEGA3, + "attiny3216" => elf::EF_AVR_ARCH_XMEGA3, + "attiny417" => elf::EF_AVR_ARCH_XMEGA3, + "attiny817" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1617" => elf::EF_AVR_ARCH_XMEGA3, + "attiny3217" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1624" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1626" => elf::EF_AVR_ARCH_XMEGA3, + "attiny1627" => elf::EF_AVR_ARCH_XMEGA3, + "atmega808" => elf::EF_AVR_ARCH_XMEGA3, + "atmega809" => elf::EF_AVR_ARCH_XMEGA3, + "atmega1608" => elf::EF_AVR_ARCH_XMEGA3, + "atmega1609" => elf::EF_AVR_ARCH_XMEGA3, + "atmega3208" => elf::EF_AVR_ARCH_XMEGA3, + "atmega3209" => elf::EF_AVR_ARCH_XMEGA3, + "atmega4808" => elf::EF_AVR_ARCH_XMEGA3, + "atmega4809" => elf::EF_AVR_ARCH_XMEGA3, + + // Unknown target CPU => Unspecified/generic code + _ => 0, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/bpf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/bpf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/bpf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/bpf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,29 @@ +use crate::abi::Endian; +use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, TargetOptions}; + +pub fn opts(endian: Endian) -> TargetOptions { + TargetOptions { + allow_asm: true, + endian, + linker_flavor: LinkerFlavor::Bpf, + atomic_cas: false, + dynamic_linking: true, + no_builtins: true, + panic_strategy: PanicStrategy::Abort, + position_independent_executables: true, + // Disable MergeFunctions since: + // - older kernels don't support bpf-to-bpf calls + // - on newer kernels, userspace still needs to relocate before calling + // BPF_PROG_LOAD and not all BPF libraries do that yet + merge_functions: MergeFunctions::Disabled, + obj_is_bitcode: true, + requires_lto: false, + singlethread: true, + // When targeting the `v3` cpu in llvm, 32-bit atomics are also supported. + // But making this value change based on the target cpu can be mostly confusing + // and would require a bit of a refactor. + min_atomic_width: Some(64), + max_atomic_width: Some(64), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/dragonfly.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/dragonfly.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/dragonfly.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/dragonfly.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,14 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "dragonfly".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + position_independent_executables: true, + relro_level: RelroLevel::Full, + default_dwarf_version: 2, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "freebsd".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + position_independent_executables: true, + relro_level: RelroLevel::Full, + abi_return_struct_as_int: true, + default_dwarf_version: 2, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/fuchsia.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,43 @@ +use crate::spec::{crt_objects, cvs, Cc, LinkOutputKind, LinkerFlavor, Lld, TargetOptions}; + +pub fn opts() -> TargetOptions { + // This mirrors the linker options provided by clang. We presume lld for + // now. When using clang as the linker it will supply these options for us, + // so we only list them for ld/lld. + // + // https://github.com/llvm/llvm-project/blob/db9322b2066c55254e7691efeab863f43bfcc084/clang/lib/Driver/ToolChains/Fuchsia.cpp#L31 + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &[ + "--build-id", + "--hash-style=gnu", + "-z", + "max-page-size=4096", + "-z", + "now", + "-z", + "rodynamic", + "-z", + "separate-loadable-segments", + "--pack-dyn-relocs=relr", + ], + ); + + TargetOptions { + os: "fuchsia".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + dynamic_linking: true, + families: cvs!["unix"], + pre_link_args, + pre_link_objects: crt_objects::new(&[ + (LinkOutputKind::DynamicNoPicExe, &["Scrt1.o"]), + (LinkOutputKind::DynamicPicExe, &["Scrt1.o"]), + (LinkOutputKind::StaticNoPicExe, &["Scrt1.o"]), + (LinkOutputKind::StaticPicExe, &["Scrt1.o"]), + ]), + position_independent_executables: true, + has_thread_local: true, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/haiku.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/haiku.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/haiku.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/haiku.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,11 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "haiku".into(), + dynamic_linking: true, + families: cvs!["unix"], + relro_level: RelroLevel::Full, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hermit.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hermit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, TargetOptions, TlsModel}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "hermit".into(), + linker: Some("rust-lld".into()), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + tls_model: TlsModel::InitialExec, + position_independent_executables: true, + static_position_independent_executables: true, + has_thread_local: true, + panic_strategy: PanicStrategy::Abort, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "hurd".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + position_independent_executables: true, + relro_level: RelroLevel::Full, + has_thread_local: true, + crt_static_respected: true, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/hurd_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,5 @@ +use crate::spec::{base, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { env: "gnu".into(), ..base::hurd::opts() } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/illumos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/illumos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/illumos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/illumos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,59 @@ +use crate::spec::{cvs, Cc, FramePointer, LinkerFlavor, TargetOptions}; + +pub fn opts() -> TargetOptions { + let late_link_args = TargetOptions::link_args( + LinkerFlavor::Unix(Cc::Yes), + &[ + // The illumos libc contains a stack unwinding implementation, as + // does libgcc_s. The latter implementation includes several + // additional symbols that are not always in base libc. To force + // the consistent use of just one unwinder, we ensure libc appears + // after libgcc_s in the NEEDED list for the resultant binary by + // ignoring any attempts to add it as a dynamic dependency until the + // very end. + // FIXME: This should be replaced by a more complete and generic + // mechanism for controlling the order of library arguments passed + // to the linker. + "-lc", + // LLVM will insert calls to the stack protector functions + // "__stack_chk_fail" and "__stack_chk_guard" into code in native + // object files. Some platforms include these symbols directly in + // libc, but at least historically these have been provided in + // libssp.so on illumos and Solaris systems. + "-lssp", + ], + ); + + TargetOptions { + os: "illumos".into(), + dynamic_linking: true, + has_rpath: true, + families: cvs!["unix"], + is_like_solaris: true, + linker_flavor: LinkerFlavor::Unix(Cc::Yes), + limit_rdylib_exports: false, // Linker doesn't support this + frame_pointer: FramePointer::Always, + eh_frame_header: false, + late_link_args, + + // While we support ELF TLS, rust requires a way to register + // cleanup handlers (in C, this would be something along the lines of: + // void register_callback(void (*fn)(void *), void *arg); + // (see src/libstd/sys/unix/fast_thread_local.rs) that is currently + // missing in illumos. For now at least, we must fallback to using + // pthread_{get,set}specific. + //has_thread_local: true, + + // FIXME: Currently, rust is invoking cc to link, which ends up + // causing these to get included twice. We should eventually transition + // to having rustc invoke ld directly, in which case these will need to + // be uncommented. + // + // We want XPG6 behavior from libc and libm. See standards(5) + //pre_link_objects_exe: vec![ + // "/usr/lib/amd64/values-Xc.o".into(), + // "/usr/lib/amd64/values-xpg6.o".into(), + //], + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/l4re.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/l4re.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/l4re.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/l4re.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,14 @@ +use crate::spec::{cvs, Cc, LinkerFlavor, PanicStrategy, RelocModel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "l4re".into(), + env: "uclibc".into(), + linker_flavor: LinkerFlavor::Unix(Cc::No), + panic_strategy: PanicStrategy::Abort, + linker: Some("l4-bender".into()), + families: cvs!["unix"], + relocation_model: RelocModel::Static, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{cvs, RelroLevel, SplitDebuginfo, TargetOptions}; +use std::borrow::Cow; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "linux".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + position_independent_executables: true, + relro_level: RelroLevel::Full, + has_thread_local: true, + crt_static_respected: true, + supported_split_debuginfo: Cow::Borrowed(&[ + SplitDebuginfo::Packed, + SplitDebuginfo::Unpacked, + SplitDebuginfo::Off, + ]), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,5 @@ +use crate::spec::{base, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { env: "gnu".into(), ..base::linux::opts() } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::crt_objects; +use crate::spec::{base, LinkSelfContainedDefault, TargetOptions}; + +pub fn opts() -> TargetOptions { + let mut base = base::linux::opts(); + + base.env = "musl".into(); + base.pre_link_objects_self_contained = crt_objects::pre_musl_self_contained(); + base.post_link_objects_self_contained = crt_objects::post_musl_self_contained(); + base.link_self_contained = LinkSelfContainedDefault::InferredForMusl; + + // These targets statically link libc by default + base.crt_static_default = true; + + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_ohos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,12 @@ +use crate::spec::{base, TargetOptions}; + +pub fn opts() -> TargetOptions { + let mut base = base::linux::opts(); + + base.env = "ohos".into(); + base.crt_static_default = false; + base.force_emulated_tls = true; + base.has_thread_local = false; + + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/linux_uclibc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,5 @@ +use crate::spec::{base, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { env: "uclibc".into(), ..base::linux::opts() } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,37 @@ +pub(crate) mod aix; +pub(crate) mod android; +pub(crate) mod apple; +pub(crate) mod avr_gnu; +pub(crate) mod bpf; +pub(crate) mod dragonfly; +pub(crate) mod freebsd; +pub(crate) mod fuchsia; +pub(crate) mod haiku; +pub(crate) mod hermit; +pub(crate) mod hurd; +pub(crate) mod hurd_gnu; +pub(crate) mod illumos; +pub(crate) mod l4re; +pub(crate) mod linux; +pub(crate) mod linux_gnu; +pub(crate) mod linux_musl; +pub(crate) mod linux_ohos; +pub(crate) mod linux_uclibc; +pub(crate) mod msvc; +pub(crate) mod netbsd; +pub(crate) mod nto_qnx; +pub(crate) mod openbsd; +pub(crate) mod redox; +pub(crate) mod solaris; +pub(crate) mod solid; +pub(crate) mod teeos; +pub(crate) mod thumb; +pub(crate) mod uefi_msvc; +pub(crate) mod unikraft_linux_musl; +pub(crate) mod vxworks; +pub(crate) mod wasm; +pub(crate) mod windows_gnu; +pub(crate) mod windows_gnullvm; +pub(crate) mod windows_msvc; +pub(crate) mod windows_uwp_gnu; +pub(crate) mod windows_uwp_msvc; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions}; +use std::borrow::Cow; + +pub fn opts() -> TargetOptions { + // Suppress the verbose logo and authorship debugging output, which would needlessly + // clog any log files. + let pre_link_args = TargetOptions::link_args(LinkerFlavor::Msvc(Lld::No), &["/NOLOGO"]); + + TargetOptions { + linker_flavor: LinkerFlavor::Msvc(Lld::No), + dll_tls_export: false, + is_like_windows: true, + is_like_msvc: true, + pre_link_args, + abi_return_struct_as_int: true, + emit_debug_gdb_scripts: false, + + // Currently this is the only supported method of debuginfo on MSVC + // where `*.pdb` files show up next to the final artifact. + split_debuginfo: SplitDebuginfo::Packed, + supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Packed]), + debuginfo_kind: DebuginfoKind::Pdb, + + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "netbsd".into(), + dynamic_linking: true, + families: cvs!["unix"], + no_default_libraries: false, + has_rpath: true, + position_independent_executables: true, + relro_level: RelroLevel::Full, + use_ctors_section: true, + default_dwarf_version: 2, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/nto_qnx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/nto_qnx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/nto_qnx.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/nto_qnx.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + crt_static_respected: true, + dynamic_linking: true, + executables: true, + families: cvs!["unix"], + has_rpath: true, + has_thread_local: false, + linker: Some("qcc".into()), + os: "nto".into(), + position_independent_executables: true, + static_position_independent_executables: true, + relro_level: RelroLevel::Full, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{cvs, FramePointer, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "openbsd".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + abi_return_struct_as_int: true, + position_independent_executables: true, + frame_pointer: FramePointer::Always, // FIXME 43575: should be MayOmit... + relro_level: RelroLevel::Full, + default_dwarf_version: 2, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/redox.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/redox.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/redox.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/redox.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{cvs, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "redox".into(), + env: "relibc".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + position_independent_executables: true, + relro_level: RelroLevel::Full, + has_thread_local: true, + crt_static_default: true, + crt_static_respected: true, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solaris.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solaris.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{cvs, Cc, LinkerFlavor, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "solaris".into(), + dynamic_linking: true, + has_rpath: true, + families: cvs!["unix"], + is_like_solaris: true, + linker_flavor: LinkerFlavor::Unix(Cc::Yes), + limit_rdylib_exports: false, // Linker doesn't support this + eh_frame_header: false, + + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solid.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solid.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solid.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/solid.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,12 @@ +use crate::spec::{FramePointer, TargetOptions}; + +pub fn opts(kernel: &str) -> TargetOptions { + TargetOptions { + os: format!("solid_{kernel}").into(), + vendor: "kmc".into(), + executables: false, + frame_pointer: FramePointer::NonLeaf, + has_thread_local: true, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/teeos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/teeos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/teeos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/teeos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,28 @@ +use crate::spec::{add_link_args, Cc, LinkerFlavor, Lld, PanicStrategy, RelroLevel, TargetOptions}; + +pub fn opts() -> TargetOptions { + let lld_args = &["-zmax-page-size=4096", "-znow", "-ztext", "--execute-only"]; + let cc_args = &["-Wl,-zmax-page-size=4096", "-Wl,-znow", "-Wl,-ztext", "-mexecute-only"]; + + let mut pre_link_args = TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), lld_args); + add_link_args(&mut pre_link_args, LinkerFlavor::Gnu(Cc::Yes, Lld::No), cc_args); + + TargetOptions { + os: "teeos".into(), + vendor: "unknown".into(), + dynamic_linking: true, + linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), + // rpath hardcodes -Wl, so it can't be used together with ld.lld. + // C TAs also don't support rpath, so this is fine. + has_rpath: false, + // Note: Setting has_thread_local to true causes an error when + // loading / dyn-linking the TA + has_thread_local: false, + position_independent_executables: true, + relro_level: RelroLevel::Full, + crt_static_respected: true, + pre_link_args, + panic_strategy: PanicStrategy::Abort, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/thumb.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/thumb.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/thumb.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/thumb.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,59 @@ +// These `thumbv*` targets cover the ARM Cortex-M family of processors which are widely used in +// microcontrollers. Namely, all these processors: +// +// - Cortex-M0 +// - Cortex-M0+ +// - Cortex-M1 +// - Cortex-M3 +// - Cortex-M4(F) +// - Cortex-M7(F) +// - Cortex-M23 +// - Cortex-M33 +// +// We have opted for these instead of one target per processor (e.g., `cortex-m0`, `cortex-m3`, +// etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost +// nonexistent from the POV of codegen so it doesn't make sense to have separate targets for them. +// And if differences exist between two processors under the same target, rustc flags can be used to +// optimize for one processor or the other. +// +// Also, we have not chosen a single target (`arm-none-eabi`) like GCC does because this makes +// difficult to integrate Rust code and C code. Targeting the Cortex-M4 requires different gcc flags +// than the ones you would use for the Cortex-M0 and with a single target it'd be impossible to +// differentiate one processor from the other. +// +// About arm vs thumb in the name. The Cortex-M devices only support the Thumb instruction set, +// which is more compact (higher code density), and not the ARM instruction set. That's why LLVM +// triples use thumb instead of arm. We follow suit because having thumb in the name let us +// differentiate these targets from our other `arm(v7)-*-*-gnueabi(hf)` targets in the context of +// build scripts / gcc flags. + +use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, PanicStrategy, RelocModel, TargetOptions}; + +pub fn opts() -> TargetOptions { + // See rust-lang/rfcs#1645 for a discussion about these defaults + TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + // In most cases, LLD is good enough + linker: Some("rust-lld".into()), + // Because these devices have very little resources having an unwinder is too onerous so we + // default to "abort" because the "unwind" strategy is very rare. + panic_strategy: PanicStrategy::Abort, + // Similarly, one almost always never wants to use relocatable code because of the extra + // costs it involves. + relocation_model: RelocModel::Static, + // When this section is added a volatile load to its start address is also generated. This + // volatile load is a footgun as it can end up loading an invalid memory address, depending + // on how the user set up their linker scripts. This section adds pretty printer for stuff + // like std::Vec, which is not that used in no-std context, so it's best to left it out + // until we figure a way to add the pretty printers without requiring a volatile load cf. + // rust-lang/rust#44993. + emit_debug_gdb_scripts: false, + // LLVM is eager to trash the link register when calling `noreturn` functions, which + // breaks debugging. Preserve LR by default to prevent that from happening. + frame_pointer: FramePointer::Always, + // ARM supports multiple ABIs for enums, the linux one matches the default of 32 here + // but any arm-none or thumb-none target will be defaulted to 8 on GCC. + c_enum_min_bits: Some(8), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/uefi_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/uefi_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/uefi_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/uefi_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,52 @@ +// This defines a base target-configuration for native UEFI systems. The UEFI specification has +// quite detailed sections on the ABI of all the supported target architectures. In almost all +// cases it simply follows what Microsoft Windows does. Hence, whenever in doubt, see the MSDN +// documentation. +// UEFI uses COFF/PE32+ format for binaries. All binaries must be statically linked. No dynamic +// linker is supported. As native to COFF, binaries are position-dependent, but will be relocated +// by the loader if the pre-chosen memory location is already in use. +// UEFI forbids running code on anything but the boot-CPU. No interrupts are allowed other than +// the timer-interrupt. Device-drivers are required to use polling-based models. Furthermore, all +// code runs in the same environment, no process separation is supported. + +use crate::spec::{base, LinkerFlavor, Lld, PanicStrategy, StackProbeType, TargetOptions}; + +pub fn opts() -> TargetOptions { + let mut base = base::msvc::opts(); + + base.add_pre_link_args( + LinkerFlavor::Msvc(Lld::No), + &[ + // Non-standard subsystems have no default entry-point in PE+ files. We have to define + // one. "efi_main" seems to be a common choice amongst other implementations and the + // spec. + "/entry:efi_main", + // COFF images have a "Subsystem" field in their header, which defines what kind of + // program it is. UEFI has 3 fields reserved, which are EFI_APPLICATION, + // EFI_BOOT_SERVICE_DRIVER, and EFI_RUNTIME_DRIVER. We default to EFI_APPLICATION, + // which is very likely the most common option. Individual projects can override this + // with custom linker flags. + // The subsystem-type only has minor effects on the application. It defines the memory + // regions the application is loaded into (runtime-drivers need to be put into + // reserved areas), as well as whether a return from the entry-point is treated as + // exit (default for applications). + "/subsystem:efi_application", + ], + ); + + TargetOptions { + os: "uefi".into(), + linker_flavor: LinkerFlavor::Msvc(Lld::Yes), + disable_redzone: true, + exe_suffix: ".efi".into(), + allows_weak_linkage: false, + panic_strategy: PanicStrategy::Abort, + // LLVM does not emit inline assembly because the LLVM target does not get considered as… + // "Windows". + stack_probes: StackProbeType::Call, + singlethread: true, + linker: Some("rust-lld".into()), + entry_name: "efi_main".into(), + ..base + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/unikraft_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/unikraft_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/unikraft_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/unikraft_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{cvs, PanicStrategy, RelocModel, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "linux".into(), + env: "musl".into(), + vendor: "unikraft".into(), + linker: Some("kraftld".into()), + relocation_model: RelocModel::Static, + families: cvs!["unix"], + has_thread_local: true, + panic_strategy: PanicStrategy::Abort, + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/vxworks.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/vxworks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{cvs, TargetOptions}; + +pub fn opts() -> TargetOptions { + TargetOptions { + os: "vxworks".into(), + env: "gnu".into(), + vendor: "wrs".into(), + linker: Some("wr-c++".into()), + exe_suffix: ".vxe".into(), + dynamic_linking: true, + families: cvs!["unix"], + has_rpath: true, + has_thread_local: true, + crt_static_default: true, + crt_static_respected: true, + crt_static_allows_dylibs: true, + // VxWorks needs to implement this to support profiling + mcount: "_mcount".into(), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/wasm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/wasm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/wasm.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/wasm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,135 @@ +use crate::spec::{ + add_link_args, cvs, Cc, LinkSelfContainedDefault, LinkerFlavor, PanicStrategy, RelocModel, + TargetOptions, TlsModel, +}; + +pub fn options() -> TargetOptions { + macro_rules! args { + ($prefix:literal) => { + &[ + // By default LLD only gives us one page of stack (64k) which is a + // little small. Default to a larger stack closer to other PC platforms + // (1MB) and users can always inject their own link-args to override this. + concat!($prefix, "-z"), + concat!($prefix, "stack-size=1048576"), + // By default LLD's memory layout is: + // + // 1. First, a blank page + // 2. Next, all static data + // 3. Finally, the main stack (which grows down) + // + // This has the unfortunate consequence that on stack overflows you + // corrupt static data and can cause some exceedingly weird bugs. To + // help detect this a little sooner we instead request that the stack is + // placed before static data. + // + // This means that we'll generate slightly larger binaries as references + // to static data will take more bytes in the ULEB128 encoding, but + // stack overflow will be guaranteed to trap as it underflows instead of + // corrupting static data. + concat!($prefix, "--stack-first"), + // FIXME we probably shouldn't pass this but instead pass an explicit list + // of symbols we'll allow to be undefined. We don't currently have a + // mechanism of knowing, however, which symbols are intended to be imported + // from the environment and which are intended to be imported from other + // objects linked elsewhere. This is a coarse approximation but is sure to + // hide some bugs and frustrate someone at some point, so we should ideally + // work towards a world where we can explicitly list symbols that are + // supposed to be imported and have all other symbols generate errors if + // they remain undefined. + concat!($prefix, "--allow-undefined"), + // Rust code should never have warnings, and warnings are often + // indicative of bugs, let's prevent them. + concat!($prefix, "--fatal-warnings"), + // LLD only implements C++-like demangling, which doesn't match our own + // mangling scheme. Tell LLD to not demangle anything and leave it up to + // us to demangle these symbols later. Currently rustc does not perform + // further demangling, but tools like twiggy and wasm-bindgen are intended + // to do so. + concat!($prefix, "--no-demangle"), + ] + }; + } + + let mut pre_link_args = TargetOptions::link_args(LinkerFlavor::WasmLld(Cc::No), args!("")); + add_link_args(&mut pre_link_args, LinkerFlavor::WasmLld(Cc::Yes), args!("-Wl,")); + + TargetOptions { + is_like_wasm: true, + families: cvs!["wasm"], + + // we allow dynamic linking, but only cdylibs. Basically we allow a + // final library artifact that exports some symbols (a wasm module) but + // we don't allow intermediate `dylib` crate types + dynamic_linking: true, + only_cdylib: true, + + // relatively self-explanatory! + exe_suffix: ".wasm".into(), + dll_prefix: "".into(), + dll_suffix: ".wasm".into(), + eh_frame_header: false, + + max_atomic_width: Some(64), + + // Unwinding doesn't work right now, so the whole target unconditionally + // defaults to panic=abort. Note that this is guaranteed to change in + // the future once unwinding is implemented. Don't rely on this as we're + // basically guaranteed to change it once WebAssembly supports + // exceptions. + panic_strategy: PanicStrategy::Abort, + + // Wasm doesn't have atomics yet, so tell LLVM that we're in a single + // threaded model which will legalize atomics to normal operations. + singlethread: true, + + // no dynamic linking, no need for default visibility! + default_hidden_visibility: true, + + // Symbol visibility takes care of this for the WebAssembly. + // Additionally the only known linker, LLD, doesn't support the script + // arguments just yet + limit_rdylib_exports: false, + + // we use the LLD shipped with the Rust toolchain by default + linker: Some("rust-lld".into()), + linker_flavor: LinkerFlavor::WasmLld(Cc::No), + + pre_link_args, + + // FIXME: Figure out cases in which WASM needs to link with a native toolchain. + // + // rust-lang/rust#104137: cannot blindly remove this without putting in + // some other way to compensate for lack of `-nostartfiles` in linker + // invocation. + link_self_contained: LinkSelfContainedDefault::True, + + // This has no effect in LLVM 8 or prior, but in LLVM 9 and later when + // PIC code is implemented this has quite a drastic effect if it stays + // at the default, `pic`. In an effort to keep wasm binaries as minimal + // as possible we're defaulting to `static` for now, but the hope is + // that eventually we can ship a `pic`-compatible standard library which + // works with `static` as well (or works with some method of generating + // non-relative calls and such later on). + relocation_model: RelocModel::Static, + + // When the atomics feature is activated then these two keys matter, + // otherwise they're basically ignored by the standard library. In this + // mode, however, the `#[thread_local]` attribute works (i.e. + // `has_thread_local`) and we need to get it to work by specifying + // `local-exec` as that's all that's implemented in LLVM today for wasm. + has_thread_local: true, + tls_model: TlsModel::LocalExec, + + // gdb scripts don't work on wasm blobs + emit_debug_gdb_scripts: false, + + // There's more discussion of this at + // https://bugs.llvm.org/show_bug.cgi?id=52442 but the general result is + // that this isn't useful for wasm and has tricky issues with + // representation, so this is disabled. + generate_arange_section: false, + + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,108 @@ +use crate::spec::LinkSelfContainedDefault; +use crate::spec::{add_link_args, crt_objects}; +use crate::spec::{cvs, Cc, DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions}; +use std::borrow::Cow; + +pub fn opts() -> TargetOptions { + let mut pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &[ + // Enable ASLR + "--dynamicbase", + // ASLR will rebase it anyway so leaving that option enabled only leads to confusion + "--disable-auto-image-base", + ], + ); + add_link_args( + &mut pre_link_args, + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &[ + // Tell GCC to avoid linker plugins, because we are not bundling + // them with Windows installer, and Rust does its own LTO anyways. + "-fno-use-linker-plugin", + "-Wl,--dynamicbase", + "-Wl,--disable-auto-image-base", + ], + ); + + // Order of `late_link_args*` was found through trial and error to work with various + // mingw-w64 versions (not tested on the CI). It's expected to change from time to time. + let mingw_libs = &[ + "-lmsvcrt", + "-lmingwex", + "-lmingw32", + "-lgcc", // alas, mingw* libraries above depend on libgcc + // mingw's msvcrt is a weird hybrid import library and static library. + // And it seems that the linker fails to use import symbols from msvcrt + // that are required from functions in msvcrt in certain cases. For example + // `_fmode` that is used by an implementation of `__p__fmode` in x86_64. + // The library is purposely listed twice to fix that. + // + // See https://github.com/rust-lang/rust/pull/47483 for some more details. + "-lmsvcrt", + "-luser32", + "-lkernel32", + ]; + let mut late_link_args = + TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), mingw_libs); + add_link_args(&mut late_link_args, LinkerFlavor::Gnu(Cc::Yes, Lld::No), mingw_libs); + // If any of our crates are dynamically linked then we need to use + // the shared libgcc_s-dw2-1.dll. This is required to support + // unwinding across DLL boundaries. + let dynamic_unwind_libs = &["-lgcc_s"]; + let mut late_link_args_dynamic = + TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), dynamic_unwind_libs); + add_link_args( + &mut late_link_args_dynamic, + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + dynamic_unwind_libs, + ); + // If all of our crates are statically linked then we can get away + // with statically linking the libgcc unwinding code. This allows + // binaries to be redistributed without the libgcc_s-dw2-1.dll + // dependency, but unfortunately break unwinding across DLL + // boundaries when unwinding across FFI boundaries. + let static_unwind_libs = &["-lgcc_eh", "-l:libpthread.a"]; + let mut late_link_args_static = + TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), static_unwind_libs); + add_link_args( + &mut late_link_args_static, + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + static_unwind_libs, + ); + + TargetOptions { + os: "windows".into(), + env: "gnu".into(), + vendor: "pc".into(), + // FIXME(#13846) this should be enabled for windows + function_sections: false, + linker: Some("gcc".into()), + dynamic_linking: true, + dll_tls_export: false, + dll_prefix: "".into(), + dll_suffix: ".dll".into(), + exe_suffix: ".exe".into(), + families: cvs!["windows"], + is_like_windows: true, + allows_weak_linkage: false, + pre_link_args, + pre_link_objects: crt_objects::pre_mingw(), + post_link_objects: crt_objects::post_mingw(), + pre_link_objects_self_contained: crt_objects::pre_mingw_self_contained(), + post_link_objects_self_contained: crt_objects::post_mingw_self_contained(), + link_self_contained: LinkSelfContainedDefault::InferredForMingw, + late_link_args, + late_link_args_dynamic, + late_link_args_static, + abi_return_struct_as_int: true, + emit_debug_gdb_scripts: false, + requires_uwtable: true, + eh_frame_header: false, + // FIXME(davidtwco): Support Split DWARF on Windows GNU - may require LLVM changes to + // output DWO, despite using DWARF, doesn't use ELF.. + debuginfo_kind: DebuginfoKind::Pdb, + supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_gnullvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,47 @@ +use crate::spec::{cvs, Cc, DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions}; +use std::borrow::Cow; + +pub fn opts() -> TargetOptions { + // We cannot use `-nodefaultlibs` because compiler-rt has to be passed + // as a path since it's not added to linker search path by the default. + // There were attempts to make it behave like libgcc (so one can just use -l) + // but LLVM maintainers rejected it: https://reviews.llvm.org/D51440 + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-nolibc", "--unwindlib=none"], + ); + // Order of `late_link_args*` does not matter with LLD. + let late_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-lmingw32", "-lmingwex", "-lmsvcrt", "-lkernel32", "-luser32"], + ); + + TargetOptions { + os: "windows".into(), + env: "gnu".into(), + vendor: "pc".into(), + abi: "llvm".into(), + linker: Some("clang".into()), + dynamic_linking: true, + dll_tls_export: false, + dll_prefix: "".into(), + dll_suffix: ".dll".into(), + exe_suffix: ".exe".into(), + families: cvs!["windows"], + is_like_windows: true, + allows_weak_linkage: false, + pre_link_args, + late_link_args, + abi_return_struct_as_int: true, + emit_debug_gdb_scripts: false, + requires_uwtable: true, + eh_frame_header: false, + no_default_libraries: false, + has_thread_local: true, + // FIXME(davidtwco): Support Split DWARF on Windows GNU - may require LLVM changes to + // output DWO, despite using DWARF, doesn't use ELF.. + debuginfo_kind: DebuginfoKind::Pdb, + supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), + ..Default::default() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,34 @@ +use crate::spec::{base, cvs, TargetOptions}; + +pub fn opts() -> TargetOptions { + let base = base::msvc::opts(); + + TargetOptions { + os: "windows".into(), + env: "msvc".into(), + vendor: "pc".into(), + dynamic_linking: true, + dll_prefix: "".into(), + dll_suffix: ".dll".into(), + exe_suffix: ".exe".into(), + staticlib_prefix: "".into(), + staticlib_suffix: ".lib".into(), + families: cvs!["windows"], + crt_static_allows_dylibs: true, + crt_static_respected: true, + requires_uwtable: true, + // Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC + // as there's been trouble in the past of linking the C++ standard + // library required by LLVM. This likely needs to happen one day, but + // in general Windows is also a more controlled environment than + // Unix, so it's not necessarily as critical that this be implemented. + // + // Note that there are also some licensing worries about statically + // linking some libraries which require a specific agreement, so it may + // not ever be possible for us to pass this flag. + no_default_libraries: false, + has_thread_local: true, + + ..base + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,35 @@ +use crate::spec::{add_link_args, base, Cc, LinkArgs, LinkerFlavor, Lld, TargetOptions}; + +pub fn opts() -> TargetOptions { + let base = base::windows_gnu::opts(); + + // FIXME: This should be updated for the exception machinery changes from #67502 + // and inherit from `windows_gnu_base`, at least partially. + let mingw_libs = &[ + "-lwinstorecompat", + "-lruntimeobject", + "-lsynchronization", + "-lvcruntime140_app", + "-lucrt", + "-lwindowsapp", + "-lmingwex", + "-lmingw32", + ]; + let mut late_link_args = + TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), mingw_libs); + add_link_args(&mut late_link_args, LinkerFlavor::Gnu(Cc::Yes, Lld::No), mingw_libs); + // Reset the flags back to empty until the FIXME above is addressed. + let late_link_args_dynamic = LinkArgs::new(); + let late_link_args_static = LinkArgs::new(); + + TargetOptions { + abi: "uwp".into(), + vendor: "uwp".into(), + limit_rdylib_exports: false, + late_link_args, + late_link_args_dynamic, + late_link_args_static, + + ..base + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/base/windows_uwp_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,11 @@ +use crate::spec::{base, LinkerFlavor, Lld, TargetOptions}; + +pub fn opts() -> TargetOptions { + let mut opts = base::windows_msvc::opts(); + + opts.abi = "uwp".into(); + opts.vendor = "uwp".into(); + opts.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/APPCONTAINER", "mincore.lib"]); + + opts +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpf_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpf_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpf_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpf_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, TargetOptions}; - -pub fn opts(endian: Endian) -> TargetOptions { - TargetOptions { - allow_asm: true, - endian, - linker_flavor: LinkerFlavor::Bpf, - atomic_cas: false, - dynamic_linking: true, - no_builtins: true, - panic_strategy: PanicStrategy::Abort, - position_independent_executables: true, - // Disable MergeFunctions since: - // - older kernels don't support bpf-to-bpf calls - // - on newer kernels, userspace still needs to relocate before calling - // BPF_PROG_LOAD and not all BPF libraries do that yet - merge_functions: MergeFunctions::Disabled, - obj_is_bitcode: true, - requires_lto: false, - singlethread: true, - // When targeting the `v3` cpu in llvm, 32-bit atomics are also supported. - // But making this value change based on the target cpu can be mostly confusing - // and would require a bit of a refactor. - min_atomic_width: Some(64), - max_atomic_width: Some(64), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfeb_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfeb_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfeb_unknown_none.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfeb_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -use crate::spec::Target; -use crate::{abi::Endian, spec::bpf_base}; - -pub fn target() -> Target { - Target { - llvm_target: "bpfeb".into(), - data_layout: "E-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - pointer_width: 64, - arch: "bpf".into(), - options: bpf_base::opts(Endian::Big), - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfel_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfel_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfel_unknown_none.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/bpfel_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -use crate::spec::Target; -use crate::{abi::Endian, spec::bpf_base}; - -pub fn target() -> Target { - Target { - llvm_target: "bpfel".into(), - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - pointer_width: 64, - arch: "bpf".into(), - options: bpf_base::opts(Endian::Little), - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/crt_objects.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/crt_objects.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/crt_objects.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/crt_objects.rs 2023-12-21 16:55:28.000000000 +0000 @@ -40,11 +40,9 @@ //! but not gcc's. As a result rustc cannot link with C++ static libraries (#36710) //! when linking in self-contained mode. -use crate::json::{Json, ToJson}; use crate::spec::LinkOutputKind; use std::borrow::Cow; use std::collections::BTreeMap; -use std::str::FromStr; pub type CrtObjects = BTreeMap>>; @@ -123,39 +121,3 @@ pub(super) fn post_wasi_self_contained() -> CrtObjects { new(&[]) } - -/// Which logic to use to determine whether to use self-contained linking mode -/// if `-Clink-self-contained` is not specified explicitly. -#[derive(Clone, Copy, PartialEq, Hash, Debug)] -pub enum LinkSelfContainedDefault { - False, - True, - Musl, - Mingw, -} - -impl FromStr for LinkSelfContainedDefault { - type Err = (); - - fn from_str(s: &str) -> Result { - Ok(match s { - "false" => LinkSelfContainedDefault::False, - "true" | "wasm" => LinkSelfContainedDefault::True, - "musl" => LinkSelfContainedDefault::Musl, - "mingw" => LinkSelfContainedDefault::Mingw, - _ => return Err(()), - }) - } -} - -impl ToJson for LinkSelfContainedDefault { - fn to_json(&self) -> Json { - match *self { - LinkSelfContainedDefault::False => "false", - LinkSelfContainedDefault::True => "true", - LinkSelfContainedDefault::Musl => "musl", - LinkSelfContainedDefault::Mingw => "mingw", - } - .to_json() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/csky_unknown_linux_gnuabiv2.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/csky_unknown_linux_gnuabiv2.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/csky_unknown_linux_gnuabiv2.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/csky_unknown_linux_gnuabiv2.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions}; - -// This target is for glibc Linux on Csky - -pub fn target() -> Target { - Target { - //https://github.com/llvm/llvm-project/blob/8b76aea8d8b1b71f6220bc2845abc749f18a19b7/clang/lib/Basic/Targets/CSKY.h - llvm_target: "csky-unknown-linux-gnuabiv2".into(), - pointer_width: 32, - data_layout: "e-m:e-S32-p:32:32-i32:32:32-i64:32:32-f32:32:32-f64:32:32-v64:32:32-v128:32:32-a:0:32-Fi32-n32".into(), - arch: "csky".into(), - options: TargetOptions { - abi: "abiv2".into(), - features: "+2e3,+3e7,+7e10,+cache,+dsp1e2,+dspe60,+e1,+e2,+edsp,+elrw,+hard-tp,+high-registers,+hwdiv,+mp,+mp1e2,+nvic,+trust".into(), - late_link_args_static: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-l:libatomic.a"]), - max_atomic_width: Some(32), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/dragonfly_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/dragonfly_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/dragonfly_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/dragonfly_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "dragonfly".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - position_independent_executables: true, - relro_level: RelroLevel::Full, - default_dwarf_version: 2, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/freebsd_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/freebsd_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/freebsd_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/freebsd_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "freebsd".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - position_independent_executables: true, - relro_level: RelroLevel::Full, - abi_return_struct_as_int: true, - default_dwarf_version: 2, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/fuchsia_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/fuchsia_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/fuchsia_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/fuchsia_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ -use crate::spec::{crt_objects, cvs, Cc, LinkOutputKind, LinkerFlavor, Lld, TargetOptions}; - -pub fn opts() -> TargetOptions { - // This mirrors the linker options provided by clang. We presume lld for - // now. When using clang as the linker it will supply these options for us, - // so we only list them for ld/lld. - // - // https://github.com/llvm/llvm-project/blob/db9322b2066c55254e7691efeab863f43bfcc084/clang/lib/Driver/ToolChains/Fuchsia.cpp#L31 - let pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &[ - "--build-id", - "--hash-style=gnu", - "-z", - "max-page-size=4096", - "-z", - "now", - "-z", - "rodynamic", - "-z", - "separate-loadable-segments", - "--pack-dyn-relocs=relr", - ], - ); - - TargetOptions { - os: "fuchsia".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - dynamic_linking: true, - families: cvs!["unix"], - pre_link_args, - pre_link_objects: crt_objects::new(&[ - (LinkOutputKind::DynamicNoPicExe, &["Scrt1.o"]), - (LinkOutputKind::DynamicPicExe, &["Scrt1.o"]), - (LinkOutputKind::StaticNoPicExe, &["Scrt1.o"]), - (LinkOutputKind::StaticPicExe, &["Scrt1.o"]), - ]), - position_independent_executables: true, - has_thread_local: true, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/haiku_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/haiku_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/haiku_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/haiku_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "haiku".into(), - dynamic_linking: true, - families: cvs!["unix"], - relro_level: RelroLevel::Full, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hermit_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hermit_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hermit_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hermit_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, TargetOptions, TlsModel}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "hermit".into(), - linker: Some("rust-lld".into()), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - tls_model: TlsModel::InitialExec, - position_independent_executables: true, - static_position_independent_executables: true, - has_thread_local: true, - panic_strategy: PanicStrategy::Abort, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hexagon_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hexagon_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hexagon_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hexagon_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "hexagonv60".into(); - base.max_atomic_width = Some(32); - // FIXME: HVX length defaults are per-CPU - base.features = "-small-data,+hvx-length128b".into(); - - base.crt_static_default = false; - base.has_rpath = true; - base.linker_flavor = LinkerFlavor::Unix(Cc::Yes); - - base.c_enum_min_bits = Some(8); - - Target { - llvm_target: "hexagon-unknown-linux-musl".into(), - pointer_width: 32, - data_layout: concat!( - "e-m:e-p:32:32:32-a:0-n16:32-i64:64:64-i32:32", - ":32-i16:16:16-i1:8:8-f32:32:32-f64:64:64-v32", - ":32:32-v64:64:64-v512:512:512-v1024:1024:1024-v2048", - ":2048:2048" - ) - .into(), - arch: "hexagon".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "hurd".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - position_independent_executables: true, - relro_level: RelroLevel::Full, - has_thread_local: true, - crt_static_respected: true, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_gnu_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_gnu_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_gnu_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/hurd_gnu_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -use crate::spec::TargetOptions; - -pub fn opts() -> TargetOptions { - TargetOptions { env: "gnu".into(), ..super::hurd_base::opts() } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_apple_ios.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use super::apple_base::{ios_sim_llvm_target, opts, Arch}; -use crate::spec::{StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::I386; - Target { - // Clang automatically chooses a more specific target based on - // IPHONEOS_DEPLOYMENT_TARGET. - // This is required for the target to pick the right - // MACH-O commands, so we do too. - llvm_target: ios_sim_llvm_target(arch).into(), - pointer_width: 32, - data_layout: "e-m:o-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:128-n8:16:32-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { - max_atomic_width: Some(64), - stack_probes: StackProbeType::X86, - ..opts("ios", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i386_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::i686_unknown_linux_gnu::target(); - base.cpu = "i386".into(); - base.llvm_target = "i386-unknown-linux-gnu".into(); - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i486_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i486_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i486_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i486_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::i686_unknown_linux_gnu::target(); - base.cpu = "i486".into(); - base.llvm_target = "i486-unknown-linux-gnu".into(); - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_nto_qnx700.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_nto_qnx700.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_nto_qnx700.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_nto_qnx700.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use super::nto_qnx_base; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "i586-pc-unknown".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: TargetOptions { - cpu: "pentium4".into(), - max_atomic_width: Some(64), - pre_link_args: TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-Vgcc_ntox86_cxx"], - ), - env: "nto70".into(), - stack_probes: StackProbeType::X86, - ..nto_qnx_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::i686_pc_windows_msvc::target(); - base.cpu = "pentium".into(); - base.llvm_target = "i586-pc-windows-msvc".into(); - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::i686_unknown_linux_gnu::target(); - base.cpu = "pentium".into(); - base.llvm_target = "i586-unknown-linux-gnu".into(); - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i586_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::i686_unknown_linux_musl::target(); - base.cpu = "pentium".into(); - base.llvm_target = "i586-unknown-linux-musl".into(); - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_apple_darwin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use super::apple_base::{macos_llvm_target, opts, Arch}; -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - // ld64 only understands i386 and not i686 - let arch = Arch::I386; - let mut base = opts("macos", arch); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m32"]); - base.stack_probes = StackProbeType::X86; - base.frame_pointer = FramePointer::Always; - - Target { - // Clang automatically chooses a more specific target based on - // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work - // correctly, we do too. - // - // While ld64 doesn't understand i686, LLVM does. - llvm_target: macos_llvm_target(Arch::I686).into(), - pointer_width: 32, - data_layout: "e-m:o-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:128-n8:16:32-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { mcount: "\u{1}mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_linux_android.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -use crate::spec::{SanitizerSet, StackProbeType, Target, TargetOptions}; - -// See https://developer.android.com/ndk/guides/abis.html#x86 -// for target ABI requirements. - -pub fn target() -> Target { - let mut base = super::android_base::opts(); - - base.max_atomic_width = Some(64); - - // https://developer.android.com/ndk/guides/abis.html#x86 - base.cpu = "pentiumpro".into(); - base.features = "+mmx,+sse,+sse2,+sse3,+ssse3".into(); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-linux-android".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: TargetOptions { supported_sanitizers: SanitizerSet::ADDRESS, ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_gnu_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.frame_pointer = FramePointer::Always; // Required for backtraces - base.linker = Some("i686-w64-mingw32-gcc".into()); - - // Mark all dynamic libraries and executables as compatible with the larger 4GiB address - // space available to x86 Windows binaries on x86_64. - base.add_pre_link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &["-m", "i386pe", "--large-address-aware"], - ); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-Wl,--large-address-aware"]); - - Target { - llvm_target: "i686-pc-windows-gnu".into(), - pointer_width: 32, - data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:32-n8:16:32-a:0:32-S32" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnullvm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_gnullvm_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.frame_pointer = FramePointer::Always; // Required for backtraces - base.linker = Some("i686-w64-mingw32-clang".into()); - - // Mark all dynamic libraries and executables as compatible with the larger 4GiB address - // space available to x86 Windows binaries on x86_64. - base.add_pre_link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &["-m", "i386pe", "--large-address-aware"], - ); - - Target { - llvm_target: "i686-pc-windows-gnu".into(), - pointer_width: 32, - data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:32-n8:16:32-a:0:32-S32" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -use crate::spec::{LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_msvc_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - - base.add_pre_link_args( - LinkerFlavor::Msvc(Lld::No), - &[ - // Mark all dynamic libraries and executables as compatible with the larger 4GiB address - // space available to x86 Windows binaries on x86_64. - "/LARGEADDRESSAWARE", - // Ensure the linker will only produce an image if it can also produce a table of - // the image's safe exception handlers. - // https://docs.microsoft.com/en-us/cpp/build/reference/safeseh-image-has-safe-exception-handlers - "/SAFESEH", - ], - ); - // Workaround for #95429 - base.has_thread_local = false; - - Target { - llvm_target: "i686-pc-windows-msvc".into(), - pointer_width: 32, - data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:128-n8:16:32-a:0:32-S32" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::freebsd_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "-Wl,-znotext"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-unknown-freebsd".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_haiku.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_haiku.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_haiku.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_haiku.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::haiku_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-unknown-haiku".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_hurd_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_hurd_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_hurd_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_hurd_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::hurd_gnu_base::opts(); - base.cpu = "pentiumpro".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) }; - - Target { - llvm_target: "i686-unknown-hurd-gnu".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.supported_sanitizers = SanitizerSet::ADDRESS; - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "-Wl,-melf_i386"]); - base.stack_probes = StackProbeType::X86; - - // The unwinder used by i686-unknown-linux-musl, the LLVM libunwind - // implementation, apparently relies on frame pointers existing... somehow. - // It's not clear to me why nor where this dependency is introduced, but the - // test suite does not pass with frame pointers eliminated and it passes - // with frame pointers present. - // - // If you think that this is no longer necessary, then please feel free to - // ignore! If it still passes the test suite and the bots then sounds good - // to me. - // - // This may or may not be related to this bug: - // https://llvm.org/bugs/show_bug.cgi?id=30879 - base.frame_pointer = FramePointer::Always; - - Target { - llvm_target: "i686-unknown-linux-musl".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::netbsd_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-unknown-netbsdelf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: TargetOptions { mcount: "__mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::openbsd_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "-fuse-ld=lld"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-unknown-openbsd".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_uefi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_uefi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_uefi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_unknown_uefi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ -// This defines the ia32 target for UEFI systems as described in the UEFI specification. See the -// uefi-base module for generic UEFI options. On ia32 systems -// UEFI systems always run in protected-mode, have the interrupt-controller pre-configured and -// force a single-CPU execution. -// The cdecl ABI is used. It differs from the stdcall or fastcall ABI. -// "i686-unknown-windows" is used to get the minimal subset of windows-specific features. - -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::uefi_msvc_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - - // We disable MMX and SSE for now, even though UEFI allows using them. Problem is, you have to - // enable these CPU features explicitly before their first use, otherwise their instructions - // will trigger an exception. Rust does not inject any code that enables AVX/MMX/SSE - // instruction sets, so this must be done by the firmware. However, existing firmware is known - // to leave these uninitialized, thus triggering exceptions if we make use of them. Which is - // why we avoid them and instead use soft-floats. This is also what GRUB and friends did so - // far. - // If you initialize FP units yourself, you can override these flags with custom linker - // arguments, thus giving you access to full MMX/SSE acceleration. - base.features = "-mmx,-sse,+soft-float".into(); - - // Use -GNU here, because of the reason below: - // Background and Problem: - // If we use i686-unknown-windows, the LLVM IA32 MSVC generates compiler intrinsic - // _alldiv, _aulldiv, _allrem, _aullrem, _allmul, which will cause undefined symbol. - // A real issue is __aulldiv() is referred by __udivdi3() - udivmod_inner!(), from - // https://github.com/rust-lang-nursery/compiler-builtins. - // As result, rust-lld generates link error finally. - // Root-cause: - // In rust\src\llvm-project\llvm\lib\Target\X86\X86ISelLowering.cpp, - // we have below code to use MSVC intrinsics. It assumes MSVC target - // will link MSVC library. But that is NOT true in UEFI environment. - // UEFI does not link any MSVC or GCC standard library. - // if (Subtarget.isTargetKnownWindowsMSVC() || - // Subtarget.isTargetWindowsItanium()) { - // // Setup Windows compiler runtime calls. - // setLibcallName(RTLIB::SDIV_I64, "_alldiv"); - // setLibcallName(RTLIB::UDIV_I64, "_aulldiv"); - // setLibcallName(RTLIB::SREM_I64, "_allrem"); - // setLibcallName(RTLIB::UREM_I64, "_aullrem"); - // setLibcallName(RTLIB::MUL_I64, "_allmul"); - // setLibcallCallingConv(RTLIB::SDIV_I64, CallingConv::X86_StdCall); - // setLibcallCallingConv(RTLIB::UDIV_I64, CallingConv::X86_StdCall); - // setLibcallCallingConv(RTLIB::SREM_I64, CallingConv::X86_StdCall); - // setLibcallCallingConv(RTLIB::UREM_I64, CallingConv::X86_StdCall); - // setLibcallCallingConv(RTLIB::MUL_I64, CallingConv::X86_StdCall); - // } - // The compiler intrinsics should be implemented by compiler-builtins. - // Unfortunately, compiler-builtins has not provided those intrinsics yet. Such as: - // i386/divdi3.S - // i386/lshrdi3.S - // i386/moddi3.S - // i386/muldi3.S - // i386/udivdi3.S - // i386/umoddi3.S - // Possible solution: - // 1. Eliminate Intrinsics generation. - // 1.1 Choose different target to bypass isTargetKnownWindowsMSVC(). - // 1.2 Remove the "Setup Windows compiler runtime calls" in LLVM - // 2. Implement Intrinsics. - // We evaluated all options. - // #2 is hard because we need implement the intrinsics (_aulldiv) generated - // from the other intrinsics (__udivdi3) implementation with the same - // functionality (udivmod_inner). If we let _aulldiv() call udivmod_inner!(), - // then we are in loop. We may have to find another way to implement udivmod_inner!(). - // #1.2 may break the existing usage. - // #1.1 seems the simplest solution today. - // The IA32 -gnu calling convention is same as the one defined in UEFI specification. - // It uses cdecl, EAX/ECX/EDX as volatile register, and EAX/EDX as return value. - // We also checked the LLVM X86TargetLowering, the differences between -gnu and -msvc - // is fmodf(f32), longjmp() and TLS. None of them impacts the UEFI code. - // As a result, we choose -gnu for i686 version before those intrinsics are implemented in - // compiler-builtins. After compiler-builtins implements all required intrinsics, we may - // remove -gnu and use the default one. - Target { - llvm_target: "i686-unknown-windows-gnu".into(), - pointer_width: 32, - data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:32-n8:16:32-a:0:32-S32" - .into(), - arch: "x86".into(), - - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_uwp_gnu_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.frame_pointer = FramePointer::Always; // Required for backtraces - - // Mark all dynamic libraries and executables as compatible with the larger 4GiB address - // space available to x86 Windows binaries on x86_64. - base.add_pre_link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &["-m", "i386pe", "--large-address-aware"], - ); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-Wl,--large-address-aware"]); - - Target { - llvm_target: "i686-pc-windows-gnu".into(), - pointer_width: 32, - data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:32-n8:16:32-a:0:32-S32" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::windows_uwp_msvc_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - - Target { - llvm_target: "i686-pc-windows-msvc".into(), - pointer_width: 32, - data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:128-n8:16:32-a:0:32-S32" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_wrs_vxworks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/i686_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::vxworks_base::opts(); - base.cpu = "pentium4".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "i686-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - f64:32:64-f80:32-n8:16:32-S128" - .into(), - arch: "x86".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/illumos_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/illumos_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/illumos_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/illumos_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -use crate::spec::{cvs, Cc, FramePointer, LinkerFlavor, TargetOptions}; - -pub fn opts() -> TargetOptions { - let late_link_args = TargetOptions::link_args( - LinkerFlavor::Unix(Cc::Yes), - &[ - // The illumos libc contains a stack unwinding implementation, as - // does libgcc_s. The latter implementation includes several - // additional symbols that are not always in base libc. To force - // the consistent use of just one unwinder, we ensure libc appears - // after libgcc_s in the NEEDED list for the resultant binary by - // ignoring any attempts to add it as a dynamic dependency until the - // very end. - // FIXME: This should be replaced by a more complete and generic - // mechanism for controlling the order of library arguments passed - // to the linker. - "-lc", - // LLVM will insert calls to the stack protector functions - // "__stack_chk_fail" and "__stack_chk_guard" into code in native - // object files. Some platforms include these symbols directly in - // libc, but at least historically these have been provided in - // libssp.so on illumos and Solaris systems. - "-lssp", - ], - ); - - TargetOptions { - os: "illumos".into(), - dynamic_linking: true, - has_rpath: true, - families: cvs!["unix"], - is_like_solaris: true, - linker_flavor: LinkerFlavor::Unix(Cc::Yes), - limit_rdylib_exports: false, // Linker doesn't support this - frame_pointer: FramePointer::Always, - eh_frame_header: false, - late_link_args, - - // While we support ELF TLS, rust requires a way to register - // cleanup handlers (in C, this would be something along the lines of: - // void register_callback(void (*fn)(void *), void *arg); - // (see src/libstd/sys/unix/fast_thread_local.rs) that is currently - // missing in illumos. For now at least, we must fallback to using - // pthread_{get,set}specific. - //has_thread_local: true, - - // FIXME: Currently, rust is invoking cc to link, which ends up - // causing these to get included twice. We should eventually transition - // to having rustc invoke ld directly, in which case these will need to - // be uncommented. - // - // We want XPG6 behavior from libc and libm. See standards(5) - //pre_link_objects_exe: vec![ - // "/usr/lib/amd64/values-Xc.o".into(), - // "/usr/lib/amd64/values-xpg6.o".into(), - //], - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/l4re_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/l4re_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/l4re_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/l4re_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -use crate::spec::{cvs, Cc, LinkerFlavor, PanicStrategy, RelocModel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "l4re".into(), - env: "uclibc".into(), - linker_flavor: LinkerFlavor::Unix(Cc::No), - panic_strategy: PanicStrategy::Abort, - linker: Some("l4-bender".into()), - families: cvs!["unix"], - relocation_model: RelocModel::Static, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{cvs, RelroLevel, SplitDebuginfo, TargetOptions}; -use std::borrow::Cow; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "linux".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - position_independent_executables: true, - relro_level: RelroLevel::Full, - has_thread_local: true, - crt_static_respected: true, - supported_split_debuginfo: Cow::Borrowed(&[ - SplitDebuginfo::Packed, - SplitDebuginfo::Unpacked, - SplitDebuginfo::Off, - ]), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_gnu_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_gnu_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_gnu_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_gnu_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -use crate::spec::TargetOptions; - -pub fn opts() -> TargetOptions { - TargetOptions { env: "gnu".into(), ..super::linux_base::opts() } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_musl_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_musl_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_musl_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_musl_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::crt_objects::{self, LinkSelfContainedDefault}; -use crate::spec::TargetOptions; - -pub fn opts() -> TargetOptions { - let mut base = super::linux_base::opts(); - - base.env = "musl".into(); - base.pre_link_objects_self_contained = crt_objects::pre_musl_self_contained(); - base.post_link_objects_self_contained = crt_objects::post_musl_self_contained(); - base.link_self_contained = LinkSelfContainedDefault::Musl; - - // These targets statically link libc by default - base.crt_static_default = true; - - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_ohos_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_ohos_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_ohos_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_ohos_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -use crate::spec::TargetOptions; - -pub fn opts() -> TargetOptions { - let mut base = super::linux_base::opts(); - - base.env = "ohos".into(); - base.crt_static_default = false; - base.force_emulated_tls = true; - base.has_thread_local = false; - - base -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_uclibc_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_uclibc_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_uclibc_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/linux_uclibc_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -use crate::spec::TargetOptions; - -pub fn opts() -> TargetOptions { - TargetOptions { env: "uclibc".into(), ..super::linux_base::opts() } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "loongarch64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(), - arch: "loongarch64".into(), - options: TargetOptions { - cpu: "generic".into(), - features: "+f,+d".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use super::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy, RelocModel}; -use super::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "loongarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(), - arch: "loongarch64".into(), - options: TargetOptions { - cpu: "generic".into(), - features: "+f,+d".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - code_model: Some(CodeModel::Small), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none_softfloat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none_softfloat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none_softfloat.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/loongarch64_unknown_none_softfloat.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use super::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy, RelocModel}; -use super::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "loongarch64-unknown-none".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(), - arch: "loongarch64".into(), - options: TargetOptions { - cpu: "generic".into(), - features: "-f,-d".into(), - abi: "softfloat".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - llvm_abiname: "lp64s".into(), - max_atomic_width: Some(64), - relocation_model: RelocModel::Static, - panic_strategy: PanicStrategy::Abort, - code_model: Some(CodeModel::Small), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/m68k_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/m68k_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/m68k_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/m68k_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.cpu = "M68020".into(); - base.max_atomic_width = Some(32); - - Target { - llvm_target: "m68k-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:16:32-i8:8:8-i16:16:16-i32:16:32-n8:16:32-a:0:16-S16".into(), - arch: "m68k".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_openwrt_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_openwrt_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_openwrt_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_openwrt_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -/// A target tuple for OpenWrt MIPS64 targets -/// -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "mips64r2".into(); - base.features = "+mips64r2,+soft-float".into(); - base.max_atomic_width = Some(64); - base.crt_static_default = false; - - Target { - // LLVM doesn't recognize "muslabi64" yet. - llvm_target: "mips64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64".into(), - options: TargetOptions { - abi: "abi64".into(), - endian: Endian::Big, - mcount: "_mcount".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_gnuabi64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mips64-unknown-linux-gnuabi64".into(), - pointer_width: 64, - data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64".into(), - options: TargetOptions { - abi: "abi64".into(), - endian: Endian::Big, - // NOTE(mips64r2) matches C toolchain - cpu: "mips64r2".into(), - features: "+mips64r2,+xgot".into(), - max_atomic_width: Some(64), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_muslabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_muslabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_muslabi64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64_unknown_linux_muslabi64.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "mips64r2".into(); - base.features = "+mips64r2".into(); - base.max_atomic_width = Some(64); - Target { - // LLVM doesn't recognize "muslabi64" yet. - llvm_target: "mips64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64".into(), - options: TargetOptions { - abi: "abi64".into(), - endian: Endian::Big, - mcount: "_mcount".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_gnuabi64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mips64el-unknown-linux-gnuabi64".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64".into(), - options: TargetOptions { - abi: "abi64".into(), - // NOTE(mips64r2) matches C toolchain - cpu: "mips64r2".into(), - features: "+mips64r2,+xgot".into(), - max_atomic_width: Some(64), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_muslabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_muslabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_muslabi64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips64el_unknown_linux_muslabi64.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "mips64r2".into(); - base.features = "+mips64r2".into(); - base.max_atomic_width = Some(64); - Target { - // LLVM doesn't recognize "muslabi64" yet. - llvm_target: "mips64el-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64".into(), - options: TargetOptions { abi: "abi64".into(), mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mips-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - options: TargetOptions { - endian: Endian::Big, - cpu: "mips32r2".into(), - features: "+mips32r2,+fpxx,+nooddspreg".into(), - max_atomic_width: Some(32), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "mips32r2".into(); - base.features = "+mips32r2,+soft-float".into(); - base.max_atomic_width = Some(32); - base.crt_static_default = false; - Target { - llvm_target: "mips-unknown-linux-musl".into(), - pointer_width: 32, - data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_uclibc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mips_unknown_linux_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mips-unknown-linux-uclibc".into(), - pointer_width: 32, - data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - options: TargetOptions { - endian: Endian::Big, - cpu: "mips32r2".into(), - features: "+mips32r2,+soft-float".into(), - max_atomic_width: Some(32), - mcount: "_mcount".into(), - - ..super::linux_uclibc_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -use crate::spec::{cvs, Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; - -// The PSP has custom linker requirements. -const LINKER_SCRIPT: &str = include_str!("./mipsel_sony_psp_linker_script.ld"); - -pub fn target() -> Target { - let pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &["--emit-relocs", "--nmagic"], - ); - - Target { - llvm_target: "mipsel-sony-psp".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - - options: TargetOptions { - os: "psp".into(), - vendor: "sony".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - cpu: "mips2".into(), - linker: Some("rust-lld".into()), - relocation_model: RelocModel::Static, - - // PSP FPU only supports single precision floats. - features: "+single-float".into(), - - // PSP does not support trap-on-condition instructions. - llvm_args: cvs!["-mno-check-zero-division"], - pre_link_args, - link_script: Some(LINKER_SCRIPT.into()), - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp_linker_script.ld rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp_linker_script.ld --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp_linker_script.ld 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psp_linker_script.ld 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -ENTRY(module_start) -SECTIONS -{ - /* PRX format requires text to begin at 0 */ - .text 0 : { *(.text .text.*) } - - /* Sort stubs for convenient ordering */ - .sceStub.text : { *(.sceStub.text) *(SORT(.sceStub.text.*)) } - - /* PSP import library stub sections. Bundles together `.lib.stub.entry.*` - * sections for better `--gc-sections` support. */ - .lib.stub.top : { *(.lib.stub.top) } - .lib.stub : { *(.lib.stub) *(.lib.stub.entry.*) } - .lib.stub.btm : { *(.lib.stub.btm) } - - /* Keep these sections around, even though they may appear unused to the linker */ - .lib.ent.top : { KEEP(*(.lib.ent.top)) } - .lib.ent : { KEEP(*(.lib.ent)) } - .lib.ent.btm : { KEEP(*(.lib.ent.btm)) } - - .eh_frame_hdr : { *(.eh_frame_hdr) } - - /* Add symbols for LLVM's libunwind */ - __eh_frame_hdr_start = SIZEOF(.eh_frame_hdr) > 0 ? ADDR(.eh_frame_hdr) : 0; - __eh_frame_hdr_end = SIZEOF(.eh_frame_hdr) > 0 ? . : 0; - .eh_frame : - { - __eh_frame_start = .; - KEEP(*(.eh_frame)) - __eh_frame_end = .; - } - - /* These are explicitly listed to avoid being merged into .rodata */ - .rodata.sceResident : { *(.rodata.sceResident) *(.rodata.sceResident.*) } - .rodata.sceModuleInfo : { *(.rodata.sceModuleInfo) } - /* Sort NIDs for convenient ordering */ - .rodata.sceNid : { *(.rodata.sceNid) *(SORT(.rodata.sceNid.*)) } - - .rodata : { *(.rodata .rodata.*) } - .data : { *(.data .data.*) } - .gcc_except_table : { *(.gcc_except_table .gcc_except_table.*) } - .bss : { *(.bss .bss.*) } - - /DISCARD/ : { *(.rel.sceStub.text .MIPS.abiflags .reginfo) } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psx.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_sony_psx.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -use crate::spec::{cvs, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsel-sony-psx".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - - options: TargetOptions { - os: "none".into(), - env: "psx".into(), - vendor: "sony".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - cpu: "mips1".into(), - executables: true, - linker: Some("rust-lld".into()), - relocation_model: RelocModel::Static, - exe_suffix: ".exe".into(), - - // PSX doesn't natively support floats. - features: "+soft-float".into(), - - // This should be 16 bits, but LLVM incorrectly tries emitting MIPS-II SYNC instructions - // for atomic loads and stores. This crashes rustc so we have to disable the Atomic* API - // until this is fixed upstream. See https://reviews.llvm.org/D122427#3420144 for more - // info. - max_atomic_width: Some(0), - - // PSX does not support trap-on-condition instructions. - llvm_args: cvs!["-mno-check-zero-division"], - llvm_abiname: "o32".into(), - panic_strategy: PanicStrategy::Abort, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsel-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - - options: TargetOptions { - cpu: "mips32r2".into(), - features: "+mips32r2,+fpxx,+nooddspreg".into(), - max_atomic_width: Some(32), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "mips32r2".into(); - base.features = "+mips32r2,+soft-float".into(); - base.max_atomic_width = Some(32); - base.crt_static_default = false; - Target { - llvm_target: "mipsel-unknown-linux-musl".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - options: TargetOptions { mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_uclibc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_linux_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsel-unknown-linux-uclibc".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - - options: TargetOptions { - cpu: "mips32r2".into(), - features: "+mips32r2,+soft-float".into(), - max_atomic_width: Some(32), - mcount: "_mcount".into(), - - ..super::linux_uclibc_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_none.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsel_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -//! Bare MIPS32r2, little endian, softfloat, O32 calling convention -//! -//! Can be used for MIPS M4K core (e.g. on PIC32MX devices) - -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsel-unknown-none".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - cpu: "mips32r2".into(), - features: "+mips32r2,+soft-float,+noabicalls".into(), - max_atomic_width: Some(32), - linker: Some("rust-lld".into()), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsisa32r6-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips32r6".into(), - options: TargetOptions { - endian: Endian::Big, - cpu: "mips32r6".into(), - features: "+mips32r6".into(), - max_atomic_width: Some(32), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6el_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6el_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6el_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa32r6el_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsisa32r6el-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), - arch: "mips32r6".into(), - - options: TargetOptions { - cpu: "mips32r6".into(), - features: "+mips32r6".into(), - max_atomic_width: Some(32), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6_unknown_linux_gnuabi64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsisa64r6-unknown-linux-gnuabi64".into(), - pointer_width: 64, - data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64r6".into(), - options: TargetOptions { - abi: "abi64".into(), - endian: Endian::Big, - // NOTE(mips64r6) matches C toolchain - cpu: "mips64r6".into(), - features: "+mips64r6".into(), - max_atomic_width: Some(64), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6el_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6el_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6el_unknown_linux_gnuabi64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mipsisa64r6el_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "mipsisa64r6el-unknown-linux-gnuabi64".into(), - pointer_width: 64, - data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), - arch: "mips64r6".into(), - options: TargetOptions { - abi: "abi64".into(), - // NOTE(mips64r6) matches C toolchain - cpu: "mips64r6".into(), - features: "+mips64r6".into(), - max_atomic_width: Some(64), - mcount: "_mcount".into(), - - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -38,7 +38,7 @@ use crate::abi::{Endian, Integer, Size, TargetDataLayout, TargetDataLayoutErrors}; use crate::json::{Json, ToJson}; use crate::spec::abi::{lookup as lookup_abi, Abi}; -use crate::spec::crt_objects::{CrtObjects, LinkSelfContainedDefault}; +use crate::spec::crt_objects::CrtObjects; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_fs_util::try_canonicalize; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; @@ -57,47 +57,11 @@ pub mod abi; pub mod crt_objects; -mod aix_base; -mod android_base; -mod apple_base; -pub use apple_base::deployment_target as current_apple_deployment_target; -pub use apple_base::platform as current_apple_platform; -pub use apple_base::sdk_version as current_apple_sdk_version; -mod avr_gnu_base; -pub use avr_gnu_base::ef_avr_arch; -mod bpf_base; -mod dragonfly_base; -mod freebsd_base; -mod fuchsia_base; -mod haiku_base; -mod hermit_base; -mod hurd_base; -mod hurd_gnu_base; -mod illumos_base; -mod l4re_base; -mod linux_base; -mod linux_gnu_base; -mod linux_musl_base; -mod linux_ohos_base; -mod linux_uclibc_base; -mod msvc_base; -mod netbsd_base; -mod nto_qnx_base; -mod openbsd_base; -mod redox_base; -mod solaris_base; -mod solid_base; -mod teeos_base; -mod thumb_base; -mod uefi_msvc_base; -mod unikraft_linux_musl_base; -mod vxworks_base; -mod wasm_base; -mod windows_gnu_base; -mod windows_gnullvm_base; -mod windows_msvc_base; -mod windows_uwp_gnu_base; -mod windows_uwp_msvc_base; +mod base; +pub use base::apple::deployment_target as current_apple_deployment_target; +pub use base::apple::platform as current_apple_platform; +pub use base::apple::sdk_version as current_apple_sdk_version; +pub use base::avr_gnu::ef_avr_arch; /// Linker is called through a C/C++ compiler. #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] @@ -164,11 +128,11 @@ /// Linker flavors available externally through command line (`-Clinker-flavor`) /// or json target specifications. -/// FIXME: This set has accumulated historically, bring it more in line with the internal -/// linker flavors (`LinkerFlavor`). +/// This set has accumulated historically, and contains both (stable and unstable) legacy values, as +/// well as modern ones matching the internal linker flavors (`LinkerFlavor`). #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum LinkerFlavorCli { - // New (unstable) flavors, with direct counterparts in `LinkerFlavor`. + // Modern (unstable) flavors, with direct counterparts in `LinkerFlavor`. Gnu(Cc, Lld), Darwin(Cc, Lld), WasmLld(Cc), @@ -179,13 +143,11 @@ Bpf, Ptx, - // Below: the legacy stable values. + // Legacy stable values Gcc, Ld, Lld(LldFlavor), Em, - BpfLinker, - PtxLinker, } impl LinkerFlavorCli { @@ -199,9 +161,7 @@ | LinkerFlavorCli::Msvc(Lld::Yes) | LinkerFlavorCli::EmCc | LinkerFlavorCli::Bpf - | LinkerFlavorCli::Ptx - | LinkerFlavorCli::BpfLinker - | LinkerFlavorCli::PtxLinker => true, + | LinkerFlavorCli::Ptx => true, LinkerFlavorCli::Gcc | LinkerFlavorCli::Ld | LinkerFlavorCli::Lld(..) @@ -279,11 +239,10 @@ LinkerFlavorCli::Lld(LldFlavor::Wasm) => LinkerFlavor::WasmLld(Cc::No), LinkerFlavorCli::Lld(LldFlavor::Link) => LinkerFlavor::Msvc(Lld::Yes), LinkerFlavorCli::Em => LinkerFlavor::EmCc, - LinkerFlavorCli::BpfLinker => LinkerFlavor::Bpf, - LinkerFlavorCli::PtxLinker => LinkerFlavor::Ptx, } } + /// Returns the corresponding backwards-compatible CLI flavor. fn to_cli(self) -> LinkerFlavorCli { match self { LinkerFlavor::Gnu(Cc::Yes, _) @@ -299,8 +258,22 @@ LinkerFlavor::Msvc(Lld::Yes) => LinkerFlavorCli::Lld(LldFlavor::Link), LinkerFlavor::Msvc(..) => LinkerFlavorCli::Msvc(Lld::No), LinkerFlavor::EmCc => LinkerFlavorCli::Em, - LinkerFlavor::Bpf => LinkerFlavorCli::BpfLinker, - LinkerFlavor::Ptx => LinkerFlavorCli::PtxLinker, + LinkerFlavor::Bpf => LinkerFlavorCli::Bpf, + LinkerFlavor::Ptx => LinkerFlavorCli::Ptx, + } + } + + /// Returns the modern CLI flavor that is the counterpart of this flavor. + fn to_cli_counterpart(self) -> LinkerFlavorCli { + match self { + LinkerFlavor::Gnu(cc, lld) => LinkerFlavorCli::Gnu(cc, lld), + LinkerFlavor::Darwin(cc, lld) => LinkerFlavorCli::Darwin(cc, lld), + LinkerFlavor::WasmLld(cc) => LinkerFlavorCli::WasmLld(cc), + LinkerFlavor::Unix(cc) => LinkerFlavorCli::Unix(cc), + LinkerFlavor::Msvc(lld) => LinkerFlavorCli::Msvc(lld), + LinkerFlavor::EmCc => LinkerFlavorCli::EmCc, + LinkerFlavor::Bpf => LinkerFlavorCli::Bpf, + LinkerFlavor::Ptx => LinkerFlavorCli::Ptx, } } @@ -320,7 +293,6 @@ LinkerFlavorCli::Ld => (Some(Cc::No), Some(Lld::No)), LinkerFlavorCli::Lld(_) => (Some(Cc::No), Some(Lld::Yes)), LinkerFlavorCli::Em => (Some(Cc::Yes), Some(Lld::Yes)), - LinkerFlavorCli::BpfLinker | LinkerFlavorCli::PtxLinker => (None, None), } } @@ -511,7 +483,7 @@ (LinkerFlavorCli::Bpf) "bpf" (LinkerFlavorCli::Ptx) "ptx" - // Below: legacy stable values + // Legacy stable flavors (LinkerFlavorCli::Gcc) "gcc" (LinkerFlavorCli::Ld) "ld" (LinkerFlavorCli::Lld(LldFlavor::Ld)) "ld.lld" @@ -519,8 +491,6 @@ (LinkerFlavorCli::Lld(LldFlavor::Link)) "lld-link" (LinkerFlavorCli::Lld(LldFlavor::Wasm)) "wasm-ld" (LinkerFlavorCli::Em) "em" - (LinkerFlavorCli::BpfLinker) "bpf-linker" - (LinkerFlavorCli::PtxLinker) "ptx-linker" } impl ToJson for LinkerFlavorCli { @@ -529,6 +499,203 @@ } } +/// The different `-Clink-self-contained` options that can be specified in a target spec: +/// - enabling or disabling in bulk +/// - some target-specific pieces of inference to determine whether to use self-contained linking +/// if `-Clink-self-contained` is not specified explicitly (e.g. on musl/mingw) +/// - explicitly enabling some of the self-contained linking components, e.g. the linker component +/// to use `rust-lld` +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum LinkSelfContainedDefault { + /// The target spec explicitly enables self-contained linking. + True, + + /// The target spec explicitly disables self-contained linking. + False, + + /// The target spec requests that the self-contained mode is inferred, in the context of musl. + InferredForMusl, + + /// The target spec requests that the self-contained mode is inferred, in the context of mingw. + InferredForMingw, + + /// The target spec explicitly enables a list of self-contained linking components: e.g. for + /// targets opting into a subset of components like the CLI's `-C link-self-contained=+linker`. + WithComponents(LinkSelfContainedComponents), +} + +/// Parses a backwards-compatible `-Clink-self-contained` option string, without components. +impl FromStr for LinkSelfContainedDefault { + type Err = (); + + fn from_str(s: &str) -> Result { + Ok(match s { + "false" => LinkSelfContainedDefault::False, + "true" | "wasm" => LinkSelfContainedDefault::True, + "musl" => LinkSelfContainedDefault::InferredForMusl, + "mingw" => LinkSelfContainedDefault::InferredForMingw, + _ => return Err(()), + }) + } +} + +impl ToJson for LinkSelfContainedDefault { + fn to_json(&self) -> Json { + match *self { + LinkSelfContainedDefault::WithComponents(components) => { + // Serialize the components in a json object's `components` field, to prepare for a + // future where `crt-objects-fallback` is removed from the json specs and + // incorporated as a field here. + let mut map = BTreeMap::new(); + map.insert("components", components); + map.to_json() + } + + // Stable backwards-compatible values + LinkSelfContainedDefault::True => "true".to_json(), + LinkSelfContainedDefault::False => "false".to_json(), + LinkSelfContainedDefault::InferredForMusl => "musl".to_json(), + LinkSelfContainedDefault::InferredForMingw => "mingw".to_json(), + } + } +} + +impl LinkSelfContainedDefault { + /// Returns whether the target spec has self-contained linking explicitly disabled. Used to emit + /// errors if the user then enables it on the CLI. + pub fn is_disabled(self) -> bool { + self == LinkSelfContainedDefault::False + } + + /// Returns whether the target spec explictly requests self-contained linking, i.e. not via + /// inference. + pub fn is_linker_enabled(self) -> bool { + match self { + LinkSelfContainedDefault::True => true, + LinkSelfContainedDefault::False => false, + LinkSelfContainedDefault::WithComponents(c) => { + c.contains(LinkSelfContainedComponents::LINKER) + } + _ => false, + } + } + + /// Returns the key to use when serializing the setting to json: + /// - individual components in a `link-self-contained` object value + /// - the other variants as a backwards-compatible `crt-objects-fallback` string + fn json_key(self) -> &'static str { + match self { + LinkSelfContainedDefault::WithComponents(_) => "link-self-contained", + _ => "crt-objects-fallback", + } + } +} + +bitflags::bitflags! { + #[derive(Default)] + /// The `-C link-self-contained` components that can individually be enabled or disabled. + pub struct LinkSelfContainedComponents: u8 { + /// CRT objects (e.g. on `windows-gnu`, `musl`, `wasi` targets) + const CRT_OBJECTS = 1 << 0; + /// libc static library (e.g. on `musl`, `wasi` targets) + const LIBC = 1 << 1; + /// libgcc/libunwind (e.g. on `windows-gnu`, `fuchsia`, `fortanix`, `gnullvm` targets) + const UNWIND = 1 << 2; + /// Linker, dlltool, and their necessary libraries (e.g. on `windows-gnu` and for `rust-lld`) + const LINKER = 1 << 3; + /// Sanitizer runtime libraries + const SANITIZERS = 1 << 4; + /// Other MinGW libs and Windows import libs + const MINGW = 1 << 5; + } +} + +impl LinkSelfContainedComponents { + /// Parses a single `-Clink-self-contained` well-known component, not a set of flags. + pub fn from_str(s: &str) -> Option { + Some(match s { + "crto" => LinkSelfContainedComponents::CRT_OBJECTS, + "libc" => LinkSelfContainedComponents::LIBC, + "unwind" => LinkSelfContainedComponents::UNWIND, + "linker" => LinkSelfContainedComponents::LINKER, + "sanitizers" => LinkSelfContainedComponents::SANITIZERS, + "mingw" => LinkSelfContainedComponents::MINGW, + _ => return None, + }) + } + + /// Return the component's name. + /// + /// Returns `None` if the bitflags aren't a singular component (but a mix of multiple flags). + pub fn as_str(self) -> Option<&'static str> { + Some(match self { + LinkSelfContainedComponents::CRT_OBJECTS => "crto", + LinkSelfContainedComponents::LIBC => "libc", + LinkSelfContainedComponents::UNWIND => "unwind", + LinkSelfContainedComponents::LINKER => "linker", + LinkSelfContainedComponents::SANITIZERS => "sanitizers", + LinkSelfContainedComponents::MINGW => "mingw", + _ => return None, + }) + } + + /// Returns an array of all the components. + fn all_components() -> [LinkSelfContainedComponents; 6] { + [ + LinkSelfContainedComponents::CRT_OBJECTS, + LinkSelfContainedComponents::LIBC, + LinkSelfContainedComponents::UNWIND, + LinkSelfContainedComponents::LINKER, + LinkSelfContainedComponents::SANITIZERS, + LinkSelfContainedComponents::MINGW, + ] + } + + /// Returns whether at least a component is enabled. + pub fn are_any_components_enabled(self) -> bool { + !self.is_empty() + } + + /// Returns whether `LinkSelfContainedComponents::LINKER` is enabled. + pub fn is_linker_enabled(self) -> bool { + self.contains(LinkSelfContainedComponents::LINKER) + } + + /// Returns whether `LinkSelfContainedComponents::CRT_OBJECTS` is enabled. + pub fn is_crt_objects_enabled(self) -> bool { + self.contains(LinkSelfContainedComponents::CRT_OBJECTS) + } +} + +impl IntoIterator for LinkSelfContainedComponents { + type Item = LinkSelfContainedComponents; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + LinkSelfContainedComponents::all_components() + .into_iter() + .filter(|&s| self.contains(s)) + .collect::>() + .into_iter() + } +} + +impl ToJson for LinkSelfContainedComponents { + fn to_json(&self) -> Json { + let components: Vec<_> = Self::all_components() + .into_iter() + .filter(|c| self.contains(*c)) + .map(|c| { + // We can unwrap because we're iterating over all the known singular components, + // not an actual set of flags where `as_str` can fail. + c.as_str().unwrap().to_owned() + }) + .collect(); + + components.to_json() + } +} + #[derive(Clone, Copy, Debug, PartialEq, Hash, Encodable, Decodable, HashStable_Generic)] pub enum PanicStrategy { Unwind, @@ -1241,14 +1408,16 @@ macro_rules! supported_targets { ( $(($triple:literal, $module:ident),)+ ) => { - $(mod $module;)+ + mod targets { + $(pub(crate) mod $module;)+ + } /// List of supported targets pub const TARGETS: &[&str] = &[$($triple),+]; fn load_builtin(target: &str) -> Option { let mut t = match target { - $( $triple => $module::target(), )+ + $( $triple => targets::$module::target(), )+ _ => return None, }; t.is_builtin = true; @@ -1264,7 +1433,7 @@ $( #[test] // `#[test]` fn $module() { - tests_impl::test_target(super::$module::target()); + tests_impl::test_target(crate::spec::targets::$module::target()); } )+ } @@ -1279,6 +1448,7 @@ ("loongarch64-unknown-linux-gnu", loongarch64_unknown_linux_gnu), ("m68k-unknown-linux-gnu", m68k_unknown_linux_gnu), ("csky-unknown-linux-gnuabiv2", csky_unknown_linux_gnuabiv2), + ("csky-unknown-linux-gnuabiv2hf", csky_unknown_linux_gnuabiv2hf), ("mips-unknown-linux-gnu", mips_unknown_linux_gnu), ("mips64-unknown-linux-gnuabi64", mips64_unknown_linux_gnuabi64), ("mips64el-unknown-linux-gnuabi64", mips64el_unknown_linux_gnuabi64), @@ -1360,7 +1530,9 @@ ("aarch64_be-unknown-netbsd", aarch64_be_unknown_netbsd), ("armv6-unknown-netbsd-eabihf", armv6_unknown_netbsd_eabihf), ("armv7-unknown-netbsd-eabihf", armv7_unknown_netbsd_eabihf), + ("i586-unknown-netbsd", i586_unknown_netbsd), ("i686-unknown-netbsd", i686_unknown_netbsd), + ("mipsel-unknown-netbsd", mipsel_unknown_netbsd), ("powerpc-unknown-netbsd", powerpc_unknown_netbsd), ("riscv64gc-unknown-netbsd", riscv64gc_unknown_netbsd), ("sparc64-unknown-netbsd", sparc64_unknown_netbsd), @@ -1399,6 +1571,7 @@ ("aarch64-apple-ios-macabi", aarch64_apple_ios_macabi), ("aarch64-apple-ios-sim", aarch64_apple_ios_sim), ("aarch64-apple-tvos", aarch64_apple_tvos), + ("aarch64-apple-tvos-sim", aarch64_apple_tvos_sim), ("x86_64-apple-tvos", x86_64_apple_tvos), ("armv7k-apple-watchos", armv7k_apple_watchos), @@ -1704,6 +1877,8 @@ /// Same as `(pre|post)_link_objects`, but when self-contained linking mode is enabled. pub pre_link_objects_self_contained: CrtObjects, pub post_link_objects_self_contained: CrtObjects, + /// Behavior for the self-contained linking mode: inferred for some targets, or explicitly + /// enabled (in bulk, or with individual components). pub link_self_contained: LinkSelfContainedDefault, /// Linker arguments that are passed *before* any user-defined libraries. @@ -2104,7 +2279,7 @@ } fn update_to_cli(&mut self) { - self.linker_flavor_json = self.linker_flavor.to_cli(); + self.linker_flavor_json = self.linker_flavor.to_cli_counterpart(); self.lld_flavor_json = self.linker_flavor.lld_flavor(); self.linker_is_gnu_json = self.linker_flavor.is_gnu(); for (args, args_json) in [ @@ -2114,8 +2289,10 @@ (&self.late_link_args_static, &mut self.late_link_args_static_json), (&self.post_link_args, &mut self.post_link_args_json), ] { - *args_json = - args.iter().map(|(flavor, args)| (flavor.to_cli(), args.clone())).collect(); + *args_json = args + .iter() + .map(|(flavor, args)| (flavor.to_cli_counterpart(), args.clone())) + .collect(); } } } @@ -2658,8 +2835,43 @@ } Ok::<(), String>(()) } ); - - ($key_name:ident = $json_name:expr, link_self_contained) => ( { + ($key_name:ident, link_self_contained_components) => ( { + // Skeleton of what needs to be parsed: + // + // ``` + // $name: { + // "components": [ + // + // ] + // } + // ``` + let name = (stringify!($key_name)).replace("_", "-"); + if let Some(o) = obj.remove(&name) { + if let Some(o) = o.as_object() { + let component_array = o.get("components") + .ok_or_else(|| format!("{name}: expected a \ + JSON object with a `components` field."))?; + let component_array = component_array.as_array() + .ok_or_else(|| format!("{name}.components: expected a JSON array"))?; + let mut components = LinkSelfContainedComponents::empty(); + for s in component_array { + components |= match s.as_str() { + Some(s) => { + LinkSelfContainedComponents::from_str(s) + .ok_or_else(|| format!("unknown \ + `-Clink-self-contained` component: {s}"))? + }, + _ => return Err(format!("not a string: {:?}", s)), + }; + } + base.$key_name = LinkSelfContainedDefault::WithComponents(components); + } else { + incorrect_type.push(name) + } + } + Ok::<(), String>(()) + } ); + ($key_name:ident = $json_name:expr, link_self_contained_backwards_compatible) => ( { let name = $json_name; obj.remove(name).and_then(|o| o.as_str().and_then(|s| { match s.parse::() { @@ -2812,7 +3024,13 @@ key!(post_link_objects = "post-link-objects", link_objects); key!(pre_link_objects_self_contained = "pre-link-objects-fallback", link_objects); key!(post_link_objects_self_contained = "post-link-objects-fallback", link_objects); - key!(link_self_contained = "crt-objects-fallback", link_self_contained)?; + // Deserializes the backwards-compatible variants of `-Clink-self-contained` + key!( + link_self_contained = "crt-objects-fallback", + link_self_contained_backwards_compatible + )?; + // Deserializes the components variant of `-Clink-self-contained` + key!(link_self_contained, link_self_contained_components)?; key!(pre_link_args_json = "pre-link-args", link_args); key!(late_link_args_json = "late-link-args", link_args); key!(late_link_args_dynamic_json = "late-link-args-dynamic", link_args); @@ -3068,7 +3286,6 @@ target_option_val!(post_link_objects); target_option_val!(pre_link_objects_self_contained, "pre-link-objects-fallback"); target_option_val!(post_link_objects_self_contained, "post-link-objects-fallback"); - target_option_val!(link_self_contained, "crt-objects-fallback"); target_option_val!(link_args - pre_link_args_json, "pre-link-args"); target_option_val!(link_args - late_link_args_json, "late-link-args"); target_option_val!(link_args - late_link_args_dynamic_json, "late-link-args-dynamic"); @@ -3165,6 +3382,10 @@ d.insert("default-adjusted-cabi".into(), Abi::name(abi).to_json()); } + // Serializing `-Clink-self-contained` needs a dynamic key to support the + // backwards-compatible variants. + d.insert(self.link_self_contained.json_key().into(), self.link_self_contained.to_json()); + Json::Object(d) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msp430_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msp430_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msp430_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msp430_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -use crate::spec::{cvs, Cc, LinkerFlavor, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "msp430-none-elf".into(), - pointer_width: 16, - data_layout: "e-m:e-p:16:16-i32:16-i64:16-f32:16-f64:16-a:8-n8:16-S16".into(), - arch: "msp430".into(), - - options: TargetOptions { - c_int_width: "16".into(), - - // The LLVM backend currently can't generate object files. To - // workaround this LLVM generates assembly files which then we feed - // to gcc to get object files. For this reason we have a hard - // dependency on this specific gcc. - asm_args: cvs!["-mcpu=msp430"], - linker: Some("msp430-elf-gcc".into()), - linker_flavor: LinkerFlavor::Unix(Cc::Yes), - - // There are no atomic CAS instructions available in the MSP430 - // instruction set, and the LLVM backend doesn't currently support - // compiler fences so the Atomic* API is missing on this target. - // When the LLVM backend gains support for compile fences uncomment - // the `singlethread: true` line and set `max_atomic_width` to - // `Some(16)`. - max_atomic_width: Some(0), - atomic_cas: false, - // singlethread: true, - - // Because these devices have very little resources having an - // unwinder is too onerous so we default to "abort" because the - // "unwind" strategy is very rare. - panic_strategy: PanicStrategy::Abort, - - // Similarly, one almost always never wants to use relocatable - // code because of the extra costs it involves. - relocation_model: RelocModel::Static, - - // Right now we invoke an external assembler and this isn't - // compatible with multiple codegen units, and plus we probably - // don't want to invoke that many gcc instances. - default_codegen_units: Some(1), - - // Since MSP430 doesn't meaningfully support faulting on illegal - // instructions, LLVM generates a call to abort() function instead - // of a trap instruction. Such calls are 4 bytes long, and that is - // too much overhead for such small target. - trap_unreachable: false, - - // See the thumb_base.rs file for an explanation of this value - emit_debug_gdb_scripts: false, - - eh_frame_header: false, - - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msvc_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msvc_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msvc_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/msvc_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions}; -use std::borrow::Cow; - -pub fn opts() -> TargetOptions { - // Suppress the verbose logo and authorship debugging output, which would needlessly - // clog any log files. - let pre_link_args = TargetOptions::link_args(LinkerFlavor::Msvc(Lld::No), &["/NOLOGO"]); - - TargetOptions { - linker_flavor: LinkerFlavor::Msvc(Lld::No), - dll_tls_export: false, - is_like_windows: true, - is_like_msvc: true, - pre_link_args, - abi_return_struct_as_int: true, - emit_debug_gdb_scripts: false, - - // Currently this is the only supported method of debuginfo on MSVC - // where `*.pdb` files show up next to the final artifact. - split_debuginfo: SplitDebuginfo::Packed, - supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Packed]), - debuginfo_kind: DebuginfoKind::Pdb, - - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/netbsd_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/netbsd_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/netbsd_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/netbsd_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "netbsd".into(), - dynamic_linking: true, - families: cvs!["unix"], - no_default_libraries: false, - has_rpath: true, - position_independent_executables: true, - relro_level: RelroLevel::Full, - use_ctors_section: true, - default_dwarf_version: 2, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nto_qnx_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nto_qnx_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nto_qnx_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nto_qnx_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - crt_static_respected: true, - dynamic_linking: true, - executables: true, - families: cvs!["unix"], - has_rpath: true, - has_thread_local: false, - linker: Some("qcc".into()), - os: "nto".into(), - position_independent_executables: true, - static_position_independent_executables: true, - relro_level: RelroLevel::Full, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nvptx64_nvidia_cuda.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nvptx64_nvidia_cuda.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nvptx64_nvidia_cuda.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/nvptx64_nvidia_cuda.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - arch: "nvptx64".into(), - data_layout: "e-i64:64-i128:128-v16:16-v32:32-n16:32:64".into(), - llvm_target: "nvptx64-nvidia-cuda".into(), - pointer_width: 64, - - options: TargetOptions { - os: "cuda".into(), - vendor: "nvidia".into(), - linker_flavor: LinkerFlavor::Ptx, - // The linker can be installed from `crates.io`. - linker: Some("rust-ptx-linker".into()), - - // With `ptx-linker` approach, it can be later overridden via link flags. - cpu: "sm_30".into(), - - // FIXME: create tests for the atomics. - max_atomic_width: Some(64), - - // Unwinding on CUDA is neither feasible nor useful. - panic_strategy: PanicStrategy::Abort, - - // Needed to use `dylib` and `bin` crate types and the linker. - dynamic_linking: true, - - // Avoid using dylib because it contain metadata not supported - // by LLVM NVPTX backend. - only_cdylib: true, - - // Let the `ptx-linker` to handle LLVM lowering into MC / assembly. - obj_is_bitcode: true, - - // Convenient and predicable naming scheme. - dll_prefix: "".into(), - dll_suffix: ".ptx".into(), - exe_suffix: ".ptx".into(), - - // Disable MergeFunctions LLVM optimisation pass because it can - // produce kernel functions that call other kernel functions. - // This behavior is not supported by PTX ISA. - merge_functions: MergeFunctions::Disabled, - - // The LLVM backend does not support stack canaries for this target - supports_stack_protector: false, - - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/openbsd_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/openbsd_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/openbsd_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/openbsd_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{cvs, FramePointer, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "openbsd".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - abi_return_struct_as_int: true, - position_independent_executables: true, - frame_pointer: FramePointer::Always, // FIXME 43575: should be MayOmit... - relro_level: RelroLevel::Full, - default_dwarf_version: 2, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut base = super::aix_base::opts(); - base.max_atomic_width = Some(64); - base.add_pre_link_args( - LinkerFlavor::Unix(Cc::No), - &["-b64", "-bpT:0x100000000", "-bpD:0x110000000", "-bcdtors:all:0:s"], - ); - - Target { - llvm_target: "powerpc64-ibm-aix".into(), - pointer_width: 64, - data_layout: "E-m:a-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), - arch: "powerpc64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::freebsd_base::opts(); - base.cpu = "ppc64".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64-unknown-freebsd".into(), - pointer_width: 64, - data_layout: "E-m:e-Fn32-i64:64-n32:64".into(), - arch: "powerpc64".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.cpu = "ppc64".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "E-m:e-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), - arch: "powerpc64".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "ppc64".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "E-m:e-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), - arch: "powerpc64".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::openbsd_base::opts(); - base.cpu = "ppc64".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64-unknown-openbsd".into(), - pointer_width: 64, - data_layout: "E-m:e-Fn32-i64:64-n32:64".into(), - arch: "powerpc64".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_wrs_vxworks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::vxworks_base::opts(); - base.cpu = "ppc64".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "E-m:e-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), - arch: "powerpc64".into(), - options: TargetOptions { endian: Endian::Big, ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::freebsd_base::opts(); - base.cpu = "ppc64le".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64le-unknown-freebsd".into(), - pointer_width: 64, - data_layout: "e-m:e-Fn32-i64:64-n32:64".into(), - arch: "powerpc64".into(), - options: TargetOptions { mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.cpu = "ppc64le".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64le-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-Fn32-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), - arch: "powerpc64".into(), - options: TargetOptions { mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc64le_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "ppc64le".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc64le-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-Fn32-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), - arch: "powerpc64".into(), - options: TargetOptions { mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::freebsd_base::opts(); - // Extra hint to linker that we are generating secure-PLT code. - base.add_pre_link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-m32", "--target=powerpc-unknown-freebsd13.0"], - ); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-freebsd13.0".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { - endian: Endian::Big, - features: "+secure-plt".into(), - mcount: "_mcount".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnuspe.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnuspe.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnuspe.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_gnuspe.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe"]); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-linux-gnuspe".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { - abi: "spe".into(), - endian: Endian::Big, - mcount: "_mcount".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-linux-musl".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::netbsd_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-netbsd".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { endian: Endian::Big, mcount: "__mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::openbsd_base::opts(); - base.endian = Endian::Big; - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-openbsd".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::vxworks_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "--secure-plt"]); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { endian: Endian::Big, features: "+secure-plt".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks_spe.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks_spe.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks_spe.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/powerpc_wrs_vxworks_spe.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::vxworks_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe", "--secure-plt"]); - base.max_atomic_width = Some(32); - base.stack_probes = StackProbeType::Inline; - - Target { - llvm_target: "powerpc-unknown-linux-gnuspe".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), - arch: "powerpc".into(), - options: TargetOptions { - abi: "spe".into(), - endian: Endian::Big, - // feature msync would disable instruction 'fsync' which is not supported by fsl_p1p2 - features: "+secure-plt,+msync".into(), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/redox_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/redox_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/redox_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/redox_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::{cvs, RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "redox".into(), - env: "relibc".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - position_independent_executables: true, - relro_level: RelroLevel::Full, - has_thread_local: true, - crt_static_default: true, - crt_static_respected: true, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv32-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - arch: "riscv32".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv32".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "ilp32d".into(), - max_atomic_width: Some(32), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32gc_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv32-unknown-linux-musl".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - arch: "riscv32".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv32".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "ilp32d".into(), - max_atomic_width: Some(32), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32i_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32i_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32i_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32i_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - cpu: "generic-rv32".into(), - max_atomic_width: Some(0), - atomic_cas: false, - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32im_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32im_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32im_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32im_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - cpu: "generic-rv32".into(), - max_atomic_width: Some(0), - atomic_cas: false, - features: "+m".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_esp_espidf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_esp_espidf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_esp_espidf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_esp_espidf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -use crate::spec::{cvs, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - families: cvs!["unix"], - os: "espidf".into(), - env: "newlib".into(), - vendor: "espressif".into(), - linker: Some("riscv32-esp-elf-gcc".into()), - cpu: "generic-rv32".into(), - - // As RiscV32IMAC architecture does natively support atomics, - // automatically enable the support for the Rust STD library. - max_atomic_width: Some(64), - atomic_cas: true, - - features: "+m,+a,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - cpu: "generic-rv32".into(), - max_atomic_width: Some(32), - features: "+m,+a,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_xous_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_xous_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_xous_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imac_unknown_xous_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - os: "xous".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - cpu: "generic-rv32".into(), - max_atomic_width: Some(32), - features: "+m,+a,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_esp_espidf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_esp_espidf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_esp_espidf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_esp_espidf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -use crate::spec::{cvs, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - families: cvs!["unix"], - os: "espidf".into(), - env: "newlib".into(), - vendor: "espressif".into(), - linker: Some("riscv32-esp-elf-gcc".into()), - cpu: "generic-rv32".into(), - - // While the RiscV32IMC architecture does not natively support atomics, ESP-IDF does support - // the __atomic* and __sync* GCC builtins, so setting `max_atomic_width` to `Some(64)` - // and `atomic_cas` to `true` will cause the compiler to emit libcalls to these builtins. - // - // Support for atomics is necessary for the Rust STD library, which is supported by the ESP-IDF framework. - max_atomic_width: Some(64), - atomic_cas: true, - - features: "+m,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv32imc_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), - llvm_target: "riscv32".into(), - pointer_width: 32, - arch: "riscv32".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - cpu: "generic-rv32".into(), - max_atomic_width: Some(0), - atomic_cas: false, - features: "+m,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64_linux_android.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{CodeModel, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-linux-android".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c,+Zba,+Zbb,+Zbs".into(), - llvm_abiname: "lp64d".into(), - supported_sanitizers: SanitizerSet::ADDRESS, - max_atomic_width: Some(64), - ..super::android_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-freebsd".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - ..super::freebsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_fuchsia.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{CodeModel, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-fuchsia".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - supported_sanitizers: SanitizerSet::SHADOWCALLSTACK, - ..super::fuchsia_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_hermit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_hermit.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{CodeModel, RelocModel, Target, TargetOptions, TlsModel}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-hermit".into(), - pointer_width: 64, - arch: "riscv64".into(), - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - options: TargetOptions { - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - relocation_model: RelocModel::Pic, - code_model: Some(CodeModel::Medium), - tls_model: TlsModel::LocalExec, - max_atomic_width: Some(64), - llvm_abiname: "lp64d".into(), - ..super::hermit_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-netbsd".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - mcount: "__mcount".into(), - ..super::netbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy}; -use crate::spec::{RelocModel, Target, TargetOptions}; - -use super::SanitizerSet; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - llvm_target: "riscv64".into(), - pointer_width: 64, - arch: "riscv64".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - llvm_abiname: "lp64d".into(), - cpu: "generic-rv64".into(), - max_atomic_width: Some(64), - features: "+m,+a,+f,+d,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - code_model: Some(CodeModel::Medium), - emit_debug_gdb_scripts: false, - eh_frame_header: false, - supported_sanitizers: SanitizerSet::KERNELADDRESS, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{CodeModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "riscv64-unknown-openbsd".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - arch: "riscv64".into(), - options: TargetOptions { - code_model: Some(CodeModel::Medium), - cpu: "generic-rv64".into(), - features: "+m,+a,+f,+d,+c".into(), - llvm_abiname: "lp64d".into(), - max_atomic_width: Some(64), - ..super::openbsd_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy}; -use crate::spec::{RelocModel, SanitizerSet, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), - llvm_target: "riscv64".into(), - pointer_width: 64, - arch: "riscv64".into(), - - options: TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - cpu: "generic-rv64".into(), - max_atomic_width: Some(64), - features: "+m,+a,+c".into(), - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - code_model: Some(CodeModel::Medium), - emit_debug_gdb_scripts: false, - eh_frame_header: false, - supported_sanitizers: SanitizerSet::KERNELADDRESS, - ..Default::default() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.endian = Endian::Big; - // z10 is the oldest CPU supported by LLVM - base.cpu = "z10".into(); - // FIXME: The ABI implementation in cabi_s390x.rs is for now hard-coded to assume the no-vector - // ABI. Pass the -vector feature string to LLVM to respect this assumption. On LLVM < 16, we - // also strip v128 from the data_layout below to match the older LLVM's expectation. - base.features = "-vector".into(); - base.max_atomic_width = Some(64); - base.min_global_align = Some(16); - base.stack_probes = StackProbeType::Inline; - base.supported_sanitizers = - SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; - - Target { - llvm_target: "s390x-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "E-m:e-i1:8:16-i8:8:16-i64:64-f128:64-v128:64-a:8:16-n32:64".into(), - arch: "s390x".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/s390x_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.endian = Endian::Big; - // z10 is the oldest CPU supported by LLVM - base.cpu = "z10".into(); - // FIXME: The ABI implementation in cabi_s390x.rs is for now hard-coded to assume the no-vector - // ABI. Pass the -vector feature string to LLVM to respect this assumption. On LLVM < 16, we - // also strip v128 from the data_layout below to match the older LLVM's expectation. - base.features = "-vector".into(); - base.max_atomic_width = Some(64); - base.min_global_align = Some(16); - base.static_position_independent_executables = true; - base.stack_probes = StackProbeType::Inline; - base.supported_sanitizers = - SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; - - Target { - llvm_target: "s390x-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "E-m:e-i1:8:16-i8:8:16-i64:64-f128:64-v128:64-a:8:16-n32:64".into(), - arch: "s390x".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solaris_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solaris_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solaris_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solaris_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -use crate::spec::{cvs, Cc, LinkerFlavor, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "solaris".into(), - dynamic_linking: true, - has_rpath: true, - families: cvs!["unix"], - is_like_solaris: true, - linker_flavor: LinkerFlavor::Unix(Cc::Yes), - limit_rdylib_exports: false, // Linker doesn't support this - eh_frame_header: false, - - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solid_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solid_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solid_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/solid_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -use super::FramePointer; -use crate::spec::TargetOptions; - -pub fn opts(kernel: &str) -> TargetOptions { - TargetOptions { - os: format!("solid_{kernel}").into(), - vendor: "kmc".into(), - executables: false, - frame_pointer: FramePointer::NonLeaf, - has_thread_local: true, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.endian = Endian::Big; - base.cpu = "v9".into(); - base.max_atomic_width = Some(64); - - Target { - llvm_target: "sparc64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "E-m:e-i64:64-n32:64-S128".into(), - arch: "sparc64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::netbsd_base::opts(); - base.cpu = "v9".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - - Target { - llvm_target: "sparc64-unknown-netbsd".into(), - pointer_width: 64, - data_layout: "E-m:e-i64:64-n32:64-S128".into(), - arch: "sparc64".into(), - options: TargetOptions { endian: Endian::Big, mcount: "__mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::openbsd_base::opts(); - base.endian = Endian::Big; - base.cpu = "v9".into(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - - Target { - llvm_target: "sparc64-unknown-openbsd".into(), - pointer_width: 64, - data_layout: "E-m:e-i64:64-n32:64-S128".into(), - arch: "sparc64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.endian = Endian::Big; - base.cpu = "v9".into(); - base.max_atomic_width = Some(32); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mv8plus"]); - - Target { - llvm_target: "sparc-unknown-linux-gnu".into(), - pointer_width: 32, - data_layout: "E-m:e-p:32:32-i64:64-f128:64-n32-S64".into(), - arch: "sparc".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - let options = TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), - linker: Some("sparc-elf-gcc".into()), - endian: Endian::Big, - cpu: "v7".into(), - abi: "elf".into(), - max_atomic_width: Some(32), - atomic_cas: true, - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - no_default_libraries: false, - emit_debug_gdb_scripts: false, - eh_frame_header: false, - ..Default::default() - }; - Target { - data_layout: "E-m:e-p:32:32-i64:64-f128:64-n32-S64".into(), - llvm_target: "sparc-unknown-none-elf".into(), - pointer_width: 32, - arch: "sparc".into(), - options, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparcv9_sun_solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparcv9_sun_solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparcv9_sun_solaris.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/sparcv9_sun_solaris.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::abi::Endian; -use crate::spec::{Cc, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut base = super::solaris_base::opts(); - base.endian = Endian::Big; - base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64"]); - // llvm calls this "v9" - base.cpu = "v9".into(); - base.vendor = "sun".into(); - base.max_atomic_width = Some(64); - - Target { - llvm_target: "sparcv9-sun-solaris".into(), - pointer_width: 64, - data_layout: "E-m:e-i64:64-n32:64-S128".into(), - // Use "sparc64" instead of "sparcv9" here, since the former is already - // used widely in the source base. If we ever needed ABI - // differentiation from the sparc64, we could, but that would probably - // just be confusing. - arch: "sparc64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::base::apple::{macos_llvm_target, opts, Arch}; +use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Arm64; + let mut base = opts("macos", arch); + base.cpu = "apple-m1".into(); + base.max_atomic_width = Some(128); + + // FIXME: The leak sanitizer currently fails the tests, see #88132. + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD; + + Target { + // Clang automatically chooses a more specific target based on + // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work + // correctly, we do too. + llvm_target: macos_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + mcount: "\u{1}mcount".into(), + frame_pointer: FramePointer::NonLeaf, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,37 @@ +use crate::spec::base::apple::{ios_llvm_target, opts, Arch}; +use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Arm64; + let mut base = opts("ios", arch); + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::THREAD; + + Target { + // Clang automatically chooses a more specific target based on + // IPHONEOS_DEPLOYMENT_TARGET. + // This is required for the target to pick the right + // MACH-O commands, so we do too. + llvm_target: ios_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + frame_pointer: FramePointer::NonLeaf, + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64-apple-ios11.0.0\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_macabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_macabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_macabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_macabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,34 @@ +use crate::spec::base::apple::{opts, Arch}; +use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + let llvm_target = "arm64-apple-ios14.0-macabi"; + + let arch = Arch::Arm64_macabi; + let mut base = opts("ios", arch); + base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-target", llvm_target]); + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::THREAD; + + Target { + llvm_target: llvm_target.into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a12".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + frame_pointer: FramePointer::NonLeaf, + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64-apple-ios-macabi\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -Os\0" + .into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_sim.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_ios_sim.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,37 @@ +use crate::spec::base::apple::{ios_sim_llvm_target, opts, Arch}; +use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Arm64_sim; + let mut base = opts("ios", arch); + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::THREAD; + + Target { + // Clang automatically chooses a more specific target based on + // IPHONEOS_DEPLOYMENT_TARGET. + // This is required for the simulator target to pick the right + // MACH-O commands, so we do too. + llvm_target: ios_sim_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + frame_pointer: FramePointer::NonLeaf, + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64-apple-ios14.0-simulator\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::base::apple::{opts, tvos_llvm_target, Arch}; +use crate::spec::{FramePointer, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Arm64; + Target { + llvm_target: tvos_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + frame_pointer: FramePointer::NonLeaf, + ..opts("tvos", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos_sim.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_tvos_sim.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,31 @@ +use crate::spec::base::apple::{opts, tvos_sim_llvm_target, Arch}; +use crate::spec::{FramePointer, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Arm64_sim; + Target { + llvm_target: tvos_sim_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + frame_pointer: FramePointer::NonLeaf, + // Taken from (and slightly modified) the aarch64-apple-ios-sim spec which says: + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64-apple-tvos15.0-simulator\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..opts("tvos", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos_sim.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_apple_watchos_sim.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,34 @@ +use crate::spec::base::apple::{opts, watchos_sim_llvm_target, Arch}; +use crate::spec::{FramePointer, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Arm64_sim; + Target { + // Clang automatically chooses a more specific target based on + // WATCHOS_DEPLOYMENT_TARGET. + // This is required for the simulator target to pick the right + // MACH-O commands, so we do too. + llvm_target: watchos_sim_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + frame_pointer: FramePointer::NonLeaf, + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64-apple-watchos5.0-simulator\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..opts("watchos", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64_be-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a,+outline-atomics".into(), + max_atomic_width: Some(128), + mcount: "\u{1}_mcount".into(), + endian: Endian::Big, + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu_ilp32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu_ilp32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu_ilp32.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_gnu_ilp32.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.max_atomic_width = Some(128); + + Target { + llvm_target: "aarch64_be-unknown-linux-gnu_ilp32".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + abi: "ilp32".into(), + features: "+v8a,+outline-atomics".into(), + mcount: "\u{1}_mcount".into(), + endian: Endian::Big, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64_be-unknown-netbsd".into(), + pointer_width: 64, + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + mcount: "__mcount".into(), + max_atomic_width: Some(128), + endian: Endian::Big, + ..base::netbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_fuchsia.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1 @@ +pub use crate::spec::targets::aarch64_unknown_fuchsia::target; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_kmc_solid_asp3.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_kmc_solid_asp3.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_kmc_solid_asp3.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_kmc_solid_asp3.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let base = base::solid::opts("asp3"); + Target { + llvm_target: "aarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + linker: Some("aarch64-kmc-elf-gcc".into()), + features: "+v8a,+neon,+fp-armv8".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(128), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_linux_android.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, SanitizerSet, Target, TargetOptions}; + +// See https://developer.android.com/ndk/guides/abis.html#arm64-v8a +// for target ABI requirements. + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-linux-android".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + max_atomic_width: Some(128), + // As documented in https://developer.android.com/ndk/guides/cpu-features.html + // the neon (ASIMD) and FP must exist on all android aarch64 targets. + features: "+v8a,+neon,+fp-armv8".into(), + supported_sanitizers: SanitizerSet::CFI + | SanitizerSet::HWADDRESS + | SanitizerSet::MEMTAG + | SanitizerSet::SHADOWCALLSTACK + | SanitizerSet::ADDRESS, + supports_xray: true, + ..base::android::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelroLevel, Target, TargetOptions}; + +const LINKER_SCRIPT: &str = include_str!("./aarch64_nintendo_switch_freestanding_linker_script.ld"); + +/// A base target for Nintendo Switch devices using a pure LLVM toolchain. +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + link_script: Some(LINKER_SCRIPT.into()), + os: "horizon".into(), + max_atomic_width: Some(128), + panic_strategy: PanicStrategy::Abort, + position_independent_executables: true, + dynamic_linking: true, + relro_level: RelroLevel::Off, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding_linker_script.ld rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding_linker_script.ld --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding_linker_script.ld 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_nintendo_switch_freestanding_linker_script.ld 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,78 @@ +OUTPUT_FORMAT(elf64-littleaarch64) +OUTPUT_ARCH(aarch64) +ENTRY(_start) + +PHDRS +{ + text PT_LOAD FLAGS(5); + rodata PT_LOAD FLAGS(4); + data PT_LOAD FLAGS(6); + bss PT_LOAD FLAGS(6); + dynamic PT_DYNAMIC; +} + +SECTIONS +{ + . = 0; + + .text : ALIGN(0x1000) { + HIDDEN(__text_start = .); + KEEP(*(.text.jmp)) + + . = 0x80; + + *(.text .text.*) + *(.plt .plt.*) + } + + /* Read-only sections */ + + . = ALIGN(0x1000); + + .module_name : { *(.module_name) } :rodata + + .rodata : { *(.rodata .rodata.*) } :rodata + .hash : { *(.hash) } + .dynsym : { *(.dynsym .dynsym.*) } + .dynstr : { *(.dynstr .dynstr.*) } + .rela.dyn : { *(.rela.dyn) } + + .eh_frame : { + HIDDEN(__eh_frame_start = .); + *(.eh_frame .eh_frame.*) + HIDDEN(__eh_frame_end = .); + } + + .eh_frame_hdr : { + HIDDEN(__eh_frame_hdr_start = .); + *(.eh_frame_hdr .eh_frame_hdr.*) + HIDDEN(__eh_frame_hdr_end = .); + } + + /* Read-write sections */ + + . = ALIGN(0x1000); + + .data : { + *(.data .data.*) + *(.got .got.*) + *(.got.plt .got.plt.*) + } :data + + .dynamic : { + HIDDEN(__dynamic_start = .); + *(.dynamic) + } + + /* BSS section */ + + . = ALIGN(0x1000); + + .bss : { + HIDDEN(__bss_start = .); + *(.bss .bss.*) + *(COMMON) + . = ALIGN(8); + HIDDEN(__bss_end = .); + } :bss +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_gnullvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::windows_gnullvm::opts(); + base.max_atomic_width = Some(128); + base.features = "+v8a,+neon,+fp-armv8".into(); + base.linker = Some("aarch64-w64-mingw32-clang".into()); + + Target { + llvm_target: "aarch64-pc-windows-gnu".into(), + pointer_width: 64, + data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::windows_msvc::opts(); + base.max_atomic_width = Some(128); + base.features = "+v8a,+neon,+fp-armv8".into(); + + Target { + llvm_target: "aarch64-pc-windows-msvc".into(), + pointer_width: 64, + data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-freebsd".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + supported_sanitizers: SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::MEMORY + | SanitizerSet::THREAD, + ..base::freebsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_fuchsia.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-fuchsia".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + supported_sanitizers: SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::SHADOWCALLSTACK, + ..base::fuchsia::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_hermit.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_hermit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-hermit".into(), + pointer_width: 64, + arch: "aarch64".into(), + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + options: TargetOptions { + features: "+v8a,+strict-align,+neon,+fp-armv8".into(), + max_atomic_width: Some(128), + ..base::hermit::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{base, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a,+outline-atomics".into(), + mcount: "\u{1}_mcount".into(), + max_atomic_width: Some(128), + supported_sanitizers: SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::MEMTAG + | SanitizerSet::THREAD + | SanitizerSet::HWADDRESS, + supports_xray: true, + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu_ilp32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu_ilp32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu_ilp32.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_gnu_ilp32.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-linux-gnu_ilp32".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + abi: "ilp32".into(), + features: "+v8a,+outline-atomics".into(), + max_atomic_width: Some(128), + mcount: "\u{1}_mcount".into(), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.max_atomic_width = Some(128); + base.supports_xray = true; + base.features = "+v8a".into(); + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::THREAD; + + Target { + llvm_target: "aarch64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { mcount: "\u{1}_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_linux_ohos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::SanitizerSet; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_ohos::opts(); + base.max_atomic_width = Some(128); + + Target { + // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. + llvm_target: "aarch64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+reserve-x18".into(), + mcount: "\u{1}_mcount".into(), + supported_sanitizers: SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::MEMTAG + | SanitizerSet::THREAD + | SanitizerSet::HWADDRESS, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-netbsd".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + mcount: "__mcount".into(), + max_atomic_width: Some(128), + ..base::netbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,32 @@ +// Generic AArch64 target for bare-metal code - Floating point enabled +// +// Can be used in conjunction with the `target-feature` and +// `target-cpu` compiler flags to opt-in more hardware-specific +// features. +// +// For example, `-C target-cpu=cortex-a53`. + +use crate::spec::{ + Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, SanitizerSet, Target, TargetOptions, +}; + +pub fn target() -> Target { + let opts = TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + features: "+v8a,+strict-align,+neon,+fp-armv8".into(), + supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS, + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(128), + panic_strategy: PanicStrategy::Abort, + ..Default::default() + }; + Target { + llvm_target: "aarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none_softfloat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none_softfloat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none_softfloat.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_none_softfloat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,30 @@ +// Generic AArch64 target for bare-metal code - Floating point disabled +// +// Can be used in conjunction with the `target-feature` and +// `target-cpu` compiler flags to opt-in more hardware-specific +// features. +// +// For example, `-C target-cpu=cortex-a53`. + +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let opts = TargetOptions { + abi: "softfloat".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + features: "+v8a,+strict-align,-neon,-fp-armv8".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(128), + panic_strategy: PanicStrategy::Abort, + ..Default::default() + }; + Target { + llvm_target: "aarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_nto_qnx_710.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_nto_qnx_710.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_nto_qnx_710.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_nto_qnx_710.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,29 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-unknown".into(), + pointer_width: 64, + // from: https://llvm.org/docs/LangRef.html#data-layout + // e = little endian + // m:e = ELF mangling: Private symbols get a .L prefix + // i8:8:32 = 8-bit-integer, minimum_alignment=8, preferred_alignment=32 + // i16:16:32 = 16-bit-integer, minimum_alignment=16, preferred_alignment=32 + // i64:64 = 64-bit-integer, minimum_alignment=64, preferred_alignment=64 + // i128:128 = 128-bit-integer, minimum_alignment=128, preferred_alignment=128 + // n32:64 = 32 and 64 are native integer widths; Elements of this set are considered to support most general arithmetic operations efficiently. + // S128 = 128 bits are the natural alignment of the stack in bits. + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + pre_link_args: TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-Vgcc_ntoaarch64le_cxx"], + ), + env: "nto71".into(), + ..base::nto_qnx::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-openbsd".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + ..base::openbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_redox.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_redox.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_redox.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_redox.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::redox::opts(); + base.max_atomic_width = Some(128); + base.features = "+v8a".into(); + + Target { + llvm_target: "aarch64-unknown-redox".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_teeos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_teeos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_teeos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_teeos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::teeos::opts(); + base.features = "+strict-align,+neon,+fp-armv8".into(); + base.max_atomic_width = Some(128); + base.linker = Some("aarch64-linux-gnu-ld".into()); + + Target { + llvm_target: "aarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_uefi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_uefi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_uefi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_unknown_uefi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +// This defines the aarch64 target for UEFI systems as described in the UEFI specification. See the +// uefi-base module for generic UEFI options. + +use crate::spec::{base, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::uefi_msvc::opts(); + + base.max_atomic_width = Some(128); + base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/machine:arm64"]); + base.features = "+v8a".into(); + + Target { + llvm_target: "aarch64-unknown-windows".into(), + pointer_width: 64, + data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_uwp_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::windows_uwp_msvc::opts(); + base.max_atomic_width = Some(128); + base.features = "+v8a".into(); + + Target { + llvm_target: "aarch64-pc-windows-msvc".into(), + pointer_width: 64, + data_layout: "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/aarch64_wrs_vxworks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "aarch64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + ..base::vxworks::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm64_32_apple_watchos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm64_32_apple_watchos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm64_32_apple_watchos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm64_32_apple_watchos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,30 @@ +use crate::spec::base::apple::{opts, Arch}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + let base = opts("watchos", Arch::Arm64_32); + Target { + llvm_target: "arm64_32-apple-watchos".into(), + pointer_width: 32, + data_layout: "e-m:o-p:32:32-i64:64-i128:128-n32:64-S128".into(), + arch: "aarch64".into(), + options: TargetOptions { + features: "+v8a,+neon,+fp-armv8,+apple-a7".into(), + max_atomic_width: Some(128), + forces_embed_bitcode: true, + dynamic_linking: false, + position_independent_executables: true, + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64_32-apple-watchos5.0.0\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_linux_androideabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_linux_androideabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_linux_androideabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_linux_androideabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "arm-linux-androideabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + // https://developer.android.com/ndk/guides/abis.html#armeabi + features: "+strict-align,+v5te".into(), + supported_sanitizers: SanitizerSet::ADDRESS, + max_atomic_width: Some(32), + ..base::android::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "arm-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+strict-align,+v6".into(), + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_gnueabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "arm-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + features: "+strict-align,+v6,+vfp2,-d32".into(), + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + // It's important we use "gnueabi" and not "musleabi" here. LLVM uses it + // to determine the calling convention and float ABI, and it doesn't + // support the "musleabi" value. + llvm_target: "arm-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + // Most of these settings are copied from the arm_unknown_linux_gnueabi + // target. + features: "+strict-align,+v6".into(), + max_atomic_width: Some(64), + mcount: "\u{1}mcount".into(), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/arm_unknown_linux_musleabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + // It's important we use "gnueabihf" and not "musleabihf" here. LLVM + // uses it to determine the calling convention and float ABI, and it + // doesn't support the "musleabihf" value. + llvm_target: "arm-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // Most of these settings are copied from the arm_unknown_linux_gnueabihf + // target. + features: "+strict-align,+v6,+vfp2,-d32".into(), + max_atomic_width: Some(64), + mcount: "\u{1}mcount".into(), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armeb_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armeb_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armeb_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armeb_unknown_linux_gnueabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armeb-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+strict-align,+v8,+crc".into(), + endian: Endian::Big, + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +// Targets the Big endian Cortex-R4/R5 processor (ARMv7-R) + +use crate::abi::Endian; +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armebv7r-none-eabi".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + endian: Endian::Big, + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + max_atomic_width: Some(64), + emit_debug_gdb_scripts: false, + // GCC defaults to 8 for arm-none here. + c_enum_min_bits: Some(8), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armebv7r_none_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +// Targets the Cortex-R4F/R5F processor (ARMv7-R) + +use crate::abi::Endian; +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armebv7r-none-eabihf".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + endian: Endian::Big, + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + features: "+vfp3,-d32,-fp16".into(), + max_atomic_width: Some(64), + emit_debug_gdb_scripts: false, + // GCC defaults to 8 for arm-none here. + c_enum_min_bits: Some(8), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,49 @@ +//! Targets the ARMv4T, with code as `a32` code by default. +//! +//! Primarily of use for the GBA, but usable with other devices too. +//! +//! Please ping @Lokathor if changes are needed. +//! +//! **Important:** This target profile **does not** specify a linker script. You +//! just get the default link script when you build a binary for this target. +//! The default link script is very likely wrong, so you should use +//! `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script. + +use crate::spec::{cvs, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv4t-none-eabi".into(), + pointer_width: 32, + arch: "arm".into(), + /* Data layout args are '-' separated: + * little endian + * stack is 64-bit aligned (EABI) + * pointers are 32-bit + * i64 must be 64-bit aligned (EABI) + * mangle names with ELF style + * native integers are 32-bit + * All other elements are default + */ + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + options: TargetOptions { + abi: "eabi".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + asm_args: cvs!["-mthumb-interwork", "-march=armv4t", "-mlittle-endian",], + // Force-enable 32-bit atomics, which allows the use of atomic load/store only. + // The resulting atomics are ABI incompatible with atomics backed by libatomic. + features: "+soft-float,+strict-align,+atomics-32".into(), + main_needs_argc_argv: false, + atomic_cas: false, + has_thumb_interworking: true, + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + // From thumb_base, rust-lang/rust#44993. + emit_debug_gdb_scripts: false, + // From thumb_base, GCC gives enums a minimum of 8 bits on no-os targets. + c_enum_min_bits: Some(8), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv4t_unknown_linux_gnueabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv4t-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+soft-float,+strict-align".into(), + // Atomic operations provided by compiler-builtins + max_atomic_width: Some(32), + mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), + has_thumb_interworking: true, + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,41 @@ +//! Targets the ARMv5TE, with code as `a32` code by default. + +use crate::spec::{base, cvs, FramePointer, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv5te-none-eabi".into(), + pointer_width: 32, + arch: "arm".into(), + /* Data layout args are '-' separated: + * little endian + * stack is 64-bit aligned (EABI) + * pointers are 32-bit + * i64 must be 64-bit aligned (EABI) + * mangle names with ELF style + * native integers are 32-bit + * All other elements are default + */ + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + + options: TargetOptions { + abi: "eabi".into(), + // extra args passed to the external assembler (assuming `arm-none-eabi-as`): + // * activate t32/a32 interworking + // * use arch ARMv5TE + // * use little-endian + asm_args: cvs!["-mthumb-interwork", "-march=armv5te", "-mlittle-endian",], + // minimum extra features, these cannot be disabled via -C + // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. + // The resulting atomics are ABI incompatible with atomics backed by libatomic. + features: "+soft-float,+strict-align,+atomics-32".into(), + frame_pointer: FramePointer::MayOmit, + main_needs_argc_argv: false, + // don't have atomic compare-and-swap + atomic_cas: false, + has_thumb_interworking: true, + + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_gnueabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv5te-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+soft-float,+strict-align".into(), + // Atomic operations provided by compiler-builtins + max_atomic_width: Some(32), + mcount: "\u{1}__gnu_mcount_nc".into(), + has_thumb_interworking: true, + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_musleabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_musleabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_musleabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_musleabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + // FIXME: this comment below does not seem applicable? + // It's important we use "gnueabihf" and not "musleabihf" here. LLVM + // uses it to determine the calling convention and float ABI, and LLVM + // doesn't support the "musleabihf" value. + llvm_target: "armv5te-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+soft-float,+strict-align".into(), + // Atomic operations provided by compiler-builtins + max_atomic_width: Some(32), + mcount: "\u{1}mcount".into(), + has_thumb_interworking: true, + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_uclibceabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_uclibceabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_uclibceabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv5te_unknown_linux_uclibceabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv5te-unknown-linux-uclibcgnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+soft-float,+strict-align".into(), + // Atomic operations provided by compiler-builtins + max_atomic_width: Some(32), + mcount: "\u{1}__gnu_mcount_nc".into(), + has_thumb_interworking: true, + ..base::linux_uclibc::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv6-unknown-freebsd-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // FIXME: change env to "gnu" when cfg_target_abi becomes stable + env: "gnueabihf".into(), + features: "+v6,+vfp2,-d32".into(), + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), + ..base::freebsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv6-unknown-netbsdelf-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // FIXME: remove env when cfg_target_abi becomes stable + env: "eabihf".into(), + features: "+v6,+vfp2,-d32".into(), + max_atomic_width: Some(64), + mcount: "__mcount".into(), + ..base::netbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6k_nintendo_3ds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6k_nintendo_3ds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6k_nintendo_3ds.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv6k_nintendo_3ds.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,36 @@ +use crate::spec::{cvs, Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; + +/// A base target for Nintendo 3DS devices using the devkitARM toolchain. +/// +/// Requires the devkitARM toolchain for 3DS targets on the host system. + +pub fn target() -> Target { + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-specs=3dsx.specs", "-mtune=mpcore", "-mfloat-abi=hard", "-mtp=soft"], + ); + + Target { + llvm_target: "armv6k-none-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + os: "horizon".into(), + env: "newlib".into(), + vendor: "nintendo".into(), + abi: "eabihf".into(), + cpu: "mpcore".into(), + families: cvs!["unix"], + linker: Some("arm-none-eabi-gcc".into()), + relocation_model: RelocModel::Static, + features: "+vfp2".into(), + pre_link_args, + exe_suffix: ".elf".into(), + no_default_libraries: false, + has_thread_local: true, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_linux_androideabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_linux_androideabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_linux_androideabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_linux_androideabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, SanitizerSet, Target, TargetOptions}; + +// This target if is for the baseline of the Android v7a ABI +// in thumb mode. It's named armv7-* instead of thumbv7-* +// for historical reasons. See the thumbv7neon variant for +// enabling NEON. + +// See https://developer.android.com/ndk/guides/abis.html#v7a +// for target ABI requirements. + +pub fn target() -> Target { + let mut base = base::android::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-march=armv7-a"]); + Target { + llvm_target: "armv7-none-linux-android".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+v7,+thumb-mode,+thumb2,+vfp3,-d32,-neon".into(), + supported_sanitizers: SanitizerSet::ADDRESS, + max_atomic_width: Some(64), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_sony_vita_newlibeabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,41 @@ +use crate::abi::Endian; +use crate::spec::{cvs, Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; + +/// A base target for PlayStation Vita devices using the VITASDK toolchain (using newlib). +/// +/// Requires the VITASDK toolchain on the host system. + +pub fn target() -> Target { + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-Wl,-q", "-Wl,--pic-veneer"], + ); + + Target { + llvm_target: "thumbv7a-vita-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + os: "vita".into(), + endian: Endian::Little, + c_int_width: "32".into(), + env: "newlib".into(), + vendor: "sony".into(), + abi: "eabihf".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), + no_default_libraries: false, + cpu: "cortex-a9".into(), + families: cvs!["unix"], + linker: Some("arm-vita-eabi-gcc".into()), + relocation_model: RelocModel::Static, + features: "+v7,+neon,+vfp3,+thumb2,+thumb-mode".into(), + pre_link_args, + exe_suffix: ".elf".into(), + has_thumb_interworking: true, + max_atomic_width: Some(64), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv7-unknown-freebsd-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // FIXME: change env to "gnu" when cfg_target_abi becomes stable + env: "gnueabihf".into(), + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + ..base::freebsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for glibc Linux on ARMv7 without thumb-mode, NEON or +// hardfloat. + +pub fn target() -> Target { + Target { + llvm_target: "armv7-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+v7,+thumb2,+soft-float,-neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + llvm_mcount_intrinsic: Some("llvm.arm.gnu.eabi.mcount".into()), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_gnueabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for glibc Linux on ARMv7 without NEON or +// thumb-mode. See the thumbv7neon variant for enabling both. + +pub fn target() -> Target { + Target { + llvm_target: "armv7-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // Info about features at https://wiki.debian.org/ArmHardFloatPort + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}__gnu_mcount_nc".into(), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for musl Linux on ARMv7 without thumb-mode, NEON or +// hardfloat. + +pub fn target() -> Target { + // Most of these settings are copied from the armv7_unknown_linux_gnueabi + // target. + Target { + // It's important we use "gnueabi" and not "musleabi" here. LLVM uses it + // to determine the calling convention and float ABI, and it doesn't + // support the "musleabi" value. + llvm_target: "armv7-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + features: "+v7,+thumb2,+soft-float,-neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}mcount".into(), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_musleabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,25 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for musl Linux on ARMv7 without thumb-mode or NEON. + +pub fn target() -> Target { + Target { + // It's important we use "gnueabihf" and not "musleabihf" here. LLVM + // uses it to determine the calling convention and float ABI, and LLVM + // doesn't support the "musleabihf" value. + llvm_target: "armv7-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + // Most of these settings are copied from the armv7_unknown_linux_gnueabihf + // target. + options: TargetOptions { + abi: "eabihf".into(), + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}mcount".into(), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_ohos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for OpenHarmony on ARMv7 Linux with thumb-mode, but no NEON or +// hardfloat. + +pub fn target() -> Target { + // Most of these settings are copied from the armv7_unknown_linux_musleabi + // target. + Target { + // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. + llvm_target: "armv7-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + features: "+v7,+thumb2,+soft-float,-neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}mcount".into(), + ..base::linux_ohos::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for uclibc Linux on ARMv7 without NEON, +// thumb-mode or hardfloat. + +pub fn target() -> Target { + let base = base::linux_uclibc::opts(); + Target { + llvm_target: "armv7-unknown-linux-gnueabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + features: "+v7,+thumb2,+soft-float,-neon".into(), + cpu: "generic".into(), + max_atomic_width: Some(64), + mcount: "_mcount".into(), + abi: "eabi".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_linux_uclibceabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for uclibc Linux on ARMv7 without NEON or +// thumb-mode. See the thumbv7neon variant for enabling both. + +pub fn target() -> Target { + let base = base::linux_uclibc::opts(); + Target { + llvm_target: "armv7-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + // Info about features at https://wiki.debian.org/ArmHardFloatPort + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + cpu: "generic".into(), + max_atomic_width: Some(64), + mcount: "_mcount".into(), + abi: "eabihf".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv7-unknown-netbsdelf-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // FIXME: remove env when cfg_target_abi becomes stable + env: "eabihf".into(), + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + max_atomic_width: Some(64), + mcount: "__mcount".into(), + ..base::netbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_wrs_vxworks_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_wrs_vxworks_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_wrs_vxworks_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7_wrs_vxworks_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv7-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // Info about features at https://wiki.debian.org/ArmHardFloatPort + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + max_atomic_width: Some(64), + ..base::vxworks::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let base = base::solid::opts("asp3"); + Target { + llvm_target: "armv7a-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + linker: Some("arm-kmc-eabi-gcc".into()), + features: "+v7,+soft-float,+thumb2,-neon".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(64), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_kmc_solid_asp3_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let base = base::solid::opts("asp3"); + Target { + llvm_target: "armv7a-none-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + linker: Some("arm-kmc-eabi-gcc".into()), + features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(64), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,40 @@ +// Generic ARMv7-A target for bare-metal code - floating point disabled +// +// This is basically the `armv7-unknown-linux-gnueabi` target with some changes +// (listed below) to bring it closer to the bare-metal `thumb` & `aarch64` +// targets: +// +// - `TargetOptions.features`: added `+strict-align`. rationale: unaligned +// memory access is disabled on boot on these cores +// - linker changed to LLD. rationale: C is not strictly needed to build +// bare-metal binaries (the `gcc` linker has the advantage that it knows where C +// libraries and crt*.o are but it's not much of an advantage here); LLD is also +// faster +// - `panic_strategy` set to `abort`. rationale: matches `thumb` targets +// - `relocation-model` set to `static`; also no PIE, no relro and no dynamic +// linking. rationale: matches `thumb` targets + +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let opts = TargetOptions { + abi: "eabi".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + features: "+v7,+thumb2,+soft-float,-neon,+strict-align".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(64), + panic_strategy: PanicStrategy::Abort, + emit_debug_gdb_scripts: false, + c_enum_min_bits: Some(8), + ..Default::default() + }; + Target { + llvm_target: "armv7a-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7a_none_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,32 @@ +// Generic ARMv7-A target for bare-metal code - floating point enabled (assumes +// FPU is present and emits FPU instructions) +// +// This is basically the `armv7-unknown-linux-gnueabihf` target with some +// changes (list in `armv7a_none_eabi.rs`) to bring it closer to the bare-metal +// `thumb` & `aarch64` targets. + +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let opts = TargetOptions { + abi: "eabihf".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + features: "+v7,+vfp3,-d32,+thumb2,-neon,+strict-align".into(), + relocation_model: RelocModel::Static, + disable_redzone: true, + max_atomic_width: Some(64), + panic_strategy: PanicStrategy::Abort, + emit_debug_gdb_scripts: false, + // GCC defaults to 8 for arm-none here. + c_enum_min_bits: Some(8), + ..Default::default() + }; + Target { + llvm_target: "armv7a-none-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7k_apple_watchos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7k_apple_watchos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7k_apple_watchos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7k_apple_watchos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,30 @@ +use crate::spec::base::apple::{opts, Arch}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Armv7k; + Target { + llvm_target: "armv7k-apple-watchos".into(), + pointer_width: 32, + data_layout: "e-m:o-p:32:32-Fi8-i64:64-a:0:32-n32-S128".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+v7,+vfp4,+neon".into(), + max_atomic_width: Some(64), + forces_embed_bitcode: true, + dynamic_linking: false, + position_independent_executables: true, + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + armv7k-apple-watchos3.0.0\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..opts("watchos", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,25 @@ +// Targets the Little-endian Cortex-R4/R5 processor (ARMv7-R) + +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv7r-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + max_atomic_width: Some(64), + emit_debug_gdb_scripts: false, + // GCC defaults to 8 for arm-none here. + c_enum_min_bits: Some(8), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7r_none_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +// Targets the Little-endian Cortex-R4F/R5F processor (ARMv7-R) + +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "armv7r-none-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabihf".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + features: "+vfp3,-d32,-fp16".into(), + max_atomic_width: Some(64), + emit_debug_gdb_scripts: false, + // GCC defaults to 8 for arm-none here. + c_enum_min_bits: Some(8), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7s_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7s_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7s_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/armv7s_apple_ios.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::base::apple::{ios_llvm_target, opts, Arch}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::Armv7s; + Target { + llvm_target: ios_llvm_target(arch).into(), + pointer_width: 32, + data_layout: "e-m:o-p:32:32-Fi8-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".into(), + arch: arch.target_arch(), + options: TargetOptions { + features: "+v7,+vfp4,+neon".into(), + max_atomic_width: Some(64), + ..opts("ios", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/asmjs_unknown_emscripten.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/asmjs_unknown_emscripten.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/asmjs_unknown_emscripten.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/asmjs_unknown_emscripten.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,7 @@ +use crate::spec::{targets::wasm32_unknown_emscripten, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut target = wasm32_unknown_emscripten::target(); + target.add_post_link_args(LinkerFlavor::EmCc, &["-sWASM=0", "--memory-init-file", "0"]); + target +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/avr_unknown_gnu_atmega328.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/avr_unknown_gnu_atmega328.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/avr_unknown_gnu_atmega328.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/avr_unknown_gnu_atmega328.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,5 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + base::avr_gnu::target("atmega328", "-mmcu=atmega328") +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfeb_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfeb_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfeb_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfeb_unknown_none.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,12 @@ +use crate::spec::Target; +use crate::{abi::Endian, spec::base}; + +pub fn target() -> Target { + Target { + llvm_target: "bpfeb".into(), + data_layout: "E-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + pointer_width: 64, + arch: "bpf".into(), + options: base::bpf::opts(Endian::Big), + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfel_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfel_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfel_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/bpfel_unknown_none.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,12 @@ +use crate::spec::Target; +use crate::{abi::Endian, spec::base}; + +pub fn target() -> Target { + Target { + llvm_target: "bpfel".into(), + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + pointer_width: 64, + arch: "bpf".into(), + options: base::bpf::opts(Endian::Little), + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +// This target is for glibc Linux on Csky + +pub fn target() -> Target { + Target { + //https://github.com/llvm/llvm-project/blob/8b76aea8d8b1b71f6220bc2845abc749f18a19b7/clang/lib/Basic/Targets/CSKY.h + llvm_target: "csky-unknown-linux-gnuabiv2".into(), + pointer_width: 32, + data_layout: "e-m:e-S32-p:32:32-i32:32:32-i64:32:32-f32:32:32-f64:32:32-v64:32:32-v128:32:32-a:0:32-Fi32-n32".into(), + arch: "csky".into(), + options: TargetOptions { + abi: "abiv2".into(), + features: "+2e3,+3e7,+7e10,+cache,+dsp1e2,+dspe60,+e1,+e2,+edsp,+elrw,+hard-tp,+high-registers,+hwdiv,+mp,+mp1e2,+nvic,+trust".into(), + late_link_args: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-l:libatomic.a"]), + max_atomic_width: Some(32), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2hf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2hf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2hf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/csky_unknown_linux_gnuabiv2hf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +// This target is for glibc Linux on Csky + +pub fn target() -> Target { + Target { + //https://github.com/llvm/llvm-project/blob/8b76aea8d8b1b71f6220bc2845abc749f18a19b7/clang/lib/Basic/Targets/CSKY.h + llvm_target: "csky-unknown-linux-gnuabiv2".into(), + pointer_width: 32, + data_layout: "e-m:e-S32-p:32:32-i32:32:32-i64:32:32-f32:32:32-f64:32:32-v64:32:32-v128:32:32-a:0:32-Fi32-n32".into(), + arch: "csky".into(), + options: TargetOptions { + abi: "abiv2hf".into(), + cpu: "ck860fv".into(), + features: "+hard-float,+hard-float-abi,+2e3,+3e7,+7e10,+cache,+dsp1e2,+dspe60,+e1,+e2,+edsp,+elrw,+hard-tp,+high-registers,+hwdiv,+mp,+mp1e2,+nvic,+trust".into(), + late_link_args: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-l:libatomic.a", "-mhard-float"]), + max_atomic_width: Some(32), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/hexagon_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/hexagon_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/hexagon_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/hexagon_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,29 @@ +use crate::spec::{base, Cc, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "hexagonv60".into(); + base.max_atomic_width = Some(32); + // FIXME: HVX length defaults are per-CPU + base.features = "-small-data,+hvx-length128b".into(); + + base.crt_static_default = false; + base.has_rpath = true; + base.linker_flavor = LinkerFlavor::Unix(Cc::Yes); + + base.c_enum_min_bits = Some(8); + + Target { + llvm_target: "hexagon-unknown-linux-musl".into(), + pointer_width: 32, + data_layout: concat!( + "e-m:e-p:32:32:32-a:0-n16:32-i64:64:64-i32:32", + ":32-i16:16:16-i1:8:8-f32:32:32-f64:64:64-v32", + ":32:32-v64:64:64-v512:512:512-v1024:1024:1024-v2048", + ":2048:2048" + ) + .into(), + arch: "hexagon".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_apple_ios.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::base::apple::{ios_sim_llvm_target, opts, Arch}; +use crate::spec::{StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::I386; + Target { + // Clang automatically chooses a more specific target based on + // IPHONEOS_DEPLOYMENT_TARGET. + // This is required for the target to pick the right + // MACH-O commands, so we do too. + llvm_target: ios_sim_llvm_target(arch).into(), + pointer_width: 32, + data_layout: "e-m:o-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:128-n8:16:32-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { + max_atomic_width: Some(64), + stack_probes: StackProbeType::X86, + ..opts("ios", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i386_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +use crate::spec::Target; + +pub fn target() -> Target { + let mut base = super::i686_unknown_linux_gnu::target(); + base.cpu = "i386".into(); + base.llvm_target = "i386-unknown-linux-gnu".into(); + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i486_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i486_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i486_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i486_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +use crate::spec::Target; + +pub fn target() -> Target { + let mut base = super::i686_unknown_linux_gnu::target(); + base.cpu = "i486".into(); + base.llvm_target = "i486-unknown-linux-gnu".into(); + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_nto_qnx700.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_nto_qnx700.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_nto_qnx700.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_nto_qnx700.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "i586-pc-unknown".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: TargetOptions { + cpu: "pentium4".into(), + max_atomic_width: Some(64), + pre_link_args: TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-Vgcc_ntox86_cxx"], + ), + env: "nto70".into(), + stack_probes: StackProbeType::X86, + ..base::nto_qnx::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_pc_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +use crate::spec::Target; + +pub fn target() -> Target { + let mut base = super::i686_pc_windows_msvc::target(); + base.cpu = "pentium".into(); + base.llvm_target = "i586-pc-windows-msvc".into(); + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +use crate::spec::Target; + +pub fn target() -> Target { + let mut base = super::i686_unknown_linux_gnu::target(); + base.cpu = "pentium".into(); + base.llvm_target = "i586-unknown-linux-gnu".into(); + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,8 @@ +use crate::spec::Target; + +pub fn target() -> Target { + let mut base = super::i686_unknown_linux_musl::target(); + base.cpu = "pentium".into(); + base.llvm_target = "i586-unknown-linux-musl".into(); + base +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i586_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::netbsd::opts(); + base.cpu = "pentium".into(); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Call; + + Target { + llvm_target: "i586-unknown-netbsdelf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: TargetOptions { mcount: "__mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_apple_darwin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::base::apple::{macos_llvm_target, opts, Arch}; +use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + // ld64 only understands i386 and not i686 + let arch = Arch::I386; + let mut base = opts("macos", arch); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m32"]); + base.stack_probes = StackProbeType::X86; + base.frame_pointer = FramePointer::Always; + + Target { + // Clang automatically chooses a more specific target based on + // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work + // correctly, we do too. + // + // While ld64 doesn't understand i686, LLVM does. + llvm_target: macos_llvm_target(Arch::I686).into(), + pointer_width: 32, + data_layout: "e-m:o-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:128-n8:16:32-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { mcount: "\u{1}mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_linux_android.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,25 @@ +use crate::spec::{base, SanitizerSet, StackProbeType, Target, TargetOptions}; + +// See https://developer.android.com/ndk/guides/abis.html#x86 +// for target ABI requirements. + +pub fn target() -> Target { + let mut base = base::android::opts(); + + base.max_atomic_width = Some(64); + + // https://developer.android.com/ndk/guides/abis.html#x86 + base.cpu = "pentiumpro".into(); + base.features = "+mmx,+sse,+sse2,+sse3,+ssse3".into(); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-linux-android".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: TargetOptions { supported_sanitizers: SanitizerSet::ADDRESS, ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::{base, Cc, FramePointer, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_gnu::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.frame_pointer = FramePointer::Always; // Required for backtraces + base.linker = Some("i686-w64-mingw32-gcc".into()); + + // Mark all dynamic libraries and executables as compatible with the larger 4GiB address + // space available to x86 Windows binaries on x86_64. + base.add_pre_link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &["-m", "i386pe", "--large-address-aware"], + ); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-Wl,--large-address-aware"]); + + Target { + llvm_target: "i686-pc-windows-gnu".into(), + pointer_width: 32, + data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:32-n8:16:32-a:0:32-S32" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_gnullvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, Cc, FramePointer, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_gnullvm::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.frame_pointer = FramePointer::Always; // Required for backtraces + base.linker = Some("i686-w64-mingw32-clang".into()); + + // Mark all dynamic libraries and executables as compatible with the larger 4GiB address + // space available to x86 Windows binaries on x86_64. + base.add_pre_link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &["-m", "i386pe", "--large-address-aware"], + ); + + Target { + llvm_target: "i686-pc-windows-gnu".into(), + pointer_width: 32, + data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:32-n8:16:32-a:0:32-S32" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,32 @@ +use crate::spec::{base, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_msvc::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + + base.add_pre_link_args( + LinkerFlavor::Msvc(Lld::No), + &[ + // Mark all dynamic libraries and executables as compatible with the larger 4GiB address + // space available to x86 Windows binaries on x86_64. + "/LARGEADDRESSAWARE", + // Ensure the linker will only produce an image if it can also produce a table of + // the image's safe exception handlers. + // https://docs.microsoft.com/en-us/cpp/build/reference/safeseh-image-has-safe-exception-handlers + "/SAFESEH", + ], + ); + // Workaround for #95429 + base.has_thread_local = false; + + Target { + llvm_target: "i686-pc-windows-msvc".into(), + pointer_width: 32, + data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:128-n8:16:32-a:0:32-S32" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::freebsd::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "-Wl,-znotext"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-unknown-freebsd".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_haiku.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_haiku.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_haiku.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_haiku.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::haiku::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-unknown-haiku".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_hurd_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_hurd_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_hurd_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_hurd_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::hurd_gnu::opts(); + base.cpu = "pentiumpro".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) }; + + Target { + llvm_target: "i686-unknown-hurd-gnu".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.supported_sanitizers = SanitizerSet::ADDRESS; + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,33 @@ +use crate::spec::{base, Cc, FramePointer, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "-Wl,-melf_i386"]); + base.stack_probes = StackProbeType::X86; + + // The unwinder used by i686-unknown-linux-musl, the LLVM libunwind + // implementation, apparently relies on frame pointers existing... somehow. + // It's not clear to me why nor where this dependency is introduced, but the + // test suite does not pass with frame pointers eliminated and it passes + // with frame pointers present. + // + // If you think that this is no longer necessary, then please feel free to + // ignore! If it still passes the test suite and the bots then sounds good + // to me. + // + // This may or may not be related to this bug: + // https://llvm.org/bugs/show_bug.cgi?id=30879 + base.frame_pointer = FramePointer::Always; + + Target { + llvm_target: "i686-unknown-linux-musl".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::netbsd::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-unknown-netbsdelf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: TargetOptions { mcount: "__mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::openbsd::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "-fuse-ld=lld"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-unknown-openbsd".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_uefi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_uefi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_uefi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_unknown_uefi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,89 @@ +// This defines the ia32 target for UEFI systems as described in the UEFI specification. See the +// uefi-base module for generic UEFI options. On ia32 systems +// UEFI systems always run in protected-mode, have the interrupt-controller pre-configured and +// force a single-CPU execution. +// The cdecl ABI is used. It differs from the stdcall or fastcall ABI. +// "i686-unknown-windows" is used to get the minimal subset of windows-specific features. + +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::uefi_msvc::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + + // We disable MMX and SSE for now, even though UEFI allows using them. Problem is, you have to + // enable these CPU features explicitly before their first use, otherwise their instructions + // will trigger an exception. Rust does not inject any code that enables AVX/MMX/SSE + // instruction sets, so this must be done by the firmware. However, existing firmware is known + // to leave these uninitialized, thus triggering exceptions if we make use of them. Which is + // why we avoid them and instead use soft-floats. This is also what GRUB and friends did so + // far. + // If you initialize FP units yourself, you can override these flags with custom linker + // arguments, thus giving you access to full MMX/SSE acceleration. + base.features = "-mmx,-sse,+soft-float".into(); + + // Use -GNU here, because of the reason below: + // Background and Problem: + // If we use i686-unknown-windows, the LLVM IA32 MSVC generates compiler intrinsic + // _alldiv, _aulldiv, _allrem, _aullrem, _allmul, which will cause undefined symbol. + // A real issue is __aulldiv() is referred by __udivdi3() - udivmod_inner!(), from + // https://github.com/rust-lang-nursery/compiler-builtins. + // As result, rust-lld generates link error finally. + // Root-cause: + // In rust\src\llvm-project\llvm\lib\Target\X86\X86ISelLowering.cpp, + // we have below code to use MSVC intrinsics. It assumes MSVC target + // will link MSVC library. But that is NOT true in UEFI environment. + // UEFI does not link any MSVC or GCC standard library. + // if (Subtarget.isTargetKnownWindowsMSVC() || + // Subtarget.isTargetWindowsItanium()) { + // // Setup Windows compiler runtime calls. + // setLibcallName(RTLIB::SDIV_I64, "_alldiv"); + // setLibcallName(RTLIB::UDIV_I64, "_aulldiv"); + // setLibcallName(RTLIB::SREM_I64, "_allrem"); + // setLibcallName(RTLIB::UREM_I64, "_aullrem"); + // setLibcallName(RTLIB::MUL_I64, "_allmul"); + // setLibcallCallingConv(RTLIB::SDIV_I64, CallingConv::X86_StdCall); + // setLibcallCallingConv(RTLIB::UDIV_I64, CallingConv::X86_StdCall); + // setLibcallCallingConv(RTLIB::SREM_I64, CallingConv::X86_StdCall); + // setLibcallCallingConv(RTLIB::UREM_I64, CallingConv::X86_StdCall); + // setLibcallCallingConv(RTLIB::MUL_I64, CallingConv::X86_StdCall); + // } + // The compiler intrinsics should be implemented by compiler-builtins. + // Unfortunately, compiler-builtins has not provided those intrinsics yet. Such as: + // i386/divdi3.S + // i386/lshrdi3.S + // i386/moddi3.S + // i386/muldi3.S + // i386/udivdi3.S + // i386/umoddi3.S + // Possible solution: + // 1. Eliminate Intrinsics generation. + // 1.1 Choose different target to bypass isTargetKnownWindowsMSVC(). + // 1.2 Remove the "Setup Windows compiler runtime calls" in LLVM + // 2. Implement Intrinsics. + // We evaluated all options. + // #2 is hard because we need implement the intrinsics (_aulldiv) generated + // from the other intrinsics (__udivdi3) implementation with the same + // functionality (udivmod_inner). If we let _aulldiv() call udivmod_inner!(), + // then we are in loop. We may have to find another way to implement udivmod_inner!(). + // #1.2 may break the existing usage. + // #1.1 seems the simplest solution today. + // The IA32 -gnu calling convention is same as the one defined in UEFI specification. + // It uses cdecl, EAX/ECX/EDX as volatile register, and EAX/EDX as return value. + // We also checked the LLVM X86TargetLowering, the differences between -gnu and -msvc + // is fmodf(f32), longjmp() and TLS. None of them impacts the UEFI code. + // As a result, we choose -gnu for i686 version before those intrinsics are implemented in + // compiler-builtins. After compiler-builtins implements all required intrinsics, we may + // remove -gnu and use the default one. + Target { + llvm_target: "i686-unknown-windows-gnu".into(), + pointer_width: 32, + data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:32-n8:16:32-a:0:32-S32" + .into(), + arch: "x86".into(), + + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, Cc, FramePointer, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_uwp_gnu::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.frame_pointer = FramePointer::Always; // Required for backtraces + + // Mark all dynamic libraries and executables as compatible with the larger 4GiB address + // space available to x86 Windows binaries on x86_64. + base.add_pre_link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &["-m", "i386pe", "--large-address-aware"], + ); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-Wl,--large-address-aware"]); + + Target { + llvm_target: "i686-pc-windows-gnu".into(), + pointer_width: 32, + data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:32-n8:16:32-a:0:32-S32" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_uwp_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::windows_uwp_msvc::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + + Target { + llvm_target: "i686-pc-windows-msvc".into(), + pointer_width: 32, + data_layout: "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:128-n8:16:32-a:0:32-S32" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/i686_wrs_vxworks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::vxworks::opts(); + base.cpu = "pentium4".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "i686-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + f64:32:64-f80:32-n8:16:32-S128" + .into(), + arch: "x86".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "loongarch64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(), + arch: "loongarch64".into(), + options: TargetOptions { + cpu: "generic".into(), + features: "+f,+d".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy, RelocModel}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "loongarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(), + arch: "loongarch64".into(), + options: TargetOptions { + cpu: "generic".into(), + features: "+f,+d".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + code_model: Some(CodeModel::Small), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none_softfloat.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none_softfloat.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none_softfloat.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none_softfloat.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy, RelocModel}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "loongarch64-unknown-none".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(), + arch: "loongarch64".into(), + options: TargetOptions { + cpu: "generic".into(), + features: "-f,-d".into(), + abi: "softfloat".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + llvm_abiname: "lp64s".into(), + max_atomic_width: Some(64), + relocation_model: RelocModel::Static, + panic_strategy: PanicStrategy::Abort, + code_model: Some(CodeModel::Small), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/m68k_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/m68k_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/m68k_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/m68k_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.cpu = "M68020".into(); + base.max_atomic_width = Some(32); + + Target { + llvm_target: "m68k-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:16:32-i8:8:8-i16:16:16-i32:16:32-n8:16:32-a:0:16-S16".into(), + arch: "m68k".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_openwrt_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +/// A target tuple for OpenWrt MIPS64 targets +/// +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "mips64r2".into(); + base.features = "+mips64r2,+soft-float".into(); + base.max_atomic_width = Some(64); + base.crt_static_default = false; + + Target { + // LLVM doesn't recognize "muslabi64" yet. + llvm_target: "mips64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64".into(), + options: TargetOptions { + abi: "abi64".into(), + endian: Endian::Big, + mcount: "_mcount".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_gnuabi64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mips64-unknown-linux-gnuabi64".into(), + pointer_width: 64, + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64".into(), + options: TargetOptions { + abi: "abi64".into(), + endian: Endian::Big, + // NOTE(mips64r2) matches C toolchain + cpu: "mips64r2".into(), + features: "+mips64r2,+xgot".into(), + max_atomic_width: Some(64), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64_unknown_linux_muslabi64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "mips64r2".into(); + base.features = "+mips64r2".into(); + base.max_atomic_width = Some(64); + Target { + // LLVM doesn't recognize "muslabi64" yet. + llvm_target: "mips64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64".into(), + options: TargetOptions { + abi: "abi64".into(), + endian: Endian::Big, + mcount: "_mcount".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_gnuabi64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mips64el-unknown-linux-gnuabi64".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64".into(), + options: TargetOptions { + abi: "abi64".into(), + // NOTE(mips64r2) matches C toolchain + cpu: "mips64r2".into(), + features: "+mips64r2,+xgot".into(), + max_atomic_width: Some(64), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips64el_unknown_linux_muslabi64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "mips64r2".into(); + base.features = "+mips64r2".into(); + base.max_atomic_width = Some(64); + Target { + // LLVM doesn't recognize "muslabi64" yet. + llvm_target: "mips64el-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64".into(), + options: TargetOptions { abi: "abi64".into(), mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mips-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + options: TargetOptions { + endian: Endian::Big, + cpu: "mips32r2".into(), + features: "+mips32r2,+fpxx,+nooddspreg".into(), + max_atomic_width: Some(32), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "mips32r2".into(); + base.features = "+mips32r2,+soft-float".into(); + base.max_atomic_width = Some(32); + base.crt_static_default = false; + Target { + llvm_target: "mips-unknown-linux-musl".into(), + pointer_width: 32, + data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mips_unknown_linux_uclibc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mips-unknown-linux-uclibc".into(), + pointer_width: 32, + data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + options: TargetOptions { + endian: Endian::Big, + cpu: "mips32r2".into(), + features: "+mips32r2,+soft-float".into(), + max_atomic_width: Some(32), + mcount: "_mcount".into(), + + ..base::linux_uclibc::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,36 @@ +use crate::spec::{cvs, Cc, LinkerFlavor, Lld, RelocModel, Target, TargetOptions}; + +// The PSP has custom linker requirements. +const LINKER_SCRIPT: &str = include_str!("./mipsel_sony_psp_linker_script.ld"); + +pub fn target() -> Target { + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &["--emit-relocs", "--nmagic"], + ); + + Target { + llvm_target: "mipsel-sony-psp".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + + options: TargetOptions { + os: "psp".into(), + vendor: "sony".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + cpu: "mips2".into(), + linker: Some("rust-lld".into()), + relocation_model: RelocModel::Static, + + // PSP FPU only supports single precision floats. + features: "+single-float".into(), + + // PSP does not support trap-on-condition instructions. + llvm_args: cvs!["-mno-check-zero-division"], + pre_link_args, + link_script: Some(LINKER_SCRIPT.into()), + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp_linker_script.ld rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp_linker_script.ld --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp_linker_script.ld 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psp_linker_script.ld 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,45 @@ +ENTRY(module_start) +SECTIONS +{ + /* PRX format requires text to begin at 0 */ + .text 0 : { *(.text .text.*) } + + /* Sort stubs for convenient ordering */ + .sceStub.text : { *(.sceStub.text) *(SORT(.sceStub.text.*)) } + + /* PSP import library stub sections. Bundles together `.lib.stub.entry.*` + * sections for better `--gc-sections` support. */ + .lib.stub.top : { *(.lib.stub.top) } + .lib.stub : { *(.lib.stub) *(.lib.stub.entry.*) } + .lib.stub.btm : { *(.lib.stub.btm) } + + /* Keep these sections around, even though they may appear unused to the linker */ + .lib.ent.top : { KEEP(*(.lib.ent.top)) } + .lib.ent : { KEEP(*(.lib.ent)) } + .lib.ent.btm : { KEEP(*(.lib.ent.btm)) } + + .eh_frame_hdr : { *(.eh_frame_hdr) } + + /* Add symbols for LLVM's libunwind */ + __eh_frame_hdr_start = SIZEOF(.eh_frame_hdr) > 0 ? ADDR(.eh_frame_hdr) : 0; + __eh_frame_hdr_end = SIZEOF(.eh_frame_hdr) > 0 ? . : 0; + .eh_frame : + { + __eh_frame_start = .; + KEEP(*(.eh_frame)) + __eh_frame_end = .; + } + + /* These are explicitly listed to avoid being merged into .rodata */ + .rodata.sceResident : { *(.rodata.sceResident) *(.rodata.sceResident.*) } + .rodata.sceModuleInfo : { *(.rodata.sceModuleInfo) } + /* Sort NIDs for convenient ordering */ + .rodata.sceNid : { *(.rodata.sceNid) *(SORT(.rodata.sceNid.*)) } + + .rodata : { *(.rodata .rodata.*) } + .data : { *(.data .data.*) } + .gcc_except_table : { *(.gcc_except_table .gcc_except_table.*) } + .bss : { *(.bss .bss.*) } + + /DISCARD/ : { *(.rel.sceStub.text .MIPS.abiflags .reginfo) } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psx.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_sony_psx.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,37 @@ +use crate::spec::{cvs, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsel-sony-psx".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + + options: TargetOptions { + os: "none".into(), + env: "psx".into(), + vendor: "sony".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + cpu: "mips1".into(), + executables: true, + linker: Some("rust-lld".into()), + relocation_model: RelocModel::Static, + exe_suffix: ".exe".into(), + + // PSX doesn't natively support floats. + features: "+soft-float".into(), + + // This should be 16 bits, but LLVM incorrectly tries emitting MIPS-II SYNC instructions + // for atomic loads and stores. This crashes rustc so we have to disable the Atomic* API + // until this is fixed upstream. See https://reviews.llvm.org/D122427#3420144 for more + // info. + max_atomic_width: Some(0), + + // PSX does not support trap-on-condition instructions. + llvm_args: cvs!["-mno-check-zero-division"], + llvm_abiname: "o32".into(), + panic_strategy: PanicStrategy::Abort, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsel-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + + options: TargetOptions { + cpu: "mips32r2".into(), + features: "+mips32r2,+fpxx,+nooddspreg".into(), + max_atomic_width: Some(32), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "mips32r2".into(); + base.features = "+mips32r2,+soft-float".into(); + base.max_atomic_width = Some(32); + base.crt_static_default = false; + Target { + llvm_target: "mipsel-unknown-linux-musl".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + options: TargetOptions { mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_linux_uclibc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsel-unknown-linux-uclibc".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + + options: TargetOptions { + cpu: "mips32r2".into(), + features: "+mips32r2,+soft-float".into(), + max_atomic_width: Some(32), + mcount: "_mcount".into(), + + ..base::linux_uclibc::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::netbsd::opts(); + base.max_atomic_width = Some(32); + base.cpu = "mips32".into(); + + Target { + llvm_target: "mipsel-unknown-netbsd".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + options: TargetOptions { + features: "+soft-float".into(), + mcount: "__mcount".into(), + endian: Endian::Little, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsel_unknown_none.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +//! Bare MIPS32r2, little endian, softfloat, O32 calling convention +//! +//! Can be used for MIPS M4K core (e.g. on PIC32MX devices) + +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsel-unknown-none".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + cpu: "mips32r2".into(), + features: "+mips32r2,+soft-float,+noabicalls".into(), + max_atomic_width: Some(32), + linker: Some("rust-lld".into()), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsisa32r6-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "E-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips32r6".into(), + options: TargetOptions { + endian: Endian::Big, + cpu: "mips32r6".into(), + features: "+mips32r6".into(), + max_atomic_width: Some(32), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6el_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6el_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6el_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa32r6el_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsisa32r6el-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(), + arch: "mips32r6".into(), + + options: TargetOptions { + cpu: "mips32r6".into(), + features: "+mips32r6".into(), + max_atomic_width: Some(32), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6_unknown_linux_gnuabi64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::abi::Endian; +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsisa64r6-unknown-linux-gnuabi64".into(), + pointer_width: 64, + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64r6".into(), + options: TargetOptions { + abi: "abi64".into(), + endian: Endian::Big, + // NOTE(mips64r6) matches C toolchain + cpu: "mips64r6".into(), + features: "+mips64r6".into(), + max_atomic_width: Some(64), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/mipsisa64r6el_unknown_linux_gnuabi64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "mipsisa64r6el-unknown-linux-gnuabi64".into(), + pointer_width: 64, + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".into(), + arch: "mips64r6".into(), + options: TargetOptions { + abi: "abi64".into(), + // NOTE(mips64r6) matches C toolchain + cpu: "mips64r6".into(), + features: "+mips64r6".into(), + max_atomic_width: Some(64), + mcount: "_mcount".into(), + + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/msp430_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/msp430_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/msp430_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/msp430_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,59 @@ +use crate::spec::{cvs, Cc, LinkerFlavor, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "msp430-none-elf".into(), + pointer_width: 16, + data_layout: "e-m:e-p:16:16-i32:16-i64:16-f32:16-f64:16-a:8-n8:16-S16".into(), + arch: "msp430".into(), + + options: TargetOptions { + c_int_width: "16".into(), + + // The LLVM backend currently can't generate object files. To + // workaround this LLVM generates assembly files which then we feed + // to gcc to get object files. For this reason we have a hard + // dependency on this specific gcc. + asm_args: cvs!["-mcpu=msp430"], + linker: Some("msp430-elf-gcc".into()), + linker_flavor: LinkerFlavor::Unix(Cc::Yes), + + // There are no atomic CAS instructions available in the MSP430 + // instruction set, and the LLVM backend doesn't currently support + // compiler fences so the Atomic* API is missing on this target. + // When the LLVM backend gains support for compile fences uncomment + // the `singlethread: true` line and set `max_atomic_width` to + // `Some(16)`. + max_atomic_width: Some(0), + atomic_cas: false, + // singlethread: true, + + // Because these devices have very little resources having an + // unwinder is too onerous so we default to "abort" because the + // "unwind" strategy is very rare. + panic_strategy: PanicStrategy::Abort, + + // Similarly, one almost always never wants to use relocatable + // code because of the extra costs it involves. + relocation_model: RelocModel::Static, + + // Right now we invoke an external assembler and this isn't + // compatible with multiple codegen units, and plus we probably + // don't want to invoke that many gcc instances. + default_codegen_units: Some(1), + + // Since MSP430 doesn't meaningfully support faulting on illegal + // instructions, LLVM generates a call to abort() function instead + // of a trap instruction. Such calls are 4 bytes long, and that is + // too much overhead for such small target. + trap_unreachable: false, + + // See the thumb_base.rs file for an explanation of this value + emit_debug_gdb_scripts: false, + + eh_frame_header: false, + + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,52 @@ +use crate::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + arch: "nvptx64".into(), + data_layout: "e-i64:64-i128:128-v16:16-v32:32-n16:32:64".into(), + llvm_target: "nvptx64-nvidia-cuda".into(), + pointer_width: 64, + + options: TargetOptions { + os: "cuda".into(), + vendor: "nvidia".into(), + linker_flavor: LinkerFlavor::Ptx, + // The linker can be installed from `crates.io`. + linker: Some("rust-ptx-linker".into()), + + // With `ptx-linker` approach, it can be later overridden via link flags. + cpu: "sm_30".into(), + + // FIXME: create tests for the atomics. + max_atomic_width: Some(64), + + // Unwinding on CUDA is neither feasible nor useful. + panic_strategy: PanicStrategy::Abort, + + // Needed to use `dylib` and `bin` crate types and the linker. + dynamic_linking: true, + + // Avoid using dylib because it contain metadata not supported + // by LLVM NVPTX backend. + only_cdylib: true, + + // Let the `ptx-linker` to handle LLVM lowering into MC / assembly. + obj_is_bitcode: true, + + // Convenient and predicable naming scheme. + dll_prefix: "".into(), + dll_suffix: ".ptx".into(), + exe_suffix: ".ptx".into(), + + // Disable MergeFunctions LLVM optimisation pass because it can + // produce kernel functions that call other kernel functions. + // This behavior is not supported by PTX ISA. + merge_functions: MergeFunctions::Disabled, + + // The LLVM backend does not support stack canaries for this target + supports_stack_protector: false, + + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_ibm_aix.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_ibm_aix.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_ibm_aix.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_ibm_aix.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, Cc, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut base = base::aix::opts(); + base.max_atomic_width = Some(64); + base.add_pre_link_args( + LinkerFlavor::Unix(Cc::No), + &["-b64", "-bpT:0x100000000", "-bpD:0x110000000", "-bcdtors:all:0:s"], + ); + + Target { + llvm_target: "powerpc64-ibm-aix".into(), + pointer_width: 64, + data_layout: "E-m:a-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), + arch: "powerpc64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::freebsd::opts(); + base.cpu = "ppc64".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64-unknown-freebsd".into(), + pointer_width: 64, + data_layout: "E-m:e-Fn32-i64:64-n32:64".into(), + arch: "powerpc64".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.cpu = "ppc64".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "E-m:e-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), + arch: "powerpc64".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "ppc64".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "E-m:e-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), + arch: "powerpc64".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::openbsd::opts(); + base.cpu = "ppc64".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64-unknown-openbsd".into(), + pointer_width: 64, + data_layout: "E-m:e-Fn32-i64:64-n32:64".into(), + arch: "powerpc64".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64_wrs_vxworks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::vxworks::opts(); + base.cpu = "ppc64".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "E-m:e-Fi64-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), + arch: "powerpc64".into(), + options: TargetOptions { endian: Endian::Big, ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::freebsd::opts(); + base.cpu = "ppc64le".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64le-unknown-freebsd".into(), + pointer_width: 64, + data_layout: "e-m:e-Fn32-i64:64-n32:64".into(), + arch: "powerpc64".into(), + options: TargetOptions { mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.cpu = "ppc64le".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64le-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-Fn32-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), + arch: "powerpc64".into(), + options: TargetOptions { mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc64le_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "ppc64le".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc64le-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-Fn32-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(), + arch: "powerpc64".into(), + options: TargetOptions { mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::freebsd::opts(); + // Extra hint to linker that we are generating secure-PLT code. + base.add_pre_link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-m32", "--target=powerpc-unknown-freebsd13.0"], + ); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-freebsd13.0".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { + endian: Endian::Big, + features: "+secure-plt".into(), + mcount: "_mcount".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnuspe.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnuspe.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnuspe.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_gnuspe.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-gnuspe".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { + abi: "spe".into(), + endian: Endian::Big, + mcount: "_mcount".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-musl".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { endian: Endian::Big, mcount: "_mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::netbsd::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-netbsd".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { endian: Endian::Big, mcount: "__mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::openbsd::opts(); + base.endian = Endian::Big; + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-openbsd".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::vxworks::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32", "--secure-plt"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { endian: Endian::Big, features: "+secure-plt".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks_spe.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks_spe.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks_spe.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/powerpc_wrs_vxworks_spe.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::vxworks::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe", "--secure-plt"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-gnuspe".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { + abi: "spe".into(), + endian: Endian::Big, + // feature msync would disable instruction 'fsync' which is not supported by fsl_p1p2 + features: "+secure-plt,+msync".into(), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv32-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + arch: "riscv32".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv32".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "ilp32d".into(), + max_atomic_width: Some(32), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32gc_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv32-unknown-linux-musl".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + arch: "riscv32".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv32".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "ilp32d".into(), + max_atomic_width: Some(32), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32i_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32i_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32i_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32i_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + cpu: "generic-rv32".into(), + max_atomic_width: Some(0), + atomic_cas: false, + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32im_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32im_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32im_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32im_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + cpu: "generic-rv32".into(), + max_atomic_width: Some(0), + atomic_cas: false, + features: "+m".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_esp_espidf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_esp_espidf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_esp_espidf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_esp_espidf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,31 @@ +use crate::spec::{cvs, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + families: cvs!["unix"], + os: "espidf".into(), + env: "newlib".into(), + vendor: "espressif".into(), + linker: Some("riscv32-esp-elf-gcc".into()), + cpu: "generic-rv32".into(), + + // As RiscV32IMAC architecture does natively support atomics, + // automatically enable the support for the Rust STD library. + max_atomic_width: Some(32), + atomic_cas: true, + + features: "+m,+a,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + cpu: "generic-rv32".into(), + max_atomic_width: Some(32), + features: "+m,+a,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_xous_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_xous_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_xous_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imac_unknown_xous_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + os: "xous".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + cpu: "generic-rv32".into(), + max_atomic_width: Some(32), + features: "+m,+a,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_esp_espidf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_esp_espidf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_esp_espidf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_esp_espidf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,34 @@ +use crate::spec::{cvs, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + families: cvs!["unix"], + os: "espidf".into(), + env: "newlib".into(), + vendor: "espressif".into(), + linker: Some("riscv32-esp-elf-gcc".into()), + cpu: "generic-rv32".into(), + + // While the RiscV32IMC architecture does not natively support atomics, ESP-IDF does support + // the __atomic* and __sync* GCC builtins, so setting `max_atomic_width` to `Some(32)` + // and `atomic_cas` to `true` will cause the compiler to emit libcalls to these builtins. + // + // Support for atomics is necessary for the Rust STD library, which is supported by the ESP-IDF framework. + max_atomic_width: Some(32), + atomic_cas: true, + + features: "+m,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv32imc_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:32:32-i64:64-n32-S128".into(), + llvm_target: "riscv32".into(), + pointer_width: 32, + arch: "riscv32".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + cpu: "generic-rv32".into(), + max_atomic_width: Some(0), + atomic_cas: false, + features: "+m,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64_linux_android.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, CodeModel, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-linux-android".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c,+zba,+zbb,+zbs,+v".into(), + llvm_abiname: "lp64d".into(), + supported_sanitizers: SanitizerSet::ADDRESS, + max_atomic_width: Some(64), + ..base::android::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-freebsd".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + ..base::freebsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_fuchsia.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, CodeModel, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-fuchsia".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + supported_sanitizers: SanitizerSet::SHADOWCALLSTACK, + ..base::fuchsia::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_hermit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, CodeModel, RelocModel, Target, TargetOptions, TlsModel}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-hermit".into(), + pointer_width: 64, + arch: "riscv64".into(), + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + options: TargetOptions { + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + relocation_model: RelocModel::Pic, + code_model: Some(CodeModel::Medium), + tls_model: TlsModel::LocalExec, + max_atomic_width: Some(64), + llvm_abiname: "lp64d".into(), + ..base::hermit::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-netbsd".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + mcount: "__mcount".into(), + ..base::netbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,28 @@ +use crate::spec::SanitizerSet; +use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy}; +use crate::spec::{RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + llvm_target: "riscv64".into(), + pointer_width: 64, + arch: "riscv64".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + llvm_abiname: "lp64d".into(), + cpu: "generic-rv64".into(), + max_atomic_width: Some(64), + features: "+m,+a,+f,+d,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + code_model: Some(CodeModel::Medium), + emit_debug_gdb_scripts: false, + eh_frame_header: false, + supported_sanitizers: SanitizerSet::KERNELADDRESS, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, CodeModel, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "riscv64-unknown-openbsd".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + arch: "riscv64".into(), + options: TargetOptions { + code_model: Some(CodeModel::Medium), + cpu: "generic-rv64".into(), + features: "+m,+a,+f,+d,+c".into(), + llvm_abiname: "lp64d".into(), + max_atomic_width: Some(64), + ..base::openbsd::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64imac_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64imac_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64imac_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/riscv64imac_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy}; +use crate::spec::{RelocModel, SanitizerSet, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(), + llvm_target: "riscv64".into(), + pointer_width: 64, + arch: "riscv64".into(), + + options: TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + cpu: "generic-rv64".into(), + max_atomic_width: Some(64), + features: "+m,+a,+c".into(), + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + code_model: Some(CodeModel::Medium), + emit_debug_gdb_scripts: false, + eh_frame_header: false, + supported_sanitizers: SanitizerSet::KERNELADDRESS, + ..Default::default() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::abi::Endian; +use crate::spec::{base, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.endian = Endian::Big; + // z10 is the oldest CPU supported by LLVM + base.cpu = "z10".into(); + // FIXME: The ABI implementation in cabi_s390x.rs is for now hard-coded to assume the no-vector + // ABI. Pass the -vector feature string to LLVM to respect this assumption. On LLVM < 16, we + // also strip v128 from the data_layout below to match the older LLVM's expectation. + base.features = "-vector".into(); + base.max_atomic_width = Some(64); + base.min_global_align = Some(16); + base.stack_probes = StackProbeType::Inline; + base.supported_sanitizers = + SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; + + Target { + llvm_target: "s390x-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "E-m:e-i1:8:16-i8:8:16-i64:64-f128:64-v128:64-a:8:16-n32:64".into(), + arch: "s390x".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/s390x_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::abi::Endian; +use crate::spec::{base, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.endian = Endian::Big; + // z10 is the oldest CPU supported by LLVM + base.cpu = "z10".into(); + // FIXME: The ABI implementation in cabi_s390x.rs is for now hard-coded to assume the no-vector + // ABI. Pass the -vector feature string to LLVM to respect this assumption. On LLVM < 16, we + // also strip v128 from the data_layout below to match the older LLVM's expectation. + base.features = "-vector".into(); + base.max_atomic_width = Some(64); + base.min_global_align = Some(16); + base.static_position_independent_executables = true; + base.stack_probes = StackProbeType::Inline; + base.supported_sanitizers = + SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; + + Target { + llvm_target: "s390x-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "E-m:e-i1:8:16-i8:8:16-i64:64-f128:64-v128:64-a:8:16-n32:64".into(), + arch: "s390x".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.endian = Endian::Big; + base.cpu = "v9".into(); + base.max_atomic_width = Some(64); + + Target { + llvm_target: "sparc64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "E-m:e-i64:64-n32:64-S128".into(), + arch: "sparc64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::netbsd::opts(); + base.cpu = "v9".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + + Target { + llvm_target: "sparc64-unknown-netbsd".into(), + pointer_width: 64, + data_layout: "E-m:e-i64:64-n32:64-S128".into(), + arch: "sparc64".into(), + options: TargetOptions { endian: Endian::Big, mcount: "__mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc64_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::openbsd::opts(); + base.endian = Endian::Big; + base.cpu = "v9".into(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + + Target { + llvm_target: "sparc64-unknown-openbsd".into(), + pointer_width: 64, + data_layout: "E-m:e-i64:64-n32:64-S128".into(), + arch: "sparc64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.endian = Endian::Big; + base.cpu = "v9".into(); + base.max_atomic_width = Some(32); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mv8plus"]); + + Target { + llvm_target: "sparc-unknown-linux-gnu".into(), + pointer_width: 32, + data_layout: "E-m:e-p:32:32-i64:64-f128:64-n32-S64".into(), + arch: "sparc".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_none_elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_none_elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_none_elf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparc_unknown_none_elf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::abi::Endian; +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let options = TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), + linker: Some("sparc-elf-gcc".into()), + endian: Endian::Big, + cpu: "v7".into(), + abi: "elf".into(), + max_atomic_width: Some(32), + atomic_cas: true, + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + no_default_libraries: false, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }; + Target { + data_layout: "E-m:e-p:32:32-i64:64-f128:64-n32-S64".into(), + llvm_target: "sparc-unknown-none-elf".into(), + pointer_width: 32, + arch: "sparc".into(), + options, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparcv9_sun_solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparcv9_sun_solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparcv9_sun_solaris.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/sparcv9_sun_solaris.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut base = base::solaris::opts(); + base.endian = Endian::Big; + base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64"]); + // llvm calls this "v9" + base.cpu = "v9".into(); + base.vendor = "sun".into(); + base.max_atomic_width = Some(64); + + Target { + llvm_target: "sparcv9-sun-solaris".into(), + pointer_width: 64, + data_layout: "E-m:e-i64:64-n32:64-S128".into(), + // Use "sparc64" instead of "sparcv9" here, since the former is already + // used widely in the source base. If we ever needed ABI + // differentiation from the sparc64, we could, but that would probably + // just be confusing. + arch: "sparc64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv4t_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,59 @@ +//! Targets the ARMv4T, with code as `t32` code by default. +//! +//! Primarily of use for the GBA, but usable with other devices too. +//! +//! Please ping @Lokathor if changes are needed. +//! +//! **Important:** This target profile **does not** specify a linker script. You +//! just get the default link script when you build a binary for this target. +//! The default link script is very likely wrong, so you should use +//! `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script. + +use crate::spec::{base, PanicStrategy, RelocModel, Target, TargetOptions}; +use crate::spec::{cvs, FramePointer}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv4t-none-eabi".into(), + pointer_width: 32, + arch: "arm".into(), + /* Data layout args are '-' separated: + * little endian + * stack is 64-bit aligned (EABI) + * pointers are 32-bit + * i64 must be 64-bit aligned (EABI) + * mangle names with ELF style + * native integers are 32-bit + * All other elements are default + */ + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + options: TargetOptions { + abi: "eabi".into(), + + // extra args passed to the external assembler (assuming `arm-none-eabi-as`): + // * activate t32/a32 interworking + // * use arch ARMv4T + // * use little-endian + asm_args: cvs!["-mthumb-interwork", "-march=armv4t", "-mlittle-endian",], + + // minimum extra features, these cannot be disabled via -C + // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. + // The resulting atomics are ABI incompatible with atomics backed by libatomic. + features: "+soft-float,+strict-align,+atomics-32".into(), + + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + // suggested from thumb_base, rust-lang/rust#44993. + emit_debug_gdb_scripts: false, + frame_pointer: FramePointer::MayOmit, + + main_needs_argc_argv: false, + + // don't have atomic compare-and-swap + atomic_cas: false, + has_thumb_interworking: true, + + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv5te_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,41 @@ +//! Targets the ARMv5TE, with code as `t32` code by default. + +use crate::spec::{base, cvs, FramePointer, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv5te-none-eabi".into(), + pointer_width: 32, + arch: "arm".into(), + /* Data layout args are '-' separated: + * little endian + * stack is 64-bit aligned (EABI) + * pointers are 32-bit + * i64 must be 64-bit aligned (EABI) + * mangle names with ELF style + * native integers are 32-bit + * All other elements are default + */ + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + + options: TargetOptions { + abi: "eabi".into(), + // extra args passed to the external assembler (assuming `arm-none-eabi-as`): + // * activate t32/a32 interworking + // * use arch ARMv5TE + // * use little-endian + asm_args: cvs!["-mthumb-interwork", "-march=armv5te", "-mlittle-endian",], + // minimum extra features, these cannot be disabled via -C + // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. + // The resulting atomics are ABI incompatible with atomics backed by libatomic. + features: "+soft-float,+strict-align,+atomics-32".into(), + frame_pointer: FramePointer::MayOmit, + main_needs_argc_argv: false, + // don't have atomic compare-and-swap + atomic_cas: false, + has_thumb_interworking: true, + + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv6m_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,25 @@ +// Targets the Cortex-M0, Cortex-M0+ and Cortex-M1 processors (ARMv6-M architecture) + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv6m-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + // The ARMv6-M architecture doesn't support unaligned loads/stores so we disable them + // with +strict-align. + // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. + // The resulting atomics are ABI incompatible with atomics backed by libatomic. + features: "+strict-align,+atomics-32".into(), + // There are no atomic CAS instructions available in the instruction set of the ARMv6-M + // architecture + atomic_cas: false, + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_pc_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,28 @@ +use crate::spec::{base, LinkerFlavor, Lld, PanicStrategy, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::windows_msvc::opts(); + // Prevent error LNK2013: BRANCH24(T) fixup overflow + // The LBR optimization tries to eliminate branch islands, + // but if the displacement is larger than can fit + // in the instruction, this error will occur. The linker + // should be smart enough to insert branch islands only + // where necessary, but this is not the observed behavior. + // Disabling the LBR optimization works around the issue. + base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/OPT:NOLBR"]); + + Target { + llvm_target: "thumbv7a-pc-windows-msvc".into(), + pointer_width: 32, + data_layout: "e-m:w-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + features: "+vfp3,+neon".into(), + max_atomic_width: Some(64), + // FIXME(jordanrh): use PanicStrategy::Unwind when SEH is + // implemented for windows/arm in LLVM + panic_strategy: PanicStrategy::Abort, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7a_uwp_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, PanicStrategy, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv7a-pc-windows-msvc".into(), + pointer_width: 32, + data_layout: "e-m:w-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + features: "+vfp3,+neon".into(), + max_atomic_width: Some(64), + // FIXME(jordanrh): use PanicStrategy::Unwind when SEH is + // implemented for windows/arm in LLVM + panic_strategy: PanicStrategy::Abort, + ..base::windows_uwp_msvc::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +// Targets the Cortex-M4 and Cortex-M7 processors (ARMv7E-M) +// +// This target assumes that the device doesn't have a FPU (Floating Point Unit) and lowers all the +// floating point operations to software routines (intrinsics). +// +// As such, this target uses the "soft" calling convention (ABI) where floating point values are +// passed to/from subroutines via general purpose registers (R0, R1, etc.). +// +// To opt-in to hardware accelerated floating point operations, you can use, for example, +// `-C target-feature=+vfp4` or `-C target-cpu=cortex-m4`. + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv7em-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + max_atomic_width: Some(32), + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7em_none_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,36 @@ +// Targets the Cortex-M4F and Cortex-M7F processors (ARMv7E-M) +// +// This target assumes that the device does have a FPU (Floating Point Unit) and lowers all (single +// precision) floating point operations to hardware instructions. +// +// Additionally, this target uses the "hard" floating convention (ABI) where floating point values +// are passed to/from subroutines via FPU registers (S0, S1, D0, D1, etc.). +// +// To opt into double precision hardware support, use the `-C target-feature=+fp64` flag. + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv7em-none-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabihf".into(), + // `+vfp4` is the lowest common denominator between the Cortex-M4 (vfp4-16) and the + // Cortex-M7 (vfp5) + // `-d32` both the Cortex-M4 and the Cortex-M7 only have 16 double-precision registers + // available + // `-fp64` The Cortex-M4 only supports single precision floating point operations + // whereas in the Cortex-M7 double precision is optional + // + // Reference: + // ARMv7-M Architecture Reference Manual - A2.5 The optional floating-point extension + features: "+vfp4,-d32,-fp64".into(), + max_atomic_width: Some(32), + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7m_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +// Targets the Cortex-M3 processor (ARMv7-M) + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv7m-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + max_atomic_width: Some(32), + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_linux_androideabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_linux_androideabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_linux_androideabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_linux_androideabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +// This target if is for the Android v7a ABI in thumb mode with +// NEON unconditionally enabled and, therefore, with 32 FPU registers +// enabled as well. See section A2.6.2 on page A2-56 in +// https://web.archive.org/web/20210307234416/https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf + +// See https://developer.android.com/ndk/guides/abis.html#v7a +// for target ABI requirements. + +pub fn target() -> Target { + let mut base = base::android::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-march=armv7-a"]); + Target { + llvm_target: "armv7-none-linux-android".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabi".into(), + features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".into(), + max_atomic_width: Some(64), + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_gnueabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_gnueabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_gnueabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_gnueabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for glibc Linux on ARMv7 with thumb mode enabled +// (for consistency with Android and Debian-based distributions) +// and with NEON unconditionally enabled and, therefore, with 32 FPU +// registers enabled as well. See section A2.6.2 on page A2-56 in +// https://web.archive.org/web/20210307234416/https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf + +pub fn target() -> Target { + Target { + llvm_target: "armv7-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + options: TargetOptions { + abi: "eabihf".into(), + // Info about features at https://wiki.debian.org/ArmHardFloatPort + features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".into(), + max_atomic_width: Some(64), + ..base::linux_gnu::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_musleabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_musleabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_musleabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv7neon_unknown_linux_musleabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,29 @@ +use crate::spec::{base, Target, TargetOptions}; + +// This target is for musl Linux on ARMv7 with thumb mode enabled +// (for consistency with Android and Debian-based distributions) +// and with NEON unconditionally enabled and, therefore, with 32 FPU +// registers enabled as well. See section A2.6.2 on page A2-56 in +// https://web.archive.org/web/20210307234416/https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf + +pub fn target() -> Target { + Target { + // It's important we use "gnueabihf" and not "musleabihf" here. LLVM + // uses it to determine the calling convention and float ABI, and LLVM + // doesn't support the "musleabihf" value. + llvm_target: "armv7-unknown-linux-gnueabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + // Most of these settings are copied from the thumbv7neon_unknown_linux_gnueabihf + // target. + options: TargetOptions { + abi: "eabihf".into(), + features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".into(), + max_atomic_width: Some(64), + mcount: "\u{1}mcount".into(), + ..base::linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_base_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +// Targets the Cortex-M23 processor (Baseline ARMv8-M) + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv8m.base-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + // ARMv8-M baseline doesn't support unaligned loads/stores so we disable them + // with +strict-align. + features: "+strict-align".into(), + max_atomic_width: Some(32), + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +// Targets the Cortex-M33 processor (Armv8-M Mainline architecture profile), +// without the Floating Point extension. + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv8m.main-none-eabi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabi".into(), + max_atomic_width: Some(32), + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/thumbv8m_main_none_eabihf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,25 @@ +// Targets the Cortex-M33 processor (Armv8-M Mainline architecture profile), +// with the Floating Point extension. + +use crate::spec::{base, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "thumbv8m.main-none-eabihf".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), + arch: "arm".into(), + + options: TargetOptions { + abi: "eabihf".into(), + // If the Floating Point extension is implemented in the Cortex-M33 + // processor, the Cortex-M33 Technical Reference Manual states that + // the FPU uses the FPv5 architecture, single-precision instructions + // and 16 D registers. + // These parameters map to the following LLVM features. + features: "+fp-armv8,-fp64,-d32".into(), + max_atomic_width: Some(32), + ..base::thumb::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_emscripten.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_emscripten.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_emscripten.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_emscripten.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,35 @@ +use crate::spec::{ + base, cvs, LinkArgs, LinkerFlavor, PanicStrategy, RelocModel, Target, TargetOptions, +}; + +pub fn target() -> Target { + // Reset flags for non-Em flavors back to empty to satisfy sanity checking tests. + let pre_link_args = LinkArgs::new(); + let post_link_args = TargetOptions::link_args( + LinkerFlavor::EmCc, + &["-sABORTING_MALLOC=0", "-Wl,--fatal-warnings"], + ); + + let opts = TargetOptions { + os: "emscripten".into(), + linker_flavor: LinkerFlavor::EmCc, + // emcc emits two files - a .js file to instantiate the wasm and supply platform + // functionality, and a .wasm file. + exe_suffix: ".js".into(), + linker: None, + pre_link_args, + post_link_args, + relocation_model: RelocModel::Pic, + panic_strategy: PanicStrategy::Unwind, + no_default_libraries: false, + families: cvs!["unix", "wasm"], + ..base::wasm::options() + }; + Target { + llvm_target: "wasm32-unknown-emscripten".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-f128:64-n32:64-S128-ni:1:10:20".into(), + arch: "wasm32".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_unknown.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_unknown.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_unknown.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_unknown_unknown.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,55 @@ +//! A "bare wasm" target representing a WebAssembly output that makes zero +//! assumptions about its environment. +//! +//! The `wasm32-unknown-unknown` target is intended to encapsulate use cases +//! that do not rely on any imported functionality. The binaries generated are +//! entirely self-contained by default when using the standard library. Although +//! the standard library is available, most of it returns an error immediately +//! (e.g. trying to create a TCP stream or something like that). +//! +//! This target is more or less managed by the Rust and WebAssembly Working +//! Group nowadays at . + +use crate::spec::abi::Abi; +use crate::spec::{base, Cc, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut options = base::wasm::options(); + options.os = "unknown".into(); + + // This is a default for backwards-compatibility with the original + // definition of this target oh-so-long-ago. Once the "wasm" ABI is + // stable and the wasm-bindgen project has switched to using it then there's + // no need for this and it can be removed. + // + // Currently this is the reason that this target's ABI is mismatched with + // clang's ABI. This means that, in the limit, you can't merge C and Rust + // code on this target due to this ABI mismatch. + options.default_adjusted_cabi = Some(Abi::Wasm); + + options.add_pre_link_args( + LinkerFlavor::WasmLld(Cc::No), + &[ + // For now this target just never has an entry symbol no matter the output + // type, so unconditionally pass this. + "--no-entry", + ], + ); + options.add_pre_link_args( + LinkerFlavor::WasmLld(Cc::Yes), + &[ + // Make sure clang uses LLD as its linker and is configured appropriately + // otherwise + "--target=wasm32-unknown-unknown", + "-Wl,--no-entry", + ], + ); + + Target { + llvm_target: "wasm32-unknown-unknown".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), + arch: "wasm32".into(), + options, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,119 @@ +//! The `wasm32-wasi` target is a new and still (as of April 2019) an +//! experimental target. The definition in this file is likely to be tweaked +//! over time and shouldn't be relied on too much. +//! +//! The `wasi` target is a proposal to define a standardized set of syscalls +//! that WebAssembly files can interoperate with. This set of syscalls is +//! intended to empower WebAssembly binaries with native capabilities such as +//! filesystem access, network access, etc. +//! +//! You can see more about the proposal at . +//! +//! The Rust target definition here is interesting in a few ways. We want to +//! serve two use cases here with this target: +//! +//! * First, we want Rust usage of the target to be as hassle-free as possible, +//! ideally avoiding the need to configure and install a local wasm32-wasi +//! toolchain. +//! +//! * Second, one of the primary use cases of LLVM's new wasm backend and the +//! wasm support in LLD is that any compiled language can interoperate with +//! any other. To that the `wasm32-wasi` target is the first with a viable C +//! standard library and sysroot common definition, so we want Rust and C/C++ +//! code to interoperate when compiled to `wasm32-unknown-unknown`. +//! +//! You'll note, however, that the two goals above are somewhat at odds with one +//! another. To attempt to solve both use cases in one go we define a target +//! that (ab)uses the `crt-static` target feature to indicate which one you're +//! in. +//! +//! ## No interop with C required +//! +//! By default the `crt-static` target feature is enabled, and when enabled +//! this means that the bundled version of `libc.a` found in `liblibc.rlib` +//! is used. This isn't intended really for interoperation with a C because it +//! may be the case that Rust's bundled C library is incompatible with a +//! foreign-compiled C library. In this use case, though, we use `rust-lld` and +//! some copied crt startup object files to ensure that you can download the +//! wasi target for Rust and you're off to the races, no further configuration +//! necessary. +//! +//! All in all, by default, no external dependencies are required. You can +//! compile `wasm32-wasi` binaries straight out of the box. You can't, however, +//! reliably interoperate with C code in this mode (yet). +//! +//! ## Interop with C required +//! +//! For the second goal we repurpose the `target-feature` flag, meaning that +//! you'll need to do a few things to have C/Rust code interoperate. +//! +//! 1. All Rust code needs to be compiled with `-C target-feature=-crt-static`, +//! indicating that the bundled C standard library in the Rust sysroot will +//! not be used. +//! +//! 2. If you're using rustc to build a linked artifact then you'll need to +//! specify `-C linker` to a `clang` binary that supports +//! `wasm32-wasi` and is configured with the `wasm32-wasi` sysroot. This +//! will cause Rust code to be linked against the libc.a that the specified +//! `clang` provides. +//! +//! 3. If you're building a staticlib and integrating Rust code elsewhere, then +//! compiling with `-C target-feature=-crt-static` is all you need to do. +//! +//! You can configure the linker via Cargo using the +//! `CARGO_TARGET_WASM32_WASI_LINKER` env var. Be sure to also set +//! `CC_wasm32-wasi` if any crates in the dependency graph are using the `cc` +//! crate. +//! +//! ## Remember, this is all in flux +//! +//! The wasi target is **very** new in its specification. It's likely going to +//! be a long effort to get it standardized and stable. We'll be following it as +//! best we can with this target. Don't start relying on too much here unless +//! you know what you're getting in to! + +use crate::spec::crt_objects; +use crate::spec::LinkSelfContainedDefault; +use crate::spec::{base, Cc, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut options = base::wasm::options(); + + options.os = "wasi".into(); + options.add_pre_link_args(LinkerFlavor::WasmLld(Cc::Yes), &["--target=wasm32-wasi"]); + + options.pre_link_objects_self_contained = crt_objects::pre_wasi_self_contained(); + options.post_link_objects_self_contained = crt_objects::post_wasi_self_contained(); + + // FIXME: Figure out cases in which WASM needs to link with a native toolchain. + options.link_self_contained = LinkSelfContainedDefault::True; + + // Right now this is a bit of a workaround but we're currently saying that + // the target by default has a static crt which we're taking as a signal + // for "use the bundled crt". If that's turned off then the system's crt + // will be used, but this means that default usage of this target doesn't + // need an external compiler but it's still interoperable with an external + // compiler if configured correctly. + options.crt_static_default = true; + options.crt_static_respected = true; + + // Allow `+crt-static` to create a "cdylib" output which is just a wasm file + // without a main function. + options.crt_static_allows_dylibs = true; + + // WASI's `sys::args::init` function ignores its arguments; instead, + // `args::args()` makes the WASI API calls itself. + options.main_needs_argc_argv = false; + + // And, WASI mangles the name of "main" to distinguish between different + // signatures. + options.entry_name = "__main_void".into(); + + Target { + llvm_target: "wasm32-wasi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), + arch: "wasm32".into(), + options, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi_preview1_threads.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi_preview1_threads.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi_preview1_threads.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm32_wasi_preview1_threads.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,133 @@ +//! The `wasm32-wasi-preview1-threads` target is a new and still (as of July 2023) an +//! experimental target. The definition in this file is likely to be tweaked +//! over time and shouldn't be relied on too much. +//! +//! The `wasi-threads` target is a proposal to define a standardized set of syscalls +//! that WebAssembly files can interoperate with. This set of syscalls is +//! intended to empower WebAssembly binaries with native capabilities such as +//! threads, filesystem access, network access, etc. +//! +//! You can see more about the proposal at . +//! +//! The Rust target definition here is interesting in a few ways. We want to +//! serve two use cases here with this target: +//! +//! * First, we want Rust usage of the target to be as hassle-free as possible, +//! ideally avoiding the need to configure and install a local wasm32-wasi-preview1-threads +//! toolchain. +//! +//! * Second, one of the primary use cases of LLVM's new wasm backend and the +//! wasm support in LLD is that any compiled language can interoperate with +//! any other. To that the `wasm32-wasi-preview1-threads` target is the first with a viable C +//! standard library and sysroot common definition, so we want Rust and C/C++ +//! code to interoperate when compiled to `wasm32-unknown-unknown`. +//! +//! You'll note, however, that the two goals above are somewhat at odds with one +//! another. To attempt to solve both use cases in one go we define a target +//! that (ab)uses the `crt-static` target feature to indicate which one you're +//! in. +//! +//! ## No interop with C required +//! +//! By default the `crt-static` target feature is enabled, and when enabled +//! this means that the bundled version of `libc.a` found in `liblibc.rlib` +//! is used. This isn't intended really for interoperation with a C because it +//! may be the case that Rust's bundled C library is incompatible with a +//! foreign-compiled C library. In this use case, though, we use `rust-lld` and +//! some copied crt startup object files to ensure that you can download the +//! wasi target for Rust and you're off to the races, no further configuration +//! necessary. +//! +//! All in all, by default, no external dependencies are required. You can +//! compile `wasm32-wasi-preview1-threads` binaries straight out of the box. You can't, however, +//! reliably interoperate with C code in this mode (yet). +//! +//! ## Interop with C required +//! +//! For the second goal we repurpose the `target-feature` flag, meaning that +//! you'll need to do a few things to have C/Rust code interoperate. +//! +//! 1. All Rust code needs to be compiled with `-C target-feature=-crt-static`, +//! indicating that the bundled C standard library in the Rust sysroot will +//! not be used. +//! +//! 2. If you're using rustc to build a linked artifact then you'll need to +//! specify `-C linker` to a `clang` binary that supports +//! `wasm32-wasi-preview1-threads` and is configured with the `wasm32-wasi-preview1-threads` sysroot. This +//! will cause Rust code to be linked against the libc.a that the specified +//! `clang` provides. +//! +//! 3. If you're building a staticlib and integrating Rust code elsewhere, then +//! compiling with `-C target-feature=-crt-static` is all you need to do. +//! +//! You can configure the linker via Cargo using the +//! `CARGO_TARGET_WASM32_WASI_LINKER` env var. Be sure to also set +//! `CC_wasm32-wasi-preview1-threads` if any crates in the dependency graph are using the `cc` +//! crate. +//! +//! ## Remember, this is all in flux +//! +//! The wasi target is **very** new in its specification. It's likely going to +//! be a long effort to get it standardized and stable. We'll be following it as +//! best we can with this target. Don't start relying on too much here unless +//! you know what you're getting in to! + +use crate::spec::{base, crt_objects, Cc, LinkSelfContainedDefault, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut options = base::wasm::options(); + + options.os = "wasi".into(); + + options.add_pre_link_args( + LinkerFlavor::WasmLld(Cc::No), + &["--import-memory", "--export-memory", "--shared-memory"], + ); + options.add_pre_link_args( + LinkerFlavor::WasmLld(Cc::Yes), + &[ + "--target=wasm32-wasi-threads", + "-Wl,--import-memory", + "-Wl,--export-memory,", + "-Wl,--shared-memory", + ], + ); + + options.pre_link_objects_self_contained = crt_objects::pre_wasi_self_contained(); + options.post_link_objects_self_contained = crt_objects::post_wasi_self_contained(); + + // FIXME: Figure out cases in which WASM needs to link with a native toolchain. + options.link_self_contained = LinkSelfContainedDefault::True; + + // Right now this is a bit of a workaround but we're currently saying that + // the target by default has a static crt which we're taking as a signal + // for "use the bundled crt". If that's turned off then the system's crt + // will be used, but this means that default usage of this target doesn't + // need an external compiler but it's still interoperable with an external + // compiler if configured correctly. + options.crt_static_default = true; + options.crt_static_respected = true; + + // Allow `+crt-static` to create a "cdylib" output which is just a wasm file + // without a main function. + options.crt_static_allows_dylibs = true; + + // WASI's `sys::args::init` function ignores its arguments; instead, + // `args::args()` makes the WASI API calls itself. + options.main_needs_argc_argv = false; + + // And, WASI mangles the name of "main" to distinguish between different + // signatures. + options.entry_name = "__main_void".into(); + + options.singlethread = false; + options.features = "+atomics,+bulk-memory,+mutable-globals".into(); + + Target { + llvm_target: "wasm32-wasi".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), + arch: "wasm32".into(), + options, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm64_unknown_unknown.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm64_unknown_unknown.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm64_unknown_unknown.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/wasm64_unknown_unknown.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,47 @@ +//! A "bare wasm" target representing a WebAssembly output that makes zero +//! assumptions about its environment. +//! +//! The `wasm64-unknown-unknown` target is intended to encapsulate use cases +//! that do not rely on any imported functionality. The binaries generated are +//! entirely self-contained by default when using the standard library. Although +//! the standard library is available, most of it returns an error immediately +//! (e.g. trying to create a TCP stream or something like that). + +use crate::spec::{base, Cc, LinkerFlavor, Target}; + +pub fn target() -> Target { + let mut options = base::wasm::options(); + options.os = "unknown".into(); + + options.add_pre_link_args( + LinkerFlavor::WasmLld(Cc::No), + &[ + // For now this target just never has an entry symbol no matter the output + // type, so unconditionally pass this. + "--no-entry", + "-mwasm64", + ], + ); + options.add_pre_link_args( + LinkerFlavor::WasmLld(Cc::Yes), + &[ + // Make sure clang uses LLD as its linker and is configured appropriately + // otherwise + "--target=wasm64-unknown-unknown", + "-Wl,--no-entry", + ], + ); + + // Any engine that implements wasm64 will surely implement the rest of these + // features since they were all merged into the official spec by the time + // wasm64 was designed. + options.features = "+bulk-memory,+mutable-globals,+sign-ext,+nontrapping-fptoint".into(); + + Target { + llvm_target: "wasm64-unknown-unknown".into(), + pointer_width: 64, + data_layout: "e-m:e-p:64:64-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), + arch: "wasm64".into(), + options, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_darwin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::base::apple::{macos_llvm_target, opts, Arch}; +use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet}; +use crate::spec::{StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::X86_64; + let mut base = opts("macos", arch); + base.max_atomic_width = Some(128); // penryn+ supports cmpxchg16b + base.frame_pointer = FramePointer::Always; + base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.supported_sanitizers = + SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK | SanitizerSet::THREAD; + + Target { + // Clang automatically chooses a more specific target based on + // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work + // correctly, we do too. + llvm_target: macos_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { mcount: "\u{1}mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::base::apple::{ios_sim_llvm_target, opts, Arch}; +use crate::spec::{SanitizerSet, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::X86_64_sim; + let mut base = opts("ios", arch); + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::THREAD; + + Target { + llvm_target: ios_sim_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { + max_atomic_width: Some(128), + stack_probes: StackProbeType::X86, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios_macabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios_macabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios_macabi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_ios_macabi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::base::apple::{opts, Arch}; +use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let llvm_target = "x86_64-apple-ios14.0-macabi"; + + let arch = Arch::X86_64_macabi; + let mut base = opts("ios", arch); + base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-target", llvm_target]); + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::THREAD; + + Target { + llvm_target: llvm_target.into(), + pointer_width: 64, + data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { + max_atomic_width: Some(128), + stack_probes: StackProbeType::X86, + ..base + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_tvos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_tvos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_tvos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_tvos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::base::apple::{opts, tvos_sim_llvm_target, Arch}; +use crate::spec::{StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::X86_64_sim; + Target { + llvm_target: tvos_sim_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { + max_atomic_width: Some(128), + stack_probes: StackProbeType::X86, + ..opts("tvos", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_watchos_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_watchos_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_watchos_sim.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_apple_watchos_sim.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,30 @@ +use crate::spec::base::apple::{opts, watchos_sim_llvm_target, Arch}; +use crate::spec::{StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::X86_64_sim; + Target { + llvm_target: watchos_sim_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { + max_atomic_width: Some(128), + stack_probes: StackProbeType::X86, + forces_embed_bitcode: true, + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + x86_64-apple-watchos5.0-simulator\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .into(), + ..opts("watchos", arch) + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fortanix_unknown_sgx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fortanix_unknown_sgx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fortanix_unknown_sgx.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fortanix_unknown_sgx.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,83 @@ +use std::borrow::Cow; + +use crate::spec::{cvs, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +pub fn target() -> Target { + let pre_link_args = TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &[ + "-e", + "elf_entry", + "-Bstatic", + "--gc-sections", + "-z", + "text", + "-z", + "norelro", + "--no-undefined", + "--error-unresolved-symbols", + "--no-undefined-version", + "-Bsymbolic", + "--export-dynamic", + // The following symbols are needed by libunwind, which is linked after + // libstd. Make sure they're included in the link. + "-u", + "__rust_abort", + "-u", + "__rust_c_alloc", + "-u", + "__rust_c_dealloc", + "-u", + "__rust_print_err", + "-u", + "__rust_rwlock_rdlock", + "-u", + "__rust_rwlock_unlock", + "-u", + "__rust_rwlock_wrlock", + ], + ); + + const EXPORT_SYMBOLS: &[&str] = &[ + "sgx_entry", + "HEAP_BASE", + "HEAP_SIZE", + "RELA", + "RELACOUNT", + "ENCLAVE_SIZE", + "CFGDATA_BASE", + "DEBUG", + "EH_FRM_HDR_OFFSET", + "EH_FRM_HDR_LEN", + "EH_FRM_OFFSET", + "EH_FRM_LEN", + "TEXT_BASE", + "TEXT_SIZE", + ]; + let opts = TargetOptions { + os: "unknown".into(), + env: "sgx".into(), + vendor: "fortanix".into(), + abi: "fortanix".into(), + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + max_atomic_width: Some(64), + cpu: "x86-64".into(), + plt_by_default: false, + features: "+rdrnd,+rdseed,+lvi-cfi,+lvi-load-hardening".into(), + llvm_args: cvs!["--x86-experimental-lvi-inline-asm-hardening"], + position_independent_executables: true, + pre_link_args, + override_export_symbols: Some(EXPORT_SYMBOLS.iter().cloned().map(Cow::from).collect()), + relax_elf_relocations: true, + ..Default::default() + }; + Target { + llvm_target: "x86_64-elf".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_fuchsia.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1 @@ +pub use crate::spec::targets::x86_64_unknown_fuchsia::target; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_linux_android.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{ + base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target, TargetOptions, +}; + +pub fn target() -> Target { + let mut base = base::android::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + // https://developer.android.com/ndk/guides/abis.html#86-64 + base.features = "+mmx,+sse,+sse2,+sse3,+ssse3,+sse4.1,+sse4.2,+popcnt".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-linux-android".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: TargetOptions { supported_sanitizers: SanitizerSet::ADDRESS, ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_nto_qnx710.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_nto_qnx710.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_nto_qnx710.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_nto_qnx710.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "x86_64-pc-unknown".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: TargetOptions { + cpu: "x86-64".into(), + plt_by_default: false, + max_atomic_width: Some(64), + pre_link_args: TargetOptions::link_args( + LinkerFlavor::Gnu(Cc::Yes, Lld::No), + &["-Vgcc_ntox86_64_cxx"], + ), + env: "nto71".into(), + ..base::nto_qnx::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_solaris.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_solaris.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, Cc, LinkerFlavor, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::solaris::opts(); + base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64"]); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.vendor = "pc".into(); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::X86; + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD; + + Target { + llvm_target: "x86_64-pc-solaris".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_gnu::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + // Use high-entropy 64 bit address space for ASLR + base.add_pre_link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &["-m", "i386pep", "--high-entropy-va"], + ); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64", "-Wl,--high-entropy-va"]); + base.max_atomic_width = Some(64); + base.linker = Some("x86_64-w64-mingw32-gcc".into()); + + Target { + llvm_target: "x86_64-pc-windows-gnu".into(), + pointer_width: 64, + data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_gnullvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_gnullvm::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.max_atomic_width = Some(64); + base.linker = Some("x86_64-w64-mingw32-clang".into()); + + Target { + llvm_target: "x86_64-pc-windows-gnu".into(), + pointer_width: 64, + data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::windows_msvc::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + + Target { + llvm_target: "x86_64-pc-windows-msvc".into(), + pointer_width: 64, + data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_sun_solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_sun_solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_sun_solaris.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_sun_solaris.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Cc, LinkerFlavor, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::solaris::opts(); + base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64"]); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.vendor = "sun".into(); + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "x86_64-pc-solaris".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unikraft_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unikraft_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unikraft_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unikraft_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "x86_64-unknown-linux-musl".into(), + pointer_width: 64, + arch: "x86_64".into(), + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + options: TargetOptions { + cpu: "x86-64".into(), + plt_by_default: false, + pre_link_args: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]), + max_atomic_width: Some(64), + stack_probes: StackProbeType::X86, + ..base::unikraft_linux_musl::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_dragonfly.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_dragonfly.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_dragonfly.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_dragonfly.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::dragonfly::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "x86_64-unknown-dragonfly".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_freebsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,22 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::freebsd::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.supported_sanitizers = + SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::MEMORY | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-unknown-freebsd".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_fuchsia.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::fuchsia::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.stack_probes = StackProbeType::X86; + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI; + + Target { + llvm_target: "x86_64-unknown-fuchsia".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_haiku.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_haiku.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_haiku.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_haiku.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::haiku::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + // This option is required to build executables on Haiku x86_64 + base.position_independent_executables = true; + + Target { + llvm_target: "x86_64-unknown-haiku".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_hermit.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_hermit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + Target { + llvm_target: "x86_64-unknown-hermit".into(), + pointer_width: 64, + arch: "x86_64".into(), + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + options: TargetOptions { + cpu: "x86-64".into(), + features: "+rdrnd,+rdseed".into(), + plt_by_default: false, + max_atomic_width: Some(64), + stack_probes: StackProbeType::X86, + ..base::hermit::opts() + }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_illumos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_illumos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_illumos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_illumos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,21 @@ +use crate::spec::{base, Cc, LinkerFlavor, SanitizerSet, Target}; + +pub fn target() -> Target { + let mut base = base::illumos::opts(); + base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64", "-std=c99"]); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD; + + Target { + // LLVM does not currently have a separate illumos target, + // so we still pass Solaris to it + llvm_target: "x86_64-pc-solaris".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_l4re_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_l4re_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_l4re_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_l4re_uclibc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,18 @@ +use crate::spec::{base, PanicStrategy, Target}; + +pub fn target() -> Target { + let mut base = base::l4re::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.panic_strategy = PanicStrategy::Abort; + + Target { + llvm_target: "x86_64-unknown-l4re-uclibc".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.static_position_independent_executables = true; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::SAFESTACK + | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnux32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnux32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnux32.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_gnux32.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_gnu::opts(); + base.cpu = "x86-64".into(); + base.abi = "x32".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mx32"]); + base.stack_probes = StackProbeType::X86; + base.has_thread_local = false; + // BUG(GabrielMajeri): disabling the PLT on x86_64 Linux with x32 ABI + // breaks code gen. See LLVM bug 36743 + base.plt_by_default = true; + + Target { + llvm_target: "x86_64-unknown-linux-gnux32".into(), + pointer_width: 32, + data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ + i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_musl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.static_position_independent_executables = true; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_linux_ohos.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::linux_ohos::opts(); + base.cpu = "x86-64".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.static_position_independent_executables = true; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. + llvm_target: "x86_64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_netbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +use crate::spec::{ + base, Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target, TargetOptions, +}; + +pub fn target() -> Target { + let mut base = base::netbsd::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-unknown-netbsd".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: TargetOptions { mcount: "__mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_none.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,38 @@ +// Generic x86-64 target for bare-metal code - Floating point disabled +// +// Can be used in conjunction with the `target-feature` and +// `target-cpu` compiler flags to opt-in more hardware-specific +// features. + +use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy}; +use crate::spec::{RelroLevel, SanitizerSet, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let opts = TargetOptions { + cpu: "x86-64".into(), + plt_by_default: false, + max_atomic_width: Some(64), + stack_probes: StackProbeType::X86, + position_independent_executables: true, + static_position_independent_executables: true, + relro_level: RelroLevel::Full, + linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), + linker: Some("rust-lld".into()), + features: + "-mmx,-sse,-sse2,-sse3,-ssse3,-sse4.1,-sse4.2,-3dnow,-3dnowa,-avx,-avx2,+soft-float" + .into(), + supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS, + disable_redzone: true, + panic_strategy: PanicStrategy::Abort, + code_model: Some(CodeModel::Kernel), + ..Default::default() + }; + Target { + llvm_target: "x86_64-unknown-none-elf".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: opts, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_openbsd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::openbsd::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.supports_xray = true; + + Target { + llvm_target: "x86_64-unknown-openbsd".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_redox.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_redox.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_redox.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_redox.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::redox::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + + Target { + llvm_target: "x86_64-unknown-redox".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_uefi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_uefi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_uefi.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_unknown_uefi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,41 @@ +// This defines the amd64 target for UEFI systems as described in the UEFI specification. See the +// uefi-base module for generic UEFI options. On x86_64 systems (mostly called "x64" in the spec) +// UEFI systems always run in long-mode, have the interrupt-controller pre-configured and force a +// single-CPU execution. +// The win64 ABI is used. It differs from the sysv64 ABI, so we must use a windows target with +// LLVM. "x86_64-unknown-windows" is used to get the minimal subset of windows-specific features. + +use crate::{ + abi::call::Conv, + spec::{base, Target}, +}; + +pub fn target() -> Target { + let mut base = base::uefi_msvc::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.entry_abi = Conv::X86_64Win64; + + // We disable MMX and SSE for now, even though UEFI allows using them. Problem is, you have to + // enable these CPU features explicitly before their first use, otherwise their instructions + // will trigger an exception. Rust does not inject any code that enables AVX/MMX/SSE + // instruction sets, so this must be done by the firmware. However, existing firmware is known + // to leave these uninitialized, thus triggering exceptions if we make use of them. Which is + // why we avoid them and instead use soft-floats. This is also what GRUB and friends did so + // far. + // + // If you initialize FP units yourself, you can override these flags with custom linker + // arguments, thus giving you access to full MMX/SSE acceleration. + base.features = "-mmx,-sse,+soft-float".into(); + + Target { + llvm_target: "x86_64-unknown-windows".into(), + pointer_width: 64, + data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_gnu.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,23 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, Target}; + +pub fn target() -> Target { + let mut base = base::windows_uwp_gnu::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + // Use high-entropy 64 bit address space for ASLR + base.add_pre_link_args( + LinkerFlavor::Gnu(Cc::No, Lld::No), + &["-m", "i386pep", "--high-entropy-va"], + ); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64", "-Wl,--high-entropy-va"]); + base.max_atomic_width = Some(64); + + Target { + llvm_target: "x86_64-pc-windows-gnu".into(), + pointer_width: 64, + data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_uwp_windows_msvc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,17 @@ +use crate::spec::{base, Target}; + +pub fn target() -> Target { + let mut base = base::windows_uwp_msvc::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + + Target { + llvm_target: "x86_64-pc-windows-msvc".into(), + pointer_width: 64, + data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64_wrs_vxworks.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,20 @@ +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = base::vxworks::opts(); + base.cpu = "x86-64".into(); + base.plt_by_default = false; + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.disable_redzone = true; + + Target { + llvm_target: "x86_64-unknown-linux-gnu".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64h_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64h_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64h_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/targets/x86_64h_apple_darwin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,44 @@ +use crate::spec::base::apple::{macos_llvm_target, opts, Arch}; +use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet}; +use crate::spec::{StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let arch = Arch::X86_64h; + let mut base = opts("macos", arch); + base.max_atomic_width = Some(128); + base.frame_pointer = FramePointer::Always; + base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.supported_sanitizers = + SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK | SanitizerSet::THREAD; + + // x86_64h is core2-avx without a few of the features which would otherwise + // be guaranteed, so we need to disable those. This imitates clang's logic: + // - https://github.com/llvm/llvm-project/blob/bd1f7c417/clang/lib/Driver/ToolChains/Arch/X86.cpp#L77-L78 + // - https://github.com/llvm/llvm-project/blob/bd1f7c417/clang/lib/Driver/ToolChains/Arch/X86.cpp#L133-L141 + // + // FIXME: Sadly, turning these off here disables them in such a way that they + // aren't re-enabled by `-Ctarget-cpu=native` (on a machine that has them). + // It would be nice if this were not the case, but fixing it seems tricky + // (and given that the main use-case for this target is for use in universal + // binaries, probably not that important). + base.features = "-rdrnd,-aes,-pclmul,-rtm,-fsgsbase".into(); + // Double-check that the `cpu` is what we expect (if it's not the list above + // may need updating). + assert_eq!( + base.cpu, "core-avx2", + "you need to adjust the feature list in x86_64h-apple-darwin if you change this", + ); + + Target { + // Clang automatically chooses a more specific target based on + // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work + // correctly, we do too. + llvm_target: macos_llvm_target(arch).into(), + pointer_width: 64, + data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: arch.target_arch(), + options: TargetOptions { mcount: "\u{1}mcount".into(), ..base }, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/teeos_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/teeos_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/teeos_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/teeos_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -use super::{Cc, LinkerFlavor, Lld, PanicStrategy}; -use crate::spec::{RelroLevel, TargetOptions}; - -pub fn opts() -> TargetOptions { - let lld_args = &["-zmax-page-size=4096", "-znow", "-ztext", "--execute-only"]; - let cc_args = &["-Wl,-zmax-page-size=4096", "-Wl,-znow", "-Wl,-ztext", "-mexecute-only"]; - - let mut pre_link_args = TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), lld_args); - super::add_link_args(&mut pre_link_args, LinkerFlavor::Gnu(Cc::Yes, Lld::No), cc_args); - - TargetOptions { - os: "teeos".into(), - vendor: "unknown".into(), - dynamic_linking: true, - linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), - // rpath hardcodes -Wl, so it can't be used together with ld.lld. - // C TAs also don't support rpath, so this is fine. - has_rpath: false, - // Note: Setting has_thread_local to true causes an error when - // loading / dyn-linking the TA - has_thread_local: false, - position_independent_executables: true, - relro_level: RelroLevel::Full, - crt_static_respected: true, - pre_link_args, - panic_strategy: PanicStrategy::Abort, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/tests/tests_impl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/tests/tests_impl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/tests/tests_impl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/tests/tests_impl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,7 +97,7 @@ ); } - if self.link_self_contained == LinkSelfContainedDefault::False { + if self.link_self_contained.is_disabled() { assert!( self.pre_link_objects_self_contained.is_empty() && self.post_link_objects_self_contained.is_empty() diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumb_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumb_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumb_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumb_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -// These `thumbv*` targets cover the ARM Cortex-M family of processors which are widely used in -// microcontrollers. Namely, all these processors: -// -// - Cortex-M0 -// - Cortex-M0+ -// - Cortex-M1 -// - Cortex-M3 -// - Cortex-M4(F) -// - Cortex-M7(F) -// - Cortex-M23 -// - Cortex-M33 -// -// We have opted for these instead of one target per processor (e.g., `cortex-m0`, `cortex-m3`, -// etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost -// nonexistent from the POV of codegen so it doesn't make sense to have separate targets for them. -// And if differences exist between two processors under the same target, rustc flags can be used to -// optimize for one processor or the other. -// -// Also, we have not chosen a single target (`arm-none-eabi`) like GCC does because this makes -// difficult to integrate Rust code and C code. Targeting the Cortex-M4 requires different gcc flags -// than the ones you would use for the Cortex-M0 and with a single target it'd be impossible to -// differentiate one processor from the other. -// -// About arm vs thumb in the name. The Cortex-M devices only support the Thumb instruction set, -// which is more compact (higher code density), and not the ARM instruction set. That's why LLVM -// triples use thumb instead of arm. We follow suit because having thumb in the name let us -// differentiate these targets from our other `arm(v7)-*-*-gnueabi(hf)` targets in the context of -// build scripts / gcc flags. - -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, PanicStrategy, RelocModel, TargetOptions}; - -pub fn opts() -> TargetOptions { - // See rust-lang/rfcs#1645 for a discussion about these defaults - TargetOptions { - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - // In most cases, LLD is good enough - linker: Some("rust-lld".into()), - // Because these devices have very little resources having an unwinder is too onerous so we - // default to "abort" because the "unwind" strategy is very rare. - panic_strategy: PanicStrategy::Abort, - // Similarly, one almost always never wants to use relocatable code because of the extra - // costs it involves. - relocation_model: RelocModel::Static, - // When this section is added a volatile load to its start address is also generated. This - // volatile load is a footgun as it can end up loading an invalid memory address, depending - // on how the user set up their linker scripts. This section adds pretty printer for stuff - // like std::Vec, which is not that used in no-std context, so it's best to left it out - // until we figure a way to add the pretty printers without requiring a volatile load cf. - // rust-lang/rust#44993. - emit_debug_gdb_scripts: false, - // LLVM is eager to trash the link register when calling `noreturn` functions, which - // breaks debugging. Preserve LR by default to prevent that from happening. - frame_pointer: FramePointer::Always, - // ARM supports multiple ABIs for enums, the linux one matches the default of 32 here - // but any arm-none or thumb-none target will be defaulted to 8 on GCC. - c_enum_min_bits: Some(8), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv4t_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv4t_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv4t_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv4t_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -//! Targets the ARMv4T, with code as `t32` code by default. -//! -//! Primarily of use for the GBA, but usable with other devices too. -//! -//! Please ping @Lokathor if changes are needed. -//! -//! **Important:** This target profile **does not** specify a linker script. You -//! just get the default link script when you build a binary for this target. -//! The default link script is very likely wrong, so you should use -//! `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script. - -use crate::spec::{cvs, FramePointer}; -use crate::spec::{PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv4t-none-eabi".into(), - pointer_width: 32, - arch: "arm".into(), - /* Data layout args are '-' separated: - * little endian - * stack is 64-bit aligned (EABI) - * pointers are 32-bit - * i64 must be 64-bit aligned (EABI) - * mangle names with ELF style - * native integers are 32-bit - * All other elements are default - */ - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - options: TargetOptions { - abi: "eabi".into(), - - // extra args passed to the external assembler (assuming `arm-none-eabi-as`): - // * activate t32/a32 interworking - // * use arch ARMv4T - // * use little-endian - asm_args: cvs!["-mthumb-interwork", "-march=armv4t", "-mlittle-endian",], - - // minimum extra features, these cannot be disabled via -C - // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. - // The resulting atomics are ABI incompatible with atomics backed by libatomic. - features: "+soft-float,+strict-align,+atomics-32".into(), - - panic_strategy: PanicStrategy::Abort, - relocation_model: RelocModel::Static, - // suggested from thumb_base, rust-lang/rust#44993. - emit_debug_gdb_scripts: false, - frame_pointer: FramePointer::MayOmit, - - main_needs_argc_argv: false, - - // don't have atomic compare-and-swap - atomic_cas: false, - has_thumb_interworking: true, - - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv5te_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv5te_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv5te_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv5te_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -//! Targets the ARMv5TE, with code as `t32` code by default. - -use crate::spec::{cvs, FramePointer, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv5te-none-eabi".into(), - pointer_width: 32, - arch: "arm".into(), - /* Data layout args are '-' separated: - * little endian - * stack is 64-bit aligned (EABI) - * pointers are 32-bit - * i64 must be 64-bit aligned (EABI) - * mangle names with ELF style - * native integers are 32-bit - * All other elements are default - */ - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - - options: TargetOptions { - abi: "eabi".into(), - // extra args passed to the external assembler (assuming `arm-none-eabi-as`): - // * activate t32/a32 interworking - // * use arch ARMv5TE - // * use little-endian - asm_args: cvs!["-mthumb-interwork", "-march=armv5te", "-mlittle-endian",], - // minimum extra features, these cannot be disabled via -C - // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. - // The resulting atomics are ABI incompatible with atomics backed by libatomic. - features: "+soft-float,+strict-align,+atomics-32".into(), - frame_pointer: FramePointer::MayOmit, - main_needs_argc_argv: false, - // don't have atomic compare-and-swap - atomic_cas: false, - has_thumb_interworking: true, - - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv6m_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv6m_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv6m_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv6m_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -// Targets the Cortex-M0, Cortex-M0+ and Cortex-M1 processors (ARMv6-M architecture) - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv6m-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - // The ARMv6-M architecture doesn't support unaligned loads/stores so we disable them - // with +strict-align. - // Also force-enable 32-bit atomics, which allows the use of atomic load/store only. - // The resulting atomics are ABI incompatible with atomics backed by libatomic. - features: "+strict-align,+atomics-32".into(), - // There are no atomic CAS instructions available in the instruction set of the ARMv6-M - // architecture - atomic_cas: false, - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_pc_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,28 +0,0 @@ -use crate::spec::{LinkerFlavor, Lld, PanicStrategy, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::windows_msvc_base::opts(); - // Prevent error LNK2013: BRANCH24(T) fixup overflow - // The LBR optimization tries to eliminate branch islands, - // but if the displacement is larger than can fit - // in the instruction, this error will occur. The linker - // should be smart enough to insert branch islands only - // where necessary, but this is not the observed behavior. - // Disabling the LBR optimization works around the issue. - base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/OPT:NOLBR"]); - - Target { - llvm_target: "thumbv7a-pc-windows-msvc".into(), - pointer_width: 32, - data_layout: "e-m:w-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - features: "+vfp3,+neon".into(), - max_atomic_width: Some(64), - // FIXME(jordanrh): use PanicStrategy::Unwind when SEH is - // implemented for windows/arm in LLVM - panic_strategy: PanicStrategy::Abort, - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_uwp_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7a_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{PanicStrategy, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv7a-pc-windows-msvc".into(), - pointer_width: 32, - data_layout: "e-m:w-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - features: "+vfp3,+neon".into(), - max_atomic_width: Some(64), - // FIXME(jordanrh): use PanicStrategy::Unwind when SEH is - // implemented for windows/arm in LLVM - panic_strategy: PanicStrategy::Abort, - ..super::windows_uwp_msvc_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -// Targets the Cortex-M4 and Cortex-M7 processors (ARMv7E-M) -// -// This target assumes that the device doesn't have a FPU (Floating Point Unit) and lowers all the -// floating point operations to software routines (intrinsics). -// -// As such, this target uses the "soft" calling convention (ABI) where floating point values are -// passed to/from subroutines via general purpose registers (R0, R1, etc.). -// -// To opt-in to hardware accelerated floating point operations, you can use, for example, -// `-C target-feature=+vfp4` or `-C target-cpu=cortex-m4`. - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv7em-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - max_atomic_width: Some(32), - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7em_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -// Targets the Cortex-M4F and Cortex-M7F processors (ARMv7E-M) -// -// This target assumes that the device does have a FPU (Floating Point Unit) and lowers all (single -// precision) floating point operations to hardware instructions. -// -// Additionally, this target uses the "hard" floating convention (ABI) where floating point values -// are passed to/from subroutines via FPU registers (S0, S1, D0, D1, etc.). -// -// To opt into double precision hardware support, use the `-C target-feature=+fp64` flag. - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv7em-none-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabihf".into(), - // `+vfp4` is the lowest common denominator between the Cortex-M4 (vfp4-16) and the - // Cortex-M7 (vfp5) - // `-d32` both the Cortex-M4 and the Cortex-M7 only have 16 double-precision registers - // available - // `-fp64` The Cortex-M4 only supports single precision floating point operations - // whereas in the Cortex-M7 double precision is optional - // - // Reference: - // ARMv7-M Architecture Reference Manual - A2.5 The optional floating-point extension - features: "+vfp4,-d32,-fp64".into(), - max_atomic_width: Some(32), - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7m_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7m_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7m_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7m_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -// Targets the Cortex-M3 processor (ARMv7-M) - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv7m-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - max_atomic_width: Some(32), - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_linux_androideabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_linux_androideabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_linux_androideabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_linux_androideabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions}; - -// This target if is for the Android v7a ABI in thumb mode with -// NEON unconditionally enabled and, therefore, with 32 FPU registers -// enabled as well. See section A2.6.2 on page A2-56 in -// https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf - -// See https://developer.android.com/ndk/guides/abis.html#v7a -// for target ABI requirements. - -pub fn target() -> Target { - let mut base = super::android_base::opts(); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-march=armv7-a"]); - Target { - llvm_target: "armv7-none-linux-android".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabi".into(), - features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".into(), - max_atomic_width: Some(64), - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_gnueabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_gnueabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_gnueabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_gnueabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for glibc Linux on ARMv7 with thumb mode enabled -// (for consistency with Android and Debian-based distributions) -// and with NEON unconditionally enabled and, therefore, with 32 FPU -// registers enabled as well. See section A2.6.2 on page A2-56 in -// https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf - -pub fn target() -> Target { - Target { - llvm_target: "armv7-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - options: TargetOptions { - abi: "eabihf".into(), - // Info about features at https://wiki.debian.org/ArmHardFloatPort - features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".into(), - max_atomic_width: Some(64), - ..super::linux_gnu_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_musleabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_musleabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_musleabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv7neon_unknown_linux_musleabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -use crate::spec::{Target, TargetOptions}; - -// This target is for musl Linux on ARMv7 with thumb mode enabled -// (for consistency with Android and Debian-based distributions) -// and with NEON unconditionally enabled and, therefore, with 32 FPU -// registers enabled as well. See section A2.6.2 on page A2-56 in -// https://static.docs.arm.com/ddi0406/cd/DDI0406C_d_armv7ar_arm.pdf - -pub fn target() -> Target { - Target { - // It's important we use "gnueabihf" and not "musleabihf" here. LLVM - // uses it to determine the calling convention and float ABI, and LLVM - // doesn't support the "musleabihf" value. - llvm_target: "armv7-unknown-linux-gnueabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - // Most of these settings are copied from the thumbv7neon_unknown_linux_gnueabihf - // target. - options: TargetOptions { - abi: "eabihf".into(), - features: "+v7,+thumb-mode,+thumb2,+vfp3,+neon".into(), - max_atomic_width: Some(64), - mcount: "\u{1}mcount".into(), - ..super::linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_base_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_base_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_base_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_base_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -// Targets the Cortex-M23 processor (Baseline ARMv8-M) - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv8m.base-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - // ARMv8-M baseline doesn't support unaligned loads/stores so we disable them - // with +strict-align. - features: "+strict-align".into(), - max_atomic_width: Some(32), - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -// Targets the Cortex-M33 processor (Armv8-M Mainline architecture profile), -// without the Floating Point extension. - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv8m.main-none-eabi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabi".into(), - max_atomic_width: Some(32), - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabihf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabihf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabihf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/thumbv8m_main_none_eabihf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -// Targets the Cortex-M33 processor (Armv8-M Mainline architecture profile), -// with the Floating Point extension. - -use crate::spec::{Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "thumbv8m.main-none-eabihf".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(), - arch: "arm".into(), - - options: TargetOptions { - abi: "eabihf".into(), - // If the Floating Point extension is implemented in the Cortex-M33 - // processor, the Cortex-M33 Technical Reference Manual states that - // the FPU uses the FPv5 architecture, single-precision instructions - // and 16 D registers. - // These parameters map to the following LLVM features. - features: "+fp-armv8,-fp64,-d32".into(), - max_atomic_width: Some(32), - ..super::thumb_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/uefi_msvc_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/uefi_msvc_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/uefi_msvc_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/uefi_msvc_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -// This defines a base target-configuration for native UEFI systems. The UEFI specification has -// quite detailed sections on the ABI of all the supported target architectures. In almost all -// cases it simply follows what Microsoft Windows does. Hence, whenever in doubt, see the MSDN -// documentation. -// UEFI uses COFF/PE32+ format for binaries. All binaries must be statically linked. No dynamic -// linker is supported. As native to COFF, binaries are position-dependent, but will be relocated -// by the loader if the pre-chosen memory location is already in use. -// UEFI forbids running code on anything but the boot-CPU. No interrupts are allowed other than -// the timer-interrupt. Device-drivers are required to use polling-based models. Furthermore, all -// code runs in the same environment, no process separation is supported. - -use crate::spec::{LinkerFlavor, Lld, PanicStrategy, StackProbeType, TargetOptions}; - -pub fn opts() -> TargetOptions { - let mut base = super::msvc_base::opts(); - - base.add_pre_link_args( - LinkerFlavor::Msvc(Lld::No), - &[ - // Non-standard subsystems have no default entry-point in PE+ files. We have to define - // one. "efi_main" seems to be a common choice amongst other implementations and the - // spec. - "/entry:efi_main", - // COFF images have a "Subsystem" field in their header, which defines what kind of - // program it is. UEFI has 3 fields reserved, which are EFI_APPLICATION, - // EFI_BOOT_SERVICE_DRIVER, and EFI_RUNTIME_DRIVER. We default to EFI_APPLICATION, - // which is very likely the most common option. Individual projects can override this - // with custom linker flags. - // The subsystem-type only has minor effects on the application. It defines the memory - // regions the application is loaded into (runtime-drivers need to be put into - // reserved areas), as well as whether a return from the entry-point is treated as - // exit (default for applications). - "/subsystem:efi_application", - ], - ); - - TargetOptions { - os: "uefi".into(), - linker_flavor: LinkerFlavor::Msvc(Lld::Yes), - disable_redzone: true, - exe_suffix: ".efi".into(), - allows_weak_linkage: false, - panic_strategy: PanicStrategy::Abort, - // LLVM does not emit inline assembly because the LLVM target does not get considered as… - // "Windows". - stack_probes: StackProbeType::Call, - singlethread: true, - linker: Some("rust-lld".into()), - entry_name: "efi_main".into(), - ..base - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/unikraft_linux_musl_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/unikraft_linux_musl_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/unikraft_linux_musl_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/unikraft_linux_musl_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -use crate::spec::{cvs, PanicStrategy, RelocModel, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "linux".into(), - env: "musl".into(), - vendor: "unikraft".into(), - linker: Some("kraftld".into()), - relocation_model: RelocModel::Static, - families: cvs!["unix"], - has_thread_local: true, - panic_strategy: PanicStrategy::Abort, - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/vxworks_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/vxworks_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/vxworks_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/vxworks_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{cvs, TargetOptions}; - -pub fn opts() -> TargetOptions { - TargetOptions { - os: "vxworks".into(), - env: "gnu".into(), - vendor: "wrs".into(), - linker: Some("wr-c++".into()), - exe_suffix: ".vxe".into(), - dynamic_linking: true, - families: cvs!["unix"], - has_rpath: true, - has_thread_local: true, - crt_static_default: true, - crt_static_respected: true, - crt_static_allows_dylibs: true, - // VxWorks needs to implement this to support profiling - mcount: "_mcount".into(), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_emscripten.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_emscripten.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_emscripten.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_emscripten.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -use super::{cvs, wasm_base}; -use super::{LinkArgs, LinkerFlavor, PanicStrategy, RelocModel, Target, TargetOptions}; - -pub fn target() -> Target { - // Reset flags for non-Em flavors back to empty to satisfy sanity checking tests. - let pre_link_args = LinkArgs::new(); - let post_link_args = TargetOptions::link_args( - LinkerFlavor::EmCc, - &["-sABORTING_MALLOC=0", "-Wl,--fatal-warnings"], - ); - - let opts = TargetOptions { - os: "emscripten".into(), - linker_flavor: LinkerFlavor::EmCc, - // emcc emits two files - a .js file to instantiate the wasm and supply platform - // functionality, and a .wasm file. - exe_suffix: ".js".into(), - linker: None, - pre_link_args, - post_link_args, - relocation_model: RelocModel::Pic, - panic_strategy: PanicStrategy::Unwind, - no_default_libraries: false, - families: cvs!["unix", "wasm"], - ..wasm_base::options() - }; - Target { - llvm_target: "wasm32-unknown-emscripten".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-f128:64-n32:64-S128-ni:1:10:20".into(), - arch: "wasm32".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -//! A "bare wasm" target representing a WebAssembly output that makes zero -//! assumptions about its environment. -//! -//! The `wasm32-unknown-unknown` target is intended to encapsulate use cases -//! that do not rely on any imported functionality. The binaries generated are -//! entirely self-contained by default when using the standard library. Although -//! the standard library is available, most of it returns an error immediately -//! (e.g. trying to create a TCP stream or something like that). -//! -//! This target is more or less managed by the Rust and WebAssembly Working -//! Group nowadays at . - -use super::{wasm_base, Cc, LinkerFlavor, Target}; -use crate::spec::abi::Abi; - -pub fn target() -> Target { - let mut options = wasm_base::options(); - options.os = "unknown".into(); - - // This is a default for backwards-compatibility with the original - // definition of this target oh-so-long-ago. Once the "wasm" ABI is - // stable and the wasm-bindgen project has switched to using it then there's - // no need for this and it can be removed. - // - // Currently this is the reason that this target's ABI is mismatched with - // clang's ABI. This means that, in the limit, you can't merge C and Rust - // code on this target due to this ABI mismatch. - options.default_adjusted_cabi = Some(Abi::Wasm); - - options.add_pre_link_args( - LinkerFlavor::WasmLld(Cc::No), - &[ - // For now this target just never has an entry symbol no matter the output - // type, so unconditionally pass this. - "--no-entry", - ], - ); - options.add_pre_link_args( - LinkerFlavor::WasmLld(Cc::Yes), - &[ - // Make sure clang uses LLD as its linker and is configured appropriately - // otherwise - "--target=wasm32-unknown-unknown", - "-Wl,--no-entry", - ], - ); - - Target { - llvm_target: "wasm32-unknown-unknown".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), - arch: "wasm32".into(), - options, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,118 +0,0 @@ -//! The `wasm32-wasi` target is a new and still (as of April 2019) an -//! experimental target. The definition in this file is likely to be tweaked -//! over time and shouldn't be relied on too much. -//! -//! The `wasi` target is a proposal to define a standardized set of syscalls -//! that WebAssembly files can interoperate with. This set of syscalls is -//! intended to empower WebAssembly binaries with native capabilities such as -//! filesystem access, network access, etc. -//! -//! You can see more about the proposal at . -//! -//! The Rust target definition here is interesting in a few ways. We want to -//! serve two use cases here with this target: -//! -//! * First, we want Rust usage of the target to be as hassle-free as possible, -//! ideally avoiding the need to configure and install a local wasm32-wasi -//! toolchain. -//! -//! * Second, one of the primary use cases of LLVM's new wasm backend and the -//! wasm support in LLD is that any compiled language can interoperate with -//! any other. To that the `wasm32-wasi` target is the first with a viable C -//! standard library and sysroot common definition, so we want Rust and C/C++ -//! code to interoperate when compiled to `wasm32-unknown-unknown`. -//! -//! You'll note, however, that the two goals above are somewhat at odds with one -//! another. To attempt to solve both use cases in one go we define a target -//! that (ab)uses the `crt-static` target feature to indicate which one you're -//! in. -//! -//! ## No interop with C required -//! -//! By default the `crt-static` target feature is enabled, and when enabled -//! this means that the bundled version of `libc.a` found in `liblibc.rlib` -//! is used. This isn't intended really for interoperation with a C because it -//! may be the case that Rust's bundled C library is incompatible with a -//! foreign-compiled C library. In this use case, though, we use `rust-lld` and -//! some copied crt startup object files to ensure that you can download the -//! wasi target for Rust and you're off to the races, no further configuration -//! necessary. -//! -//! All in all, by default, no external dependencies are required. You can -//! compile `wasm32-wasi` binaries straight out of the box. You can't, however, -//! reliably interoperate with C code in this mode (yet). -//! -//! ## Interop with C required -//! -//! For the second goal we repurpose the `target-feature` flag, meaning that -//! you'll need to do a few things to have C/Rust code interoperate. -//! -//! 1. All Rust code needs to be compiled with `-C target-feature=-crt-static`, -//! indicating that the bundled C standard library in the Rust sysroot will -//! not be used. -//! -//! 2. If you're using rustc to build a linked artifact then you'll need to -//! specify `-C linker` to a `clang` binary that supports -//! `wasm32-wasi` and is configured with the `wasm32-wasi` sysroot. This -//! will cause Rust code to be linked against the libc.a that the specified -//! `clang` provides. -//! -//! 3. If you're building a staticlib and integrating Rust code elsewhere, then -//! compiling with `-C target-feature=-crt-static` is all you need to do. -//! -//! You can configure the linker via Cargo using the -//! `CARGO_TARGET_WASM32_WASI_LINKER` env var. Be sure to also set -//! `CC_wasm32-wasi` if any crates in the dependency graph are using the `cc` -//! crate. -//! -//! ## Remember, this is all in flux -//! -//! The wasi target is **very** new in its specification. It's likely going to -//! be a long effort to get it standardized and stable. We'll be following it as -//! best we can with this target. Don't start relying on too much here unless -//! you know what you're getting in to! - -use super::crt_objects::{self, LinkSelfContainedDefault}; -use super::{wasm_base, Cc, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut options = wasm_base::options(); - - options.os = "wasi".into(); - options.add_pre_link_args(LinkerFlavor::WasmLld(Cc::Yes), &["--target=wasm32-wasi"]); - - options.pre_link_objects_self_contained = crt_objects::pre_wasi_self_contained(); - options.post_link_objects_self_contained = crt_objects::post_wasi_self_contained(); - - // FIXME: Figure out cases in which WASM needs to link with a native toolchain. - options.link_self_contained = LinkSelfContainedDefault::True; - - // Right now this is a bit of a workaround but we're currently saying that - // the target by default has a static crt which we're taking as a signal - // for "use the bundled crt". If that's turned off then the system's crt - // will be used, but this means that default usage of this target doesn't - // need an external compiler but it's still interoperable with an external - // compiler if configured correctly. - options.crt_static_default = true; - options.crt_static_respected = true; - - // Allow `+crt-static` to create a "cdylib" output which is just a wasm file - // without a main function. - options.crt_static_allows_dylibs = true; - - // WASI's `sys::args::init` function ignores its arguments; instead, - // `args::args()` makes the WASI API calls itself. - options.main_needs_argc_argv = false; - - // And, WASI mangles the name of "main" to distinguish between different - // signatures. - options.entry_name = "__main_void".into(); - - Target { - llvm_target: "wasm32-wasi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), - arch: "wasm32".into(), - options, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi_preview1_threads.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi_preview1_threads.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi_preview1_threads.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm32_wasi_preview1_threads.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,134 +0,0 @@ -//! The `wasm32-wasi-preview1-threads` target is a new and still (as of July 2023) an -//! experimental target. The definition in this file is likely to be tweaked -//! over time and shouldn't be relied on too much. -//! -//! The `wasi-threads` target is a proposal to define a standardized set of syscalls -//! that WebAssembly files can interoperate with. This set of syscalls is -//! intended to empower WebAssembly binaries with native capabilities such as -//! threads, filesystem access, network access, etc. -//! -//! You can see more about the proposal at . -//! -//! The Rust target definition here is interesting in a few ways. We want to -//! serve two use cases here with this target: -//! -//! * First, we want Rust usage of the target to be as hassle-free as possible, -//! ideally avoiding the need to configure and install a local wasm32-wasi-preview1-threads -//! toolchain. -//! -//! * Second, one of the primary use cases of LLVM's new wasm backend and the -//! wasm support in LLD is that any compiled language can interoperate with -//! any other. To that the `wasm32-wasi-preview1-threads` target is the first with a viable C -//! standard library and sysroot common definition, so we want Rust and C/C++ -//! code to interoperate when compiled to `wasm32-unknown-unknown`. -//! -//! You'll note, however, that the two goals above are somewhat at odds with one -//! another. To attempt to solve both use cases in one go we define a target -//! that (ab)uses the `crt-static` target feature to indicate which one you're -//! in. -//! -//! ## No interop with C required -//! -//! By default the `crt-static` target feature is enabled, and when enabled -//! this means that the bundled version of `libc.a` found in `liblibc.rlib` -//! is used. This isn't intended really for interoperation with a C because it -//! may be the case that Rust's bundled C library is incompatible with a -//! foreign-compiled C library. In this use case, though, we use `rust-lld` and -//! some copied crt startup object files to ensure that you can download the -//! wasi target for Rust and you're off to the races, no further configuration -//! necessary. -//! -//! All in all, by default, no external dependencies are required. You can -//! compile `wasm32-wasi-preview1-threads` binaries straight out of the box. You can't, however, -//! reliably interoperate with C code in this mode (yet). -//! -//! ## Interop with C required -//! -//! For the second goal we repurpose the `target-feature` flag, meaning that -//! you'll need to do a few things to have C/Rust code interoperate. -//! -//! 1. All Rust code needs to be compiled with `-C target-feature=-crt-static`, -//! indicating that the bundled C standard library in the Rust sysroot will -//! not be used. -//! -//! 2. If you're using rustc to build a linked artifact then you'll need to -//! specify `-C linker` to a `clang` binary that supports -//! `wasm32-wasi-preview1-threads` and is configured with the `wasm32-wasi-preview1-threads` sysroot. This -//! will cause Rust code to be linked against the libc.a that the specified -//! `clang` provides. -//! -//! 3. If you're building a staticlib and integrating Rust code elsewhere, then -//! compiling with `-C target-feature=-crt-static` is all you need to do. -//! -//! You can configure the linker via Cargo using the -//! `CARGO_TARGET_WASM32_WASI_LINKER` env var. Be sure to also set -//! `CC_wasm32-wasi-preview1-threads` if any crates in the dependency graph are using the `cc` -//! crate. -//! -//! ## Remember, this is all in flux -//! -//! The wasi target is **very** new in its specification. It's likely going to -//! be a long effort to get it standardized and stable. We'll be following it as -//! best we can with this target. Don't start relying on too much here unless -//! you know what you're getting in to! - -use super::crt_objects::{self, LinkSelfContainedDefault}; -use super::{wasm_base, Cc, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut options = wasm_base::options(); - - options.os = "wasi".into(); - - options.add_pre_link_args( - LinkerFlavor::WasmLld(Cc::No), - &["--import-memory", "--export-memory", "--shared-memory"], - ); - options.add_pre_link_args( - LinkerFlavor::WasmLld(Cc::Yes), - &[ - "--target=wasm32-wasi-threads", - "-Wl,--import-memory", - "-Wl,--export-memory,", - "-Wl,--shared-memory", - ], - ); - - options.pre_link_objects_self_contained = crt_objects::pre_wasi_self_contained(); - options.post_link_objects_self_contained = crt_objects::post_wasi_self_contained(); - - // FIXME: Figure out cases in which WASM needs to link with a native toolchain. - options.link_self_contained = LinkSelfContainedDefault::True; - - // Right now this is a bit of a workaround but we're currently saying that - // the target by default has a static crt which we're taking as a signal - // for "use the bundled crt". If that's turned off then the system's crt - // will be used, but this means that default usage of this target doesn't - // need an external compiler but it's still interoperable with an external - // compiler if configured correctly. - options.crt_static_default = true; - options.crt_static_respected = true; - - // Allow `+crt-static` to create a "cdylib" output which is just a wasm file - // without a main function. - options.crt_static_allows_dylibs = true; - - // WASI's `sys::args::init` function ignores its arguments; instead, - // `args::args()` makes the WASI API calls itself. - options.main_needs_argc_argv = false; - - // And, WASI mangles the name of "main" to distinguish between different - // signatures. - options.entry_name = "__main_void".into(); - - options.singlethread = false; - options.features = "+atomics,+bulk-memory,+mutable-globals".into(); - - Target { - llvm_target: "wasm32-wasi".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), - arch: "wasm32".into(), - options, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm64_unknown_unknown.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm64_unknown_unknown.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm64_unknown_unknown.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm64_unknown_unknown.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,47 +0,0 @@ -//! A "bare wasm" target representing a WebAssembly output that makes zero -//! assumptions about its environment. -//! -//! The `wasm64-unknown-unknown` target is intended to encapsulate use cases -//! that do not rely on any imported functionality. The binaries generated are -//! entirely self-contained by default when using the standard library. Although -//! the standard library is available, most of it returns an error immediately -//! (e.g. trying to create a TCP stream or something like that). - -use super::{wasm_base, Cc, LinkerFlavor, Target}; - -pub fn target() -> Target { - let mut options = wasm_base::options(); - options.os = "unknown".into(); - - options.add_pre_link_args( - LinkerFlavor::WasmLld(Cc::No), - &[ - // For now this target just never has an entry symbol no matter the output - // type, so unconditionally pass this. - "--no-entry", - "-mwasm64", - ], - ); - options.add_pre_link_args( - LinkerFlavor::WasmLld(Cc::Yes), - &[ - // Make sure clang uses LLD as its linker and is configured appropriately - // otherwise - "--target=wasm64-unknown-unknown", - "-Wl,--no-entry", - ], - ); - - // Any engine that implements wasm64 will surely implement the rest of these - // features since they were all merged into the official spec by the time - // wasm64 was designed. - options.features = "+bulk-memory,+mutable-globals,+sign-ext,+nontrapping-fptoint".into(); - - Target { - llvm_target: "wasm64-unknown-unknown".into(), - pointer_width: 64, - data_layout: "e-m:e-p:64:64-p10:8:8-p20:8:8-i64:64-n32:64-S128-ni:1:10:20".into(), - arch: "wasm64".into(), - options, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/wasm_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,133 +0,0 @@ -use super::crt_objects::LinkSelfContainedDefault; -use super::{cvs, Cc, LinkerFlavor, PanicStrategy, RelocModel, TargetOptions, TlsModel}; - -pub fn options() -> TargetOptions { - macro_rules! args { - ($prefix:literal) => { - &[ - // By default LLD only gives us one page of stack (64k) which is a - // little small. Default to a larger stack closer to other PC platforms - // (1MB) and users can always inject their own link-args to override this. - concat!($prefix, "-z"), - concat!($prefix, "stack-size=1048576"), - // By default LLD's memory layout is: - // - // 1. First, a blank page - // 2. Next, all static data - // 3. Finally, the main stack (which grows down) - // - // This has the unfortunate consequence that on stack overflows you - // corrupt static data and can cause some exceedingly weird bugs. To - // help detect this a little sooner we instead request that the stack is - // placed before static data. - // - // This means that we'll generate slightly larger binaries as references - // to static data will take more bytes in the ULEB128 encoding, but - // stack overflow will be guaranteed to trap as it underflows instead of - // corrupting static data. - concat!($prefix, "--stack-first"), - // FIXME we probably shouldn't pass this but instead pass an explicit list - // of symbols we'll allow to be undefined. We don't currently have a - // mechanism of knowing, however, which symbols are intended to be imported - // from the environment and which are intended to be imported from other - // objects linked elsewhere. This is a coarse approximation but is sure to - // hide some bugs and frustrate someone at some point, so we should ideally - // work towards a world where we can explicitly list symbols that are - // supposed to be imported and have all other symbols generate errors if - // they remain undefined. - concat!($prefix, "--allow-undefined"), - // Rust code should never have warnings, and warnings are often - // indicative of bugs, let's prevent them. - concat!($prefix, "--fatal-warnings"), - // LLD only implements C++-like demangling, which doesn't match our own - // mangling scheme. Tell LLD to not demangle anything and leave it up to - // us to demangle these symbols later. Currently rustc does not perform - // further demangling, but tools like twiggy and wasm-bindgen are intended - // to do so. - concat!($prefix, "--no-demangle"), - ] - }; - } - - let mut pre_link_args = TargetOptions::link_args(LinkerFlavor::WasmLld(Cc::No), args!("")); - super::add_link_args(&mut pre_link_args, LinkerFlavor::WasmLld(Cc::Yes), args!("-Wl,")); - - TargetOptions { - is_like_wasm: true, - families: cvs!["wasm"], - - // we allow dynamic linking, but only cdylibs. Basically we allow a - // final library artifact that exports some symbols (a wasm module) but - // we don't allow intermediate `dylib` crate types - dynamic_linking: true, - only_cdylib: true, - - // relatively self-explanatory! - exe_suffix: ".wasm".into(), - dll_prefix: "".into(), - dll_suffix: ".wasm".into(), - eh_frame_header: false, - - max_atomic_width: Some(64), - - // Unwinding doesn't work right now, so the whole target unconditionally - // defaults to panic=abort. Note that this is guaranteed to change in - // the future once unwinding is implemented. Don't rely on this as we're - // basically guaranteed to change it once WebAssembly supports - // exceptions. - panic_strategy: PanicStrategy::Abort, - - // Wasm doesn't have atomics yet, so tell LLVM that we're in a single - // threaded model which will legalize atomics to normal operations. - singlethread: true, - - // no dynamic linking, no need for default visibility! - default_hidden_visibility: true, - - // Symbol visibility takes care of this for the WebAssembly. - // Additionally the only known linker, LLD, doesn't support the script - // arguments just yet - limit_rdylib_exports: false, - - // we use the LLD shipped with the Rust toolchain by default - linker: Some("rust-lld".into()), - linker_flavor: LinkerFlavor::WasmLld(Cc::No), - - pre_link_args, - - // FIXME: Figure out cases in which WASM needs to link with a native toolchain. - // - // rust-lang/rust#104137: cannot blindly remove this without putting in - // some other way to compensate for lack of `-nostartfiles` in linker - // invocation. - link_self_contained: LinkSelfContainedDefault::True, - - // This has no effect in LLVM 8 or prior, but in LLVM 9 and later when - // PIC code is implemented this has quite a drastic effect if it stays - // at the default, `pic`. In an effort to keep wasm binaries as minimal - // as possible we're defaulting to `static` for now, but the hope is - // that eventually we can ship a `pic`-compatible standard library which - // works with `static` as well (or works with some method of generating - // non-relative calls and such later on). - relocation_model: RelocModel::Static, - - // When the atomics feature is activated then these two keys matter, - // otherwise they're basically ignored by the standard library. In this - // mode, however, the `#[thread_local]` attribute works (i.e. - // `has_thread_local`) and we need to get it to work by specifying - // `local-exec` as that's all that's implemented in LLVM today for wasm. - has_thread_local: true, - tls_model: TlsModel::LocalExec, - - // gdb scripts don't work on wasm blobs - emit_debug_gdb_scripts: false, - - // There's more discussion of this at - // https://bugs.llvm.org/show_bug.cgi?id=52442 but the general result is - // that this isn't useful for wasm and has tricky issues with - // representation, so this is disabled. - generate_arange_section: false, - - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnu_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnu_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnu_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnu_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,107 +0,0 @@ -use crate::spec::crt_objects::{self, LinkSelfContainedDefault}; -use crate::spec::{cvs, Cc, DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions}; -use std::borrow::Cow; - -pub fn opts() -> TargetOptions { - let mut pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &[ - // Enable ASLR - "--dynamicbase", - // ASLR will rebase it anyway so leaving that option enabled only leads to confusion - "--disable-auto-image-base", - ], - ); - super::add_link_args( - &mut pre_link_args, - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &[ - // Tell GCC to avoid linker plugins, because we are not bundling - // them with Windows installer, and Rust does its own LTO anyways. - "-fno-use-linker-plugin", - "-Wl,--dynamicbase", - "-Wl,--disable-auto-image-base", - ], - ); - - // Order of `late_link_args*` was found through trial and error to work with various - // mingw-w64 versions (not tested on the CI). It's expected to change from time to time. - let mingw_libs = &[ - "-lmsvcrt", - "-lmingwex", - "-lmingw32", - "-lgcc", // alas, mingw* libraries above depend on libgcc - // mingw's msvcrt is a weird hybrid import library and static library. - // And it seems that the linker fails to use import symbols from msvcrt - // that are required from functions in msvcrt in certain cases. For example - // `_fmode` that is used by an implementation of `__p__fmode` in x86_64. - // The library is purposely listed twice to fix that. - // - // See https://github.com/rust-lang/rust/pull/47483 for some more details. - "-lmsvcrt", - "-luser32", - "-lkernel32", - ]; - let mut late_link_args = - TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), mingw_libs); - super::add_link_args(&mut late_link_args, LinkerFlavor::Gnu(Cc::Yes, Lld::No), mingw_libs); - // If any of our crates are dynamically linked then we need to use - // the shared libgcc_s-dw2-1.dll. This is required to support - // unwinding across DLL boundaries. - let dynamic_unwind_libs = &["-lgcc_s"]; - let mut late_link_args_dynamic = - TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), dynamic_unwind_libs); - super::add_link_args( - &mut late_link_args_dynamic, - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - dynamic_unwind_libs, - ); - // If all of our crates are statically linked then we can get away - // with statically linking the libgcc unwinding code. This allows - // binaries to be redistributed without the libgcc_s-dw2-1.dll - // dependency, but unfortunately break unwinding across DLL - // boundaries when unwinding across FFI boundaries. - let static_unwind_libs = &["-lgcc_eh", "-l:libpthread.a"]; - let mut late_link_args_static = - TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), static_unwind_libs); - super::add_link_args( - &mut late_link_args_static, - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - static_unwind_libs, - ); - - TargetOptions { - os: "windows".into(), - env: "gnu".into(), - vendor: "pc".into(), - // FIXME(#13846) this should be enabled for windows - function_sections: false, - linker: Some("gcc".into()), - dynamic_linking: true, - dll_tls_export: false, - dll_prefix: "".into(), - dll_suffix: ".dll".into(), - exe_suffix: ".exe".into(), - families: cvs!["windows"], - is_like_windows: true, - allows_weak_linkage: false, - pre_link_args, - pre_link_objects: crt_objects::pre_mingw(), - post_link_objects: crt_objects::post_mingw(), - pre_link_objects_self_contained: crt_objects::pre_mingw_self_contained(), - post_link_objects_self_contained: crt_objects::post_mingw_self_contained(), - link_self_contained: LinkSelfContainedDefault::Mingw, - late_link_args, - late_link_args_dynamic, - late_link_args_static, - abi_return_struct_as_int: true, - emit_debug_gdb_scripts: false, - requires_uwtable: true, - eh_frame_header: false, - // FIXME(davidtwco): Support Split DWARF on Windows GNU - may require LLVM changes to - // output DWO, despite using DWARF, doesn't use ELF.. - debuginfo_kind: DebuginfoKind::Pdb, - supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnullvm_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnullvm_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnullvm_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_gnullvm_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,47 +0,0 @@ -use crate::spec::{cvs, Cc, DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions}; -use std::borrow::Cow; - -pub fn opts() -> TargetOptions { - // We cannot use `-nodefaultlibs` because compiler-rt has to be passed - // as a path since it's not added to linker search path by the default. - // There were attempts to make it behave like libgcc (so one can just use -l) - // but LLVM maintainers rejected it: https://reviews.llvm.org/D51440 - let pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-nolibc", "--unwindlib=none"], - ); - // Order of `late_link_args*` does not matter with LLD. - let late_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-lmingw32", "-lmingwex", "-lmsvcrt", "-lkernel32", "-luser32"], - ); - - TargetOptions { - os: "windows".into(), - env: "gnu".into(), - vendor: "pc".into(), - abi: "llvm".into(), - linker: Some("clang".into()), - dynamic_linking: true, - dll_tls_export: false, - dll_prefix: "".into(), - dll_suffix: ".dll".into(), - exe_suffix: ".exe".into(), - families: cvs!["windows"], - is_like_windows: true, - allows_weak_linkage: false, - pre_link_args, - late_link_args, - abi_return_struct_as_int: true, - emit_debug_gdb_scripts: false, - requires_uwtable: true, - eh_frame_header: false, - no_default_libraries: false, - has_thread_local: true, - // FIXME(davidtwco): Support Split DWARF on Windows GNU - may require LLVM changes to - // output DWO, despite using DWARF, doesn't use ELF.. - debuginfo_kind: DebuginfoKind::Pdb, - supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]), - ..Default::default() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_msvc_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_msvc_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_msvc_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_msvc_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -use crate::spec::{cvs, TargetOptions}; - -pub fn opts() -> TargetOptions { - let base = super::msvc_base::opts(); - - TargetOptions { - os: "windows".into(), - env: "msvc".into(), - vendor: "pc".into(), - dynamic_linking: true, - dll_prefix: "".into(), - dll_suffix: ".dll".into(), - exe_suffix: ".exe".into(), - staticlib_prefix: "".into(), - staticlib_suffix: ".lib".into(), - families: cvs!["windows"], - crt_static_allows_dylibs: true, - crt_static_respected: true, - requires_uwtable: true, - // Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC - // as there's been trouble in the past of linking the C++ standard - // library required by LLVM. This likely needs to happen one day, but - // in general Windows is also a more controlled environment than - // Unix, so it's not necessarily as critical that this be implemented. - // - // Note that there are also some licensing worries about statically - // linking some libraries which require a specific agreement, so it may - // not ever be possible for us to pass this flag. - no_default_libraries: false, - has_thread_local: true, - - ..base - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_gnu_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_gnu_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_gnu_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_gnu_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,35 +0,0 @@ -use crate::spec::{Cc, LinkArgs, LinkerFlavor, Lld, TargetOptions}; - -pub fn opts() -> TargetOptions { - let base = super::windows_gnu_base::opts(); - - // FIXME: This should be updated for the exception machinery changes from #67502 - // and inherit from `windows_gnu_base`, at least partially. - let mingw_libs = &[ - "-lwinstorecompat", - "-lruntimeobject", - "-lsynchronization", - "-lvcruntime140_app", - "-lucrt", - "-lwindowsapp", - "-lmingwex", - "-lmingw32", - ]; - let mut late_link_args = - TargetOptions::link_args(LinkerFlavor::Gnu(Cc::No, Lld::No), mingw_libs); - super::add_link_args(&mut late_link_args, LinkerFlavor::Gnu(Cc::Yes, Lld::No), mingw_libs); - // Reset the flags back to empty until the FIXME above is addressed. - let late_link_args_dynamic = LinkArgs::new(); - let late_link_args_static = LinkArgs::new(); - - TargetOptions { - abi: "uwp".into(), - vendor: "uwp".into(), - limit_rdylib_exports: false, - late_link_args, - late_link_args_dynamic, - late_link_args_static, - - ..base - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_msvc_base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_msvc_base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_msvc_base.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/windows_uwp_msvc_base.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -use crate::spec::{LinkerFlavor, Lld, TargetOptions}; - -pub fn opts() -> TargetOptions { - let mut opts = super::windows_msvc_base::opts(); - - opts.abi = "uwp".into(); - opts.vendor = "uwp".into(); - opts.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/APPCONTAINER", "mincore.lib"]); - - opts -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use super::apple_base::{macos_llvm_target, opts, Arch}; -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet}; -use crate::spec::{StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::X86_64; - let mut base = opts("macos", arch); - base.max_atomic_width = Some(128); // penryn+ supports cmpxchg16b - base.frame_pointer = FramePointer::Always; - base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.supported_sanitizers = - SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK | SanitizerSet::THREAD; - - Target { - // Clang automatically chooses a more specific target based on - // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work - // correctly, we do too. - llvm_target: macos_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { mcount: "\u{1}mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use super::apple_base::{ios_sim_llvm_target, opts, Arch}; -use crate::spec::{SanitizerSet, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::X86_64_sim; - let mut base = opts("ios", arch); - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::THREAD; - - Target { - llvm_target: ios_sim_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { - max_atomic_width: Some(128), - stack_probes: StackProbeType::X86, - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use super::apple_base::{opts, Arch}; -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let llvm_target = "x86_64-apple-ios14.0-macabi"; - - let arch = Arch::X86_64_macabi; - let mut base = opts("ios", arch); - base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-target", llvm_target]); - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::LEAK | SanitizerSet::THREAD; - - Target { - llvm_target: llvm_target.into(), - pointer_width: 64, - data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { - max_atomic_width: Some(128), - stack_probes: StackProbeType::X86, - ..base - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use super::apple_base::{opts, tvos_sim_llvm_target, Arch}; -use crate::spec::{StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::X86_64_sim; - Target { - llvm_target: tvos_sim_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { - max_atomic_width: Some(128), - stack_probes: StackProbeType::X86, - ..opts("tvos", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -use super::apple_base::{opts, watchos_sim_llvm_target, Arch}; -use crate::spec::{StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::X86_64_sim; - Target { - llvm_target: watchos_sim_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { - max_atomic_width: Some(128), - stack_probes: StackProbeType::X86, - forces_embed_bitcode: true, - // Taken from a clang build on Xcode 11.4.1. - // These arguments are not actually invoked - they just have - // to look right to pass App Store validation. - bitcode_llvm_cmdline: "-triple\0\ - x86_64-apple-watchos5.0-simulator\0\ - -emit-obj\0\ - -disable-llvm-passes\0\ - -target-abi\0\ - darwinpcs\0\ - -Os\0" - .into(), - ..opts("watchos", arch) - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fortanix_unknown_sgx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fortanix_unknown_sgx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fortanix_unknown_sgx.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fortanix_unknown_sgx.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,83 +0,0 @@ -use std::borrow::Cow; - -use super::{cvs, Cc, LinkerFlavor, Lld, Target, TargetOptions}; - -pub fn target() -> Target { - let pre_link_args = TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &[ - "-e", - "elf_entry", - "-Bstatic", - "--gc-sections", - "-z", - "text", - "-z", - "norelro", - "--no-undefined", - "--error-unresolved-symbols", - "--no-undefined-version", - "-Bsymbolic", - "--export-dynamic", - // The following symbols are needed by libunwind, which is linked after - // libstd. Make sure they're included in the link. - "-u", - "__rust_abort", - "-u", - "__rust_c_alloc", - "-u", - "__rust_c_dealloc", - "-u", - "__rust_print_err", - "-u", - "__rust_rwlock_rdlock", - "-u", - "__rust_rwlock_unlock", - "-u", - "__rust_rwlock_wrlock", - ], - ); - - const EXPORT_SYMBOLS: &[&str] = &[ - "sgx_entry", - "HEAP_BASE", - "HEAP_SIZE", - "RELA", - "RELACOUNT", - "ENCLAVE_SIZE", - "CFGDATA_BASE", - "DEBUG", - "EH_FRM_HDR_OFFSET", - "EH_FRM_HDR_LEN", - "EH_FRM_OFFSET", - "EH_FRM_LEN", - "TEXT_BASE", - "TEXT_SIZE", - ]; - let opts = TargetOptions { - os: "unknown".into(), - env: "sgx".into(), - vendor: "fortanix".into(), - abi: "fortanix".into(), - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - max_atomic_width: Some(64), - cpu: "x86-64".into(), - plt_by_default: false, - features: "+rdrnd,+rdseed,+lvi-cfi,+lvi-load-hardening".into(), - llvm_args: cvs!["--x86-experimental-lvi-inline-asm-hardening"], - position_independent_executables: true, - pre_link_args, - override_export_symbols: Some(EXPORT_SYMBOLS.iter().cloned().map(Cow::from).collect()), - relax_elf_relocations: true, - ..Default::default() - }; - Target { - llvm_target: "x86_64-elf".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fuchsia.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -pub use crate::spec::x86_64_unknown_fuchsia::target; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_linux_android.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_linux_android.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_linux_android.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_linux_android.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::android_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - // https://developer.android.com/ndk/guides/abis.html#86-64 - base.features = "+mmx,+sse,+sse2,+sse3,+ssse3,+sse4.1,+sse4.2,+popcnt".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.supports_xray = true; - - Target { - llvm_target: "x86_64-linux-android".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: TargetOptions { supported_sanitizers: SanitizerSet::ADDRESS, ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use super::nto_qnx_base; -use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "x86_64-pc-unknown".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: TargetOptions { - cpu: "x86-64".into(), - plt_by_default: false, - max_atomic_width: Some(64), - pre_link_args: TargetOptions::link_args( - LinkerFlavor::Gnu(Cc::Yes, Lld::No), - &["-Vgcc_ntox86_64_cxx"], - ), - env: "nto71".into(), - ..nto_qnx_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_solaris.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_solaris.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::solaris_base::opts(); - base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64"]); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.vendor = "pc".into(); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::X86; - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD; - - Target { - llvm_target: "x86_64-pc-solaris".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_gnu_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - // Use high-entropy 64 bit address space for ASLR - base.add_pre_link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &["-m", "i386pep", "--high-entropy-va"], - ); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64", "-Wl,--high-entropy-va"]); - base.max_atomic_width = Some(64); - base.linker = Some("x86_64-w64-mingw32-gcc".into()); - - Target { - llvm_target: "x86_64-pc-windows-gnu".into(), - pointer_width: 64, - data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnullvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnullvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnullvm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_gnullvm.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_gnullvm_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.max_atomic_width = Some(64); - base.linker = Some("x86_64-w64-mingw32-clang".into()); - - Target { - llvm_target: "x86_64-pc-windows-gnu".into(), - pointer_width: 64, - data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_pc_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::windows_msvc_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - - Target { - llvm_target: "x86_64-pc-windows-msvc".into(), - pointer_width: 64, - data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_sun_solaris.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_sun_solaris.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_sun_solaris.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_sun_solaris.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::solaris_base::opts(); - base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64"]); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.vendor = "sun".into(); - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "x86_64-pc-solaris".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unikraft_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unikraft_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unikraft_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unikraft_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "x86_64-unknown-linux-musl".into(), - pointer_width: 64, - arch: "x86_64".into(), - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - options: TargetOptions { - cpu: "x86-64".into(), - plt_by_default: false, - pre_link_args: TargetOptions::link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]), - max_atomic_width: Some(64), - stack_probes: StackProbeType::X86, - ..super::unikraft_linux_musl_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_dragonfly.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_dragonfly.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_dragonfly.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_dragonfly.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::dragonfly_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "x86_64-unknown-dragonfly".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::freebsd_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.supported_sanitizers = - SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::MEMORY | SanitizerSet::THREAD; - base.supports_xray = true; - - Target { - llvm_target: "x86_64-unknown-freebsd".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_fuchsia.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_fuchsia.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_fuchsia.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_fuchsia.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::fuchsia_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.stack_probes = StackProbeType::X86; - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI; - - Target { - llvm_target: "x86_64-unknown-fuchsia".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_haiku.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_haiku.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_haiku.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_haiku.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::haiku_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - // This option is required to build executables on Haiku x86_64 - base.position_independent_executables = true; - - Target { - llvm_target: "x86_64-unknown-haiku".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_hermit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_hermit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_hermit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_hermit.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - Target { - llvm_target: "x86_64-unknown-hermit".into(), - pointer_width: 64, - arch: "x86_64".into(), - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - options: TargetOptions { - cpu: "x86-64".into(), - features: "+rdrnd,+rdseed".into(), - plt_by_default: false, - max_atomic_width: Some(64), - stack_probes: StackProbeType::X86, - ..super::hermit_base::opts() - }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_illumos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_illumos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_illumos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_illumos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, SanitizerSet, Target}; - -pub fn target() -> Target { - let mut base = super::illumos_base::opts(); - base.add_pre_link_args(LinkerFlavor::Unix(Cc::Yes), &["-m64", "-std=c99"]); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD; - - Target { - // LLVM does not currently have a separate illumos target, - // so we still pass Solaris to it - llvm_target: "x86_64-pc-solaris".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_l4re_uclibc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_l4re_uclibc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_l4re_uclibc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_l4re_uclibc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -use crate::spec::{PanicStrategy, Target}; - -pub fn target() -> Target { - let mut base = super::l4re_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.panic_strategy = PanicStrategy::Abort; - - Target { - llvm_target: "x86_64-unknown-l4re-uclibc".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.static_position_independent_executables = true; - base.supported_sanitizers = SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::SAFESTACK - | SanitizerSet::THREAD; - base.supports_xray = true; - - Target { - llvm_target: "x86_64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnux32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnux32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnux32.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnux32.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); - base.cpu = "x86-64".into(); - base.abi = "x32".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mx32"]); - base.stack_probes = StackProbeType::X86; - base.has_thread_local = false; - // BUG(GabrielMajeri): disabling the PLT on x86_64 Linux with x32 ABI - // breaks code gen. See LLVM bug 36743 - base.plt_by_default = true; - - Target { - llvm_target: "x86_64-unknown-linux-gnux32".into(), - pointer_width: 32, - data_layout: "e-m:e-p:32:32-p270:32:32-p271:32:32-p272:64:64-\ - i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.static_position_independent_executables = true; - base.supported_sanitizers = SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::THREAD; - base.supports_xray = true; - - Target { - llvm_target: "x86_64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::linux_ohos_base::opts(); - base.cpu = "x86-64".into(); - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.static_position_independent_executables = true; - base.supported_sanitizers = SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::THREAD; - base.supports_xray = true; - - Target { - // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. - llvm_target: "x86_64-unknown-linux-musl".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let mut base = super::netbsd_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.supported_sanitizers = SanitizerSet::ADDRESS - | SanitizerSet::CFI - | SanitizerSet::LEAK - | SanitizerSet::MEMORY - | SanitizerSet::THREAD; - base.supports_xray = true; - - Target { - llvm_target: "x86_64-unknown-netbsd".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: TargetOptions { mcount: "__mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_none.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_none.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_none.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_none.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -// Generic x86-64 target for bare-metal code - Floating point disabled -// -// Can be used in conjunction with the `target-feature` and -// `target-cpu` compiler flags to opt-in more hardware-specific -// features. - -use super::{Cc, CodeModel, LinkerFlavor, Lld, PanicStrategy}; -use super::{RelroLevel, SanitizerSet, StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let opts = TargetOptions { - cpu: "x86-64".into(), - plt_by_default: false, - max_atomic_width: Some(64), - stack_probes: StackProbeType::X86, - position_independent_executables: true, - static_position_independent_executables: true, - relro_level: RelroLevel::Full, - linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), - linker: Some("rust-lld".into()), - features: - "-mmx,-sse,-sse2,-sse3,-ssse3,-sse4.1,-sse4.2,-3dnow,-3dnowa,-avx,-avx2,+soft-float" - .into(), - supported_sanitizers: SanitizerSet::KCFI | SanitizerSet::KERNELADDRESS, - disable_redzone: true, - panic_strategy: PanicStrategy::Abort, - code_model: Some(CodeModel::Kernel), - ..Default::default() - }; - Target { - llvm_target: "x86_64-unknown-none-elf".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: opts, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::openbsd_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.supports_xray = true; - - Target { - llvm_target: "x86_64-unknown-openbsd".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_redox.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_redox.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_redox.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_redox.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::redox_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - - Target { - llvm_target: "x86_64-unknown-redox".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_uefi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_uefi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_uefi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_unknown_uefi.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -// This defines the amd64 target for UEFI systems as described in the UEFI specification. See the -// uefi-base module for generic UEFI options. On x86_64 systems (mostly called "x64" in the spec) -// UEFI systems always run in long-mode, have the interrupt-controller pre-configured and force a -// single-CPU execution. -// The win64 ABI is used. It differs from the sysv64 ABI, so we must use a windows target with -// LLVM. "x86_64-unknown-windows" is used to get the minimal subset of windows-specific features. - -use crate::{abi::call::Conv, spec::Target}; - -pub fn target() -> Target { - let mut base = super::uefi_msvc_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.entry_abi = Conv::X86_64Win64; - - // We disable MMX and SSE for now, even though UEFI allows using them. Problem is, you have to - // enable these CPU features explicitly before their first use, otherwise their instructions - // will trigger an exception. Rust does not inject any code that enables AVX/MMX/SSE - // instruction sets, so this must be done by the firmware. However, existing firmware is known - // to leave these uninitialized, thus triggering exceptions if we make use of them. Which is - // why we avoid them and instead use soft-floats. This is also what GRUB and friends did so - // far. - // - // If you initialize FP units yourself, you can override these flags with custom linker - // arguments, thus giving you access to full MMX/SSE acceleration. - base.features = "-mmx,-sse,+soft-float".into(); - - Target { - llvm_target: "x86_64-unknown-windows".into(), - pointer_width: 64, - data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_gnu.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_gnu.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_gnu.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_gnu.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, Target}; - -pub fn target() -> Target { - let mut base = super::windows_uwp_gnu_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - // Use high-entropy 64 bit address space for ASLR - base.add_pre_link_args( - LinkerFlavor::Gnu(Cc::No, Lld::No), - &["-m", "i386pep", "--high-entropy-va"], - ); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64", "-Wl,--high-entropy-va"]); - base.max_atomic_width = Some(64); - - Target { - llvm_target: "x86_64-pc-windows-gnu".into(), - pointer_width: 64, - data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_msvc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_msvc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_msvc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_uwp_windows_msvc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -use crate::spec::Target; - -pub fn target() -> Target { - let mut base = super::windows_uwp_msvc_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - - Target { - llvm_target: "x86_64-pc-windows-msvc".into(), - pointer_width: 64, - data_layout: "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_wrs_vxworks.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_wrs_vxworks.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_wrs_vxworks.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64_wrs_vxworks.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target}; - -pub fn target() -> Target { - let mut base = super::vxworks_base::opts(); - base.cpu = "x86-64".into(); - base.plt_by_default = false; - base.max_atomic_width = Some(64); - base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.disable_redzone = true; - - Target { - llvm_target: "x86_64-unknown-linux-gnu".into(), - pointer_width: 64, - data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: "x86_64".into(), - options: base, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64h_apple_darwin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64h_apple_darwin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64h_apple_darwin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_target/src/spec/x86_64h_apple_darwin.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,44 +0,0 @@ -use super::apple_base::{macos_llvm_target, opts, Arch}; -use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet}; -use crate::spec::{StackProbeType, Target, TargetOptions}; - -pub fn target() -> Target { - let arch = Arch::X86_64h; - let mut base = opts("macos", arch); - base.max_atomic_width = Some(128); - base.frame_pointer = FramePointer::Always; - base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m64"]); - base.stack_probes = StackProbeType::X86; - base.supported_sanitizers = - SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK | SanitizerSet::THREAD; - - // x86_64h is core2-avx without a few of the features which would otherwise - // be guaranteed, so we need to disable those. This imitates clang's logic: - // - https://github.com/llvm/llvm-project/blob/bd1f7c417/clang/lib/Driver/ToolChains/Arch/X86.cpp#L77-L78 - // - https://github.com/llvm/llvm-project/blob/bd1f7c417/clang/lib/Driver/ToolChains/Arch/X86.cpp#L133-L141 - // - // FIXME: Sadly, turning these off here disables them in such a way that they - // aren't re-enabled by `-Ctarget-cpu=native` (on a machine that has them). - // It would be nice if this were not the case, but fixing it seems tricky - // (and given that the main use-case for this target is for use in universal - // binaries, probably not that important). - base.features = "-rdrnd,-aes,-pclmul,-rtm,-fsgsbase".into(); - // Double-check that the `cpu` is what we expect (if it's not the list above - // may need updating). - assert_eq!( - base.cpu, "core-avx2", - "you need to adjust the feature list in x86_64h-apple-darwin if you change this", - ); - - Target { - // Clang automatically chooses a more specific target based on - // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work - // correctly, we do too. - llvm_target: macos_llvm_target(arch).into(), - pointer_width: 64, - data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" - .into(), - arch: arch.target_arch(), - options: TargetOptions { mcount: "\u{1}mcount".into(), ..base }, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,21 +3,19 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] -rustc_parse_format = { path = "../rustc_parse_format" } -tracing = "0.1" -rustc_attr = { path = "../rustc_attr" } -rustc_middle = { path = "../rustc_middle" } +# tidy-alphabetical-start rustc_ast = { path = "../rustc_ast" } +rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } +rustc_parse_format = { path = "../rustc_parse_format" } rustc_query_system = { path = "../rustc_query_system" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } @@ -25,3 +23,5 @@ rustc_target = { path = "../rustc_target" } rustc_transmute = { path = "../rustc_transmute", features = ["rustc"] } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -28,6 +28,11 @@ .label = invalid on-clause here trait_selection_malformed_on_unimplemented_attr = malformed `on_unimplemented` attribute + .help = only `message`, `note` and `label` are allowed as options + .label = invalid option found here + +trait_selection_missing_options_for_on_unimplemented_attr = missing options for `on_unimplemented` attribute + .help = at least one of the `message`, `note` and `label` options are expected trait_selection_negative_positive_conflict = found both positive and negative implementation of trait `{$trait_desc}`{$self_desc -> [none] {""} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,6 +11,9 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(associated_type_bounds)] #![feature(box_patterns)] #![feature(control_flow_enum)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,6 +37,8 @@ fn trait_ref(self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx>; + fn polarity(self) -> ty::ImplPolarity; + fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self; fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId; @@ -191,18 +193,26 @@ goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx>; - /// A generator (that comes from an `async` desugaring) is known to implement - /// `Future`, where `O` is given by the generator's return type + /// A coroutine (that comes from an `async` desugaring) is known to implement + /// `Future`, where `O` is given by the coroutine's return type /// that was computed during type-checking. fn consider_builtin_future_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx>; - /// A generator (that doesn't come from an `async` desugaring) is known to - /// implement `Generator`, given the resume, yield, - /// and return types of the generator computed during type-checking. - fn consider_builtin_generator_candidate( + /// A coroutine (that comes from a `gen` desugaring) is known to implement + /// `Iterator`, where `O` is given by the generator's yield type + /// that was computed during type-checking. + fn consider_builtin_iterator_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx>; + + /// A coroutine (that doesn't come from an `async` or `gen` desugaring) is known to + /// implement `Coroutine`, given the resume, yield, + /// and return types of the coroutine computed during type-checking. + fn consider_builtin_coroutine_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx>; @@ -410,7 +420,7 @@ | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Generator(_, _, _) + | ty::Coroutine(_, _, _) | ty::Never | ty::Tuple(_) => { let simp = @@ -467,9 +477,9 @@ ty::Alias(_, _) | ty::Placeholder(..) | ty::Error(_) => (), // FIXME: These should ideally not exist as a self type. It would be nice for - // the builtin auto trait impls of generators to instead directly recurse + // the builtin auto trait impls of coroutines to instead directly recurse // into the witness. - ty::GeneratorWitness(..) => (), + ty::CoroutineWitness(..) => (), // These variants should not exist as a self type. ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) @@ -552,8 +562,10 @@ G::consider_builtin_pointee_candidate(self, goal) } else if lang_items.future_trait() == Some(trait_def_id) { G::consider_builtin_future_candidate(self, goal) - } else if lang_items.gen_trait() == Some(trait_def_id) { - G::consider_builtin_generator_candidate(self, goal) + } else if lang_items.iterator_trait() == Some(trait_def_id) { + G::consider_builtin_iterator_candidate(self, goal) + } else if lang_items.coroutine_trait() == Some(trait_def_id) { + G::consider_builtin_coroutine_candidate(self, goal) } else if lang_items.discriminant_kind_trait() == Some(trait_def_id) { G::consider_builtin_discriminant_kind_candidate(self, goal) } else if lang_items.destruct_trait() == Some(trait_def_id) { @@ -620,8 +632,8 @@ | ty::FnPtr(_) | ty::Dynamic(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Param(_) @@ -776,8 +788,8 @@ | ty::FnPtr(_) | ty::Alias(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Param(_) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,7 +12,7 @@ // Calculates the constituent types of a type for `auto trait` purposes. // -// For types with an "existential" binder, i.e. generator witnesses, we also +// For types with an "existential" binder, i.e. coroutine witnesses, we also // instantiate the binder with placeholders eagerly. #[instrument(level = "debug", skip(ecx), ret)] pub(in crate::solve) fn instantiate_constituent_tys_for_auto_trait<'tcx>( @@ -57,14 +57,14 @@ ty::Closure(_, ref args) => Ok(vec![args.as_closure().tupled_upvars_ty()]), - ty::Generator(_, ref args, _) => { - let generator_args = args.as_generator(); - Ok(vec![generator_args.tupled_upvars_ty(), generator_args.witness()]) + ty::Coroutine(_, ref args, _) => { + let coroutine_args = args.as_coroutine(); + Ok(vec![coroutine_args.tupled_upvars_ty(), coroutine_args.witness()]) } - ty::GeneratorWitness(def_id, args) => Ok(ecx + ty::CoroutineWitness(def_id, args) => Ok(ecx .tcx() - .generator_hidden_types(def_id) + .coroutine_hidden_types(def_id) .map(|bty| { ecx.instantiate_binder_with_placeholders(replace_erased_lifetimes_with_bound_vars( tcx, @@ -124,8 +124,8 @@ | ty::RawPtr(..) | ty::Char | ty::Ref(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Array(..) | ty::Closure(..) | ty::Never @@ -177,7 +177,7 @@ ty::Dynamic(..) | ty::Str | ty::Slice(_) - | ty::Generator(_, _, Movability::Static) + | ty::Coroutine(_, _, Movability::Static) | ty::Foreign(..) | ty::Ref(_, _, Mutability::Mut) | ty::Adt(_, _) @@ -194,18 +194,18 @@ ty::Closure(_, args) => Ok(vec![args.as_closure().tupled_upvars_ty()]), - ty::Generator(_, args, Movability::Movable) => { - if ecx.tcx().features().generator_clone { - let generator = args.as_generator(); - Ok(vec![generator.tupled_upvars_ty(), generator.witness()]) + ty::Coroutine(_, args, Movability::Movable) => { + if ecx.tcx().features().coroutine_clone { + let coroutine = args.as_coroutine(); + Ok(vec![coroutine.tupled_upvars_ty(), coroutine.witness()]) } else { Err(NoSolution) } } - ty::GeneratorWitness(def_id, args) => Ok(ecx + ty::CoroutineWitness(def_id, args) => Ok(ecx .tcx() - .generator_hidden_types(def_id) + .coroutine_hidden_types(def_id) .map(|bty| { ecx.instantiate_binder_with_placeholders(replace_erased_lifetimes_with_bound_vars( ecx.tcx(), @@ -278,8 +278,8 @@ | ty::RawPtr(_) | ty::Ref(_, _, _) | ty::Dynamic(_, _, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Alias(_, _) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/canonicalize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/canonicalize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/canonicalize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/canonicalize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -224,12 +224,20 @@ let kind = match *r { ty::ReLateBound(..) => return r, - ty::ReStatic => match self.canonicalize_mode { + // We may encounter `ReStatic` in item signatures or the hidden type + // of an opaque. `ReErased` should only be encountered in the hidden + // type of an opaque for regions that are ignored for the purposes of + // captures. + // + // FIXME: We should investigate the perf implications of not uniquifying + // `ReErased`. We may be able to short-circuit registering region + // obligations if we encounter a `ReErased` on one side, for example. + ty::ReStatic | ty::ReErased => match self.canonicalize_mode { CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), CanonicalizeMode::Response { .. } => return r, }, - ty::ReErased | ty::ReFree(_) | ty::ReEarlyBound(_) => match self.canonicalize_mode { + ty::ReFree(_) | ty::ReEarlyBound(_) => match self.canonicalize_mode { CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT), CanonicalizeMode::Response { .. } => bug!("unexpected region in response: {r:?}"), }, @@ -329,8 +337,8 @@ | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Alias(_, _) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,987 @@ +use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_infer::infer::at::ToTrace; +use rustc_infer::infer::canonical::CanonicalVarValues; +use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use rustc_infer::infer::{ + DefineOpaqueTypes, InferCtxt, InferOk, LateBoundRegionConversionTime, TyCtxtInferExt, +}; +use rustc_infer::traits::query::NoSolution; +use rustc_infer::traits::ObligationCause; +use rustc_middle::infer::canonical::CanonicalVarInfos; +use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind}; +use rustc_middle::traits::solve::inspect; +use rustc_middle::traits::solve::{ + CanonicalInput, CanonicalResponse, Certainty, IsNormalizesToHack, PredefinedOpaques, + PredefinedOpaquesData, QueryResult, +}; +use rustc_middle::traits::{specialization_graph, DefiningAnchor}; +use rustc_middle::ty::{ + self, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, +}; +use rustc_session::config::DumpSolverProofTree; +use rustc_span::DUMMY_SP; +use std::io::Write; +use std::ops::ControlFlow; + +use crate::traits::vtable::{count_own_vtable_entries, prepare_vtable_segments, VtblSegment}; + +use super::inspect::ProofTreeBuilder; +use super::SolverMode; +use super::{search_graph, GoalEvaluationKind}; +use super::{search_graph::SearchGraph, Goal}; +pub use select::InferCtxtSelectExt; + +mod canonical; +mod probe; +mod select; + +pub struct EvalCtxt<'a, 'tcx> { + /// The inference context that backs (mostly) inference and placeholder terms + /// instantiated while solving goals. + /// + /// NOTE: The `InferCtxt` that backs the `EvalCtxt` is intentionally private, + /// because the `InferCtxt` is much more general than `EvalCtxt`. Methods such + /// as `take_registered_region_obligations` can mess up query responses, + /// using `At::normalize` is totally wrong, calling `evaluate_root_goal` can + /// cause coinductive unsoundness, etc. + /// + /// Methods that are generally of use for trait solving are *intentionally* + /// re-declared through the `EvalCtxt` below, often with cleaner signatures + /// since we don't care about things like `ObligationCause`s and `Span`s here. + /// If some `InferCtxt` method is missing, please first think defensively about + /// the method's compatibility with this solver, or if an existing one does + /// the job already. + infcx: &'a InferCtxt<'tcx>, + + /// The variable info for the `var_values`, only used to make an ambiguous response + /// with no constraints. + variables: CanonicalVarInfos<'tcx>, + pub(super) var_values: CanonicalVarValues<'tcx>, + + predefined_opaques_in_body: PredefinedOpaques<'tcx>, + + /// The highest universe index nameable by the caller. + /// + /// When we enter a new binder inside of the query we create new universes + /// which the caller cannot name. We have to be careful with variables from + /// these new universes when creating the query response. + /// + /// Both because these new universes can prevent us from reaching a fixpoint + /// if we have a coinductive cycle and because that's the only way we can return + /// new placeholders to the caller. + pub(super) max_input_universe: ty::UniverseIndex, + + pub(super) search_graph: &'a mut SearchGraph<'tcx>, + + pub(super) nested_goals: NestedGoals<'tcx>, + + // Has this `EvalCtxt` errored out with `NoSolution` in `try_evaluate_added_goals`? + // + // If so, then it can no longer be used to make a canonical query response, + // since subsequent calls to `try_evaluate_added_goals` have possibly dropped + // ambiguous goals. Instead, a probe needs to be introduced somewhere in the + // evaluation code. + tainted: Result<(), NoSolution>, + + pub(super) inspect: ProofTreeBuilder<'tcx>, +} + +#[derive(Debug, Clone)] +pub(super) struct NestedGoals<'tcx> { + /// This normalizes-to goal that is treated specially during the evaluation + /// loop. In each iteration we take the RHS of the projection, replace it with + /// a fresh inference variable, and only after evaluating that goal do we + /// equate the fresh inference variable with the actual RHS of the predicate. + /// + /// This is both to improve caching, and to avoid using the RHS of the + /// projection predicate to influence the normalizes-to candidate we select. + /// + /// This is not a 'real' nested goal. We must not forget to replace the RHS + /// with a fresh inference variable when we evaluate this goal. That can result + /// in a trait solver cycle. This would currently result in overflow but can be + /// can be unsound with more powerful coinduction in the future. + pub(super) normalizes_to_hack_goal: Option>>, + /// The rest of the goals which have not yet processed or remain ambiguous. + pub(super) goals: Vec>>, +} + +impl NestedGoals<'_> { + pub(super) fn new() -> Self { + Self { normalizes_to_hack_goal: None, goals: Vec::new() } + } + + pub(super) fn is_empty(&self) -> bool { + self.normalizes_to_hack_goal.is_none() && self.goals.is_empty() + } +} + +#[derive(PartialEq, Eq, Debug, Hash, HashStable, Clone, Copy)] +pub enum GenerateProofTree { + Yes, + IfEnabled, + Never, +} + +pub trait InferCtxtEvalExt<'tcx> { + /// Evaluates a goal from **outside** of the trait solver. + /// + /// Using this while inside of the solver is wrong as it uses a new + /// search graph which would break cycle detection. + fn evaluate_root_goal( + &self, + goal: Goal<'tcx, ty::Predicate<'tcx>>, + generate_proof_tree: GenerateProofTree, + ) -> ( + Result<(bool, Certainty, Vec>>), NoSolution>, + Option>, + ); +} + +impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> { + #[instrument(level = "debug", skip(self), ret)] + fn evaluate_root_goal( + &self, + goal: Goal<'tcx, ty::Predicate<'tcx>>, + generate_proof_tree: GenerateProofTree, + ) -> ( + Result<(bool, Certainty, Vec>>), NoSolution>, + Option>, + ) { + EvalCtxt::enter_root(self, generate_proof_tree, |ecx| { + ecx.evaluate_goal(GoalEvaluationKind::Root, goal) + }) + } +} + +impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { + pub(super) fn solver_mode(&self) -> SolverMode { + self.search_graph.solver_mode() + } + + pub(super) fn local_overflow_limit(&self) -> usize { + self.search_graph.local_overflow_limit() + } + + /// Creates a root evaluation context and search graph. This should only be + /// used from outside of any evaluation, and other methods should be preferred + /// over using this manually (such as [`InferCtxtEvalExt::evaluate_root_goal`]). + fn enter_root( + infcx: &InferCtxt<'tcx>, + generate_proof_tree: GenerateProofTree, + f: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> R, + ) -> (R, Option>) { + let mode = if infcx.intercrate { SolverMode::Coherence } else { SolverMode::Normal }; + let mut search_graph = search_graph::SearchGraph::new(infcx.tcx, mode); + + let mut ecx = EvalCtxt { + search_graph: &mut search_graph, + infcx, + nested_goals: NestedGoals::new(), + inspect: ProofTreeBuilder::new_maybe_root(infcx.tcx, generate_proof_tree), + + // Only relevant when canonicalizing the response, + // which we don't do within this evaluation context. + predefined_opaques_in_body: infcx + .tcx + .mk_predefined_opaques_in_body(PredefinedOpaquesData::default()), + max_input_universe: ty::UniverseIndex::ROOT, + variables: ty::List::empty(), + var_values: CanonicalVarValues::dummy(), + tainted: Ok(()), + }; + let result = f(&mut ecx); + + let tree = ecx.inspect.finalize(); + if let (Some(tree), DumpSolverProofTree::Always) = + (&tree, infcx.tcx.sess.opts.unstable_opts.dump_solver_proof_tree) + { + let mut lock = std::io::stdout().lock(); + let _ = lock.write_fmt(format_args!("{tree:?}\n")); + let _ = lock.flush(); + } + + assert!( + ecx.nested_goals.is_empty(), + "root `EvalCtxt` should not have any goals added to it" + ); + + assert!(search_graph.is_empty()); + (result, tree) + } + + /// Creates a nested evaluation context that shares the same search graph as the + /// one passed in. This is suitable for evaluation, granted that the search graph + /// has had the nested goal recorded on its stack ([`SearchGraph::with_new_goal`]), + /// but it's preferable to use other methods that call this one rather than this + /// method directly. + /// + /// This function takes care of setting up the inference context, setting the anchor, + /// and registering opaques from the canonicalized input. + fn enter_canonical( + tcx: TyCtxt<'tcx>, + search_graph: &'a mut search_graph::SearchGraph<'tcx>, + canonical_input: CanonicalInput<'tcx>, + canonical_goal_evaluation: &mut ProofTreeBuilder<'tcx>, + f: impl FnOnce(&mut EvalCtxt<'_, 'tcx>, Goal<'tcx, ty::Predicate<'tcx>>) -> R, + ) -> R { + let intercrate = match search_graph.solver_mode() { + SolverMode::Normal => false, + SolverMode::Coherence => true, + }; + let (ref infcx, input, var_values) = tcx + .infer_ctxt() + .intercrate(intercrate) + .with_next_trait_solver(true) + .with_opaque_type_inference(canonical_input.value.anchor) + .build_with_canonical(DUMMY_SP, &canonical_input); + + let mut ecx = EvalCtxt { + infcx, + variables: canonical_input.variables, + var_values, + predefined_opaques_in_body: input.predefined_opaques_in_body, + max_input_universe: canonical_input.max_universe, + search_graph, + nested_goals: NestedGoals::new(), + tainted: Ok(()), + inspect: canonical_goal_evaluation.new_goal_evaluation_step(input), + }; + + for &(key, ty) in &input.predefined_opaques_in_body.opaque_types { + ecx.insert_hidden_type(key, input.goal.param_env, ty) + .expect("failed to prepopulate opaque types"); + } + + if !ecx.nested_goals.is_empty() { + panic!("prepopulating opaque types shouldn't add goals: {:?}", ecx.nested_goals); + } + + let result = f(&mut ecx, input.goal); + + canonical_goal_evaluation.goal_evaluation_step(ecx.inspect); + + // When creating a query response we clone the opaque type constraints + // instead of taking them. This would cause an ICE here, since we have + // assertions against dropping an `InferCtxt` without taking opaques. + // FIXME: Once we remove support for the old impl we can remove this. + if input.anchor != DefiningAnchor::Error { + // This seems ok, but fragile. + let _ = infcx.take_opaque_types(); + } + + result + } + + /// The entry point of the solver. + /// + /// This function deals with (coinductive) cycles, overflow, and caching + /// and then calls [`EvalCtxt::compute_goal`] which contains the actual + /// logic of the solver. + /// + /// Instead of calling this function directly, use either [EvalCtxt::evaluate_goal] + /// if you're inside of the solver or [InferCtxtEvalExt::evaluate_root_goal] if you're + /// outside of it. + #[instrument(level = "debug", skip(tcx, search_graph, goal_evaluation), ret)] + fn evaluate_canonical_goal( + tcx: TyCtxt<'tcx>, + search_graph: &'a mut search_graph::SearchGraph<'tcx>, + canonical_input: CanonicalInput<'tcx>, + goal_evaluation: &mut ProofTreeBuilder<'tcx>, + ) -> QueryResult<'tcx> { + let mut canonical_goal_evaluation = + goal_evaluation.new_canonical_goal_evaluation(canonical_input); + + // Deal with overflow, caching, and coinduction. + // + // The actual solver logic happens in `ecx.compute_goal`. + let result = ensure_sufficient_stack(|| { + search_graph.with_new_goal( + tcx, + canonical_input, + &mut canonical_goal_evaluation, + |search_graph, canonical_goal_evaluation| { + EvalCtxt::enter_canonical( + tcx, + search_graph, + canonical_input, + canonical_goal_evaluation, + |ecx, goal| { + let result = ecx.compute_goal(goal); + ecx.inspect.query_result(result); + result + }, + ) + }, + ) + }); + + canonical_goal_evaluation.query_result(result); + goal_evaluation.canonical_goal_evaluation(canonical_goal_evaluation); + result + } + + /// Recursively evaluates `goal`, returning whether any inference vars have + /// been constrained and the certainty of the result. + fn evaluate_goal( + &mut self, + goal_evaluation_kind: GoalEvaluationKind, + goal: Goal<'tcx, ty::Predicate<'tcx>>, + ) -> Result<(bool, Certainty, Vec>>), NoSolution> { + let (orig_values, canonical_goal) = self.canonicalize_goal(goal); + let mut goal_evaluation = + self.inspect.new_goal_evaluation(goal, &orig_values, goal_evaluation_kind); + let encountered_overflow = self.search_graph.encountered_overflow(); + let canonical_response = EvalCtxt::evaluate_canonical_goal( + self.tcx(), + self.search_graph, + canonical_goal, + &mut goal_evaluation, + ); + let canonical_response = match canonical_response { + Err(e) => { + self.inspect.goal_evaluation(goal_evaluation); + return Err(e); + } + Ok(response) => response, + }; + + let has_changed = !canonical_response.value.var_values.is_identity_modulo_regions() + || !canonical_response.value.external_constraints.opaque_types.is_empty(); + let (certainty, nested_goals) = match self.instantiate_and_apply_query_response( + goal.param_env, + orig_values, + canonical_response, + ) { + Err(e) => { + self.inspect.goal_evaluation(goal_evaluation); + return Err(e); + } + Ok(response) => response, + }; + goal_evaluation.returned_goals(&nested_goals); + self.inspect.goal_evaluation(goal_evaluation); + + if !has_changed && !nested_goals.is_empty() { + bug!("an unchanged goal shouldn't have any side-effects on instantiation"); + } + + // Check that rerunning this query with its inference constraints applied + // doesn't result in new inference constraints and has the same result. + // + // If we have projection goals like `::Assoc == u32` we recursively + // call `exists ::Assoc == U` to enable better caching. This goal + // could constrain `U` to `u32` which would cause this check to result in a + // solver cycle. + if cfg!(debug_assertions) + && has_changed + && !matches!( + goal_evaluation_kind, + GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::Yes } + ) + && !self.search_graph.in_cycle() + { + // The nested evaluation has to happen with the original state + // of `encountered_overflow`. + let from_original_evaluation = + self.search_graph.reset_encountered_overflow(encountered_overflow); + self.check_evaluate_goal_stable_result(goal, canonical_goal, canonical_response); + // In case the evaluation was unstable, we manually make sure that this + // debug check does not influence the result of the parent goal. + self.search_graph.reset_encountered_overflow(from_original_evaluation); + } + + Ok((has_changed, certainty, nested_goals)) + } + + fn check_evaluate_goal_stable_result( + &mut self, + goal: Goal<'tcx, ty::Predicate<'tcx>>, + original_input: CanonicalInput<'tcx>, + original_result: CanonicalResponse<'tcx>, + ) { + let (_orig_values, canonical_goal) = self.canonicalize_goal(goal); + let result = EvalCtxt::evaluate_canonical_goal( + self.tcx(), + self.search_graph, + canonical_goal, + // FIXME(-Ztrait-solver=next): we do not track what happens in `evaluate_canonical_goal` + &mut ProofTreeBuilder::new_noop(), + ); + + macro_rules! fail { + ($msg:expr) => {{ + let msg = $msg; + warn!( + "unstable result: {msg}\n\ + original goal: {original_input:?},\n\ + original result: {original_result:?}\n\ + re-canonicalized goal: {canonical_goal:?}\n\ + second response: {result:?}" + ); + return; + }}; + } + + let Ok(new_canonical_response) = result else { fail!("second response was error") }; + // We only check for modulo regions as we convert all regions in + // the input to new existentials, even if they're expected to be + // `'static` or a placeholder region. + if !new_canonical_response.value.var_values.is_identity_modulo_regions() { + fail!("additional constraints from second response") + } + if original_result.value.certainty != new_canonical_response.value.certainty { + fail!("unstable certainty") + } + } + + fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> { + let Goal { param_env, predicate } = goal; + let kind = predicate.kind(); + if let Some(kind) = kind.no_bound_vars() { + match kind { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(predicate)) => { + self.compute_trait_goal(Goal { param_env, predicate }) + } + ty::PredicateKind::Clause(ty::ClauseKind::Projection(predicate)) => { + self.compute_projection_goal(Goal { param_env, predicate }) + } + ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(predicate)) => { + self.compute_type_outlives_goal(Goal { param_env, predicate }) + } + ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(predicate)) => { + self.compute_region_outlives_goal(Goal { param_env, predicate }) + } + ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { + self.compute_const_arg_has_type_goal(Goal { param_env, predicate: (ct, ty) }) + } + ty::PredicateKind::Subtype(predicate) => { + self.compute_subtype_goal(Goal { param_env, predicate }) + } + ty::PredicateKind::Coerce(predicate) => { + self.compute_coerce_goal(Goal { param_env, predicate }) + } + ty::PredicateKind::ClosureKind(def_id, args, kind) => self + .compute_closure_kind_goal(Goal { param_env, predicate: (def_id, args, kind) }), + ty::PredicateKind::ObjectSafe(trait_def_id) => { + self.compute_object_safe_goal(trait_def_id) + } + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + self.compute_well_formed_goal(Goal { param_env, predicate: arg }) + } + ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => { + self.compute_const_evaluatable_goal(Goal { param_env, predicate: ct }) + } + ty::PredicateKind::ConstEquate(_, _) => { + bug!("ConstEquate should not be emitted when `-Ztrait-solver=next` is active") + } + ty::PredicateKind::AliasRelate(lhs, rhs, direction) => self + .compute_alias_relate_goal(Goal { + param_env, + predicate: (lhs, rhs, direction), + }), + ty::PredicateKind::Ambiguous => { + self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + } + } + } else { + let kind = self.infcx.instantiate_binder_with_placeholders(kind); + let goal = goal.with(self.tcx(), ty::Binder::dummy(kind)); + self.add_goal(goal); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + } + + // Recursively evaluates all the goals added to this `EvalCtxt` to completion, returning + // the certainty of all the goals. + #[instrument(level = "debug", skip(self))] + pub(super) fn try_evaluate_added_goals(&mut self) -> Result { + let inspect = self.inspect.new_evaluate_added_goals(); + let inspect = core::mem::replace(&mut self.inspect, inspect); + + let mut response = Ok(Certainty::OVERFLOW); + for _ in 0..self.local_overflow_limit() { + // FIXME: This match is a bit ugly, it might be nice to change the inspect + // stuff to use a closure instead. which should hopefully simplify this a bit. + match self.evaluate_added_goals_step() { + Ok(Some(cert)) => { + response = Ok(cert); + break; + } + Ok(None) => {} + Err(NoSolution) => { + response = Err(NoSolution); + break; + } + } + } + + self.inspect.eval_added_goals_result(response); + + if response.is_err() { + self.tainted = Err(NoSolution); + } + + let goal_evaluations = std::mem::replace(&mut self.inspect, inspect); + self.inspect.added_goals_evaluation(goal_evaluations); + + response + } + + /// Iterate over all added goals: returning `Ok(Some(_))` in case we can stop rerunning. + /// + /// Goals for the next step get directly added to the nested goals of the `EvalCtxt`. + fn evaluate_added_goals_step(&mut self) -> Result, NoSolution> { + let tcx = self.tcx(); + let mut goals = core::mem::replace(&mut self.nested_goals, NestedGoals::new()); + + self.inspect.evaluate_added_goals_loop_start(); + // If this loop did not result in any progress, what's our final certainty. + let mut unchanged_certainty = Some(Certainty::Yes); + if let Some(goal) = goals.normalizes_to_hack_goal.take() { + // Replace the goal with an unconstrained infer var, so the + // RHS does not affect projection candidate assembly. + let unconstrained_rhs = self.next_term_infer_of_kind(goal.predicate.term); + let unconstrained_goal = goal.with( + tcx, + ty::ProjectionPredicate { + projection_ty: goal.predicate.projection_ty, + term: unconstrained_rhs, + }, + ); + + let (_, certainty, instantiate_goals) = self.evaluate_goal( + GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::Yes }, + unconstrained_goal, + )?; + self.nested_goals.goals.extend(instantiate_goals); + + // Finally, equate the goal's RHS with the unconstrained var. + // We put the nested goals from this into goals instead of + // next_goals to avoid needing to process the loop one extra + // time if this goal returns something -- I don't think this + // matters in practice, though. + let eq_goals = + self.eq_and_get_goals(goal.param_env, goal.predicate.term, unconstrained_rhs)?; + goals.goals.extend(eq_goals); + + // We only look at the `projection_ty` part here rather than + // looking at the "has changed" return from evaluate_goal, + // because we expect the `unconstrained_rhs` part of the predicate + // to have changed -- that means we actually normalized successfully! + if goal.predicate.projection_ty + != self.resolve_vars_if_possible(goal.predicate.projection_ty) + { + unchanged_certainty = None; + } + + match certainty { + Certainty::Yes => {} + Certainty::Maybe(_) => { + // We need to resolve vars here so that we correctly + // deal with `has_changed` in the next iteration. + self.set_normalizes_to_hack_goal(self.resolve_vars_if_possible(goal)); + unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); + } + } + } + + for goal in goals.goals.drain(..) { + let (has_changed, certainty, instantiate_goals) = self.evaluate_goal( + GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::No }, + goal, + )?; + self.nested_goals.goals.extend(instantiate_goals); + if has_changed { + unchanged_certainty = None; + } + + match certainty { + Certainty::Yes => {} + Certainty::Maybe(_) => { + self.nested_goals.goals.push(goal); + unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); + } + } + } + + Ok(unchanged_certainty) + } +} + +impl<'tcx> EvalCtxt<'_, 'tcx> { + pub(super) fn tcx(&self) -> TyCtxt<'tcx> { + self.infcx.tcx + } + + pub(super) fn next_ty_infer(&self) -> Ty<'tcx> { + self.infcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::MiscVariable, + span: DUMMY_SP, + }) + } + + pub(super) fn next_const_infer(&self, ty: Ty<'tcx>) -> ty::Const<'tcx> { + self.infcx.next_const_var( + ty, + ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span: DUMMY_SP }, + ) + } + + /// Returns a ty infer or a const infer depending on whether `kind` is a `Ty` or `Const`. + /// If `kind` is an integer inference variable this will still return a ty infer var. + pub(super) fn next_term_infer_of_kind(&self, kind: ty::Term<'tcx>) -> ty::Term<'tcx> { + match kind.unpack() { + ty::TermKind::Ty(_) => self.next_ty_infer().into(), + ty::TermKind::Const(ct) => self.next_const_infer(ct.ty()).into(), + } + } + + /// Is the projection predicate is of the form `exists ::Assoc = T`. + /// + /// This is the case if the `term` is an inference variable in the innermost universe + /// and does not occur in any other part of the predicate. + pub(super) fn term_is_fully_unconstrained( + &self, + goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, + ) -> bool { + let term_is_infer = match goal.predicate.term.unpack() { + ty::TermKind::Ty(ty) => { + if let &ty::Infer(ty::TyVar(vid)) = ty.kind() { + match self.infcx.probe_ty_var(vid) { + Ok(value) => bug!("resolved var in query: {goal:?} {value:?}"), + Err(universe) => universe == self.infcx.universe(), + } + } else { + false + } + } + ty::TermKind::Const(ct) => { + if let ty::ConstKind::Infer(ty::InferConst::Var(vid)) = ct.kind() { + match self.infcx.probe_const_var(vid) { + Ok(value) => bug!("resolved var in query: {goal:?} {value:?}"), + Err(universe) => universe == self.infcx.universe(), + } + } else { + false + } + } + }; + + // Guard against `>::Assoc = ?0>`. + struct ContainsTerm<'a, 'tcx> { + term: ty::Term<'tcx>, + infcx: &'a InferCtxt<'tcx>, + } + impl<'tcx> TypeVisitor> for ContainsTerm<'_, 'tcx> { + type BreakTy = (); + fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { + if let Some(vid) = t.ty_vid() + && let ty::TermKind::Ty(term) = self.term.unpack() + && let Some(term_vid) = term.ty_vid() + && self.infcx.root_var(vid) == self.infcx.root_var(term_vid) + { + ControlFlow::Break(()) + } else if t.has_non_region_infer() { + t.super_visit_with(self) + } else { + ControlFlow::Continue(()) + } + } + + fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow { + if let ty::ConstKind::Infer(ty::InferConst::Var(vid)) = c.kind() + && let ty::TermKind::Const(term) = self.term.unpack() + && let ty::ConstKind::Infer(ty::InferConst::Var(term_vid)) = term.kind() + && self.infcx.root_const_var(vid) == self.infcx.root_const_var(term_vid) + { + ControlFlow::Break(()) + } else if c.has_non_region_infer() { + c.super_visit_with(self) + } else { + ControlFlow::Continue(()) + } + } + } + + let mut visitor = ContainsTerm { infcx: self.infcx, term: goal.predicate.term }; + + term_is_infer + && goal.predicate.projection_ty.visit_with(&mut visitor).is_continue() + && goal.param_env.visit_with(&mut visitor).is_continue() + } + + #[instrument(level = "debug", skip(self, param_env), ret)] + pub(super) fn eq>( + &mut self, + param_env: ty::ParamEnv<'tcx>, + lhs: T, + rhs: T, + ) -> Result<(), NoSolution> { + self.infcx + .at(&ObligationCause::dummy(), param_env) + .eq(DefineOpaqueTypes::No, lhs, rhs) + .map(|InferOk { value: (), obligations }| { + self.add_goals(obligations.into_iter().map(|o| o.into())); + }) + .map_err(|e| { + debug!(?e, "failed to equate"); + NoSolution + }) + } + + #[instrument(level = "debug", skip(self, param_env), ret)] + pub(super) fn sub>( + &mut self, + param_env: ty::ParamEnv<'tcx>, + sub: T, + sup: T, + ) -> Result<(), NoSolution> { + self.infcx + .at(&ObligationCause::dummy(), param_env) + .sub(DefineOpaqueTypes::No, sub, sup) + .map(|InferOk { value: (), obligations }| { + self.add_goals(obligations.into_iter().map(|o| o.into())); + }) + .map_err(|e| { + debug!(?e, "failed to subtype"); + NoSolution + }) + } + + /// Equates two values returning the nested goals without adding them + /// to the nested goals of the `EvalCtxt`. + /// + /// If possible, try using `eq` instead which automatically handles nested + /// goals correctly. + #[instrument(level = "trace", skip(self, param_env), ret)] + pub(super) fn eq_and_get_goals>( + &self, + param_env: ty::ParamEnv<'tcx>, + lhs: T, + rhs: T, + ) -> Result>>, NoSolution> { + self.infcx + .at(&ObligationCause::dummy(), param_env) + .eq(DefineOpaqueTypes::No, lhs, rhs) + .map(|InferOk { value: (), obligations }| { + obligations.into_iter().map(|o| o.into()).collect() + }) + .map_err(|e| { + debug!(?e, "failed to equate"); + NoSolution + }) + } + + pub(super) fn instantiate_binder_with_infer> + Copy>( + &self, + value: ty::Binder<'tcx, T>, + ) -> T { + self.infcx.instantiate_binder_with_fresh_vars( + DUMMY_SP, + LateBoundRegionConversionTime::HigherRankedType, + value, + ) + } + + pub(super) fn instantiate_binder_with_placeholders> + Copy>( + &self, + value: ty::Binder<'tcx, T>, + ) -> T { + self.infcx.instantiate_binder_with_placeholders(value) + } + + pub(super) fn resolve_vars_if_possible(&self, value: T) -> T + where + T: TypeFoldable>, + { + self.infcx.resolve_vars_if_possible(value) + } + + pub(super) fn fresh_args_for_item(&self, def_id: DefId) -> ty::GenericArgsRef<'tcx> { + self.infcx.fresh_args_for_item(DUMMY_SP, def_id) + } + + pub(super) fn translate_args( + &self, + param_env: ty::ParamEnv<'tcx>, + source_impl: DefId, + source_args: ty::GenericArgsRef<'tcx>, + target_node: specialization_graph::Node, + ) -> ty::GenericArgsRef<'tcx> { + crate::traits::translate_args(self.infcx, param_env, source_impl, source_args, target_node) + } + + pub(super) fn register_ty_outlives(&self, ty: Ty<'tcx>, lt: ty::Region<'tcx>) { + self.infcx.register_region_obligation_with_cause(ty, lt, &ObligationCause::dummy()); + } + + pub(super) fn register_region_outlives(&self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) { + // `b : a` ==> `a <= b` + // (inlined from `InferCtxt::region_outlives_predicate`) + self.infcx.sub_regions( + rustc_infer::infer::SubregionOrigin::RelateRegionParamBound(DUMMY_SP), + b, + a, + ); + } + + /// Computes the list of goals required for `arg` to be well-formed + pub(super) fn well_formed_goals( + &self, + param_env: ty::ParamEnv<'tcx>, + arg: ty::GenericArg<'tcx>, + ) -> Option>>> { + crate::traits::wf::unnormalized_obligations(self.infcx, param_env, arg) + .map(|obligations| obligations.into_iter().map(|obligation| obligation.into())) + } + + pub(super) fn is_transmutable( + &self, + src_and_dst: rustc_transmute::Types<'tcx>, + scope: Ty<'tcx>, + assume: rustc_transmute::Assume, + ) -> Result { + use rustc_transmute::Answer; + // FIXME(transmutability): This really should be returning nested goals for `Answer::If*` + match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable( + ObligationCause::dummy(), + src_and_dst, + scope, + assume, + ) { + Answer::Yes => Ok(Certainty::Yes), + Answer::No(_) | Answer::If(_) => Err(NoSolution), + } + } + + pub(super) fn can_define_opaque_ty(&self, def_id: LocalDefId) -> bool { + self.infcx.opaque_type_origin(def_id).is_some() + } + + pub(super) fn insert_hidden_type( + &mut self, + opaque_type_key: OpaqueTypeKey<'tcx>, + param_env: ty::ParamEnv<'tcx>, + hidden_ty: Ty<'tcx>, + ) -> Result<(), NoSolution> { + let mut obligations = Vec::new(); + self.infcx.insert_hidden_type( + opaque_type_key, + &ObligationCause::dummy(), + param_env, + hidden_ty, + true, + &mut obligations, + )?; + self.add_goals(obligations.into_iter().map(|o| o.into())); + Ok(()) + } + + pub(super) fn add_item_bounds_for_hidden_type( + &mut self, + opaque_def_id: DefId, + opaque_args: ty::GenericArgsRef<'tcx>, + param_env: ty::ParamEnv<'tcx>, + hidden_ty: Ty<'tcx>, + ) { + let mut obligations = Vec::new(); + self.infcx.add_item_bounds_for_hidden_type( + opaque_def_id, + opaque_args, + ObligationCause::dummy(), + param_env, + hidden_ty, + &mut obligations, + ); + self.add_goals(obligations.into_iter().map(|o| o.into())); + } + + // Do something for each opaque/hidden pair defined with `def_id` in the + // current inference context. + pub(super) fn unify_existing_opaque_tys( + &mut self, + param_env: ty::ParamEnv<'tcx>, + key: ty::OpaqueTypeKey<'tcx>, + ty: Ty<'tcx>, + ) -> Vec> { + // FIXME: Super inefficient to be cloning this... + let opaques = self.infcx.clone_opaque_types_for_query_response(); + + let mut values = vec![]; + for (candidate_key, candidate_ty) in opaques { + if candidate_key.def_id != key.def_id { + continue; + } + values.extend(self.probe_misc_candidate("opaque type storage").enter(|ecx| { + for (a, b) in std::iter::zip(candidate_key.args, key.args) { + ecx.eq(param_env, a, b)?; + } + ecx.eq(param_env, candidate_ty, ty)?; + ecx.add_item_bounds_for_hidden_type( + candidate_key.def_id.to_def_id(), + candidate_key.args, + param_env, + candidate_ty, + ); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + })); + } + values + } + + // Try to evaluate a const, or return `None` if the const is too generic. + // This doesn't mean the const isn't evaluatable, though, and should be treated + // as an ambiguity rather than no-solution. + pub(super) fn try_const_eval_resolve( + &self, + param_env: ty::ParamEnv<'tcx>, + unevaluated: ty::UnevaluatedConst<'tcx>, + ty: Ty<'tcx>, + ) -> Option> { + use rustc_middle::mir::interpret::ErrorHandled; + match self.infcx.try_const_eval_resolve(param_env, unevaluated, ty, None) { + Ok(ct) => Some(ct), + Err(ErrorHandled::Reported(e, _)) => { + Some(ty::Const::new_error(self.tcx(), e.into(), ty)) + } + Err(ErrorHandled::TooGeneric(_)) => None, + } + } + + /// Walk through the vtable of a principal trait ref, executing a `supertrait_visitor` + /// for every trait ref encountered (including the principal). Passes both the vtable + /// base and the (optional) vptr slot. + pub(super) fn walk_vtable( + &mut self, + principal: ty::PolyTraitRef<'tcx>, + mut supertrait_visitor: impl FnMut(&mut Self, ty::PolyTraitRef<'tcx>, usize, Option), + ) { + let tcx = self.tcx(); + let mut offset = 0; + prepare_vtable_segments::<()>(tcx, principal, |segment| { + match segment { + VtblSegment::MetadataDSA => { + offset += TyCtxt::COMMON_VTABLE_ENTRIES.len(); + } + VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => { + let own_vtable_entries = count_own_vtable_entries(tcx, trait_ref); + + supertrait_visitor( + self, + trait_ref, + offset, + emit_vptr.then(|| offset + own_vtable_entries), + ); + + offset += own_vtable_entries; + if emit_vptr { + offset += 1; + } + } + } + ControlFlow::Continue(()) + }); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1001 +0,0 @@ -use rustc_data_structures::stack::ensure_sufficient_stack; -use rustc_hir::def_id::{DefId, LocalDefId}; -use rustc_infer::infer::at::ToTrace; -use rustc_infer::infer::canonical::CanonicalVarValues; -use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; -use rustc_infer::infer::{ - DefineOpaqueTypes, InferCtxt, InferOk, LateBoundRegionConversionTime, TyCtxtInferExt, -}; -use rustc_infer::traits::query::NoSolution; -use rustc_infer::traits::ObligationCause; -use rustc_middle::infer::canonical::CanonicalVarInfos; -use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind}; -use rustc_middle::traits::solve::inspect; -use rustc_middle::traits::solve::{ - CanonicalInput, CanonicalResponse, Certainty, IsNormalizesToHack, PredefinedOpaques, - PredefinedOpaquesData, QueryResult, -}; -use rustc_middle::traits::{specialization_graph, DefiningAnchor}; -use rustc_middle::ty::{ - self, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable, - TypeVisitableExt, TypeVisitor, -}; -use rustc_session::config::DumpSolverProofTree; -use rustc_span::DUMMY_SP; -use std::io::Write; -use std::ops::ControlFlow; - -use crate::traits::vtable::{count_own_vtable_entries, prepare_vtable_segments, VtblSegment}; - -use super::inspect::ProofTreeBuilder; -use super::SolverMode; -use super::{search_graph, GoalEvaluationKind}; -use super::{search_graph::SearchGraph, Goal}; -pub use select::InferCtxtSelectExt; - -mod canonical; -mod probe; -mod select; - -pub struct EvalCtxt<'a, 'tcx> { - /// The inference context that backs (mostly) inference and placeholder terms - /// instantiated while solving goals. - /// - /// NOTE: The `InferCtxt` that backs the `EvalCtxt` is intentionally private, - /// because the `InferCtxt` is much more general than `EvalCtxt`. Methods such - /// as `take_registered_region_obligations` can mess up query responses, - /// using `At::normalize` is totally wrong, calling `evaluate_root_goal` can - /// cause coinductive unsoundness, etc. - /// - /// Methods that are generally of use for trait solving are *intentionally* - /// re-declared through the `EvalCtxt` below, often with cleaner signatures - /// since we don't care about things like `ObligationCause`s and `Span`s here. - /// If some `InferCtxt` method is missing, please first think defensively about - /// the method's compatibility with this solver, or if an existing one does - /// the job already. - infcx: &'a InferCtxt<'tcx>, - - /// The variable info for the `var_values`, only used to make an ambiguous response - /// with no constraints. - variables: CanonicalVarInfos<'tcx>, - pub(super) var_values: CanonicalVarValues<'tcx>, - - predefined_opaques_in_body: PredefinedOpaques<'tcx>, - - /// The highest universe index nameable by the caller. - /// - /// When we enter a new binder inside of the query we create new universes - /// which the caller cannot name. We have to be careful with variables from - /// these new universes when creating the query response. - /// - /// Both because these new universes can prevent us from reaching a fixpoint - /// if we have a coinductive cycle and because that's the only way we can return - /// new placeholders to the caller. - pub(super) max_input_universe: ty::UniverseIndex, - - pub(super) search_graph: &'a mut SearchGraph<'tcx>, - - pub(super) nested_goals: NestedGoals<'tcx>, - - // Has this `EvalCtxt` errored out with `NoSolution` in `try_evaluate_added_goals`? - // - // If so, then it can no longer be used to make a canonical query response, - // since subsequent calls to `try_evaluate_added_goals` have possibly dropped - // ambiguous goals. Instead, a probe needs to be introduced somewhere in the - // evaluation code. - tainted: Result<(), NoSolution>, - - pub(super) inspect: ProofTreeBuilder<'tcx>, -} - -#[derive(Debug, Clone)] -pub(super) struct NestedGoals<'tcx> { - /// This normalizes-to goal that is treated specially during the evaluation - /// loop. In each iteration we take the RHS of the projection, replace it with - /// a fresh inference variable, and only after evaluating that goal do we - /// equate the fresh inference variable with the actual RHS of the predicate. - /// - /// This is both to improve caching, and to avoid using the RHS of the - /// projection predicate to influence the normalizes-to candidate we select. - /// - /// This is not a 'real' nested goal. We must not forget to replace the RHS - /// with a fresh inference variable when we evaluate this goal. That can result - /// in a trait solver cycle. This would currently result in overflow but can be - /// can be unsound with more powerful coinduction in the future. - pub(super) normalizes_to_hack_goal: Option>>, - /// The rest of the goals which have not yet processed or remain ambiguous. - pub(super) goals: Vec>>, -} - -impl NestedGoals<'_> { - pub(super) fn new() -> Self { - Self { normalizes_to_hack_goal: None, goals: Vec::new() } - } - - pub(super) fn is_empty(&self) -> bool { - self.normalizes_to_hack_goal.is_none() && self.goals.is_empty() - } -} - -#[derive(PartialEq, Eq, Debug, Hash, HashStable, Clone, Copy)] -pub enum GenerateProofTree { - Yes(UseGlobalCache), - IfEnabled, - Never, -} - -#[derive(PartialEq, Eq, Debug, Hash, HashStable, Clone, Copy)] -pub enum UseGlobalCache { - Yes, - No, -} -impl UseGlobalCache { - pub fn from_bool(use_cache: bool) -> Self { - match use_cache { - true => UseGlobalCache::Yes, - false => UseGlobalCache::No, - } - } -} - -pub trait InferCtxtEvalExt<'tcx> { - /// Evaluates a goal from **outside** of the trait solver. - /// - /// Using this while inside of the solver is wrong as it uses a new - /// search graph which would break cycle detection. - fn evaluate_root_goal( - &self, - goal: Goal<'tcx, ty::Predicate<'tcx>>, - generate_proof_tree: GenerateProofTree, - ) -> ( - Result<(bool, Certainty, Vec>>), NoSolution>, - Option>, - ); -} - -impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> { - #[instrument(level = "debug", skip(self), ret)] - fn evaluate_root_goal( - &self, - goal: Goal<'tcx, ty::Predicate<'tcx>>, - generate_proof_tree: GenerateProofTree, - ) -> ( - Result<(bool, Certainty, Vec>>), NoSolution>, - Option>, - ) { - EvalCtxt::enter_root(self, generate_proof_tree, |ecx| { - ecx.evaluate_goal(GoalEvaluationKind::Root, goal) - }) - } -} - -impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { - pub(super) fn solver_mode(&self) -> SolverMode { - self.search_graph.solver_mode() - } - - pub(super) fn local_overflow_limit(&self) -> usize { - self.search_graph.local_overflow_limit() - } - - /// Creates a root evaluation context and search graph. This should only be - /// used from outside of any evaluation, and other methods should be preferred - /// over using this manually (such as [`InferCtxtEvalExt::evaluate_root_goal`]). - fn enter_root( - infcx: &InferCtxt<'tcx>, - generate_proof_tree: GenerateProofTree, - f: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> R, - ) -> (R, Option>) { - let mode = if infcx.intercrate { SolverMode::Coherence } else { SolverMode::Normal }; - let mut search_graph = search_graph::SearchGraph::new(infcx.tcx, mode); - - let mut ecx = EvalCtxt { - search_graph: &mut search_graph, - infcx, - nested_goals: NestedGoals::new(), - inspect: ProofTreeBuilder::new_maybe_root(infcx.tcx, generate_proof_tree), - - // Only relevant when canonicalizing the response, - // which we don't do within this evaluation context. - predefined_opaques_in_body: infcx - .tcx - .mk_predefined_opaques_in_body(PredefinedOpaquesData::default()), - max_input_universe: ty::UniverseIndex::ROOT, - variables: ty::List::empty(), - var_values: CanonicalVarValues::dummy(), - tainted: Ok(()), - }; - let result = f(&mut ecx); - - let tree = ecx.inspect.finalize(); - if let (Some(tree), DumpSolverProofTree::Always) = - (&tree, infcx.tcx.sess.opts.unstable_opts.dump_solver_proof_tree) - { - let mut lock = std::io::stdout().lock(); - let _ = lock.write_fmt(format_args!("{tree:?}\n")); - let _ = lock.flush(); - } - - assert!( - ecx.nested_goals.is_empty(), - "root `EvalCtxt` should not have any goals added to it" - ); - - assert!(search_graph.is_empty()); - (result, tree) - } - - /// Creates a nested evaluation context that shares the same search graph as the - /// one passed in. This is suitable for evaluation, granted that the search graph - /// has had the nested goal recorded on its stack ([`SearchGraph::with_new_goal`]), - /// but it's preferable to use other methods that call this one rather than this - /// method directly. - /// - /// This function takes care of setting up the inference context, setting the anchor, - /// and registering opaques from the canonicalized input. - fn enter_canonical( - tcx: TyCtxt<'tcx>, - search_graph: &'a mut search_graph::SearchGraph<'tcx>, - canonical_input: CanonicalInput<'tcx>, - canonical_goal_evaluation: &mut ProofTreeBuilder<'tcx>, - f: impl FnOnce(&mut EvalCtxt<'_, 'tcx>, Goal<'tcx, ty::Predicate<'tcx>>) -> R, - ) -> R { - let intercrate = match search_graph.solver_mode() { - SolverMode::Normal => false, - SolverMode::Coherence => true, - }; - let (ref infcx, input, var_values) = tcx - .infer_ctxt() - .intercrate(intercrate) - .with_next_trait_solver(true) - .with_opaque_type_inference(canonical_input.value.anchor) - .build_with_canonical(DUMMY_SP, &canonical_input); - - let mut ecx = EvalCtxt { - infcx, - variables: canonical_input.variables, - var_values, - predefined_opaques_in_body: input.predefined_opaques_in_body, - max_input_universe: canonical_input.max_universe, - search_graph, - nested_goals: NestedGoals::new(), - tainted: Ok(()), - inspect: canonical_goal_evaluation.new_goal_evaluation_step(input), - }; - - for &(key, ty) in &input.predefined_opaques_in_body.opaque_types { - ecx.insert_hidden_type(key, input.goal.param_env, ty) - .expect("failed to prepopulate opaque types"); - } - - if !ecx.nested_goals.is_empty() { - panic!("prepopulating opaque types shouldn't add goals: {:?}", ecx.nested_goals); - } - - let result = f(&mut ecx, input.goal); - - canonical_goal_evaluation.goal_evaluation_step(ecx.inspect); - - // When creating a query response we clone the opaque type constraints - // instead of taking them. This would cause an ICE here, since we have - // assertions against dropping an `InferCtxt` without taking opaques. - // FIXME: Once we remove support for the old impl we can remove this. - if input.anchor != DefiningAnchor::Error { - // This seems ok, but fragile. - let _ = infcx.take_opaque_types(); - } - - result - } - - /// The entry point of the solver. - /// - /// This function deals with (coinductive) cycles, overflow, and caching - /// and then calls [`EvalCtxt::compute_goal`] which contains the actual - /// logic of the solver. - /// - /// Instead of calling this function directly, use either [EvalCtxt::evaluate_goal] - /// if you're inside of the solver or [InferCtxtEvalExt::evaluate_root_goal] if you're - /// outside of it. - #[instrument(level = "debug", skip(tcx, search_graph, goal_evaluation), ret)] - fn evaluate_canonical_goal( - tcx: TyCtxt<'tcx>, - search_graph: &'a mut search_graph::SearchGraph<'tcx>, - canonical_input: CanonicalInput<'tcx>, - goal_evaluation: &mut ProofTreeBuilder<'tcx>, - ) -> QueryResult<'tcx> { - let mut canonical_goal_evaluation = - goal_evaluation.new_canonical_goal_evaluation(canonical_input); - - // Deal with overflow, caching, and coinduction. - // - // The actual solver logic happens in `ecx.compute_goal`. - let result = ensure_sufficient_stack(|| { - search_graph.with_new_goal( - tcx, - canonical_input, - &mut canonical_goal_evaluation, - |search_graph, canonical_goal_evaluation| { - EvalCtxt::enter_canonical( - tcx, - search_graph, - canonical_input, - canonical_goal_evaluation, - |ecx, goal| { - let result = ecx.compute_goal(goal); - ecx.inspect.query_result(result); - result - }, - ) - }, - ) - }); - - canonical_goal_evaluation.query_result(result); - goal_evaluation.canonical_goal_evaluation(canonical_goal_evaluation); - result - } - - /// Recursively evaluates `goal`, returning whether any inference vars have - /// been constrained and the certainty of the result. - fn evaluate_goal( - &mut self, - goal_evaluation_kind: GoalEvaluationKind, - goal: Goal<'tcx, ty::Predicate<'tcx>>, - ) -> Result<(bool, Certainty, Vec>>), NoSolution> { - let (orig_values, canonical_goal) = self.canonicalize_goal(goal); - let mut goal_evaluation = - self.inspect.new_goal_evaluation(goal, &orig_values, goal_evaluation_kind); - let encountered_overflow = self.search_graph.encountered_overflow(); - let canonical_response = EvalCtxt::evaluate_canonical_goal( - self.tcx(), - self.search_graph, - canonical_goal, - &mut goal_evaluation, - ); - let canonical_response = match canonical_response { - Err(e) => { - self.inspect.goal_evaluation(goal_evaluation); - return Err(e); - } - Ok(response) => response, - }; - - let has_changed = !canonical_response.value.var_values.is_identity_modulo_regions() - || !canonical_response.value.external_constraints.opaque_types.is_empty(); - let (certainty, nested_goals) = match self.instantiate_and_apply_query_response( - goal.param_env, - orig_values, - canonical_response, - ) { - Err(e) => { - self.inspect.goal_evaluation(goal_evaluation); - return Err(e); - } - Ok(response) => response, - }; - goal_evaluation.returned_goals(&nested_goals); - self.inspect.goal_evaluation(goal_evaluation); - - if !has_changed && !nested_goals.is_empty() { - bug!("an unchanged goal shouldn't have any side-effects on instantiation"); - } - - // Check that rerunning this query with its inference constraints applied - // doesn't result in new inference constraints and has the same result. - // - // If we have projection goals like `::Assoc == u32` we recursively - // call `exists ::Assoc == U` to enable better caching. This goal - // could constrain `U` to `u32` which would cause this check to result in a - // solver cycle. - if cfg!(debug_assertions) - && has_changed - && !matches!( - goal_evaluation_kind, - GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::Yes } - ) - && !self.search_graph.in_cycle() - { - // The nested evaluation has to happen with the original state - // of `encountered_overflow`. - let from_original_evaluation = - self.search_graph.reset_encountered_overflow(encountered_overflow); - self.check_evaluate_goal_stable_result(goal, canonical_goal, canonical_response); - // In case the evaluation was unstable, we manually make sure that this - // debug check does not influence the result of the parent goal. - self.search_graph.reset_encountered_overflow(from_original_evaluation); - } - - Ok((has_changed, certainty, nested_goals)) - } - - fn check_evaluate_goal_stable_result( - &mut self, - goal: Goal<'tcx, ty::Predicate<'tcx>>, - original_input: CanonicalInput<'tcx>, - original_result: CanonicalResponse<'tcx>, - ) { - let (_orig_values, canonical_goal) = self.canonicalize_goal(goal); - let result = EvalCtxt::evaluate_canonical_goal( - self.tcx(), - self.search_graph, - canonical_goal, - // FIXME(-Ztrait-solver=next): we do not track what happens in `evaluate_canonical_goal` - &mut ProofTreeBuilder::new_noop(), - ); - - macro_rules! fail { - ($msg:expr) => {{ - let msg = $msg; - warn!( - "unstable result: {msg}\n\ - original goal: {original_input:?},\n\ - original result: {original_result:?}\n\ - re-canonicalized goal: {canonical_goal:?}\n\ - second response: {result:?}" - ); - return; - }}; - } - - let Ok(new_canonical_response) = result else { fail!("second response was error") }; - // We only check for modulo regions as we convert all regions in - // the input to new existentials, even if they're expected to be - // `'static` or a placeholder region. - if !new_canonical_response.value.var_values.is_identity_modulo_regions() { - fail!("additional constraints from second response") - } - if original_result.value.certainty != new_canonical_response.value.certainty { - fail!("unstable certainty") - } - } - - fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> { - let Goal { param_env, predicate } = goal; - let kind = predicate.kind(); - if let Some(kind) = kind.no_bound_vars() { - match kind { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(predicate)) => { - self.compute_trait_goal(Goal { param_env, predicate }) - } - ty::PredicateKind::Clause(ty::ClauseKind::Projection(predicate)) => { - self.compute_projection_goal(Goal { param_env, predicate }) - } - ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(predicate)) => { - self.compute_type_outlives_goal(Goal { param_env, predicate }) - } - ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(predicate)) => { - self.compute_region_outlives_goal(Goal { param_env, predicate }) - } - ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { - self.compute_const_arg_has_type_goal(Goal { param_env, predicate: (ct, ty) }) - } - ty::PredicateKind::Subtype(predicate) => { - self.compute_subtype_goal(Goal { param_env, predicate }) - } - ty::PredicateKind::Coerce(predicate) => { - self.compute_coerce_goal(Goal { param_env, predicate }) - } - ty::PredicateKind::ClosureKind(def_id, args, kind) => self - .compute_closure_kind_goal(Goal { param_env, predicate: (def_id, args, kind) }), - ty::PredicateKind::ObjectSafe(trait_def_id) => { - self.compute_object_safe_goal(trait_def_id) - } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - self.compute_well_formed_goal(Goal { param_env, predicate: arg }) - } - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => { - self.compute_const_evaluatable_goal(Goal { param_env, predicate: ct }) - } - ty::PredicateKind::ConstEquate(_, _) => { - bug!("ConstEquate should not be emitted when `-Ztrait-solver=next` is active") - } - ty::PredicateKind::AliasRelate(lhs, rhs, direction) => self - .compute_alias_relate_goal(Goal { - param_env, - predicate: (lhs, rhs, direction), - }), - ty::PredicateKind::Ambiguous => { - self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) - } - } - } else { - let kind = self.infcx.instantiate_binder_with_placeholders(kind); - let goal = goal.with(self.tcx(), ty::Binder::dummy(kind)); - self.add_goal(goal); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } - } - - // Recursively evaluates all the goals added to this `EvalCtxt` to completion, returning - // the certainty of all the goals. - #[instrument(level = "debug", skip(self))] - pub(super) fn try_evaluate_added_goals(&mut self) -> Result { - let inspect = self.inspect.new_evaluate_added_goals(); - let inspect = core::mem::replace(&mut self.inspect, inspect); - - let mut response = Ok(Certainty::OVERFLOW); - for _ in 0..self.local_overflow_limit() { - // FIXME: This match is a bit ugly, it might be nice to change the inspect - // stuff to use a closure instead. which should hopefully simplify this a bit. - match self.evaluate_added_goals_step() { - Ok(Some(cert)) => { - response = Ok(cert); - break; - } - Ok(None) => {} - Err(NoSolution) => { - response = Err(NoSolution); - break; - } - } - } - - self.inspect.eval_added_goals_result(response); - - if response.is_err() { - self.tainted = Err(NoSolution); - } - - let goal_evaluations = std::mem::replace(&mut self.inspect, inspect); - self.inspect.added_goals_evaluation(goal_evaluations); - - response - } - - /// Iterate over all added goals: returning `Ok(Some(_))` in case we can stop rerunning. - /// - /// Goals for the next step get directly added to the nested goals of the `EvalCtxt`. - fn evaluate_added_goals_step(&mut self) -> Result, NoSolution> { - let tcx = self.tcx(); - let mut goals = core::mem::replace(&mut self.nested_goals, NestedGoals::new()); - - self.inspect.evaluate_added_goals_loop_start(); - // If this loop did not result in any progress, what's our final certainty. - let mut unchanged_certainty = Some(Certainty::Yes); - if let Some(goal) = goals.normalizes_to_hack_goal.take() { - // Replace the goal with an unconstrained infer var, so the - // RHS does not affect projection candidate assembly. - let unconstrained_rhs = self.next_term_infer_of_kind(goal.predicate.term); - let unconstrained_goal = goal.with( - tcx, - ty::ProjectionPredicate { - projection_ty: goal.predicate.projection_ty, - term: unconstrained_rhs, - }, - ); - - let (_, certainty, instantiate_goals) = self.evaluate_goal( - GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::Yes }, - unconstrained_goal, - )?; - self.nested_goals.goals.extend(instantiate_goals); - - // Finally, equate the goal's RHS with the unconstrained var. - // We put the nested goals from this into goals instead of - // next_goals to avoid needing to process the loop one extra - // time if this goal returns something -- I don't think this - // matters in practice, though. - let eq_goals = - self.eq_and_get_goals(goal.param_env, goal.predicate.term, unconstrained_rhs)?; - goals.goals.extend(eq_goals); - - // We only look at the `projection_ty` part here rather than - // looking at the "has changed" return from evaluate_goal, - // because we expect the `unconstrained_rhs` part of the predicate - // to have changed -- that means we actually normalized successfully! - if goal.predicate.projection_ty - != self.resolve_vars_if_possible(goal.predicate.projection_ty) - { - unchanged_certainty = None; - } - - match certainty { - Certainty::Yes => {} - Certainty::Maybe(_) => { - // We need to resolve vars here so that we correctly - // deal with `has_changed` in the next iteration. - self.set_normalizes_to_hack_goal(self.resolve_vars_if_possible(goal)); - unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); - } - } - } - - for goal in goals.goals.drain(..) { - let (has_changed, certainty, instantiate_goals) = self.evaluate_goal( - GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::No }, - goal, - )?; - self.nested_goals.goals.extend(instantiate_goals); - if has_changed { - unchanged_certainty = None; - } - - match certainty { - Certainty::Yes => {} - Certainty::Maybe(_) => { - self.nested_goals.goals.push(goal); - unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty)); - } - } - } - - Ok(unchanged_certainty) - } -} - -impl<'tcx> EvalCtxt<'_, 'tcx> { - pub(super) fn tcx(&self) -> TyCtxt<'tcx> { - self.infcx.tcx - } - - pub(super) fn next_ty_infer(&self) -> Ty<'tcx> { - self.infcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span: DUMMY_SP, - }) - } - - pub(super) fn next_const_infer(&self, ty: Ty<'tcx>) -> ty::Const<'tcx> { - self.infcx.next_const_var( - ty, - ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span: DUMMY_SP }, - ) - } - - /// Returns a ty infer or a const infer depending on whether `kind` is a `Ty` or `Const`. - /// If `kind` is an integer inference variable this will still return a ty infer var. - pub(super) fn next_term_infer_of_kind(&self, kind: ty::Term<'tcx>) -> ty::Term<'tcx> { - match kind.unpack() { - ty::TermKind::Ty(_) => self.next_ty_infer().into(), - ty::TermKind::Const(ct) => self.next_const_infer(ct.ty()).into(), - } - } - - /// Is the projection predicate is of the form `exists ::Assoc = T`. - /// - /// This is the case if the `term` is an inference variable in the innermost universe - /// and does not occur in any other part of the predicate. - pub(super) fn term_is_fully_unconstrained( - &self, - goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, - ) -> bool { - let term_is_infer = match goal.predicate.term.unpack() { - ty::TermKind::Ty(ty) => { - if let &ty::Infer(ty::TyVar(vid)) = ty.kind() { - match self.infcx.probe_ty_var(vid) { - Ok(value) => bug!("resolved var in query: {goal:?} {value:?}"), - Err(universe) => universe == self.infcx.universe(), - } - } else { - false - } - } - ty::TermKind::Const(ct) => { - if let ty::ConstKind::Infer(ty::InferConst::Var(vid)) = ct.kind() { - match self.infcx.probe_const_var(vid) { - Ok(value) => bug!("resolved var in query: {goal:?} {value:?}"), - Err(universe) => universe == self.infcx.universe(), - } - } else { - false - } - } - }; - - // Guard against `>::Assoc = ?0>`. - struct ContainsTerm<'a, 'tcx> { - term: ty::Term<'tcx>, - infcx: &'a InferCtxt<'tcx>, - } - impl<'tcx> TypeVisitor> for ContainsTerm<'_, 'tcx> { - type BreakTy = (); - fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { - if let Some(vid) = t.ty_vid() - && let ty::TermKind::Ty(term) = self.term.unpack() - && let Some(term_vid) = term.ty_vid() - && self.infcx.root_var(vid) == self.infcx.root_var(term_vid) - { - ControlFlow::Break(()) - } else if t.has_non_region_infer() { - t.super_visit_with(self) - } else { - ControlFlow::Continue(()) - } - } - - fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow { - if let ty::ConstKind::Infer(ty::InferConst::Var(vid)) = c.kind() - && let ty::TermKind::Const(term) = self.term.unpack() - && let ty::ConstKind::Infer(ty::InferConst::Var(term_vid)) = term.kind() - && self.infcx.root_const_var(vid) == self.infcx.root_const_var(term_vid) - { - ControlFlow::Break(()) - } else if c.has_non_region_infer() { - c.super_visit_with(self) - } else { - ControlFlow::Continue(()) - } - } - } - - let mut visitor = ContainsTerm { infcx: self.infcx, term: goal.predicate.term }; - - term_is_infer - && goal.predicate.projection_ty.visit_with(&mut visitor).is_continue() - && goal.param_env.visit_with(&mut visitor).is_continue() - } - - #[instrument(level = "debug", skip(self, param_env), ret)] - pub(super) fn eq>( - &mut self, - param_env: ty::ParamEnv<'tcx>, - lhs: T, - rhs: T, - ) -> Result<(), NoSolution> { - self.infcx - .at(&ObligationCause::dummy(), param_env) - .eq(DefineOpaqueTypes::No, lhs, rhs) - .map(|InferOk { value: (), obligations }| { - self.add_goals(obligations.into_iter().map(|o| o.into())); - }) - .map_err(|e| { - debug!(?e, "failed to equate"); - NoSolution - }) - } - - #[instrument(level = "debug", skip(self, param_env), ret)] - pub(super) fn sub>( - &mut self, - param_env: ty::ParamEnv<'tcx>, - sub: T, - sup: T, - ) -> Result<(), NoSolution> { - self.infcx - .at(&ObligationCause::dummy(), param_env) - .sub(DefineOpaqueTypes::No, sub, sup) - .map(|InferOk { value: (), obligations }| { - self.add_goals(obligations.into_iter().map(|o| o.into())); - }) - .map_err(|e| { - debug!(?e, "failed to subtype"); - NoSolution - }) - } - - /// Equates two values returning the nested goals without adding them - /// to the nested goals of the `EvalCtxt`. - /// - /// If possible, try using `eq` instead which automatically handles nested - /// goals correctly. - #[instrument(level = "trace", skip(self, param_env), ret)] - pub(super) fn eq_and_get_goals>( - &self, - param_env: ty::ParamEnv<'tcx>, - lhs: T, - rhs: T, - ) -> Result>>, NoSolution> { - self.infcx - .at(&ObligationCause::dummy(), param_env) - .eq(DefineOpaqueTypes::No, lhs, rhs) - .map(|InferOk { value: (), obligations }| { - obligations.into_iter().map(|o| o.into()).collect() - }) - .map_err(|e| { - debug!(?e, "failed to equate"); - NoSolution - }) - } - - pub(super) fn instantiate_binder_with_infer> + Copy>( - &self, - value: ty::Binder<'tcx, T>, - ) -> T { - self.infcx.instantiate_binder_with_fresh_vars( - DUMMY_SP, - LateBoundRegionConversionTime::HigherRankedType, - value, - ) - } - - pub(super) fn instantiate_binder_with_placeholders> + Copy>( - &self, - value: ty::Binder<'tcx, T>, - ) -> T { - self.infcx.instantiate_binder_with_placeholders(value) - } - - pub(super) fn resolve_vars_if_possible(&self, value: T) -> T - where - T: TypeFoldable>, - { - self.infcx.resolve_vars_if_possible(value) - } - - pub(super) fn fresh_args_for_item(&self, def_id: DefId) -> ty::GenericArgsRef<'tcx> { - self.infcx.fresh_args_for_item(DUMMY_SP, def_id) - } - - pub(super) fn translate_args( - &self, - param_env: ty::ParamEnv<'tcx>, - source_impl: DefId, - source_args: ty::GenericArgsRef<'tcx>, - target_node: specialization_graph::Node, - ) -> ty::GenericArgsRef<'tcx> { - crate::traits::translate_args(self.infcx, param_env, source_impl, source_args, target_node) - } - - pub(super) fn register_ty_outlives(&self, ty: Ty<'tcx>, lt: ty::Region<'tcx>) { - self.infcx.register_region_obligation_with_cause(ty, lt, &ObligationCause::dummy()); - } - - pub(super) fn register_region_outlives(&self, a: ty::Region<'tcx>, b: ty::Region<'tcx>) { - // `b : a` ==> `a <= b` - // (inlined from `InferCtxt::region_outlives_predicate`) - self.infcx.sub_regions( - rustc_infer::infer::SubregionOrigin::RelateRegionParamBound(DUMMY_SP), - b, - a, - ); - } - - /// Computes the list of goals required for `arg` to be well-formed - pub(super) fn well_formed_goals( - &self, - param_env: ty::ParamEnv<'tcx>, - arg: ty::GenericArg<'tcx>, - ) -> Option>>> { - crate::traits::wf::unnormalized_obligations(self.infcx, param_env, arg) - .map(|obligations| obligations.into_iter().map(|obligation| obligation.into())) - } - - pub(super) fn is_transmutable( - &self, - src_and_dst: rustc_transmute::Types<'tcx>, - scope: Ty<'tcx>, - assume: rustc_transmute::Assume, - ) -> Result { - use rustc_transmute::Answer; - // FIXME(transmutability): This really should be returning nested goals for `Answer::If*` - match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable( - ObligationCause::dummy(), - src_and_dst, - scope, - assume, - ) { - Answer::Yes => Ok(Certainty::Yes), - Answer::No(_) | Answer::If(_) => Err(NoSolution), - } - } - - pub(super) fn can_define_opaque_ty(&self, def_id: LocalDefId) -> bool { - self.infcx.opaque_type_origin(def_id).is_some() - } - - pub(super) fn insert_hidden_type( - &mut self, - opaque_type_key: OpaqueTypeKey<'tcx>, - param_env: ty::ParamEnv<'tcx>, - hidden_ty: Ty<'tcx>, - ) -> Result<(), NoSolution> { - let mut obligations = Vec::new(); - self.infcx.insert_hidden_type( - opaque_type_key, - &ObligationCause::dummy(), - param_env, - hidden_ty, - true, - &mut obligations, - )?; - self.add_goals(obligations.into_iter().map(|o| o.into())); - Ok(()) - } - - pub(super) fn add_item_bounds_for_hidden_type( - &mut self, - opaque_def_id: DefId, - opaque_args: ty::GenericArgsRef<'tcx>, - param_env: ty::ParamEnv<'tcx>, - hidden_ty: Ty<'tcx>, - ) { - let mut obligations = Vec::new(); - self.infcx.add_item_bounds_for_hidden_type( - opaque_def_id, - opaque_args, - ObligationCause::dummy(), - param_env, - hidden_ty, - &mut obligations, - ); - self.add_goals(obligations.into_iter().map(|o| o.into())); - } - - // Do something for each opaque/hidden pair defined with `def_id` in the - // current inference context. - pub(super) fn unify_existing_opaque_tys( - &mut self, - param_env: ty::ParamEnv<'tcx>, - key: ty::OpaqueTypeKey<'tcx>, - ty: Ty<'tcx>, - ) -> Vec> { - // FIXME: Super inefficient to be cloning this... - let opaques = self.infcx.clone_opaque_types_for_query_response(); - - let mut values = vec![]; - for (candidate_key, candidate_ty) in opaques { - if candidate_key.def_id != key.def_id { - continue; - } - values.extend(self.probe_misc_candidate("opaque type storage").enter(|ecx| { - for (a, b) in std::iter::zip(candidate_key.args, key.args) { - ecx.eq(param_env, a, b)?; - } - ecx.eq(param_env, candidate_ty, ty)?; - ecx.add_item_bounds_for_hidden_type( - candidate_key.def_id.to_def_id(), - candidate_key.args, - param_env, - candidate_ty, - ); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - })); - } - values - } - - // Try to evaluate a const, or return `None` if the const is too generic. - // This doesn't mean the const isn't evaluatable, though, and should be treated - // as an ambiguity rather than no-solution. - pub(super) fn try_const_eval_resolve( - &self, - param_env: ty::ParamEnv<'tcx>, - unevaluated: ty::UnevaluatedConst<'tcx>, - ty: Ty<'tcx>, - ) -> Option> { - use rustc_middle::mir::interpret::ErrorHandled; - match self.infcx.try_const_eval_resolve(param_env, unevaluated, ty, None) { - Ok(ct) => Some(ct), - Err(ErrorHandled::Reported(e, _)) => { - Some(ty::Const::new_error(self.tcx(), e.into(), ty)) - } - Err(ErrorHandled::TooGeneric(_)) => None, - } - } - - /// Walk through the vtable of a principal trait ref, executing a `supertrait_visitor` - /// for every trait ref encountered (including the principal). Passes both the vtable - /// base and the (optional) vptr slot. - pub(super) fn walk_vtable( - &mut self, - principal: ty::PolyTraitRef<'tcx>, - mut supertrait_visitor: impl FnMut(&mut Self, ty::PolyTraitRef<'tcx>, usize, Option), - ) { - let tcx = self.tcx(); - let mut offset = 0; - prepare_vtable_segments::<()>(tcx, principal, |segment| { - match segment { - VtblSegment::MetadataDSA => { - offset += TyCtxt::COMMON_VTABLE_ENTRIES.len(); - } - VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => { - let own_vtable_entries = count_own_vtable_entries(tcx, trait_ref); - - supertrait_visitor( - self, - trait_ref, - offset, - emit_vptr.then(|| offset + own_vtable_entries), - ); - - offset += own_vtable_entries; - if emit_vptr { - offset += 1; - } - } - } - ControlFlow::Continue(()) - }); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inherent_projection.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inherent_projection.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inherent_projection.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inherent_projection.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -//! Computes a normalizes-to (projection) goal for inherent associated types, -//! `#![feature(inherent_associated_type)]`. Since astconv already determines -//! which impl the IAT is being projected from, we just: -//! 1. instantiate substs, -//! 2. equate the self type, and -//! 3. instantiate and register where clauses. -use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; -use rustc_middle::ty; - -use super::EvalCtxt; - -impl<'tcx> EvalCtxt<'_, 'tcx> { - pub(super) fn normalize_inherent_associated_type( - &mut self, - goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, - ) -> QueryResult<'tcx> { - let tcx = self.tcx(); - let inherent = goal.predicate.projection_ty; - let expected = goal.predicate.term.ty().expect("inherent consts are treated separately"); - - let impl_def_id = tcx.parent(inherent.def_id); - let impl_substs = self.fresh_args_for_item(impl_def_id); - - // Equate impl header and add impl where clauses - self.eq( - goal.param_env, - inherent.self_ty(), - tcx.type_of(impl_def_id).instantiate(tcx, impl_substs), - )?; - - // Equate IAT with the RHS of the project goal - let inherent_substs = inherent.rebase_inherent_args_onto_impl(impl_substs, tcx); - self.eq( - goal.param_env, - expected, - tcx.type_of(inherent.def_id).instantiate(tcx, inherent_substs), - ) - .expect("expected goal term to be fully unconstrained"); - - // Check both where clauses on the impl and IAT - self.add_goals( - tcx.predicates_of(inherent.def_id) - .instantiate(tcx, inherent_substs) - .into_iter() - .map(|(pred, _)| goal.with(tcx, pred)), - ); - - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,7 +17,7 @@ use rustc_middle::ty; use crate::solve::inspect::ProofTreeBuilder; -use crate::solve::{GenerateProofTree, InferCtxtEvalExt, UseGlobalCache}; +use crate::solve::{GenerateProofTree, InferCtxtEvalExt}; pub struct InspectGoal<'a, 'tcx> { infcx: &'a InferCtxt<'tcx>, @@ -82,8 +82,7 @@ } for &goal in &instantiated_goals { - let (_, proof_tree) = - infcx.evaluate_root_goal(goal, GenerateProofTree::Yes(UseGlobalCache::No)); + let (_, proof_tree) = infcx.evaluate_root_goal(goal, GenerateProofTree::Yes); let proof_tree = proof_tree.unwrap(); visitor.visit_goal(&InspectGoal::new( infcx, @@ -169,11 +168,11 @@ let mut candidates = vec![]; let last_eval_step = match self.evaluation.evaluation.kind { inspect::CanonicalGoalEvaluationKind::Overflow - | inspect::CanonicalGoalEvaluationKind::CacheHit(_) => { + | inspect::CanonicalGoalEvaluationKind::CycleInStack => { warn!("unexpected root evaluation: {:?}", self.evaluation); return vec![]; } - inspect::CanonicalGoalEvaluationKind::Uncached { ref revisions } => { + inspect::CanonicalGoalEvaluationKind::Evaluation { ref revisions } => { if let Some(last) = revisions.last() { last } else { @@ -227,8 +226,7 @@ goal: Goal<'tcx, ty::Predicate<'tcx>>, visitor: &mut V, ) -> ControlFlow { - let (_, proof_tree) = - self.evaluate_root_goal(goal, GenerateProofTree::Yes(UseGlobalCache::No)); + let (_, proof_tree) = self.evaluate_root_goal(goal, GenerateProofTree::Yes); let proof_tree = proof_tree.unwrap(); visitor.visit_goal(&InspectGoal::new(self, 0, &proof_tree)) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/build.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/build.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/build.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/inspect/build.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,8 @@ //! This code is *a bit* of a mess and can hopefully be //! mostly ignored. For a general overview of how it works, //! see the comment on [ProofTreeBuilder]. +use std::mem; + use rustc_middle::traits::query::NoSolution; use rustc_middle::traits::solve::{ CanonicalInput, Certainty, Goal, IsNormalizesToHack, QueryInput, QueryResult, @@ -10,7 +12,6 @@ use rustc_middle::ty::{self, TyCtxt}; use rustc_session::config::DumpSolverProofTree; -use crate::solve::eval_ctxt::UseGlobalCache; use crate::solve::{self, inspect, EvalCtxt, GenerateProofTree}; /// The core data structure when building proof trees. @@ -34,12 +35,7 @@ /// is called to recursively convert the whole structure to a /// finished proof tree. pub(in crate::solve) struct ProofTreeBuilder<'tcx> { - state: Option>>, -} - -struct BuilderData<'tcx> { - tree: DebugSolver<'tcx>, - use_global_cache: UseGlobalCache, + state: Option>>, } /// The current state of the proof tree builder, at most places @@ -118,36 +114,46 @@ Nested { is_normalizes_to_hack: IsNormalizesToHack }, } -#[derive(Eq, PartialEq, Debug)] -pub(in crate::solve) enum WipCanonicalGoalEvaluationKind { +#[derive(Eq, PartialEq)] +pub(in crate::solve) enum WipCanonicalGoalEvaluationKind<'tcx> { Overflow, - CacheHit(inspect::CacheHit), + CycleInStack, + Interned { revisions: &'tcx [inspect::GoalEvaluationStep<'tcx>] }, +} + +impl std::fmt::Debug for WipCanonicalGoalEvaluationKind<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Overflow => write!(f, "Overflow"), + Self::CycleInStack => write!(f, "CycleInStack"), + Self::Interned { revisions: _ } => f.debug_struct("Interned").finish_non_exhaustive(), + } + } } #[derive(Eq, PartialEq, Debug)] struct WipCanonicalGoalEvaluation<'tcx> { goal: CanonicalInput<'tcx>, - kind: Option, + kind: Option>, + /// Only used for uncached goals. After we finished evaluating + /// the goal, this is interned and moved into `kind`. revisions: Vec>, result: Option>, } impl<'tcx> WipCanonicalGoalEvaluation<'tcx> { fn finalize(self) -> inspect::CanonicalGoalEvaluation<'tcx> { - let kind = match self.kind { - Some(WipCanonicalGoalEvaluationKind::Overflow) => { + assert!(self.revisions.is_empty()); + let kind = match self.kind.unwrap() { + WipCanonicalGoalEvaluationKind::Overflow => { inspect::CanonicalGoalEvaluationKind::Overflow } - Some(WipCanonicalGoalEvaluationKind::CacheHit(hit)) => { - inspect::CanonicalGoalEvaluationKind::CacheHit(hit) + WipCanonicalGoalEvaluationKind::CycleInStack => { + inspect::CanonicalGoalEvaluationKind::CycleInStack + } + WipCanonicalGoalEvaluationKind::Interned { revisions } => { + inspect::CanonicalGoalEvaluationKind::Evaluation { revisions } } - None => inspect::CanonicalGoalEvaluationKind::Uncached { - revisions: self - .revisions - .into_iter() - .map(WipGoalEvaluationStep::finalize) - .collect(), - }, }; inspect::CanonicalGoalEvaluation { goal: self.goal, kind, result: self.result.unwrap() } @@ -226,33 +232,20 @@ } impl<'tcx> ProofTreeBuilder<'tcx> { - fn new( - state: impl Into>, - use_global_cache: UseGlobalCache, - ) -> ProofTreeBuilder<'tcx> { - ProofTreeBuilder { - state: Some(Box::new(BuilderData { tree: state.into(), use_global_cache })), - } + fn new(state: impl Into>) -> ProofTreeBuilder<'tcx> { + ProofTreeBuilder { state: Some(Box::new(state.into())) } } fn nested>>(&self, state: impl FnOnce() -> T) -> Self { - match &self.state { - Some(prev_state) => Self { - state: Some(Box::new(BuilderData { - tree: state().into(), - use_global_cache: prev_state.use_global_cache, - })), - }, - None => Self { state: None }, - } + ProofTreeBuilder { state: self.state.as_ref().map(|_| Box::new(state().into())) } } fn as_mut(&mut self) -> Option<&mut DebugSolver<'tcx>> { - self.state.as_mut().map(|boxed| &mut boxed.tree) + self.state.as_deref_mut() } pub fn finalize(self) -> Option> { - match self.state?.tree { + match *self.state? { DebugSolver::GoalEvaluation(wip_goal_evaluation) => { Some(wip_goal_evaluation.finalize()) } @@ -260,13 +253,6 @@ } } - pub fn use_global_cache(&self) -> bool { - self.state - .as_ref() - .map(|state| matches!(state.use_global_cache, UseGlobalCache::Yes)) - .unwrap_or(true) - } - pub fn new_maybe_root( tcx: TyCtxt<'tcx>, generate_proof_tree: GenerateProofTree, @@ -276,10 +262,7 @@ GenerateProofTree::IfEnabled => { let opts = &tcx.sess.opts.unstable_opts; match opts.dump_solver_proof_tree { - DumpSolverProofTree::Always => { - let use_cache = opts.dump_solver_proof_tree_use_cache.unwrap_or(true); - ProofTreeBuilder::new_root(UseGlobalCache::from_bool(use_cache)) - } + DumpSolverProofTree::Always => ProofTreeBuilder::new_root(), // `OnError` is handled by reevaluating goals in error // reporting with `GenerateProofTree::Yes`. DumpSolverProofTree::OnError | DumpSolverProofTree::Never => { @@ -287,12 +270,12 @@ } } } - GenerateProofTree::Yes(use_cache) => ProofTreeBuilder::new_root(use_cache), + GenerateProofTree::Yes => ProofTreeBuilder::new_root(), } } - pub fn new_root(use_global_cache: UseGlobalCache) -> ProofTreeBuilder<'tcx> { - ProofTreeBuilder::new(DebugSolver::Root, use_global_cache) + pub fn new_root() -> ProofTreeBuilder<'tcx> { + ProofTreeBuilder::new(DebugSolver::Root) } pub fn new_noop() -> ProofTreeBuilder<'tcx> { @@ -336,9 +319,27 @@ }) } + pub fn finalize_evaluation( + &mut self, + tcx: TyCtxt<'tcx>, + ) -> Option<&'tcx [inspect::GoalEvaluationStep<'tcx>]> { + self.as_mut().map(|this| match this { + DebugSolver::CanonicalGoalEvaluation(evaluation) => { + let revisions = mem::take(&mut evaluation.revisions) + .into_iter() + .map(WipGoalEvaluationStep::finalize); + let revisions = &*tcx.arena.alloc_from_iter(revisions); + let kind = WipCanonicalGoalEvaluationKind::Interned { revisions }; + assert_eq!(evaluation.kind.replace(kind), None); + revisions + } + _ => unreachable!(), + }) + } + pub fn canonical_goal_evaluation(&mut self, canonical_goal_evaluation: ProofTreeBuilder<'tcx>) { if let Some(this) = self.as_mut() { - match (this, canonical_goal_evaluation.state.unwrap().tree) { + match (this, *canonical_goal_evaluation.state.unwrap()) { ( DebugSolver::GoalEvaluation(goal_evaluation), DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation), @@ -348,7 +349,7 @@ } } - pub fn goal_evaluation_kind(&mut self, kind: WipCanonicalGoalEvaluationKind) { + pub fn goal_evaluation_kind(&mut self, kind: WipCanonicalGoalEvaluationKind<'tcx>) { if let Some(this) = self.as_mut() { match this { DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => { @@ -372,7 +373,7 @@ } pub fn goal_evaluation(&mut self, goal_evaluation: ProofTreeBuilder<'tcx>) { if let Some(this) = self.as_mut() { - match (this, goal_evaluation.state.unwrap().tree) { + match (this, *goal_evaluation.state.unwrap()) { ( DebugSolver::AddedGoalsEvaluation(WipAddedGoalsEvaluation { evaluations, .. @@ -396,7 +397,7 @@ } pub fn goal_evaluation_step(&mut self, goal_evaluation_step: ProofTreeBuilder<'tcx>) { if let Some(this) = self.as_mut() { - match (this, goal_evaluation_step.state.unwrap().tree) { + match (this, *goal_evaluation_step.state.unwrap()) { ( DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluations), DebugSolver::GoalEvaluationStep(goal_evaluation_step), @@ -444,7 +445,7 @@ pub fn finish_probe(&mut self, probe: ProofTreeBuilder<'tcx>) { if let Some(this) = self.as_mut() { - match (this, probe.state.unwrap().tree) { + match (this, *probe.state.unwrap()) { ( DebugSolver::Probe(WipProbe { steps, .. }) | DebugSolver::GoalEvaluationStep(WipGoalEvaluationStep { @@ -486,7 +487,7 @@ pub fn added_goals_evaluation(&mut self, added_goals_evaluation: ProofTreeBuilder<'tcx>) { if let Some(this) = self.as_mut() { - match (this, added_goals_evaluation.state.unwrap().tree) { + match (this, *added_goals_evaluation.state.unwrap()) { ( DebugSolver::GoalEvaluationStep(WipGoalEvaluationStep { evaluation: WipProbe { steps, .. }, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -32,18 +32,13 @@ mod canonicalize; mod eval_ctxt; mod fulfill; -mod inherent_projection; pub mod inspect; mod normalize; -mod opaques; mod project_goals; mod search_graph; mod trait_goals; -mod weak_types; -pub use eval_ctxt::{ - EvalCtxt, GenerateProofTree, InferCtxtEvalExt, InferCtxtSelectExt, UseGlobalCache, -}; +pub use eval_ctxt::{EvalCtxt, GenerateProofTree, InferCtxtEvalExt, InferCtxtSelectExt}; pub use fulfill::FulfillmentCtxt; pub(crate) use normalize::{deeply_normalize, deeply_normalize_with_skipped_universes}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/normalize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/normalize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/normalize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/normalize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -129,7 +129,7 @@ self.at.cause.clone(), self.at.param_env, ty::ProjectionPredicate { - projection_ty: tcx.mk_alias_ty(uv.def, uv.args), + projection_ty: AliasTy::new(tcx, uv.def, uv.args), term: new_infer_ct.into(), }, ); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/opaques.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/opaques.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/opaques.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/opaques.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,85 +0,0 @@ -//! Computes a normalizes-to (projection) goal for opaque types. This goal -//! behaves differently depending on the param-env's reveal mode and whether -//! the opaque is in a defining scope. -use rustc_middle::traits::query::NoSolution; -use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; -use rustc_middle::traits::Reveal; -use rustc_middle::ty; -use rustc_middle::ty::util::NotUniqueParam; - -use super::{EvalCtxt, SolverMode}; - -impl<'tcx> EvalCtxt<'_, 'tcx> { - pub(super) fn normalize_opaque_type( - &mut self, - goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, - ) -> QueryResult<'tcx> { - let tcx = self.tcx(); - let opaque_ty = goal.predicate.projection_ty; - let expected = goal.predicate.term.ty().expect("no such thing as an opaque const"); - - match (goal.param_env.reveal(), self.solver_mode()) { - (Reveal::UserFacing, SolverMode::Normal) => { - let Some(opaque_ty_def_id) = opaque_ty.def_id.as_local() else { - return Err(NoSolution); - }; - // FIXME: at some point we should call queries without defining - // new opaque types but having the existing opaque type definitions. - // This will require moving this below "Prefer opaques registered already". - if !self.can_define_opaque_ty(opaque_ty_def_id) { - return Err(NoSolution); - } - // FIXME: This may have issues when the args contain aliases... - match self.tcx().uses_unique_placeholders_ignoring_regions(opaque_ty.args) { - Err(NotUniqueParam::NotParam(param)) if param.is_non_region_infer() => { - return self.evaluate_added_goals_and_make_canonical_response( - Certainty::AMBIGUOUS, - ); - } - Err(_) => { - return Err(NoSolution); - } - Ok(()) => {} - } - // Prefer opaques registered already. - let opaque_type_key = - ty::OpaqueTypeKey { def_id: opaque_ty_def_id, args: opaque_ty.args }; - let matches = - self.unify_existing_opaque_tys(goal.param_env, opaque_type_key, expected); - if !matches.is_empty() { - if let Some(response) = self.try_merge_responses(&matches) { - return Ok(response); - } else { - return self.flounder(&matches); - } - } - // Otherwise, define a new opaque type - self.insert_hidden_type(opaque_type_key, goal.param_env, expected)?; - self.add_item_bounds_for_hidden_type( - opaque_ty.def_id, - opaque_ty.args, - goal.param_env, - expected, - ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } - (Reveal::UserFacing, SolverMode::Coherence) => { - // An impossible opaque type bound is the only way this goal will fail - // e.g. assigning `impl Copy := NotCopy` - self.add_item_bounds_for_hidden_type( - opaque_ty.def_id, - opaque_ty.args, - goal.param_env, - expected, - ); - self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) - } - (Reveal::All, _) => { - // FIXME: Add an assertion that opaque type storage is empty. - let actual = tcx.type_of(opaque_ty.def_id).instantiate(tcx, opaque_ty.args); - self.eq(goal.param_env, expected, actual)?; - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/inherent_projection.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/inherent_projection.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/inherent_projection.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/inherent_projection.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,50 @@ +//! Computes a normalizes-to (projection) goal for inherent associated types, +//! `#![feature(inherent_associated_type)]`. Since astconv already determines +//! which impl the IAT is being projected from, we just: +//! 1. instantiate substs, +//! 2. equate the self type, and +//! 3. instantiate and register where clauses. +use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::ty; + +use super::EvalCtxt; + +impl<'tcx> EvalCtxt<'_, 'tcx> { + pub(super) fn normalize_inherent_associated_type( + &mut self, + goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, + ) -> QueryResult<'tcx> { + let tcx = self.tcx(); + let inherent = goal.predicate.projection_ty; + let expected = goal.predicate.term.ty().expect("inherent consts are treated separately"); + + let impl_def_id = tcx.parent(inherent.def_id); + let impl_substs = self.fresh_args_for_item(impl_def_id); + + // Equate impl header and add impl where clauses + self.eq( + goal.param_env, + inherent.self_ty(), + tcx.type_of(impl_def_id).instantiate(tcx, impl_substs), + )?; + + // Equate IAT with the RHS of the project goal + let inherent_substs = inherent.rebase_inherent_args_onto_impl(impl_substs, tcx); + self.eq( + goal.param_env, + expected, + tcx.type_of(inherent.def_id).instantiate(tcx, inherent_substs), + ) + .expect("expected goal term to be fully unconstrained"); + + // Check both where clauses on the impl and IAT + self.add_goals( + tcx.predicates_of(inherent.def_id) + .instantiate(tcx, inherent_substs) + .into_iter() + .map(|(pred, _)| goal.with(tcx, pred)), + ); + + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,680 @@ +use crate::traits::{check_args_compatible, specialization_graph}; + +use super::assembly::{self, structural_traits}; +use super::EvalCtxt; +use rustc_hir::def::DefKind; +use rustc_hir::def_id::DefId; +use rustc_hir::LangItem; +use rustc_infer::traits::query::NoSolution; +use rustc_infer::traits::specialization_graph::LeafDef; +use rustc_infer::traits::Reveal; +use rustc_middle::traits::solve::{ + CandidateSource, CanonicalResponse, Certainty, Goal, QueryResult, +}; +use rustc_middle::traits::BuiltinImplSource; +use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; +use rustc_middle::ty::ProjectionPredicate; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_middle::ty::{ToPredicate, TypeVisitableExt}; +use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP}; + +mod inherent_projection; +mod opaques; +mod weak_types; + +impl<'tcx> EvalCtxt<'_, 'tcx> { + #[instrument(level = "debug", skip(self), ret)] + pub(super) fn compute_projection_goal( + &mut self, + goal: Goal<'tcx, ProjectionPredicate<'tcx>>, + ) -> QueryResult<'tcx> { + let def_id = goal.predicate.def_id(); + match self.tcx().def_kind(def_id) { + DefKind::AssocTy | DefKind::AssocConst => { + // To only compute normalization once for each projection we only + // assemble normalization candidates if the expected term is an + // unconstrained inference variable. + // + // Why: For better cache hits, since if we have an unconstrained RHS then + // there are only as many cache keys as there are (canonicalized) alias + // types in each normalizes-to goal. This also weakens inference in a + // forwards-compatible way so we don't use the value of the RHS term to + // affect candidate assembly for projections. + // + // E.g. for `::Assoc == u32` we recursively compute the goal + // `exists ::Assoc == U` and then take the resulting type for + // `U` and equate it with `u32`. This means that we don't need a separate + // projection cache in the solver, since we're piggybacking off of regular + // goal caching. + if self.term_is_fully_unconstrained(goal) { + match self.tcx().associated_item(def_id).container { + ty::AssocItemContainer::TraitContainer => { + let candidates = self.assemble_and_evaluate_candidates(goal); + self.merge_candidates(candidates) + } + ty::AssocItemContainer::ImplContainer => { + self.normalize_inherent_associated_type(goal) + } + } + } else { + self.set_normalizes_to_hack_goal(goal); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + } + DefKind::AnonConst => self.normalize_anon_const(goal), + DefKind::OpaqueTy => self.normalize_opaque_type(goal), + DefKind::TyAlias => self.normalize_weak_type(goal), + kind => bug!("unknown DefKind {} in projection goal: {goal:#?}", kind.descr(def_id)), + } + } + + #[instrument(level = "debug", skip(self), ret)] + fn normalize_anon_const( + &mut self, + goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, + ) -> QueryResult<'tcx> { + if let Some(normalized_const) = self.try_const_eval_resolve( + goal.param_env, + ty::UnevaluatedConst::new( + goal.predicate.projection_ty.def_id, + goal.predicate.projection_ty.args, + ), + self.tcx() + .type_of(goal.predicate.projection_ty.def_id) + .no_bound_vars() + .expect("const ty should not rely on other generics"), + ) { + self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?; + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } else { + self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + } + } +} + +impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { + fn self_ty(self) -> Ty<'tcx> { + self.self_ty() + } + + fn trait_ref(self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> { + self.projection_ty.trait_ref(tcx) + } + + fn polarity(self) -> ty::ImplPolarity { + ty::ImplPolarity::Positive + } + + fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self { + self.with_self_ty(tcx, self_ty) + } + + fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId { + self.trait_def_id(tcx) + } + + fn probe_and_match_goal_against_assumption( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + assumption: ty::Clause<'tcx>, + then: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>, + ) -> QueryResult<'tcx> { + if let Some(projection_pred) = assumption.as_projection_clause() { + if projection_pred.projection_def_id() == goal.predicate.def_id() { + let tcx = ecx.tcx(); + ecx.probe_misc_candidate("assumption").enter(|ecx| { + let assumption_projection_pred = + ecx.instantiate_binder_with_infer(projection_pred); + ecx.eq( + goal.param_env, + goal.predicate.projection_ty, + assumption_projection_pred.projection_ty, + )?; + ecx.eq(goal.param_env, goal.predicate.term, assumption_projection_pred.term) + .expect("expected goal term to be fully unconstrained"); + + // Add GAT where clauses from the trait's definition + ecx.add_goals( + tcx.predicates_of(goal.predicate.def_id()) + .instantiate_own(tcx, goal.predicate.projection_ty.args) + .map(|(pred, _)| goal.with(tcx, pred)), + ); + + then(ecx) + }) + } else { + Err(NoSolution) + } + } else { + Err(NoSolution) + } + } + + fn consider_impl_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, ProjectionPredicate<'tcx>>, + impl_def_id: DefId, + ) -> QueryResult<'tcx> { + let tcx = ecx.tcx(); + + let goal_trait_ref = goal.predicate.projection_ty.trait_ref(tcx); + let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); + let drcx = DeepRejectCtxt { treat_obligation_params: TreatParams::ForLookup }; + if !drcx.args_refs_may_unify(goal_trait_ref.args, impl_trait_ref.skip_binder().args) { + return Err(NoSolution); + } + + ecx.probe_trait_candidate(CandidateSource::Impl(impl_def_id)).enter(|ecx| { + let impl_args = ecx.fresh_args_for_item(impl_def_id); + let impl_trait_ref = impl_trait_ref.instantiate(tcx, impl_args); + + ecx.eq(goal.param_env, goal_trait_ref, impl_trait_ref)?; + + let where_clause_bounds = tcx + .predicates_of(impl_def_id) + .instantiate(tcx, impl_args) + .predicates + .into_iter() + .map(|pred| goal.with(tcx, pred)); + ecx.add_goals(where_clause_bounds); + + // Add GAT where clauses from the trait's definition + ecx.add_goals( + tcx.predicates_of(goal.predicate.def_id()) + .instantiate_own(tcx, goal.predicate.projection_ty.args) + .map(|(pred, _)| goal.with(tcx, pred)), + ); + + // In case the associated item is hidden due to specialization, we have to + // return ambiguity this would otherwise be incomplete, resulting in + // unsoundness during coherence (#105782). + let Some(assoc_def) = fetch_eligible_assoc_item_def( + ecx, + goal.param_env, + goal_trait_ref, + goal.predicate.def_id(), + impl_def_id, + )? + else { + return ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + }; + + let error_response = |ecx: &mut EvalCtxt<'_, 'tcx>, reason| { + let guar = tcx.sess.delay_span_bug(tcx.def_span(assoc_def.item.def_id), reason); + let error_term = match assoc_def.item.kind { + ty::AssocKind::Const => ty::Const::new_error( + tcx, + guar, + tcx.type_of(goal.predicate.def_id()) + .instantiate(tcx, goal.predicate.projection_ty.args), + ) + .into(), + ty::AssocKind::Type => Ty::new_error(tcx, guar).into(), + ty::AssocKind::Fn => unreachable!(), + }; + ecx.eq(goal.param_env, goal.predicate.term, error_term) + .expect("expected goal term to be fully unconstrained"); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }; + + if !assoc_def.item.defaultness(tcx).has_value() { + return error_response(ecx, "missing value for assoc item in impl"); + } + + // Getting the right args here is complex, e.g. given: + // - a goal ` as Trait>::Assoc` + // - the applicable impl `impl Trait for Vec` + // - and the impl which defines `Assoc` being `impl Trait for Vec` + // + // We first rebase the goal args onto the impl, going from `[Vec, i32, u64]` + // to `[u32, u64]`. + // + // And then map these args to the args of the defining impl of `Assoc`, going + // from `[u32, u64]` to `[u32, i32, u64]`. + let impl_args_with_gat = goal.predicate.projection_ty.args.rebase_onto( + tcx, + goal_trait_ref.def_id, + impl_args, + ); + let args = ecx.translate_args( + goal.param_env, + impl_def_id, + impl_args_with_gat, + assoc_def.defining_node, + ); + + if !check_args_compatible(tcx, assoc_def.item, args) { + return error_response( + ecx, + "associated item has mismatched generic item arguments", + ); + } + + // Finally we construct the actual value of the associated type. + let term = match assoc_def.item.kind { + ty::AssocKind::Type => tcx.type_of(assoc_def.item.def_id).map_bound(|ty| ty.into()), + ty::AssocKind::Const => { + if tcx.features().associated_const_equality { + bug!("associated const projection is not supported yet") + } else { + ty::EarlyBinder::bind( + ty::Const::new_error_with_message( + tcx, + tcx.type_of(assoc_def.item.def_id).instantiate_identity(), + DUMMY_SP, + "associated const projection is not supported yet", + ) + .into(), + ) + } + } + ty::AssocKind::Fn => unreachable!("we should never project to a fn"), + }; + + ecx.eq(goal.param_env, goal.predicate.term, term.instantiate(tcx, args)) + .expect("expected goal term to be fully unconstrained"); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) + } + + /// Fail to normalize if the predicate contains an error, alternatively, we could normalize to `ty::Error` + /// and succeed. Can experiment with this to figure out what results in better error messages. + fn consider_error_guaranteed_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + _guar: ErrorGuaranteed, + ) -> QueryResult<'tcx> { + Err(NoSolution) + } + + fn consider_auto_trait_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + ecx.tcx().sess.delay_span_bug( + ecx.tcx().def_span(goal.predicate.def_id()), + "associated types not allowed on auto traits", + ); + Err(NoSolution) + } + + fn consider_trait_alias_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("trait aliases do not have associated types: {:?}", goal); + } + + fn consider_builtin_sized_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`Sized` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_copy_clone_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`Copy`/`Clone` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_pointer_like_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`PointerLike` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_fn_ptr_trait_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`FnPtr` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_fn_trait_candidates( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + goal_kind: ty::ClosureKind, + ) -> QueryResult<'tcx> { + let tcx = ecx.tcx(); + let tupled_inputs_and_output = + match structural_traits::extract_tupled_inputs_and_output_from_callable( + tcx, + goal.predicate.self_ty(), + goal_kind, + )? { + Some(tupled_inputs_and_output) => tupled_inputs_and_output, + None => { + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); + } + }; + let output_is_sized_pred = tupled_inputs_and_output.map_bound(|(_, output)| { + ty::TraitRef::from_lang_item(tcx, LangItem::Sized, DUMMY_SP, [output]) + }); + + let pred = tupled_inputs_and_output + .map_bound(|(inputs, output)| ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new( + tcx, + goal.predicate.def_id(), + [goal.predicate.self_ty(), inputs], + ), + term: output.into(), + }) + .to_predicate(tcx); + + // A built-in `Fn` impl only holds if the output is sized. + // (FIXME: technically we only need to check this if the type is a fn ptr...) + Self::consider_implied_clause(ecx, goal, pred, [goal.with(tcx, output_is_sized_pred)]) + } + + fn consider_builtin_tuple_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`Tuple` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_pointee_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + let tcx = ecx.tcx(); + ecx.probe_misc_candidate("builtin pointee").enter(|ecx| { + let metadata_ty = match goal.predicate.self_ty().kind() { + ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Array(..) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::FnDef(..) + | ty::FnPtr(..) + | ty::Closure(..) + | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Never + | ty::Foreign(..) => tcx.types.unit, + + ty::Error(e) => Ty::new_error(tcx, *e), + + ty::Str | ty::Slice(_) => tcx.types.usize, + + ty::Dynamic(_, _, _) => { + let dyn_metadata = tcx.require_lang_item(LangItem::DynMetadata, None); + tcx.type_of(dyn_metadata) + .instantiate(tcx, &[ty::GenericArg::from(goal.predicate.self_ty())]) + } + + ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) => { + // FIXME(ptr_metadata): It would also be possible to return a `Ok(Ambig)` with no constraints. + let sized_predicate = ty::TraitRef::from_lang_item( + tcx, + LangItem::Sized, + DUMMY_SP, + [ty::GenericArg::from(goal.predicate.self_ty())], + ); + ecx.add_goal(goal.with(tcx, sized_predicate)); + tcx.types.unit + } + + ty::Adt(def, args) if def.is_struct() => match def.non_enum_variant().tail_opt() { + None => tcx.types.unit, + Some(field_def) => { + let self_ty = field_def.ty(tcx, args); + ecx.add_goal(goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty))); + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + } + }, + ty::Adt(_, _) => tcx.types.unit, + + ty::Tuple(elements) => match elements.last() { + None => tcx.types.unit, + Some(&self_ty) => { + ecx.add_goal(goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty))); + return ecx + .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); + } + }, + + ty::Infer( + ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_), + ) + | ty::Bound(..) => bug!( + "unexpected self ty `{:?}` when normalizing `::Metadata`", + goal.predicate.self_ty() + ), + }; + + ecx.eq(goal.param_env, goal.predicate.term, metadata_ty.into()) + .expect("expected goal term to be fully unconstrained"); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) + } + + fn consider_builtin_future_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + let self_ty = goal.predicate.self_ty(); + let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + return Err(NoSolution); + }; + + // Coroutines are not futures unless they come from `async` desugaring + let tcx = ecx.tcx(); + if !tcx.coroutine_is_async(def_id) { + return Err(NoSolution); + } + + let term = args.as_coroutine().return_ty().into(); + + Self::consider_implied_clause( + ecx, + goal, + ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new(ecx.tcx(), goal.predicate.def_id(), [self_ty]), + term, + } + .to_predicate(tcx), + // Technically, we need to check that the future type is Sized, + // but that's already proven by the coroutine being WF. + [], + ) + } + + fn consider_builtin_iterator_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + let self_ty = goal.predicate.self_ty(); + let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + return Err(NoSolution); + }; + + // Coroutines are not Iterators unless they come from `gen` desugaring + let tcx = ecx.tcx(); + if !tcx.coroutine_is_gen(def_id) { + return Err(NoSolution); + } + + let term = args.as_coroutine().yield_ty().into(); + + Self::consider_implied_clause( + ecx, + goal, + ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new(ecx.tcx(), goal.predicate.def_id(), [self_ty]), + term, + } + .to_predicate(tcx), + // Technically, we need to check that the iterator type is Sized, + // but that's already proven by the generator being WF. + [], + ) + } + + fn consider_builtin_coroutine_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + let self_ty = goal.predicate.self_ty(); + let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + return Err(NoSolution); + }; + + // `async`-desugared coroutines do not implement the coroutine trait + let tcx = ecx.tcx(); + if !tcx.is_general_coroutine(def_id) { + return Err(NoSolution); + } + + let coroutine = args.as_coroutine(); + + let name = tcx.associated_item(goal.predicate.def_id()).name; + let term = if name == sym::Return { + coroutine.return_ty().into() + } else if name == sym::Yield { + coroutine.yield_ty().into() + } else { + bug!("unexpected associated item `<{self_ty} as Coroutine>::{name}`") + }; + + Self::consider_implied_clause( + ecx, + goal, + ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new( + ecx.tcx(), + goal.predicate.def_id(), + [self_ty, coroutine.resume_ty()], + ), + term, + } + .to_predicate(tcx), + // Technically, we need to check that the coroutine type is Sized, + // but that's already proven by the coroutine being WF. + [], + ) + } + + fn consider_unsize_to_dyn_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`Unsize` does not have an associated type: {:?}", goal) + } + + fn consider_structural_builtin_unsize_candidates( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> Vec<(CanonicalResponse<'tcx>, BuiltinImplSource)> { + bug!("`Unsize` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_discriminant_kind_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + let self_ty = goal.predicate.self_ty(); + let discriminant_ty = match *self_ty.kind() { + ty::Bool + | ty::Char + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Array(..) + | ty::RawPtr(..) + | ty::Ref(..) + | ty::FnDef(..) + | ty::FnPtr(..) + | ty::Closure(..) + | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Never + | ty::Foreign(..) + | ty::Adt(_, _) + | ty::Str + | ty::Slice(_) + | ty::Dynamic(_, _, _) + | ty::Tuple(_) + | ty::Error(_) => self_ty.discriminant_ty(ecx.tcx()), + + // We do not call `Ty::discriminant_ty` on alias, param, or placeholder + // types, which return `::Discriminant` + // (or ICE in the case of placeholders). Projecting a type to itself + // is never really productive. + ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) => { + return Err(NoSolution); + } + + ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) + | ty::Bound(..) => bug!( + "unexpected self ty `{:?}` when normalizing `::Discriminant`", + goal.predicate.self_ty() + ), + }; + + ecx.probe_misc_candidate("builtin discriminant kind").enter(|ecx| { + ecx.eq(goal.param_env, goal.predicate.term, discriminant_ty.into()) + .expect("expected goal term to be fully unconstrained"); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) + } + + fn consider_builtin_destruct_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`Destruct` does not have an associated type: {:?}", goal); + } + + fn consider_builtin_transmute_candidate( + _ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + bug!("`BikeshedIntrinsicFrom` does not have an associated type: {:?}", goal) + } +} + +/// This behavior is also implemented in `rustc_ty_utils` and in the old `project` code. +/// +/// FIXME: We should merge these 3 implementations as it's likely that they otherwise +/// diverge. +#[instrument(level = "debug", skip(ecx, param_env), ret)] +fn fetch_eligible_assoc_item_def<'tcx>( + ecx: &EvalCtxt<'_, 'tcx>, + param_env: ty::ParamEnv<'tcx>, + goal_trait_ref: ty::TraitRef<'tcx>, + trait_assoc_def_id: DefId, + impl_def_id: DefId, +) -> Result, NoSolution> { + let node_item = specialization_graph::assoc_def(ecx.tcx(), impl_def_id, trait_assoc_def_id) + .map_err(|ErrorGuaranteed { .. }| NoSolution)?; + + let eligible = if node_item.is_final() { + // Non-specializable items are always projectable. + true + } else { + // Only reveal a specializable default if we're past type-checking + // and the obligation is monomorphic, otherwise passes such as + // transmute checking and polymorphic MIR optimizations could + // get a result which isn't correct for all monomorphizations. + if param_env.reveal() == Reveal::All { + let poly_trait_ref = ecx.resolve_vars_if_possible(goal_trait_ref); + !poly_trait_ref.still_further_specializable() + } else { + debug!(?node_item.item.def_id, "not eligible due to default"); + false + } + }; + + if eligible { Ok(Some(node_item)) } else { Ok(None) } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/opaques.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/opaques.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/opaques.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/opaques.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,85 @@ +//! Computes a normalizes-to (projection) goal for opaque types. This goal +//! behaves differently depending on the param-env's reveal mode and whether +//! the opaque is in a defining scope. +use rustc_middle::traits::query::NoSolution; +use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::traits::Reveal; +use rustc_middle::ty; +use rustc_middle::ty::util::NotUniqueParam; + +use crate::solve::{EvalCtxt, SolverMode}; + +impl<'tcx> EvalCtxt<'_, 'tcx> { + pub(super) fn normalize_opaque_type( + &mut self, + goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, + ) -> QueryResult<'tcx> { + let tcx = self.tcx(); + let opaque_ty = goal.predicate.projection_ty; + let expected = goal.predicate.term.ty().expect("no such thing as an opaque const"); + + match (goal.param_env.reveal(), self.solver_mode()) { + (Reveal::UserFacing, SolverMode::Normal) => { + let Some(opaque_ty_def_id) = opaque_ty.def_id.as_local() else { + return Err(NoSolution); + }; + // FIXME: at some point we should call queries without defining + // new opaque types but having the existing opaque type definitions. + // This will require moving this below "Prefer opaques registered already". + if !self.can_define_opaque_ty(opaque_ty_def_id) { + return Err(NoSolution); + } + // FIXME: This may have issues when the args contain aliases... + match self.tcx().uses_unique_placeholders_ignoring_regions(opaque_ty.args) { + Err(NotUniqueParam::NotParam(param)) if param.is_non_region_infer() => { + return self.evaluate_added_goals_and_make_canonical_response( + Certainty::AMBIGUOUS, + ); + } + Err(_) => { + return Err(NoSolution); + } + Ok(()) => {} + } + // Prefer opaques registered already. + let opaque_type_key = + ty::OpaqueTypeKey { def_id: opaque_ty_def_id, args: opaque_ty.args }; + let matches = + self.unify_existing_opaque_tys(goal.param_env, opaque_type_key, expected); + if !matches.is_empty() { + if let Some(response) = self.try_merge_responses(&matches) { + return Ok(response); + } else { + return self.flounder(&matches); + } + } + // Otherwise, define a new opaque type + self.insert_hidden_type(opaque_type_key, goal.param_env, expected)?; + self.add_item_bounds_for_hidden_type( + opaque_ty.def_id, + opaque_ty.args, + goal.param_env, + expected, + ); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + (Reveal::UserFacing, SolverMode::Coherence) => { + // An impossible opaque type bound is the only way this goal will fail + // e.g. assigning `impl Copy := NotCopy` + self.add_item_bounds_for_hidden_type( + opaque_ty.def_id, + opaque_ty.args, + goal.param_env, + expected, + ); + self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) + } + (Reveal::All, _) => { + // FIXME: Add an assertion that opaque type storage is empty. + let actual = tcx.type_of(opaque_ty.def_id).instantiate(tcx, opaque_ty.args); + self.eq(goal.param_env, expected, actual)?; + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/weak_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/weak_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/weak_types.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals/weak_types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,34 @@ +//! Computes a normalizes-to (projection) goal for inherent associated types, +//! `#![feature(lazy_type_alias)]` and `#![feature(type_alias_impl_trait)]`. +//! +//! Since a weak alias is not ambiguous, this just computes the `type_of` of +//! the alias and registers the where-clauses of the type alias. +use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::ty; + +use super::EvalCtxt; + +impl<'tcx> EvalCtxt<'_, 'tcx> { + pub(super) fn normalize_weak_type( + &mut self, + goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, + ) -> QueryResult<'tcx> { + let tcx = self.tcx(); + let weak_ty = goal.predicate.projection_ty; + let expected = goal.predicate.term.ty().expect("no such thing as a const alias"); + + let actual = tcx.type_of(weak_ty.def_id).instantiate(tcx, weak_ty.args); + self.eq(goal.param_env, expected, actual)?; + + // Check where clauses + self.add_goals( + tcx.predicates_of(weak_ty.def_id) + .instantiate(tcx, weak_ty.args) + .predicates + .into_iter() + .map(|pred| goal.with(tcx, pred)), + ); + + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/project_goals.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,637 +0,0 @@ -use crate::traits::{check_args_compatible, specialization_graph}; - -use super::assembly::{self, structural_traits}; -use super::EvalCtxt; -use rustc_hir::def::DefKind; -use rustc_hir::def_id::DefId; -use rustc_hir::LangItem; -use rustc_infer::traits::query::NoSolution; -use rustc_infer::traits::specialization_graph::LeafDef; -use rustc_infer::traits::Reveal; -use rustc_middle::traits::solve::{ - CandidateSource, CanonicalResponse, Certainty, Goal, QueryResult, -}; -use rustc_middle::traits::BuiltinImplSource; -use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; -use rustc_middle::ty::ProjectionPredicate; -use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_middle::ty::{ToPredicate, TypeVisitableExt}; -use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP}; - -impl<'tcx> EvalCtxt<'_, 'tcx> { - #[instrument(level = "debug", skip(self), ret)] - pub(super) fn compute_projection_goal( - &mut self, - goal: Goal<'tcx, ProjectionPredicate<'tcx>>, - ) -> QueryResult<'tcx> { - let def_id = goal.predicate.def_id(); - match self.tcx().def_kind(def_id) { - DefKind::AssocTy | DefKind::AssocConst => { - // To only compute normalization once for each projection we only - // assemble normalization candidates if the expected term is an - // unconstrained inference variable. - // - // Why: For better cache hits, since if we have an unconstrained RHS then - // there are only as many cache keys as there are (canonicalized) alias - // types in each normalizes-to goal. This also weakens inference in a - // forwards-compatible way so we don't use the value of the RHS term to - // affect candidate assembly for projections. - // - // E.g. for `::Assoc == u32` we recursively compute the goal - // `exists ::Assoc == U` and then take the resulting type for - // `U` and equate it with `u32`. This means that we don't need a separate - // projection cache in the solver, since we're piggybacking off of regular - // goal caching. - if self.term_is_fully_unconstrained(goal) { - match self.tcx().associated_item(def_id).container { - ty::AssocItemContainer::TraitContainer => { - let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_candidates(candidates) - } - ty::AssocItemContainer::ImplContainer => { - self.normalize_inherent_associated_type(goal) - } - } - } else { - self.set_normalizes_to_hack_goal(goal); - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } - } - DefKind::AnonConst => self.normalize_anon_const(goal), - DefKind::OpaqueTy => self.normalize_opaque_type(goal), - DefKind::TyAlias => self.normalize_weak_type(goal), - kind => bug!("unknown DefKind {} in projection goal: {goal:#?}", kind.descr(def_id)), - } - } - - #[instrument(level = "debug", skip(self), ret)] - fn normalize_anon_const( - &mut self, - goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, - ) -> QueryResult<'tcx> { - if let Some(normalized_const) = self.try_const_eval_resolve( - goal.param_env, - ty::UnevaluatedConst::new( - goal.predicate.projection_ty.def_id, - goal.predicate.projection_ty.args, - ), - self.tcx() - .type_of(goal.predicate.projection_ty.def_id) - .no_bound_vars() - .expect("const ty should not rely on other generics"), - ) { - self.eq(goal.param_env, normalized_const, goal.predicate.term.ct().unwrap())?; - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } else { - self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) - } - } -} - -impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { - fn self_ty(self) -> Ty<'tcx> { - self.self_ty() - } - - fn trait_ref(self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> { - self.projection_ty.trait_ref(tcx) - } - - fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self { - self.with_self_ty(tcx, self_ty) - } - - fn trait_def_id(self, tcx: TyCtxt<'tcx>) -> DefId { - self.trait_def_id(tcx) - } - - fn probe_and_match_goal_against_assumption( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - assumption: ty::Clause<'tcx>, - then: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>, - ) -> QueryResult<'tcx> { - if let Some(projection_pred) = assumption.as_projection_clause() { - if projection_pred.projection_def_id() == goal.predicate.def_id() { - let tcx = ecx.tcx(); - ecx.probe_misc_candidate("assumption").enter(|ecx| { - let assumption_projection_pred = - ecx.instantiate_binder_with_infer(projection_pred); - ecx.eq( - goal.param_env, - goal.predicate.projection_ty, - assumption_projection_pred.projection_ty, - )?; - ecx.eq(goal.param_env, goal.predicate.term, assumption_projection_pred.term) - .expect("expected goal term to be fully unconstrained"); - - // Add GAT where clauses from the trait's definition - ecx.add_goals( - tcx.predicates_of(goal.predicate.def_id()) - .instantiate_own(tcx, goal.predicate.projection_ty.args) - .map(|(pred, _)| goal.with(tcx, pred)), - ); - - then(ecx) - }) - } else { - Err(NoSolution) - } - } else { - Err(NoSolution) - } - } - - fn consider_impl_candidate( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, ProjectionPredicate<'tcx>>, - impl_def_id: DefId, - ) -> QueryResult<'tcx> { - let tcx = ecx.tcx(); - - let goal_trait_ref = goal.predicate.projection_ty.trait_ref(tcx); - let impl_trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); - let drcx = DeepRejectCtxt { treat_obligation_params: TreatParams::ForLookup }; - if !drcx.args_refs_may_unify(goal_trait_ref.args, impl_trait_ref.skip_binder().args) { - return Err(NoSolution); - } - - ecx.probe_trait_candidate(CandidateSource::Impl(impl_def_id)).enter(|ecx| { - let impl_args = ecx.fresh_args_for_item(impl_def_id); - let impl_trait_ref = impl_trait_ref.instantiate(tcx, impl_args); - - ecx.eq(goal.param_env, goal_trait_ref, impl_trait_ref)?; - - let where_clause_bounds = tcx - .predicates_of(impl_def_id) - .instantiate(tcx, impl_args) - .predicates - .into_iter() - .map(|pred| goal.with(tcx, pred)); - ecx.add_goals(where_clause_bounds); - - // Add GAT where clauses from the trait's definition - ecx.add_goals( - tcx.predicates_of(goal.predicate.def_id()) - .instantiate_own(tcx, goal.predicate.projection_ty.args) - .map(|(pred, _)| goal.with(tcx, pred)), - ); - - // In case the associated item is hidden due to specialization, we have to - // return ambiguity this would otherwise be incomplete, resulting in - // unsoundness during coherence (#105782). - let Some(assoc_def) = fetch_eligible_assoc_item_def( - ecx, - goal.param_env, - goal_trait_ref, - goal.predicate.def_id(), - impl_def_id, - )? - else { - return ecx.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); - }; - - let error_response = |ecx: &mut EvalCtxt<'_, 'tcx>, reason| { - let guar = tcx.sess.delay_span_bug(tcx.def_span(assoc_def.item.def_id), reason); - let error_term = match assoc_def.item.kind { - ty::AssocKind::Const => ty::Const::new_error( - tcx, - guar, - tcx.type_of(goal.predicate.def_id()) - .instantiate(tcx, goal.predicate.projection_ty.args), - ) - .into(), - ty::AssocKind::Type => Ty::new_error(tcx, guar).into(), - ty::AssocKind::Fn => unreachable!(), - }; - ecx.eq(goal.param_env, goal.predicate.term, error_term) - .expect("expected goal term to be fully unconstrained"); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }; - - if !assoc_def.item.defaultness(tcx).has_value() { - return error_response(ecx, "missing value for assoc item in impl"); - } - - // Getting the right args here is complex, e.g. given: - // - a goal ` as Trait>::Assoc` - // - the applicable impl `impl Trait for Vec` - // - and the impl which defines `Assoc` being `impl Trait for Vec` - // - // We first rebase the goal args onto the impl, going from `[Vec, i32, u64]` - // to `[u32, u64]`. - // - // And then map these args to the args of the defining impl of `Assoc`, going - // from `[u32, u64]` to `[u32, i32, u64]`. - let impl_args_with_gat = goal.predicate.projection_ty.args.rebase_onto( - tcx, - goal_trait_ref.def_id, - impl_args, - ); - let args = ecx.translate_args( - goal.param_env, - impl_def_id, - impl_args_with_gat, - assoc_def.defining_node, - ); - - if !check_args_compatible(tcx, assoc_def.item, args) { - return error_response( - ecx, - "associated item has mismatched generic item arguments", - ); - } - - // Finally we construct the actual value of the associated type. - let term = match assoc_def.item.kind { - ty::AssocKind::Type => tcx.type_of(assoc_def.item.def_id).map_bound(|ty| ty.into()), - ty::AssocKind::Const => { - if tcx.features().associated_const_equality { - bug!("associated const projection is not supported yet") - } else { - ty::EarlyBinder::bind( - ty::Const::new_error_with_message( - tcx, - tcx.type_of(assoc_def.item.def_id).instantiate_identity(), - DUMMY_SP, - "associated const projection is not supported yet", - ) - .into(), - ) - } - } - ty::AssocKind::Fn => unreachable!("we should never project to a fn"), - }; - - ecx.eq(goal.param_env, goal.predicate.term, term.instantiate(tcx, args)) - .expect("expected goal term to be fully unconstrained"); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }) - } - - /// Fail to normalize if the predicate contains an error, alternatively, we could normalize to `ty::Error` - /// and succeed. Can experiment with this to figure out what results in better error messages. - fn consider_error_guaranteed_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - _guar: ErrorGuaranteed, - ) -> QueryResult<'tcx> { - Err(NoSolution) - } - - fn consider_auto_trait_candidate( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - ecx.tcx().sess.delay_span_bug( - ecx.tcx().def_span(goal.predicate.def_id()), - "associated types not allowed on auto traits", - ); - Err(NoSolution) - } - - fn consider_trait_alias_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("trait aliases do not have associated types: {:?}", goal); - } - - fn consider_builtin_sized_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`Sized` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_copy_clone_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`Copy`/`Clone` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_pointer_like_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`PointerLike` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_fn_ptr_trait_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`FnPtr` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_fn_trait_candidates( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - goal_kind: ty::ClosureKind, - ) -> QueryResult<'tcx> { - let tcx = ecx.tcx(); - let tupled_inputs_and_output = - match structural_traits::extract_tupled_inputs_and_output_from_callable( - tcx, - goal.predicate.self_ty(), - goal_kind, - )? { - Some(tupled_inputs_and_output) => tupled_inputs_and_output, - None => { - return ecx - .evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS); - } - }; - let output_is_sized_pred = tupled_inputs_and_output.map_bound(|(_, output)| { - ty::TraitRef::from_lang_item(tcx, LangItem::Sized, DUMMY_SP, [output]) - }); - - let pred = ty::Clause::from_projection_clause( - tcx, - tupled_inputs_and_output.map_bound(|(inputs, output)| ty::ProjectionPredicate { - projection_ty: tcx - .mk_alias_ty(goal.predicate.def_id(), [goal.predicate.self_ty(), inputs]), - term: output.into(), - }), - ); - - // A built-in `Fn` impl only holds if the output is sized. - // (FIXME: technically we only need to check this if the type is a fn ptr...) - Self::consider_implied_clause(ecx, goal, pred, [goal.with(tcx, output_is_sized_pred)]) - } - - fn consider_builtin_tuple_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`Tuple` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_pointee_candidate( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - let tcx = ecx.tcx(); - ecx.probe_misc_candidate("builtin pointee").enter(|ecx| { - let metadata_ty = match goal.predicate.self_ty().kind() { - ty::Bool - | ty::Char - | ty::Int(..) - | ty::Uint(..) - | ty::Float(..) - | ty::Array(..) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Closure(..) - | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) - | ty::Generator(..) - | ty::GeneratorWitness(..) - | ty::Never - | ty::Foreign(..) => tcx.types.unit, - - ty::Error(e) => Ty::new_error(tcx, *e), - - ty::Str | ty::Slice(_) => tcx.types.usize, - - ty::Dynamic(_, _, _) => { - let dyn_metadata = tcx.require_lang_item(LangItem::DynMetadata, None); - tcx.type_of(dyn_metadata) - .instantiate(tcx, &[ty::GenericArg::from(goal.predicate.self_ty())]) - } - - ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) => { - // FIXME(ptr_metadata): It would also be possible to return a `Ok(Ambig)` with no constraints. - let sized_predicate = ty::TraitRef::from_lang_item( - tcx, - LangItem::Sized, - DUMMY_SP, - [ty::GenericArg::from(goal.predicate.self_ty())], - ); - ecx.add_goal(goal.with(tcx, sized_predicate)); - tcx.types.unit - } - - ty::Adt(def, args) if def.is_struct() => match def.non_enum_variant().tail_opt() { - None => tcx.types.unit, - Some(field_def) => { - let self_ty = field_def.ty(tcx, args); - ecx.add_goal(goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty))); - return ecx - .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); - } - }, - ty::Adt(_, _) => tcx.types.unit, - - ty::Tuple(elements) => match elements.last() { - None => tcx.types.unit, - Some(&self_ty) => { - ecx.add_goal(goal.with(tcx, goal.predicate.with_self_ty(tcx, self_ty))); - return ecx - .evaluate_added_goals_and_make_canonical_response(Certainty::Yes); - } - }, - - ty::Infer( - ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_), - ) - | ty::Bound(..) => bug!( - "unexpected self ty `{:?}` when normalizing `::Metadata`", - goal.predicate.self_ty() - ), - }; - - ecx.eq(goal.param_env, goal.predicate.term, metadata_ty.into()) - .expect("expected goal term to be fully unconstrained"); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }) - } - - fn consider_builtin_future_candidate( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - let self_ty = goal.predicate.self_ty(); - let ty::Generator(def_id, args, _) = *self_ty.kind() else { - return Err(NoSolution); - }; - - // Generators are not futures unless they come from `async` desugaring - let tcx = ecx.tcx(); - if !tcx.generator_is_async(def_id) { - return Err(NoSolution); - } - - let term = args.as_generator().return_ty().into(); - - Self::consider_implied_clause( - ecx, - goal, - ty::ProjectionPredicate { - projection_ty: ecx.tcx().mk_alias_ty(goal.predicate.def_id(), [self_ty]), - term, - } - .to_predicate(tcx), - // Technically, we need to check that the future type is Sized, - // but that's already proven by the generator being WF. - [], - ) - } - - fn consider_builtin_generator_candidate( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - let self_ty = goal.predicate.self_ty(); - let ty::Generator(def_id, args, _) = *self_ty.kind() else { - return Err(NoSolution); - }; - - // `async`-desugared generators do not implement the generator trait - let tcx = ecx.tcx(); - if tcx.generator_is_async(def_id) { - return Err(NoSolution); - } - - let generator = args.as_generator(); - - let name = tcx.associated_item(goal.predicate.def_id()).name; - let term = if name == sym::Return { - generator.return_ty().into() - } else if name == sym::Yield { - generator.yield_ty().into() - } else { - bug!("unexpected associated item `<{self_ty} as Generator>::{name}`") - }; - - Self::consider_implied_clause( - ecx, - goal, - ty::ProjectionPredicate { - projection_ty: ecx - .tcx() - .mk_alias_ty(goal.predicate.def_id(), [self_ty, generator.resume_ty()]), - term, - } - .to_predicate(tcx), - // Technically, we need to check that the future type is Sized, - // but that's already proven by the generator being WF. - [], - ) - } - - fn consider_unsize_to_dyn_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`Unsize` does not have an associated type: {:?}", goal) - } - - fn consider_structural_builtin_unsize_candidates( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> Vec<(CanonicalResponse<'tcx>, BuiltinImplSource)> { - bug!("`Unsize` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_discriminant_kind_candidate( - ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - let self_ty = goal.predicate.self_ty(); - let discriminant_ty = match *self_ty.kind() { - ty::Bool - | ty::Char - | ty::Int(..) - | ty::Uint(..) - | ty::Float(..) - | ty::Array(..) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Closure(..) - | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) - | ty::Generator(..) - | ty::GeneratorWitness(..) - | ty::Never - | ty::Foreign(..) - | ty::Adt(_, _) - | ty::Str - | ty::Slice(_) - | ty::Dynamic(_, _, _) - | ty::Tuple(_) - | ty::Error(_) => self_ty.discriminant_ty(ecx.tcx()), - - // We do not call `Ty::discriminant_ty` on alias, param, or placeholder - // types, which return `::Discriminant` - // (or ICE in the case of placeholders). Projecting a type to itself - // is never really productive. - ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) => { - return Err(NoSolution); - } - - ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) - | ty::Bound(..) => bug!( - "unexpected self ty `{:?}` when normalizing `::Discriminant`", - goal.predicate.self_ty() - ), - }; - - ecx.probe_misc_candidate("builtin discriminant kind").enter(|ecx| { - ecx.eq(goal.param_env, goal.predicate.term, discriminant_ty.into()) - .expect("expected goal term to be fully unconstrained"); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }) - } - - fn consider_builtin_destruct_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`Destruct` does not have an associated type: {:?}", goal); - } - - fn consider_builtin_transmute_candidate( - _ecx: &mut EvalCtxt<'_, 'tcx>, - goal: Goal<'tcx, Self>, - ) -> QueryResult<'tcx> { - bug!("`BikeshedIntrinsicFrom` does not have an associated type: {:?}", goal) - } -} - -/// This behavior is also implemented in `rustc_ty_utils` and in the old `project` code. -/// -/// FIXME: We should merge these 3 implementations as it's likely that they otherwise -/// diverge. -#[instrument(level = "debug", skip(ecx, param_env), ret)] -fn fetch_eligible_assoc_item_def<'tcx>( - ecx: &EvalCtxt<'_, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - goal_trait_ref: ty::TraitRef<'tcx>, - trait_assoc_def_id: DefId, - impl_def_id: DefId, -) -> Result, NoSolution> { - let node_item = specialization_graph::assoc_def(ecx.tcx(), impl_def_id, trait_assoc_def_id) - .map_err(|ErrorGuaranteed { .. }| NoSolution)?; - - let eligible = if node_item.is_final() { - // Non-specializable items are always projectable. - true - } else { - // Only reveal a specializable default if we're past type-checking - // and the obligation is monomorphic, otherwise passes such as - // transmute checking and polymorphic MIR optimizations could - // get a result which isn't correct for all monomorphizations. - if param_env.reveal() == Reveal::All { - let poly_trait_ref = ecx.resolve_vars_if_possible(goal_trait_ref); - !poly_trait_ref.still_further_specializable() - } else { - debug!(?node_item.item.def_id, "not eligible due to default"); - false - } - }; - - if eligible { Ok(Some(node_item)) } else { Ok(None) } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph/mod.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,368 +0,0 @@ -use super::inspect; -use super::inspect::ProofTreeBuilder; -use super::SolverMode; -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::fx::FxHashSet; -use rustc_index::Idx; -use rustc_index::IndexVec; -use rustc_middle::dep_graph::dep_kinds; -use rustc_middle::traits::solve::inspect::CacheHit; -use rustc_middle::traits::solve::CacheData; -use rustc_middle::traits::solve::{CanonicalInput, Certainty, EvaluationCache, QueryResult}; -use rustc_middle::ty::TyCtxt; -use rustc_session::Limit; -use std::collections::hash_map::Entry; - -rustc_index::newtype_index! { - pub struct StackDepth {} -} - -#[derive(Debug)] -struct StackEntry<'tcx> { - input: CanonicalInput<'tcx>, - available_depth: Limit, - // The maximum depth reached by this stack entry, only up-to date - // for the top of the stack and lazily updated for the rest. - reached_depth: StackDepth, - // In case of a cycle, the depth of the root. - cycle_root_depth: StackDepth, - - encountered_overflow: bool, - has_been_used: bool, - /// Starts out as `None` and gets set when rerunning this - /// goal in case we encounter a cycle. - provisional_result: Option>, - - /// We put only the root goal of a coinductive cycle into the global cache. - /// - /// If we were to use that result when later trying to prove another cycle - /// participant, we can end up with unstable query results. - /// - /// See tests/ui/new-solver/coinduction/incompleteness-unstable-result.rs for - /// an example of where this is needed. - cycle_participants: FxHashSet>, -} - -pub(super) struct SearchGraph<'tcx> { - mode: SolverMode, - local_overflow_limit: usize, - /// The stack of goals currently being computed. - /// - /// An element is *deeper* in the stack if its index is *lower*. - stack: IndexVec>, - stack_entries: FxHashMap, StackDepth>, -} - -impl<'tcx> SearchGraph<'tcx> { - pub(super) fn new(tcx: TyCtxt<'tcx>, mode: SolverMode) -> SearchGraph<'tcx> { - Self { - mode, - local_overflow_limit: tcx.recursion_limit().0.checked_ilog2().unwrap_or(0) as usize, - stack: Default::default(), - stack_entries: Default::default(), - } - } - - pub(super) fn solver_mode(&self) -> SolverMode { - self.mode - } - - pub(super) fn local_overflow_limit(&self) -> usize { - self.local_overflow_limit - } - - /// Update the stack and reached depths on cache hits. - #[instrument(level = "debug", skip(self))] - fn on_cache_hit(&mut self, additional_depth: usize, encountered_overflow: bool) { - let reached_depth = self.stack.next_index().plus(additional_depth); - if let Some(last) = self.stack.raw.last_mut() { - last.reached_depth = last.reached_depth.max(reached_depth); - last.encountered_overflow |= encountered_overflow; - } - } - - /// Pops the highest goal from the stack, lazily updating the - /// the next goal in the stack. - /// - /// Directly popping from the stack instead of using this method - /// would cause us to not track overflow and recursion depth correctly. - fn pop_stack(&mut self) -> StackEntry<'tcx> { - let elem = self.stack.pop().unwrap(); - assert!(self.stack_entries.remove(&elem.input).is_some()); - if let Some(last) = self.stack.raw.last_mut() { - last.reached_depth = last.reached_depth.max(elem.reached_depth); - last.encountered_overflow |= elem.encountered_overflow; - } - elem - } - - /// The trait solver behavior is different for coherence - /// so we use a separate cache. Alternatively we could use - /// a single cache and share it between coherence and ordinary - /// trait solving. - pub(super) fn global_cache(&self, tcx: TyCtxt<'tcx>) -> &'tcx EvaluationCache<'tcx> { - match self.mode { - SolverMode::Normal => &tcx.new_solver_evaluation_cache, - SolverMode::Coherence => &tcx.new_solver_coherence_evaluation_cache, - } - } - - pub(super) fn is_empty(&self) -> bool { - self.stack.is_empty() - } - - /// Whether we're currently in a cycle. This should only be used - /// for debug assertions. - pub(super) fn in_cycle(&self) -> bool { - if let Some(stack_depth) = self.stack.last_index() { - // Either the current goal on the stack is the root of a cycle - // or it depends on a goal with a lower depth. - self.stack[stack_depth].has_been_used - || self.stack[stack_depth].cycle_root_depth != stack_depth - } else { - false - } - } - - /// Fetches whether the current goal encountered overflow. - /// - /// This should only be used for the check in `evaluate_goal`. - pub(super) fn encountered_overflow(&self) -> bool { - if let Some(last) = self.stack.raw.last() { last.encountered_overflow } else { false } - } - - /// Resets `encountered_overflow` of the current goal. - /// - /// This should only be used for the check in `evaluate_goal`. - pub(super) fn reset_encountered_overflow(&mut self, encountered_overflow: bool) -> bool { - if let Some(last) = self.stack.raw.last_mut() { - let prev = last.encountered_overflow; - last.encountered_overflow = encountered_overflow; - prev - } else { - false - } - } - - /// Returns the remaining depth allowed for nested goals. - /// - /// This is generally simply one less than the current depth. - /// However, if we encountered overflow, we significantly reduce - /// the remaining depth of all nested goals to prevent hangs - /// in case there is exponential blowup. - fn allowed_depth_for_nested( - tcx: TyCtxt<'tcx>, - stack: &IndexVec>, - ) -> Option { - if let Some(last) = stack.raw.last() { - if last.available_depth.0 == 0 { - return None; - } - - Some(if last.encountered_overflow { - Limit(last.available_depth.0 / 4) - } else { - Limit(last.available_depth.0 - 1) - }) - } else { - Some(tcx.recursion_limit()) - } - } - - /// Probably the most involved method of the whole solver. - /// - /// Given some goal which is proven via the `prove_goal` closure, this - /// handles caching, overflow, and coinductive cycles. - pub(super) fn with_new_goal( - &mut self, - tcx: TyCtxt<'tcx>, - input: CanonicalInput<'tcx>, - inspect: &mut ProofTreeBuilder<'tcx>, - mut prove_goal: impl FnMut(&mut Self, &mut ProofTreeBuilder<'tcx>) -> QueryResult<'tcx>, - ) -> QueryResult<'tcx> { - // Check for overflow. - let Some(available_depth) = Self::allowed_depth_for_nested(tcx, &self.stack) else { - if let Some(last) = self.stack.raw.last_mut() { - last.encountered_overflow = true; - } - - inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::Overflow); - return Self::response_no_constraints(tcx, input, Certainty::OVERFLOW); - }; - - // Try to fetch the goal from the global cache. - if inspect.use_global_cache() { - if let Some(CacheData { result, reached_depth, encountered_overflow }) = - self.global_cache(tcx).get( - tcx, - input, - |cycle_participants| { - self.stack.iter().any(|entry| cycle_participants.contains(&entry.input)) - }, - available_depth, - ) - { - inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CacheHit( - CacheHit::Global, - )); - self.on_cache_hit(reached_depth, encountered_overflow); - return result; - } - } - - // Check whether we're in a cycle. - match self.stack_entries.entry(input) { - // No entry, we push this goal on the stack and try to prove it. - Entry::Vacant(v) => { - let depth = self.stack.next_index(); - let entry = StackEntry { - input, - available_depth, - reached_depth: depth, - cycle_root_depth: depth, - encountered_overflow: false, - has_been_used: false, - provisional_result: None, - cycle_participants: Default::default(), - }; - assert_eq!(self.stack.push(entry), depth); - v.insert(depth); - } - // We have a nested goal which relies on a goal `root` deeper in the stack. - // - // We first store that we may have to reprove `root` in case the provisional - // response is not equal to the final response. We also update the depth of all - // goals which recursively depend on our current goal to depend on `root` - // instead. - // - // Finally we can return either the provisional response for that goal if we have a - // coinductive cycle or an ambiguous result if the cycle is inductive. - Entry::Occupied(entry) => { - inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CacheHit( - CacheHit::Provisional, - )); - - let stack_depth = *entry.get(); - debug!("encountered cycle with depth {stack_depth:?}"); - // We start by updating the root depth of all cycle participants, and - // add all cycle participants to the root. - let root_depth = self.stack[stack_depth].cycle_root_depth; - let (prev, participants) = self.stack.raw.split_at_mut(stack_depth.as_usize() + 1); - let root = &mut prev[root_depth.as_usize()]; - for entry in participants { - debug_assert!(entry.cycle_root_depth >= root_depth); - entry.cycle_root_depth = root_depth; - root.cycle_participants.insert(entry.input); - // FIXME(@lcnr): I believe that this line is needed as we could - // otherwise access a cache entry for the root of a cycle while - // computing the result for a cycle participant. This can result - // in unstable results due to incompleteness. - // - // However, a test for this would be an even more complex version of - // tests/ui/traits/new-solver/coinduction/incompleteness-unstable-result.rs. - // I did not bother to write such a test and we have no regression test - // for this. It would be good to have such a test :) - #[allow(rustc::potential_query_instability)] - root.cycle_participants.extend(entry.cycle_participants.drain()); - } - - // If we're in a cycle, we have to retry proving the cycle head - // until we reach a fixpoint. It is not enough to simply retry the - // `root` goal of this cycle. - // - // See tests/ui/traits/new-solver/cycles/fixpoint-rerun-all-cycle-heads.rs - // for an example. - self.stack[stack_depth].has_been_used = true; - return if let Some(result) = self.stack[stack_depth].provisional_result { - result - } else { - // If we don't have a provisional result yet we're in the first iteration, - // so we start with no constraints. - let is_coinductive = self.stack.raw[stack_depth.index()..] - .iter() - .all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx)); - if is_coinductive { - Self::response_no_constraints(tcx, input, Certainty::Yes) - } else { - Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) - } - }; - } - } - - // This is for global caching, so we properly track query dependencies. - // Everything that affects the `result` should be performed within this - // `with_anon_task` closure. - let ((final_entry, result), dep_node) = - tcx.dep_graph.with_anon_task(tcx, dep_kinds::TraitSelect, || { - // When we encounter a coinductive cycle, we have to fetch the - // result of that cycle while we are still computing it. Because - // of this we continuously recompute the cycle until the result - // of the previous iteration is equal to the final result, at which - // point we are done. - for _ in 0..self.local_overflow_limit() { - let result = prove_goal(self, inspect); - - // Check whether the current goal is the root of a cycle and whether - // we have to rerun because its provisional result differed from the - // final result. - let stack_entry = self.pop_stack(); - debug_assert_eq!(stack_entry.input, input); - if stack_entry.has_been_used - && stack_entry.provisional_result.map_or(true, |r| r != result) - { - // If so, update its provisional result and reevaluate it. - let depth = self.stack.push(StackEntry { - has_been_used: false, - provisional_result: Some(result), - ..stack_entry - }); - assert_eq!(self.stack_entries.insert(input, depth), None); - } else { - return (stack_entry, result); - } - } - - debug!("canonical cycle overflow"); - let current_entry = self.pop_stack(); - let result = Self::response_no_constraints(tcx, input, Certainty::OVERFLOW); - (current_entry, result) - }); - - // We're now done with this goal. In case this goal is involved in a larger cycle - // do not remove it from the provisional cache and update its provisional result. - // We only add the root of cycles to the global cache. - // - // It is not possible for any nested goal to depend on something deeper on the - // stack, as this would have also updated the depth of the current goal. - if final_entry.cycle_root_depth == self.stack.next_index() { - // When encountering a cycle, both inductive and coinductive, we only - // move the root into the global cache. We also store all other cycle - // participants involved. - // - // We disable the global cache entry of the root goal if a cycle - // participant is on the stack. This is necessary to prevent unstable - // results. See the comment of `StackEntry::cycle_participants` for - // more details. - let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len(); - self.global_cache(tcx).insert( - input, - reached_depth, - final_entry.encountered_overflow, - final_entry.cycle_participants, - dep_node, - result, - ) - } - - result - } - - fn response_no_constraints( - tcx: TyCtxt<'tcx>, - goal: CanonicalInput<'tcx>, - certainty: Certainty, - ) -> QueryResult<'tcx> { - Ok(super::response_no_constraints_raw(tcx, goal.max_universe, goal.variables, certainty)) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/search_graph.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,382 @@ +use super::inspect; +use super::inspect::ProofTreeBuilder; +use super::SolverMode; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxHashSet; +use rustc_index::Idx; +use rustc_index::IndexVec; +use rustc_middle::dep_graph::dep_kinds; +use rustc_middle::traits::solve::CacheData; +use rustc_middle::traits::solve::{CanonicalInput, Certainty, EvaluationCache, QueryResult}; +use rustc_middle::ty::TyCtxt; +use rustc_session::Limit; +use std::collections::hash_map::Entry; + +rustc_index::newtype_index! { + pub struct StackDepth {} +} + +#[derive(Debug)] +struct StackEntry<'tcx> { + input: CanonicalInput<'tcx>, + available_depth: Limit, + // The maximum depth reached by this stack entry, only up-to date + // for the top of the stack and lazily updated for the rest. + reached_depth: StackDepth, + // In case of a cycle, the depth of the root. + cycle_root_depth: StackDepth, + + encountered_overflow: bool, + has_been_used: bool, + /// Starts out as `None` and gets set when rerunning this + /// goal in case we encounter a cycle. + provisional_result: Option>, + + /// We put only the root goal of a coinductive cycle into the global cache. + /// + /// If we were to use that result when later trying to prove another cycle + /// participant, we can end up with unstable query results. + /// + /// See tests/ui/new-solver/coinduction/incompleteness-unstable-result.rs for + /// an example of where this is needed. + cycle_participants: FxHashSet>, +} + +pub(super) struct SearchGraph<'tcx> { + mode: SolverMode, + local_overflow_limit: usize, + /// The stack of goals currently being computed. + /// + /// An element is *deeper* in the stack if its index is *lower*. + stack: IndexVec>, + stack_entries: FxHashMap, StackDepth>, +} + +impl<'tcx> SearchGraph<'tcx> { + pub(super) fn new(tcx: TyCtxt<'tcx>, mode: SolverMode) -> SearchGraph<'tcx> { + Self { + mode, + local_overflow_limit: tcx.recursion_limit().0.checked_ilog2().unwrap_or(0) as usize, + stack: Default::default(), + stack_entries: Default::default(), + } + } + + pub(super) fn solver_mode(&self) -> SolverMode { + self.mode + } + + pub(super) fn local_overflow_limit(&self) -> usize { + self.local_overflow_limit + } + + /// Update the stack and reached depths on cache hits. + #[instrument(level = "debug", skip(self))] + fn on_cache_hit(&mut self, additional_depth: usize, encountered_overflow: bool) { + let reached_depth = self.stack.next_index().plus(additional_depth); + if let Some(last) = self.stack.raw.last_mut() { + last.reached_depth = last.reached_depth.max(reached_depth); + last.encountered_overflow |= encountered_overflow; + } + } + + /// Pops the highest goal from the stack, lazily updating the + /// the next goal in the stack. + /// + /// Directly popping from the stack instead of using this method + /// would cause us to not track overflow and recursion depth correctly. + fn pop_stack(&mut self) -> StackEntry<'tcx> { + let elem = self.stack.pop().unwrap(); + assert!(self.stack_entries.remove(&elem.input).is_some()); + if let Some(last) = self.stack.raw.last_mut() { + last.reached_depth = last.reached_depth.max(elem.reached_depth); + last.encountered_overflow |= elem.encountered_overflow; + } + elem + } + + /// The trait solver behavior is different for coherence + /// so we use a separate cache. Alternatively we could use + /// a single cache and share it between coherence and ordinary + /// trait solving. + pub(super) fn global_cache(&self, tcx: TyCtxt<'tcx>) -> &'tcx EvaluationCache<'tcx> { + match self.mode { + SolverMode::Normal => &tcx.new_solver_evaluation_cache, + SolverMode::Coherence => &tcx.new_solver_coherence_evaluation_cache, + } + } + + pub(super) fn is_empty(&self) -> bool { + self.stack.is_empty() + } + + /// Whether we're currently in a cycle. This should only be used + /// for debug assertions. + pub(super) fn in_cycle(&self) -> bool { + if let Some(stack_depth) = self.stack.last_index() { + // Either the current goal on the stack is the root of a cycle + // or it depends on a goal with a lower depth. + self.stack[stack_depth].has_been_used + || self.stack[stack_depth].cycle_root_depth != stack_depth + } else { + false + } + } + + /// Fetches whether the current goal encountered overflow. + /// + /// This should only be used for the check in `evaluate_goal`. + pub(super) fn encountered_overflow(&self) -> bool { + if let Some(last) = self.stack.raw.last() { last.encountered_overflow } else { false } + } + + /// Resets `encountered_overflow` of the current goal. + /// + /// This should only be used for the check in `evaluate_goal`. + pub(super) fn reset_encountered_overflow(&mut self, encountered_overflow: bool) -> bool { + if let Some(last) = self.stack.raw.last_mut() { + let prev = last.encountered_overflow; + last.encountered_overflow = encountered_overflow; + prev + } else { + false + } + } + + /// Returns the remaining depth allowed for nested goals. + /// + /// This is generally simply one less than the current depth. + /// However, if we encountered overflow, we significantly reduce + /// the remaining depth of all nested goals to prevent hangs + /// in case there is exponential blowup. + fn allowed_depth_for_nested( + tcx: TyCtxt<'tcx>, + stack: &IndexVec>, + ) -> Option { + if let Some(last) = stack.raw.last() { + if last.available_depth.0 == 0 { + return None; + } + + Some(if last.encountered_overflow { + Limit(last.available_depth.0 / 4) + } else { + Limit(last.available_depth.0 - 1) + }) + } else { + Some(tcx.recursion_limit()) + } + } + + /// Probably the most involved method of the whole solver. + /// + /// Given some goal which is proven via the `prove_goal` closure, this + /// handles caching, overflow, and coinductive cycles. + pub(super) fn with_new_goal( + &mut self, + tcx: TyCtxt<'tcx>, + input: CanonicalInput<'tcx>, + inspect: &mut ProofTreeBuilder<'tcx>, + mut prove_goal: impl FnMut(&mut Self, &mut ProofTreeBuilder<'tcx>) -> QueryResult<'tcx>, + ) -> QueryResult<'tcx> { + // Check for overflow. + let Some(available_depth) = Self::allowed_depth_for_nested(tcx, &self.stack) else { + if let Some(last) = self.stack.raw.last_mut() { + last.encountered_overflow = true; + } + + inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::Overflow); + return Self::response_no_constraints(tcx, input, Certainty::OVERFLOW); + }; + + // Try to fetch the goal from the global cache. + 'global: { + let Some(CacheData { result, proof_tree, reached_depth, encountered_overflow }) = + self.global_cache(tcx).get( + tcx, + input, + |cycle_participants| { + self.stack.iter().any(|entry| cycle_participants.contains(&entry.input)) + }, + available_depth, + ) + else { + break 'global; + }; + + // If we're building a proof tree and the current cache entry does not + // contain a proof tree, we do not use the entry but instead recompute + // the goal. We simply overwrite the existing entry once we're done, + // caching the proof tree. + if !inspect.is_noop() { + if let Some(revisions) = proof_tree { + inspect.goal_evaluation_kind( + inspect::WipCanonicalGoalEvaluationKind::Interned { revisions }, + ); + } else { + break 'global; + } + } + + self.on_cache_hit(reached_depth, encountered_overflow); + return result; + } + + // Check whether we're in a cycle. + match self.stack_entries.entry(input) { + // No entry, we push this goal on the stack and try to prove it. + Entry::Vacant(v) => { + let depth = self.stack.next_index(); + let entry = StackEntry { + input, + available_depth, + reached_depth: depth, + cycle_root_depth: depth, + encountered_overflow: false, + has_been_used: false, + provisional_result: None, + cycle_participants: Default::default(), + }; + assert_eq!(self.stack.push(entry), depth); + v.insert(depth); + } + // We have a nested goal which relies on a goal `root` deeper in the stack. + // + // We first store that we may have to reprove `root` in case the provisional + // response is not equal to the final response. We also update the depth of all + // goals which recursively depend on our current goal to depend on `root` + // instead. + // + // Finally we can return either the provisional response for that goal if we have a + // coinductive cycle or an ambiguous result if the cycle is inductive. + Entry::Occupied(entry) => { + inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack); + + let stack_depth = *entry.get(); + debug!("encountered cycle with depth {stack_depth:?}"); + // We start by updating the root depth of all cycle participants, and + // add all cycle participants to the root. + let root_depth = self.stack[stack_depth].cycle_root_depth; + let (prev, participants) = self.stack.raw.split_at_mut(stack_depth.as_usize() + 1); + let root = &mut prev[root_depth.as_usize()]; + for entry in participants { + debug_assert!(entry.cycle_root_depth >= root_depth); + entry.cycle_root_depth = root_depth; + root.cycle_participants.insert(entry.input); + // FIXME(@lcnr): I believe that this line is needed as we could + // otherwise access a cache entry for the root of a cycle while + // computing the result for a cycle participant. This can result + // in unstable results due to incompleteness. + // + // However, a test for this would be an even more complex version of + // tests/ui/traits/new-solver/coinduction/incompleteness-unstable-result.rs. + // I did not bother to write such a test and we have no regression test + // for this. It would be good to have such a test :) + #[allow(rustc::potential_query_instability)] + root.cycle_participants.extend(entry.cycle_participants.drain()); + } + + // If we're in a cycle, we have to retry proving the cycle head + // until we reach a fixpoint. It is not enough to simply retry the + // `root` goal of this cycle. + // + // See tests/ui/traits/new-solver/cycles/fixpoint-rerun-all-cycle-heads.rs + // for an example. + self.stack[stack_depth].has_been_used = true; + return if let Some(result) = self.stack[stack_depth].provisional_result { + result + } else { + // If we don't have a provisional result yet we're in the first iteration, + // so we start with no constraints. + let is_coinductive = self.stack.raw[stack_depth.index()..] + .iter() + .all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx)); + if is_coinductive { + Self::response_no_constraints(tcx, input, Certainty::Yes) + } else { + Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) + } + }; + } + } + + // This is for global caching, so we properly track query dependencies. + // Everything that affects the `result` should be performed within this + // `with_anon_task` closure. + let ((final_entry, result), dep_node) = + tcx.dep_graph.with_anon_task(tcx, dep_kinds::TraitSelect, || { + // When we encounter a coinductive cycle, we have to fetch the + // result of that cycle while we are still computing it. Because + // of this we continuously recompute the cycle until the result + // of the previous iteration is equal to the final result, at which + // point we are done. + for _ in 0..self.local_overflow_limit() { + let result = prove_goal(self, inspect); + + // Check whether the current goal is the root of a cycle and whether + // we have to rerun because its provisional result differed from the + // final result. + let stack_entry = self.pop_stack(); + debug_assert_eq!(stack_entry.input, input); + if stack_entry.has_been_used + && stack_entry.provisional_result.map_or(true, |r| r != result) + { + // If so, update its provisional result and reevaluate it. + let depth = self.stack.push(StackEntry { + has_been_used: false, + provisional_result: Some(result), + ..stack_entry + }); + assert_eq!(self.stack_entries.insert(input, depth), None); + } else { + return (stack_entry, result); + } + } + + debug!("canonical cycle overflow"); + let current_entry = self.pop_stack(); + let result = Self::response_no_constraints(tcx, input, Certainty::OVERFLOW); + (current_entry, result) + }); + + let proof_tree = inspect.finalize_evaluation(tcx); + + // We're now done with this goal. In case this goal is involved in a larger cycle + // do not remove it from the provisional cache and update its provisional result. + // We only add the root of cycles to the global cache. + // + // It is not possible for any nested goal to depend on something deeper on the + // stack, as this would have also updated the depth of the current goal. + if final_entry.cycle_root_depth == self.stack.next_index() { + // When encountering a cycle, both inductive and coinductive, we only + // move the root into the global cache. We also store all other cycle + // participants involved. + // + // We disable the global cache entry of the root goal if a cycle + // participant is on the stack. This is necessary to prevent unstable + // results. See the comment of `StackEntry::cycle_participants` for + // more details. + let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len(); + self.global_cache(tcx).insert( + tcx, + input, + proof_tree, + reached_depth, + final_entry.encountered_overflow, + final_entry.cycle_participants, + dep_node, + result, + ) + } + + result + } + + fn response_no_constraints( + tcx: TyCtxt<'tcx>, + goal: CanonicalInput<'tcx>, + certainty: Certainty, + ) -> QueryResult<'tcx> { + Ok(super::response_no_constraints_raw(tcx, goal.max_universe, goal.variables, certainty)) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/trait_goals.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/trait_goals.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/trait_goals.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/trait_goals.rs 2023-12-21 16:55:28.000000000 +0000 @@ -22,6 +22,10 @@ self.trait_ref } + fn polarity(self) -> ty::ImplPolarity { + self.polarity + } + fn with_self_ty(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self { self.with_self_ty(tcx, self_ty) } @@ -136,12 +140,13 @@ // `assemble_candidates_after_normalizing_self_ty`, and we'd // just be registering an identical candidate here. // - // Returning `Err(NoSolution)` here is ok in `SolverMode::Coherence` - // since we'll always be registering an ambiguous candidate in + // We always return `Err(NoSolution)` here in `SolverMode::Coherence` + // since we'll always register an ambiguous candidate in // `assemble_candidates_after_normalizing_self_ty` due to normalizing // the TAIT. if let ty::Alias(ty::Opaque, opaque_ty) = goal.predicate.self_ty().kind() { if matches!(goal.param_env.reveal(), Reveal::All) + || matches!(ecx.solver_mode(), SolverMode::Coherence) || opaque_ty .def_id .as_local() @@ -237,14 +242,25 @@ ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { - if goal.predicate.polarity != ty::ImplPolarity::Positive { - return Err(NoSolution); - } - - if let ty::FnPtr(..) = goal.predicate.self_ty().kind() { - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } else { - Err(NoSolution) + let self_ty = goal.predicate.self_ty(); + match goal.predicate.polarity { + ty::ImplPolarity::Positive => { + if self_ty.is_fn_ptr() { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } else { + Err(NoSolution) + } + } + ty::ImplPolarity::Negative => { + // If a type is rigid and not a fn ptr, then we know for certain + // that it does *not* implement `FnPtr`. + if !self_ty.is_fn_ptr() && self_ty.is_known_rigid() { + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } else { + Err(NoSolution) + } + } + ty::ImplPolarity::Reservation => bug!(), } } @@ -318,23 +334,47 @@ return Err(NoSolution); } - let ty::Generator(def_id, _, _) = *goal.predicate.self_ty().kind() else { + let ty::Coroutine(def_id, _, _) = *goal.predicate.self_ty().kind() else { return Err(NoSolution); }; - // Generators are not futures unless they come from `async` desugaring + // Coroutines are not futures unless they come from `async` desugaring let tcx = ecx.tcx(); - if !tcx.generator_is_async(def_id) { + if !tcx.coroutine_is_async(def_id) { return Err(NoSolution); } - // Async generator unconditionally implement `Future` + // Async coroutine unconditionally implement `Future` // Technically, we need to check that the future output type is Sized, - // but that's already proven by the generator being WF. + // but that's already proven by the coroutine being WF. + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } + + fn consider_builtin_iterator_candidate( + ecx: &mut EvalCtxt<'_, 'tcx>, + goal: Goal<'tcx, Self>, + ) -> QueryResult<'tcx> { + if goal.predicate.polarity != ty::ImplPolarity::Positive { + return Err(NoSolution); + } + + let ty::Coroutine(def_id, _, _) = *goal.predicate.self_ty().kind() else { + return Err(NoSolution); + }; + + // Coroutines are not iterators unless they come from `gen` desugaring + let tcx = ecx.tcx(); + if !tcx.coroutine_is_gen(def_id) { + return Err(NoSolution); + } + + // Gen coroutines unconditionally implement `Iterator` + // Technically, we need to check that the iterator output type is Sized, + // but that's already proven by the coroutines being WF. ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } - fn consider_builtin_generator_candidate( + fn consider_builtin_coroutine_candidate( ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { @@ -343,24 +383,24 @@ } let self_ty = goal.predicate.self_ty(); - let ty::Generator(def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { return Err(NoSolution); }; - // `async`-desugared generators do not implement the generator trait + // `async`-desugared coroutines do not implement the coroutine trait let tcx = ecx.tcx(); - if tcx.generator_is_async(def_id) { + if !tcx.is_general_coroutine(def_id) { return Err(NoSolution); } - let generator = args.as_generator(); + let coroutine = args.as_coroutine(); Self::consider_implied_clause( ecx, goal, - ty::TraitRef::new(tcx, goal.predicate.def_id(), [self_ty, generator.resume_ty()]) + ty::TraitRef::new(tcx, goal.predicate.def_id(), [self_ty, coroutine.resume_ty()]) .to_predicate(tcx), - // Technically, we need to check that the generator types are Sized, - // but that's already proven by the generator being WF. + // Technically, we need to check that the coroutine types are Sized, + // but that's already proven by the coroutine being WF. [], ) } @@ -843,10 +883,10 @@ ty::Infer(_) | ty::Bound(_, _) => bug!("unexpected type `{self_ty}`"), - // Generators have one special built-in candidate, `Unpin`, which + // Coroutines have one special built-in candidate, `Unpin`, which // takes precedence over the structural auto trait candidate being // assembled. - ty::Generator(_, _, movability) + ty::Coroutine(_, _, movability) if Some(goal.predicate.def_id()) == self.tcx().lang_items().unpin_trait() => { match movability { @@ -878,8 +918,8 @@ | ty::FnDef(_, _) | ty::FnPtr(_) | ty::Closure(_, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Adt(_, _) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/weak_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/weak_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/weak_types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/solve/weak_types.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -//! Computes a normalizes-to (projection) goal for inherent associated types, -//! `#![feature(lazy_type_alias)]` and `#![feature(type_alias_impl_trait)]`. -//! -//! Since a weak alias is not ambiguous, this just computes the `type_of` of -//! the alias and registers the where-clauses of the type alias. -use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; -use rustc_middle::ty; - -use super::EvalCtxt; - -impl<'tcx> EvalCtxt<'_, 'tcx> { - pub(super) fn normalize_weak_type( - &mut self, - goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, - ) -> QueryResult<'tcx> { - let tcx = self.tcx(); - let weak_ty = goal.predicate.projection_ty; - let expected = goal.predicate.term.ty().expect("no such thing as a const alias"); - - let actual = tcx.type_of(weak_ty.def_id).instantiate(tcx, weak_ty.args); - self.eq(goal.param_env, expected, actual)?; - - // Check where clauses - self.add_goals( - tcx.predicates_of(weak_ty.def_id) - .instantiate(tcx, weak_ty.args) - .predicates - .into_iter() - .map(|pred| goal.with(tcx, pred)), - ); - - self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/coherence.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/coherence.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/coherence.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/coherence.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,20 +9,18 @@ use crate::solve::inspect; use crate::solve::inspect::{InspectGoal, ProofTreeInferCtxtExt, ProofTreeVisitor}; use crate::traits::engine::TraitEngineExt; -use crate::traits::outlives_bounds::InferCtxtExt as _; use crate::traits::query::evaluate_obligation::InferCtxtExt; use crate::traits::select::{IntercrateAmbiguityCause, TreatInductiveCycleAs}; use crate::traits::structural_normalize::StructurallyNormalizeExt; -use crate::traits::util::impl_subject_and_oblig; use crate::traits::NormalizeExt; use crate::traits::SkipLeakCheck; use crate::traits::{ - self, Obligation, ObligationCause, ObligationCtxt, PredicateObligation, PredicateObligations, + Obligation, ObligationCause, ObligationCtxt, PredicateObligation, PredicateObligations, SelectionContext, }; use rustc_data_structures::fx::FxIndexSet; use rustc_errors::Diagnostic; -use rustc_hir::def_id::{DefId, CRATE_DEF_ID, LOCAL_CRATE}; +use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_infer::infer::{DefineOpaqueTypes, InferCtxt, TyCtxtInferExt}; use rustc_infer::traits::{util, TraitEngine}; use rustc_middle::traits::query::NoSolution; @@ -32,12 +30,11 @@ use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::visit::{TypeVisitable, TypeVisitableExt}; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitor}; +use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor}; use rustc_session::lint::builtin::COINDUCTIVE_OVERLAP_IN_COHERENCE; use rustc_span::symbol::sym; use rustc_span::DUMMY_SP; use std::fmt::Debug; -use std::iter; use std::ops::ControlFlow; /// Whether we do the orphan check relative to this crate or @@ -142,16 +139,13 @@ Some(overlap) } -fn with_fresh_ty_vars<'cx, 'tcx>( - selcx: &mut SelectionContext<'cx, 'tcx>, - param_env: ty::ParamEnv<'tcx>, - impl_def_id: DefId, -) -> ty::ImplHeader<'tcx> { - let tcx = selcx.tcx(); - let impl_args = selcx.infcx.fresh_args_for_item(DUMMY_SP, impl_def_id); +fn fresh_impl_header<'tcx>(infcx: &InferCtxt<'tcx>, impl_def_id: DefId) -> ty::ImplHeader<'tcx> { + let tcx = infcx.tcx; + let impl_args = infcx.fresh_args_for_item(DUMMY_SP, impl_def_id); - let header = ty::ImplHeader { + ty::ImplHeader { impl_def_id, + impl_args, self_ty: tcx.type_of(impl_def_id).instantiate(tcx, impl_args), trait_ref: tcx.impl_trait_ref(impl_def_id).map(|i| i.instantiate(tcx, impl_args)), predicates: tcx @@ -160,10 +154,18 @@ .iter() .map(|(c, _)| c.as_predicate()) .collect(), - }; + } +} + +fn fresh_impl_header_normalized<'tcx>( + infcx: &InferCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + impl_def_id: DefId, +) -> ty::ImplHeader<'tcx> { + let header = fresh_impl_header(infcx, impl_def_id); let InferOk { value: mut header, obligations } = - selcx.infcx.at(&ObligationCause::dummy(), param_env).normalize(header); + infcx.at(&ObligationCause::dummy(), param_env).normalize(header); header.predicates.extend(obligations.into_iter().map(|o| o.predicate)); header @@ -206,12 +208,13 @@ // empty environment. let param_env = ty::ParamEnv::empty(); - let impl1_header = with_fresh_ty_vars(selcx, param_env, impl1_def_id); - let impl2_header = with_fresh_ty_vars(selcx, param_env, impl2_def_id); + let impl1_header = fresh_impl_header_normalized(selcx.infcx, param_env, impl1_def_id); + let impl2_header = fresh_impl_header_normalized(selcx.infcx, param_env, impl2_def_id); // Equate the headers to find their intersection (the general type, with infer vars, // that may apply both impls). - let mut obligations = equate_impl_headers(selcx.infcx, &impl1_header, &impl2_header)?; + let mut obligations = + equate_impl_headers(selcx.infcx, param_env, &impl1_header, &impl2_header)?; debug!("overlap: unification check succeeded"); obligations.extend( @@ -312,20 +315,22 @@ #[instrument(level = "debug", skip(infcx), ret)] fn equate_impl_headers<'tcx>( infcx: &InferCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, impl1: &ty::ImplHeader<'tcx>, impl2: &ty::ImplHeader<'tcx>, ) -> Option> { - let result = match (impl1.trait_ref, impl2.trait_ref) { - (Some(impl1_ref), Some(impl2_ref)) => infcx - .at(&ObligationCause::dummy(), ty::ParamEnv::empty()) - .eq(DefineOpaqueTypes::Yes, impl1_ref, impl2_ref), - (None, None) => infcx.at(&ObligationCause::dummy(), ty::ParamEnv::empty()).eq( - DefineOpaqueTypes::Yes, - impl1.self_ty, - impl2.self_ty, - ), - _ => bug!("mk_eq_impl_headers given mismatched impl kinds"), - }; + let result = + match (impl1.trait_ref, impl2.trait_ref) { + (Some(impl1_ref), Some(impl2_ref)) => infcx + .at(&ObligationCause::dummy(), param_env) + .eq(DefineOpaqueTypes::Yes, impl1_ref, impl2_ref), + (None, None) => infcx.at(&ObligationCause::dummy(), param_env).eq( + DefineOpaqueTypes::Yes, + impl1.self_ty, + impl2.self_ty, + ), + _ => bug!("mk_eq_impl_headers given mismatched impl kinds"), + }; result.map(|infer_ok| infer_ok.obligations).ok() } @@ -391,107 +396,182 @@ ) -> bool { debug!("negative_impl(impl1_def_id={:?}, impl2_def_id={:?})", impl1_def_id, impl2_def_id); - // Create an infcx, taking the predicates of impl1 as assumptions: - let infcx = tcx.infer_ctxt().build(); - // create a parameter environment corresponding to a (placeholder) instantiation of impl1 - let impl_env = tcx.param_env(impl1_def_id); - let subject1 = match traits::fully_normalize( - &infcx, - ObligationCause::dummy(), - impl_env, - tcx.impl_subject(impl1_def_id).instantiate_identity(), - ) { - Ok(s) => s, - Err(err) => { - tcx.sess.delay_span_bug( - tcx.def_span(impl1_def_id), - format!("failed to fully normalize {impl1_def_id:?}: {err:?}"), - ); - return false; - } - }; + let ref infcx = tcx.infer_ctxt().intercrate(true).with_next_trait_solver(true).build(); + let universe = infcx.universe(); - // Attempt to prove that impl2 applies, given all of the above. - let selcx = &mut SelectionContext::new(&infcx); - let impl2_args = infcx.fresh_args_for_item(DUMMY_SP, impl2_def_id); - let (subject2, normalization_obligations) = - impl_subject_and_oblig(selcx, impl_env, impl2_def_id, impl2_args, |_, _| { - ObligationCause::dummy() - }); - - // do the impls unify? If not, then it's not currently possible to prove any - // obligations about their intersection. - let Ok(InferOk { obligations: equate_obligations, .. }) = - infcx.at(&ObligationCause::dummy(), impl_env).eq(DefineOpaqueTypes::No, subject1, subject2) + let impl1_header = fresh_impl_header(infcx, impl1_def_id); + let param_env = + ty::EarlyBinder::bind(tcx.param_env(impl1_def_id)).instantiate(tcx, impl1_header.impl_args); + + let impl2_header = fresh_impl_header(infcx, impl2_def_id); + + // Equate the headers to find their intersection (the general type, with infer vars, + // that may apply both impls). + let Some(_equate_obligations) = + equate_impl_headers(infcx, param_env, &impl1_header, &impl2_header) else { - debug!("explicit_disjoint: {:?} does not unify with {:?}", subject1, subject2); return false; }; - for obligation in normalization_obligations.into_iter().chain(equate_obligations) { - if negative_impl_exists(&infcx, &obligation, impl1_def_id) { - debug!("overlap: obligation unsatisfiable {:?}", obligation); - return true; - } - } + plug_infer_with_placeholders(infcx, universe, (impl1_header.impl_args, impl2_header.impl_args)); - false + util::elaborate(tcx, tcx.predicates_of(impl2_def_id).instantiate(tcx, impl2_header.impl_args)) + .any(|(clause, _)| try_prove_negated_where_clause(infcx, clause, param_env)) } -/// Try to prove that a negative impl exist for the obligation or its supertraits. -/// -/// If such a negative impl exists, then the obligation definitely must not hold -/// due to coherence, even if it's not necessarily "knowable" in this crate. Any -/// valid impl downstream would not be able to exist due to the overlapping -/// negative impl. -#[instrument(level = "debug", skip(infcx))] -fn negative_impl_exists<'tcx>( +fn plug_infer_with_placeholders<'tcx>( infcx: &InferCtxt<'tcx>, - o: &PredicateObligation<'tcx>, - body_def_id: DefId, -) -> bool { - // Try to prove a negative obligation exists for super predicates - for pred in util::elaborate(infcx.tcx, iter::once(o.predicate)) { - if prove_negated_obligation(infcx.fork(), &o.with(infcx.tcx, pred), body_def_id) { - return true; + universe: ty::UniverseIndex, + value: impl TypeVisitable>, +) { + struct PlugInferWithPlaceholder<'a, 'tcx> { + infcx: &'a InferCtxt<'tcx>, + universe: ty::UniverseIndex, + var: ty::BoundVar, + } + + impl<'tcx> PlugInferWithPlaceholder<'_, 'tcx> { + fn next_var(&mut self) -> ty::BoundVar { + let var = self.var; + self.var = self.var + 1; + var + } + } + + impl<'tcx> TypeVisitor> for PlugInferWithPlaceholder<'_, 'tcx> { + fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow { + let ty = self.infcx.shallow_resolve(ty); + if ty.is_ty_var() { + let Ok(InferOk { value: (), obligations }) = + self.infcx.at(&ObligationCause::dummy(), ty::ParamEnv::empty()).eq( + DefineOpaqueTypes::No, + ty, + Ty::new_placeholder( + self.infcx.tcx, + ty::Placeholder { + universe: self.universe, + bound: ty::BoundTy { + var: self.next_var(), + kind: ty::BoundTyKind::Anon, + }, + }, + ), + ) + else { + bug!() + }; + assert_eq!(obligations, &[]); + ControlFlow::Continue(()) + } else { + ty.super_visit_with(self) + } + } + + fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow { + let ct = self.infcx.shallow_resolve(ct); + if ct.is_ct_infer() { + let Ok(InferOk { value: (), obligations }) = + self.infcx.at(&ObligationCause::dummy(), ty::ParamEnv::empty()).eq( + DefineOpaqueTypes::No, + ct, + ty::Const::new_placeholder( + self.infcx.tcx, + ty::Placeholder { universe: self.universe, bound: self.next_var() }, + ct.ty(), + ), + ) + else { + bug!() + }; + assert_eq!(obligations, &[]); + ControlFlow::Continue(()) + } else { + ct.super_visit_with(self) + } + } + + fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow { + if let ty::ReVar(vid) = *r { + let r = self + .infcx + .inner + .borrow_mut() + .unwrap_region_constraints() + .opportunistic_resolve_var(self.infcx.tcx, vid); + if r.is_var() { + let Ok(InferOk { value: (), obligations }) = + self.infcx.at(&ObligationCause::dummy(), ty::ParamEnv::empty()).eq( + DefineOpaqueTypes::No, + r, + ty::Region::new_placeholder( + self.infcx.tcx, + ty::Placeholder { + universe: self.universe, + bound: ty::BoundRegion { + var: self.next_var(), + kind: ty::BoundRegionKind::BrAnon, + }, + }, + ), + ) + else { + bug!() + }; + assert_eq!(obligations, &[]); + } + } + ControlFlow::Continue(()) } } - false + value.visit_with(&mut PlugInferWithPlaceholder { + infcx, + universe, + var: ty::BoundVar::from_u32(0), + }); } -#[instrument(level = "debug", skip(infcx))] -fn prove_negated_obligation<'tcx>( - infcx: InferCtxt<'tcx>, - o: &PredicateObligation<'tcx>, - body_def_id: DefId, +fn try_prove_negated_where_clause<'tcx>( + root_infcx: &InferCtxt<'tcx>, + clause: ty::Clause<'tcx>, + param_env: ty::ParamEnv<'tcx>, ) -> bool { - let tcx = infcx.tcx; - - let Some(o) = o.flip_polarity(tcx) else { + let Some(negative_predicate) = clause.as_predicate().flip_polarity(root_infcx.tcx) else { return false; }; - let param_env = o.param_env; - let ocx = ObligationCtxt::new(&infcx); - ocx.register_obligation(o); - let errors = ocx.select_all_or_error(); - if !errors.is_empty() { + // FIXME(with_negative_coherence): the infcx has region contraints from equating + // the impl headers as requirements. Given that the only region constraints we + // get are involving inference regions in the root, it shouldn't matter, but + // still sus. + // + // We probably should just throw away the region obligations registered up until + // now, or ideally use them as assumptions when proving the region obligations + // that we get from proving the negative predicate below. + let ref infcx = root_infcx.fork(); + let ocx = ObligationCtxt::new(infcx); + + ocx.register_obligation(Obligation::new( + infcx.tcx, + ObligationCause::dummy(), + param_env, + negative_predicate, + )); + if !ocx.select_all_or_error().is_empty() { return false; } - let body_def_id = body_def_id.as_local().unwrap_or(CRATE_DEF_ID); + // FIXME: We could use the assumed_wf_types from both impls, I think, + // if that wasn't implemented just for LocalDefId, and we'd need to do + // the normalization ourselves since this is totally fallible... + let outlives_env = OutlivesEnvironment::new(param_env); - let ocx = ObligationCtxt::new(&infcx); - let Ok(wf_tys) = ocx.assumed_wf_types(param_env, body_def_id) else { + let errors = infcx.resolve_regions(&outlives_env); + if !errors.is_empty() { return false; - }; + } - let outlives_env = OutlivesEnvironment::with_bounds( - param_env, - infcx.implied_bounds_tys(param_env, body_def_id, wf_tys), - ); - infcx.resolve_regions(&outlives_env).is_empty() + true } /// Returns whether all impls which would apply to the `trait_ref` @@ -506,13 +586,6 @@ trait_ref: ty::TraitRef<'tcx>, mut lazily_normalize_ty: impl FnMut(Ty<'tcx>) -> Result, E>, ) -> Result, E> { - if Some(trait_ref.def_id) == tcx.lang_items().fn_ptr_trait() { - // The only types implementing `FnPtr` are function pointers, - // so if there's no impl of `FnPtr` in the current crate, - // then such an impl will never be added in the future. - return Ok(Ok(())); - } - if orphan_check_trait_ref(trait_ref, InCrate::Remote, &mut lazily_normalize_ty)?.is_ok() { // A downstream or cousin crate is allowed to implement some // substitution of this trait-ref. @@ -817,7 +890,7 @@ } } ty::Error(_) => ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty)), - ty::Closure(did, ..) | ty::Generator(did, ..) => { + ty::Closure(did, ..) | ty::Coroutine(did, ..) => { if self.def_id_is_local(did) { ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty)) } else { @@ -827,7 +900,7 @@ // This should only be created when checking whether we have to check whether some // auto trait impl applies. There will never be multiple impls, so we can just // act as if it were a local type here. - ty::GeneratorWitness(..) => ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty)), + ty::CoroutineWitness(..) => ControlFlow::Break(OrphanCheckEarlyExit::LocalTy(ty)), ty::Alias(ty::Opaque, ..) => { // This merits some explanation. // Normally, opaque types are not involved when performing diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/engine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/engine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/engine.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/engine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,10 +37,10 @@ (TraitSolver::Classic, false) | (TraitSolver::NextCoherence, false) => { Box::new(FulfillmentContext::new(infcx)) } - (TraitSolver::Next | TraitSolver::NextCoherence, true) => { + (TraitSolver::Classic | TraitSolver::Next | TraitSolver::NextCoherence, true) => { Box::new(NextFulfillmentCtxt::new(infcx)) } - _ => bug!( + (TraitSolver::Next, false) => bug!( "incompatible combination of -Ztrait-solver flag ({:?}) and InferCtxt::next_trait_solver ({:?})", infcx.tcx.sess.opts.unstable_opts.trait_solver, infcx.next_trait_solver() @@ -218,7 +218,7 @@ def_id: LocalDefId, ) -> Result>, ErrorGuaranteed> { self.assumed_wf_types(param_env, def_id) - .map_err(|errors| self.infcx.err_ctxt().report_fulfillment_errors(&errors)) + .map_err(|errors| self.infcx.err_ctxt().report_fulfillment_errors(errors)) } pub fn assumed_wf_types( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs 2023-12-21 16:55:28.000000000 +0000 @@ -87,7 +87,9 @@ if let ty::ClauseKind::Trait(trait_pred) = kind.skip_binder() && param_env_candidate_may_apply(kind.rebind(trait_pred)) { - if kind.rebind(trait_pred.trait_ref) == ty::Binder::dummy(ty::TraitRef::identity(tcx, trait_pred.def_id())) { + if kind.rebind(trait_pred.trait_ref) + == ty::Binder::dummy(ty::TraitRef::identity(tcx, trait_pred.def_id())) + { ambiguities.push(Ambiguity::ParamEnv(tcx.def_span(trait_pred.def_id()))) } else { ambiguities.push(Ambiguity::ParamEnv(span)) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/infer_ctxt_ext.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/infer_ctxt_ext.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/infer_ctxt_ext.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/infer_ctxt_ext.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,275 @@ +use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use crate::infer::InferCtxt; +use crate::traits::{Obligation, ObligationCause, ObligationCtxt}; +use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed}; +use rustc_hir as hir; +use rustc_hir::Node; +use rustc_middle::ty::{self, Ty}; +use rustc_span::{Span, DUMMY_SP}; + +use super::ArgKind; + +pub use rustc_infer::traits::error_reporting::*; + +pub trait InferCtxtExt<'tcx> { + /// Given some node representing a fn-like thing in the HIR map, + /// returns a span and `ArgKind` information that describes the + /// arguments it expects. This can be supplied to + /// `report_arg_count_mismatch`. + fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option, Vec)>; + + /// Reports an error when the number of arguments needed by a + /// trait match doesn't match the number that the expression + /// provides. + fn report_arg_count_mismatch( + &self, + span: Span, + found_span: Option, + expected_args: Vec, + found_args: Vec, + is_closure: bool, + closure_pipe_span: Option, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; + + /// Checks if the type implements one of `Fn`, `FnMut`, or `FnOnce` + /// in that order, and returns the generic type corresponding to the + /// argument of that trait (corresponding to the closure arguments). + fn type_implements_fn_trait( + &self, + param_env: ty::ParamEnv<'tcx>, + ty: ty::Binder<'tcx, Ty<'tcx>>, + polarity: ty::ImplPolarity, + ) -> Result<(ty::ClosureKind, ty::Binder<'tcx, Ty<'tcx>>), ()>; +} + +impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> { + /// Given some node representing a fn-like thing in the HIR map, + /// returns a span and `ArgKind` information that describes the + /// arguments it expects. This can be supplied to + /// `report_arg_count_mismatch`. + fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option, Vec)> { + let sm = self.tcx.sess.source_map(); + let hir = self.tcx.hir(); + Some(match node { + Node::Expr(&hir::Expr { + kind: hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, fn_arg_span, .. }), + .. + }) => ( + fn_decl_span, + fn_arg_span, + hir.body(body) + .params + .iter() + .map(|arg| { + if let hir::Pat { kind: hir::PatKind::Tuple(ref args, _), span, .. } = + *arg.pat + { + Some(ArgKind::Tuple( + Some(span), + args.iter() + .map(|pat| { + sm.span_to_snippet(pat.span) + .ok() + .map(|snippet| (snippet, "_".to_owned())) + }) + .collect::>>()?, + )) + } else { + let name = sm.span_to_snippet(arg.pat.span).ok()?; + Some(ArgKind::Arg(name, "_".to_owned())) + } + }) + .collect::>>()?, + ), + Node::Item(&hir::Item { kind: hir::ItemKind::Fn(ref sig, ..), .. }) + | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(ref sig, _), .. }) + | Node::TraitItem(&hir::TraitItem { + kind: hir::TraitItemKind::Fn(ref sig, _), .. + }) => ( + sig.span, + None, + sig.decl + .inputs + .iter() + .map(|arg| match arg.kind { + hir::TyKind::Tup(ref tys) => ArgKind::Tuple( + Some(arg.span), + vec![("_".to_owned(), "_".to_owned()); tys.len()], + ), + _ => ArgKind::empty(), + }) + .collect::>(), + ), + Node::Ctor(ref variant_data) => { + let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id)); + (span, None, vec![ArgKind::empty(); variant_data.fields().len()]) + } + _ => panic!("non-FnLike node found: {node:?}"), + }) + } + + /// Reports an error when the number of arguments needed by a + /// trait match doesn't match the number that the expression + /// provides. + fn report_arg_count_mismatch( + &self, + span: Span, + found_span: Option, + expected_args: Vec, + found_args: Vec, + is_closure: bool, + closure_arg_span: Option, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { + let kind = if is_closure { "closure" } else { "function" }; + + let args_str = |arguments: &[ArgKind], other: &[ArgKind]| { + let arg_length = arguments.len(); + let distinct = matches!(other, &[ArgKind::Tuple(..)]); + match (arg_length, arguments.get(0)) { + (1, Some(ArgKind::Tuple(_, fields))) => { + format!("a single {}-tuple as argument", fields.len()) + } + _ => format!( + "{} {}argument{}", + arg_length, + if distinct && arg_length > 1 { "distinct " } else { "" }, + pluralize!(arg_length) + ), + } + }; + + let expected_str = args_str(&expected_args, &found_args); + let found_str = args_str(&found_args, &expected_args); + + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0593, + "{} is expected to take {}, but it takes {}", + kind, + expected_str, + found_str, + ); + + err.span_label(span, format!("expected {kind} that takes {expected_str}")); + + if let Some(found_span) = found_span { + err.span_label(found_span, format!("takes {found_str}")); + + // Suggest to take and ignore the arguments with expected_args_length `_`s if + // found arguments is empty (assume the user just wants to ignore args in this case). + // For example, if `expected_args_length` is 2, suggest `|_, _|`. + if found_args.is_empty() && is_closure { + let underscores = vec!["_"; expected_args.len()].join(", "); + err.span_suggestion_verbose( + closure_arg_span.unwrap_or(found_span), + format!( + "consider changing the closure to take and ignore the expected argument{}", + pluralize!(expected_args.len()) + ), + format!("|{underscores}|"), + Applicability::MachineApplicable, + ); + } + + if let &[ArgKind::Tuple(_, ref fields)] = &found_args[..] { + if fields.len() == expected_args.len() { + let sugg = fields + .iter() + .map(|(name, _)| name.to_owned()) + .collect::>() + .join(", "); + err.span_suggestion_verbose( + found_span, + "change the closure to take multiple arguments instead of a single tuple", + format!("|{sugg}|"), + Applicability::MachineApplicable, + ); + } + } + if let &[ArgKind::Tuple(_, ref fields)] = &expected_args[..] + && fields.len() == found_args.len() + && is_closure + { + let sugg = format!( + "|({}){}|", + found_args + .iter() + .map(|arg| match arg { + ArgKind::Arg(name, _) => name.to_owned(), + _ => "_".to_owned(), + }) + .collect::>() + .join(", "), + // add type annotations if available + if found_args.iter().any(|arg| match arg { + ArgKind::Arg(_, ty) => ty != "_", + _ => false, + }) { + format!( + ": ({})", + fields + .iter() + .map(|(_, ty)| ty.to_owned()) + .collect::>() + .join(", ") + ) + } else { + String::new() + }, + ); + err.span_suggestion_verbose( + found_span, + "change the closure to accept a tuple instead of individual arguments", + sugg, + Applicability::MachineApplicable, + ); + } + } + + err + } + + fn type_implements_fn_trait( + &self, + param_env: ty::ParamEnv<'tcx>, + ty: ty::Binder<'tcx, Ty<'tcx>>, + polarity: ty::ImplPolarity, + ) -> Result<(ty::ClosureKind, ty::Binder<'tcx, Ty<'tcx>>), ()> { + self.commit_if_ok(|_| { + for trait_def_id in [ + self.tcx.lang_items().fn_trait(), + self.tcx.lang_items().fn_mut_trait(), + self.tcx.lang_items().fn_once_trait(), + ] { + let Some(trait_def_id) = trait_def_id else { continue }; + // Make a fresh inference variable so we can determine what the substitutions + // of the trait are. + let var = self.next_ty_var(TypeVariableOrigin { + span: DUMMY_SP, + kind: TypeVariableOriginKind::MiscVariable, + }); + // FIXME(effects) + let trait_ref = ty::TraitRef::new(self.tcx, trait_def_id, [ty.skip_binder(), var]); + let obligation = Obligation::new( + self.tcx, + ObligationCause::dummy(), + param_env, + ty.rebind(ty::TraitPredicate { trait_ref, polarity }), + ); + let ocx = ObligationCtxt::new(self); + ocx.register_obligation(obligation); + if ocx.select_all_or_error().is_empty() { + return Ok(( + self.tcx + .fn_trait_kind_from_def_id(trait_def_id) + .expect("expected to map DefId to ClosureKind"), + ty.rebind(self.resolve_vars_if_possible(var)), + )); + } + } + + Err(()) + }) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,57 +1,25 @@ +// ignore-tidy-filelength :( + mod ambiguity; +mod infer_ctxt_ext; pub mod on_unimplemented; pub mod suggestions; +mod type_err_ctxt_ext; -use super::{ - FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, Obligation, ObligationCause, - ObligationCauseCode, ObligationCtxt, OutputTypeParameterMismatch, Overflow, - PredicateObligation, SelectionError, TraitNotObjectSafe, -}; -use crate::errors::{ClosureFnMutLabel, ClosureFnOnceLabel, ClosureKindMismatch}; -use crate::infer::error_reporting::{TyCategory, TypeAnnotationNeeded as ErrorCode}; -use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; -use crate::infer::{self, InferCtxt}; -use crate::solve::{GenerateProofTree, InferCtxtEvalExt, UseGlobalCache}; -use crate::traits::query::evaluate_obligation::InferCtxtExt as _; -use crate::traits::specialize::to_pretty_impl_header; -use crate::traits::NormalizeExt; -use on_unimplemented::{AppendConstMessage, OnUnimplementedNote, TypeErrCtxtExt as _}; -use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; -use rustc_errors::{ - pluralize, struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, - MultiSpan, Style, -}; +use super::{Obligation, ObligationCause, ObligationCauseCode, PredicateObligation}; +use crate::infer::InferCtxt; +use crate::solve::{GenerateProofTree, InferCtxtEvalExt}; use rustc_hir as hir; -use rustc_hir::def::Namespace; -use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::def_id::DefId; use rustc_hir::intravisit::Visitor; -use rustc_hir::{GenericParam, Item, Node}; -use rustc_infer::infer::error_reporting::TypeErrCtxt; -use rustc_infer::infer::{InferOk, TypeTrace}; -use rustc_middle::traits::select::OverflowError; use rustc_middle::traits::solve::Goal; -use rustc_middle::traits::{DefiningAnchor, SelectionOutputTypeParameterMismatch}; -use rustc_middle::ty::abstract_const::NotConstEvaluatable; -use rustc_middle::ty::error::{ExpectedFound, TypeError}; -use rustc_middle::ty::fold::{BottomUpFolder, TypeFolder, TypeSuperFoldable}; -use rustc_middle::ty::print::{with_forced_trimmed_paths, FmtPrinter, Print}; -use rustc_middle::ty::{ - self, SubtypePredicate, ToPolyTraitRef, ToPredicate, TraitRef, Ty, TyCtxt, TypeFoldable, - TypeVisitable, TypeVisitableExt, -}; -use rustc_session::config::{DumpSolverProofTree, TraitSolver}; -use rustc_session::Limit; -use rustc_span::def_id::LOCAL_CRATE; -use rustc_span::symbol::sym; -use rustc_span::{ExpnKind, Span, DUMMY_SP}; -use std::borrow::Cow; -use std::fmt; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::Span; use std::io::Write; -use std::iter; use std::ops::ControlFlow; -use suggestions::TypeErrCtxtExt as _; -pub use rustc_infer::traits::error_reporting::*; +pub use self::infer_ctxt_ext::*; +pub use self::type_err_ctxt_ext::*; // When outputting impl candidates, prefer showing those that are more similar. // @@ -67,6 +35,7 @@ pub struct ImplCandidate<'tcx> { pub trait_ref: ty::TraitRef<'tcx>, pub similarity: CandidateSimilarity, + impl_def_id: DefId, } enum GetSafeTransmuteErrorAndReason { @@ -74,3356 +43,8 @@ Error { err_msg: String, safe_transmute_explanation: String }, } -pub trait InferCtxtExt<'tcx> { - /// Given some node representing a fn-like thing in the HIR map, - /// returns a span and `ArgKind` information that describes the - /// arguments it expects. This can be supplied to - /// `report_arg_count_mismatch`. - fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option, Vec)>; - - /// Reports an error when the number of arguments needed by a - /// trait match doesn't match the number that the expression - /// provides. - fn report_arg_count_mismatch( - &self, - span: Span, - found_span: Option, - expected_args: Vec, - found_args: Vec, - is_closure: bool, - closure_pipe_span: Option, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; - - /// Checks if the type implements one of `Fn`, `FnMut`, or `FnOnce` - /// in that order, and returns the generic type corresponding to the - /// argument of that trait (corresponding to the closure arguments). - fn type_implements_fn_trait( - &self, - param_env: ty::ParamEnv<'tcx>, - ty: ty::Binder<'tcx, Ty<'tcx>>, - polarity: ty::ImplPolarity, - ) -> Result<(ty::ClosureKind, ty::Binder<'tcx, Ty<'tcx>>), ()>; -} - -pub trait TypeErrCtxtExt<'tcx> { - fn build_overflow_error( - &self, - predicate: &T, - span: Span, - suggest_increasing_limit: bool, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> - where - T: fmt::Display - + TypeFoldable> - + Print<'tcx, FmtPrinter<'tcx, 'tcx>, Output = FmtPrinter<'tcx, 'tcx>>, - >>::Error: std::fmt::Debug; - - fn report_overflow_error( - &self, - predicate: &T, - span: Span, - suggest_increasing_limit: bool, - mutate: impl FnOnce(&mut Diagnostic), - ) -> ! - where - T: fmt::Display - + TypeFoldable> - + Print<'tcx, FmtPrinter<'tcx, 'tcx>, Output = FmtPrinter<'tcx, 'tcx>>, - >>::Error: std::fmt::Debug; - - fn report_overflow_no_abort(&self, obligation: PredicateObligation<'tcx>) -> ErrorGuaranteed; - - fn report_fulfillment_errors(&self, errors: &[FulfillmentError<'tcx>]) -> ErrorGuaranteed; - - fn report_overflow_obligation( - &self, - obligation: &Obligation<'tcx, T>, - suggest_increasing_limit: bool, - ) -> ! - where - T: ToPredicate<'tcx> + Clone; - - fn suggest_new_overflow_limit(&self, err: &mut Diagnostic); - - fn report_overflow_obligation_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> !; - - /// The `root_obligation` parameter should be the `root_obligation` field - /// from a `FulfillmentError`. If no `FulfillmentError` is available, - /// then it should be the same as `obligation`. - fn report_selection_error( - &self, - obligation: PredicateObligation<'tcx>, - root_obligation: &PredicateObligation<'tcx>, - error: &SelectionError<'tcx>, - ); - - fn report_const_param_not_wf( - &self, - ty: Ty<'tcx>, - obligation: &PredicateObligation<'tcx>, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; -} - -impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> { - /// Given some node representing a fn-like thing in the HIR map, - /// returns a span and `ArgKind` information that describes the - /// arguments it expects. This can be supplied to - /// `report_arg_count_mismatch`. - fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option, Vec)> { - let sm = self.tcx.sess.source_map(); - let hir = self.tcx.hir(); - Some(match node { - Node::Expr(&hir::Expr { - kind: hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, fn_arg_span, .. }), - .. - }) => ( - fn_decl_span, - fn_arg_span, - hir.body(body) - .params - .iter() - .map(|arg| { - if let hir::Pat { kind: hir::PatKind::Tuple(ref args, _), span, .. } = - *arg.pat - { - Some(ArgKind::Tuple( - Some(span), - args.iter() - .map(|pat| { - sm.span_to_snippet(pat.span) - .ok() - .map(|snippet| (snippet, "_".to_owned())) - }) - .collect::>>()?, - )) - } else { - let name = sm.span_to_snippet(arg.pat.span).ok()?; - Some(ArgKind::Arg(name, "_".to_owned())) - } - }) - .collect::>>()?, - ), - Node::Item(&hir::Item { kind: hir::ItemKind::Fn(ref sig, ..), .. }) - | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(ref sig, _), .. }) - | Node::TraitItem(&hir::TraitItem { - kind: hir::TraitItemKind::Fn(ref sig, _), .. - }) => ( - sig.span, - None, - sig.decl - .inputs - .iter() - .map(|arg| match arg.kind { - hir::TyKind::Tup(ref tys) => ArgKind::Tuple( - Some(arg.span), - vec![("_".to_owned(), "_".to_owned()); tys.len()], - ), - _ => ArgKind::empty(), - }) - .collect::>(), - ), - Node::Ctor(ref variant_data) => { - let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id)); - (span, None, vec![ArgKind::empty(); variant_data.fields().len()]) - } - _ => panic!("non-FnLike node found: {node:?}"), - }) - } - - /// Reports an error when the number of arguments needed by a - /// trait match doesn't match the number that the expression - /// provides. - fn report_arg_count_mismatch( - &self, - span: Span, - found_span: Option, - expected_args: Vec, - found_args: Vec, - is_closure: bool, - closure_arg_span: Option, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - let kind = if is_closure { "closure" } else { "function" }; - - let args_str = |arguments: &[ArgKind], other: &[ArgKind]| { - let arg_length = arguments.len(); - let distinct = matches!(other, &[ArgKind::Tuple(..)]); - match (arg_length, arguments.get(0)) { - (1, Some(ArgKind::Tuple(_, fields))) => { - format!("a single {}-tuple as argument", fields.len()) - } - _ => format!( - "{} {}argument{}", - arg_length, - if distinct && arg_length > 1 { "distinct " } else { "" }, - pluralize!(arg_length) - ), - } - }; - - let expected_str = args_str(&expected_args, &found_args); - let found_str = args_str(&found_args, &expected_args); - - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0593, - "{} is expected to take {}, but it takes {}", - kind, - expected_str, - found_str, - ); - - err.span_label(span, format!("expected {kind} that takes {expected_str}")); - - if let Some(found_span) = found_span { - err.span_label(found_span, format!("takes {found_str}")); - - // Suggest to take and ignore the arguments with expected_args_length `_`s if - // found arguments is empty (assume the user just wants to ignore args in this case). - // For example, if `expected_args_length` is 2, suggest `|_, _|`. - if found_args.is_empty() && is_closure { - let underscores = vec!["_"; expected_args.len()].join(", "); - err.span_suggestion_verbose( - closure_arg_span.unwrap_or(found_span), - format!( - "consider changing the closure to take and ignore the expected argument{}", - pluralize!(expected_args.len()) - ), - format!("|{underscores}|"), - Applicability::MachineApplicable, - ); - } - - if let &[ArgKind::Tuple(_, ref fields)] = &found_args[..] { - if fields.len() == expected_args.len() { - let sugg = fields - .iter() - .map(|(name, _)| name.to_owned()) - .collect::>() - .join(", "); - err.span_suggestion_verbose( - found_span, - "change the closure to take multiple arguments instead of a single tuple", - format!("|{sugg}|"), - Applicability::MachineApplicable, - ); - } - } - if let &[ArgKind::Tuple(_, ref fields)] = &expected_args[..] - && fields.len() == found_args.len() - && is_closure - { - let sugg = format!( - "|({}){}|", - found_args - .iter() - .map(|arg| match arg { - ArgKind::Arg(name, _) => name.to_owned(), - _ => "_".to_owned(), - }) - .collect::>() - .join(", "), - // add type annotations if available - if found_args.iter().any(|arg| match arg { - ArgKind::Arg(_, ty) => ty != "_", - _ => false, - }) { - format!( - ": ({})", - fields - .iter() - .map(|(_, ty)| ty.to_owned()) - .collect::>() - .join(", ") - ) - } else { - String::new() - }, - ); - err.span_suggestion_verbose( - found_span, - "change the closure to accept a tuple instead of individual arguments", - sugg, - Applicability::MachineApplicable, - ); - } - } - - err - } - - fn type_implements_fn_trait( - &self, - param_env: ty::ParamEnv<'tcx>, - ty: ty::Binder<'tcx, Ty<'tcx>>, - polarity: ty::ImplPolarity, - ) -> Result<(ty::ClosureKind, ty::Binder<'tcx, Ty<'tcx>>), ()> { - self.commit_if_ok(|_| { - for trait_def_id in [ - self.tcx.lang_items().fn_trait(), - self.tcx.lang_items().fn_mut_trait(), - self.tcx.lang_items().fn_once_trait(), - ] { - let Some(trait_def_id) = trait_def_id else { continue }; - // Make a fresh inference variable so we can determine what the substitutions - // of the trait are. - let var = self.next_ty_var(TypeVariableOrigin { - span: DUMMY_SP, - kind: TypeVariableOriginKind::MiscVariable, - }); - // FIXME(effects) - let trait_ref = ty::TraitRef::new(self.tcx, trait_def_id, [ty.skip_binder(), var]); - let obligation = Obligation::new( - self.tcx, - ObligationCause::dummy(), - param_env, - ty.rebind(ty::TraitPredicate { trait_ref, polarity }), - ); - let ocx = ObligationCtxt::new(self); - ocx.register_obligation(obligation); - if ocx.select_all_or_error().is_empty() { - return Ok(( - self.tcx - .fn_trait_kind_from_def_id(trait_def_id) - .expect("expected to map DefId to ClosureKind"), - ty.rebind(self.resolve_vars_if_possible(var)), - )); - } - } - - Err(()) - }) - } -} - -impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { - fn report_fulfillment_errors(&self, errors: &[FulfillmentError<'tcx>]) -> ErrorGuaranteed { - #[derive(Debug)] - struct ErrorDescriptor<'tcx> { - predicate: ty::Predicate<'tcx>, - index: Option, // None if this is an old error - } - - let mut error_map: FxIndexMap<_, Vec<_>> = self - .reported_trait_errors - .borrow() - .iter() - .map(|(&span, predicates)| { - ( - span, - predicates - .iter() - .map(|&predicate| ErrorDescriptor { predicate, index: None }) - .collect(), - ) - }) - .collect(); - - for (index, error) in errors.iter().enumerate() { - // We want to ignore desugarings here: spans are equivalent even - // if one is the result of a desugaring and the other is not. - let mut span = error.obligation.cause.span; - let expn_data = span.ctxt().outer_expn_data(); - if let ExpnKind::Desugaring(_) = expn_data.kind { - span = expn_data.call_site; - } - - error_map.entry(span).or_default().push(ErrorDescriptor { - predicate: error.obligation.predicate, - index: Some(index), - }); - - self.reported_trait_errors - .borrow_mut() - .entry(span) - .or_default() - .push(error.obligation.predicate); - } - - // We do this in 2 passes because we want to display errors in order, though - // maybe it *is* better to sort errors by span or something. - let mut is_suppressed = vec![false; errors.len()]; - for (_, error_set) in error_map.iter() { - // We want to suppress "duplicate" errors with the same span. - for error in error_set { - if let Some(index) = error.index { - // Suppress errors that are either: - // 1) strictly implied by another error. - // 2) implied by an error with a smaller index. - for error2 in error_set { - if error2.index.is_some_and(|index2| is_suppressed[index2]) { - // Avoid errors being suppressed by already-suppressed - // errors, to prevent all errors from being suppressed - // at once. - continue; - } - - if self.error_implies(error2.predicate, error.predicate) - && !(error2.index >= error.index - && self.error_implies(error.predicate, error2.predicate)) - { - info!("skipping {:?} (implied by {:?})", error, error2); - is_suppressed[index] = true; - break; - } - } - } - } - } - - for from_expansion in [false, true] { - for (error, suppressed) in iter::zip(errors, &is_suppressed) { - if !suppressed && error.obligation.cause.span.from_expansion() == from_expansion { - self.report_fulfillment_error(error); - } - } - } - - self.tcx.sess.delay_span_bug(DUMMY_SP, "expected fulfillment errors") - } - - /// Reports that an overflow has occurred and halts compilation. We - /// halt compilation unconditionally because it is important that - /// overflows never be masked -- they basically represent computations - /// whose result could not be truly determined and thus we can't say - /// if the program type checks or not -- and they are unusual - /// occurrences in any case. - fn report_overflow_error( - &self, - predicate: &T, - span: Span, - suggest_increasing_limit: bool, - mutate: impl FnOnce(&mut Diagnostic), - ) -> ! - where - T: fmt::Display - + TypeFoldable> - + Print<'tcx, FmtPrinter<'tcx, 'tcx>, Output = FmtPrinter<'tcx, 'tcx>>, - >>::Error: std::fmt::Debug, - { - let mut err = self.build_overflow_error(predicate, span, suggest_increasing_limit); - mutate(&mut err); - err.emit(); - - self.tcx.sess.abort_if_errors(); - bug!(); - } - - fn build_overflow_error( - &self, - predicate: &T, - span: Span, - suggest_increasing_limit: bool, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> - where - T: fmt::Display - + TypeFoldable> - + Print<'tcx, FmtPrinter<'tcx, 'tcx>, Output = FmtPrinter<'tcx, 'tcx>>, - >>::Error: std::fmt::Debug, - { - let predicate = self.resolve_vars_if_possible(predicate.clone()); - let mut pred_str = predicate.to_string(); - - if pred_str.len() > 50 { - // We don't need to save the type to a file, we will be talking about this type already - // in a separate note when we explain the obligation, so it will be available that way. - pred_str = predicate - .print(FmtPrinter::new_with_limit( - self.tcx, - Namespace::TypeNS, - rustc_session::Limit(6), - )) - .unwrap() - .into_buffer(); - } - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0275, - "overflow evaluating the requirement `{}`", - pred_str, - ); - - if suggest_increasing_limit { - self.suggest_new_overflow_limit(&mut err); - } - - err - } - - /// Reports that an overflow has occurred and halts compilation. We - /// halt compilation unconditionally because it is important that - /// overflows never be masked -- they basically represent computations - /// whose result could not be truly determined and thus we can't say - /// if the program type checks or not -- and they are unusual - /// occurrences in any case. - fn report_overflow_obligation( - &self, - obligation: &Obligation<'tcx, T>, - suggest_increasing_limit: bool, - ) -> ! - where - T: ToPredicate<'tcx> + Clone, - { - let predicate = obligation.predicate.clone().to_predicate(self.tcx); - let predicate = self.resolve_vars_if_possible(predicate); - self.report_overflow_error( - &predicate, - obligation.cause.span, - suggest_increasing_limit, - |err| { - self.note_obligation_cause_code( - obligation.cause.body_id, - err, - predicate, - obligation.param_env, - obligation.cause.code(), - &mut vec![], - &mut Default::default(), - ); - }, - ); - } - - fn suggest_new_overflow_limit(&self, err: &mut Diagnostic) { - let suggested_limit = match self.tcx.recursion_limit() { - Limit(0) => Limit(2), - limit => limit * 2, - }; - err.help(format!( - "consider increasing the recursion limit by adding a \ - `#![recursion_limit = \"{}\"]` attribute to your crate (`{}`)", - suggested_limit, - self.tcx.crate_name(LOCAL_CRATE), - )); - } - - /// Reports that a cycle was detected which led to overflow and halts - /// compilation. This is equivalent to `report_overflow_obligation` except - /// that we can give a more helpful error message (and, in particular, - /// we do not suggest increasing the overflow limit, which is not - /// going to help). - fn report_overflow_obligation_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! { - let cycle = self.resolve_vars_if_possible(cycle.to_owned()); - assert!(!cycle.is_empty()); - - debug!(?cycle, "report_overflow_error_cycle"); - - // The 'deepest' obligation is most likely to have a useful - // cause 'backtrace' - self.report_overflow_obligation( - cycle.iter().max_by_key(|p| p.recursion_depth).unwrap(), - false, - ); - } - - fn report_overflow_no_abort(&self, obligation: PredicateObligation<'tcx>) -> ErrorGuaranteed { - let obligation = self.resolve_vars_if_possible(obligation); - let mut err = self.build_overflow_error(&obligation.predicate, obligation.cause.span, true); - self.note_obligation_cause(&mut err, &obligation); - self.point_at_returns_when_relevant(&mut err, &obligation); - err.emit() - } - - fn report_selection_error( - &self, - mut obligation: PredicateObligation<'tcx>, - root_obligation: &PredicateObligation<'tcx>, - error: &SelectionError<'tcx>, - ) { - let tcx = self.tcx; - - if tcx.sess.opts.unstable_opts.dump_solver_proof_tree == DumpSolverProofTree::OnError { - dump_proof_tree(root_obligation, self.infcx); - } - - let mut span = obligation.cause.span; - // FIXME: statically guarantee this by tainting after the diagnostic is emitted - self.set_tainted_by_errors( - tcx.sess.delay_span_bug(span, "`report_selection_error` did not emit an error"), - ); - - let mut err = match *error { - SelectionError::Unimplemented => { - // If this obligation was generated as a result of well-formedness checking, see if we - // can get a better error message by performing HIR-based well-formedness checking. - if let ObligationCauseCode::WellFormed(Some(wf_loc)) = - root_obligation.cause.code().peel_derives() - && !obligation.predicate.has_non_region_infer() - { - if let Some(cause) = self - .tcx - .diagnostic_hir_wf_check((tcx.erase_regions(obligation.predicate), *wf_loc)) - { - obligation.cause = cause.clone(); - span = obligation.cause.span; - } - } - - if let ObligationCauseCode::CompareImplItemObligation { - impl_item_def_id, - trait_item_def_id, - kind: _, - } = *obligation.cause.code() - { - self.report_extra_impl_obligation( - span, - impl_item_def_id, - trait_item_def_id, - &format!("`{}`", obligation.predicate), - ) - .emit(); - return; - } - - // Report a const-param specific error - if let ObligationCauseCode::ConstParam(ty) = *obligation.cause.code().peel_derives() - { - self.report_const_param_not_wf(ty, &obligation).emit(); - return; - } - - let bound_predicate = obligation.predicate.kind(); - match bound_predicate.skip_binder() { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_predicate)) => { - let trait_predicate = bound_predicate.rebind(trait_predicate); - let trait_predicate = self.resolve_vars_if_possible(trait_predicate); - - // FIXME(effects) - let predicate_is_const = false; - - if self.tcx.sess.has_errors().is_some() - && trait_predicate.references_error() - { - return; - } - let trait_ref = trait_predicate.to_poly_trait_ref(); - - let (post_message, pre_message, type_def) = self - .get_parent_trait_ref(obligation.cause.code()) - .map(|(t, s)| { - ( - format!(" in `{t}`"), - format!("within `{t}`, "), - s.map(|s| (format!("within this `{t}`"), s)), - ) - }) - .unwrap_or_default(); - - let OnUnimplementedNote { - message, - label, - note, - parent_label, - append_const_msg, - } = self.on_unimplemented_note(trait_ref, &obligation); - let have_alt_message = message.is_some() || label.is_some(); - let is_try_conversion = self.is_try_conversion(span, trait_ref.def_id()); - let is_unsize = - Some(trait_ref.def_id()) == self.tcx.lang_items().unsize_trait(); - let (message, note, append_const_msg) = if is_try_conversion { - ( - Some(format!( - "`?` couldn't convert the error to `{}`", - trait_ref.skip_binder().self_ty(), - )), - Some( - "the question mark operation (`?`) implicitly performs a \ - conversion on the error value using the `From` trait" - .to_owned(), - ), - Some(AppendConstMessage::Default), - ) - } else { - (message, note, append_const_msg) - }; - - let err_msg = self.get_standard_error_message( - &trait_predicate, - message, - predicate_is_const, - append_const_msg, - post_message, - ); - - let (err_msg, safe_transmute_explanation) = if Some(trait_ref.def_id()) - == self.tcx.lang_items().transmute_trait() - { - // Recompute the safe transmute reason and use that for the error reporting - match self.get_safe_transmute_error_and_reason( - obligation.clone(), - trait_ref, - span, - ) { - GetSafeTransmuteErrorAndReason::Silent => return, - GetSafeTransmuteErrorAndReason::Error { - err_msg, - safe_transmute_explanation, - } => (err_msg, Some(safe_transmute_explanation)), - } - } else { - (err_msg, None) - }; - - let mut err = struct_span_err!(self.tcx.sess, span, E0277, "{}", err_msg); - - if is_try_conversion && let Some(ret_span) = self.return_type_span(&obligation) { - err.span_label( - ret_span, - format!( - "expected `{}` because of this", - trait_ref.skip_binder().self_ty() - ), - ); - } - - if Some(trait_ref.def_id()) == tcx.lang_items().tuple_trait() { - self.add_tuple_trait_message( - &obligation.cause.code().peel_derives(), - &mut err, - ); - } - - if Some(trait_ref.def_id()) == tcx.lang_items().drop_trait() - && predicate_is_const - { - err.note("`~const Drop` was renamed to `~const Destruct`"); - err.note("See for more details"); - } - - let explanation = get_explanation_based_on_obligation( - &obligation, - trait_ref, - &trait_predicate, - pre_message, - ); - - self.check_for_binding_assigned_block_without_tail_expression( - &obligation, - &mut err, - trait_predicate, - ); - if self.suggest_add_reference_to_arg( - &obligation, - &mut err, - trait_predicate, - have_alt_message, - ) { - self.note_obligation_cause(&mut err, &obligation); - err.emit(); - return; - } - if let Some(s) = label { - // If it has a custom `#[rustc_on_unimplemented]` - // error message, let's display it as the label! - err.span_label(span, s); - if !matches!(trait_ref.skip_binder().self_ty().kind(), ty::Param(_)) { - // When the self type is a type param We don't need to "the trait - // `std::marker::Sized` is not implemented for `T`" as we will point - // at the type param with a label to suggest constraining it. - err.help(explanation); - } - } else if let Some(custom_explanation) = safe_transmute_explanation { - err.span_label(span, custom_explanation); - } else { - err.span_label(span, explanation); - } - - if let ObligationCauseCode::Coercion { source, target } = - *obligation.cause.code().peel_derives() - { - if Some(trait_ref.def_id()) == self.tcx.lang_items().sized_trait() { - self.suggest_borrowing_for_object_cast( - &mut err, - &root_obligation, - source, - target, - ); - } - } - - let UnsatisfiedConst(unsatisfied_const) = self - .maybe_add_note_for_unsatisfied_const( - &obligation, - trait_ref, - &trait_predicate, - &mut err, - span, - ); - - if let Some((msg, span)) = type_def { - err.span_label(span, msg); - } - if let Some(s) = note { - // If it has a custom `#[rustc_on_unimplemented]` note, let's display it - err.note(s); - } - if let Some(s) = parent_label { - let body = obligation.cause.body_id; - err.span_label(tcx.def_span(body), s); - } - - self.suggest_floating_point_literal(&obligation, &mut err, &trait_ref); - self.suggest_dereferencing_index(&obligation, &mut err, trait_predicate); - let mut suggested = - self.suggest_dereferences(&obligation, &mut err, trait_predicate); - suggested |= self.suggest_fn_call(&obligation, &mut err, trait_predicate); - let impl_candidates = self.find_similar_impl_candidates(trait_predicate); - suggested = if let &[cand] = &impl_candidates[..] { - let cand = cand.trait_ref; - if let (ty::FnPtr(_), ty::FnDef(..)) = - (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) - { - err.span_suggestion( - span.shrink_to_hi(), - format!( - "the trait `{}` is implemented for fn pointer `{}`, try casting using `as`", - cand.print_only_trait_path(), - cand.self_ty(), - ), - format!(" as {}", cand.self_ty()), - Applicability::MaybeIncorrect, - ); - true - } else { - false - } - } else { - false - } || suggested; - suggested |= - self.suggest_remove_reference(&obligation, &mut err, trait_predicate); - suggested |= self.suggest_semicolon_removal( - &obligation, - &mut err, - span, - trait_predicate, - ); - self.note_version_mismatch(&mut err, &trait_ref); - self.suggest_remove_await(&obligation, &mut err); - self.suggest_derive(&obligation, &mut err, trait_predicate); - - if Some(trait_ref.def_id()) == tcx.lang_items().try_trait() { - self.suggest_await_before_try( - &mut err, - &obligation, - trait_predicate, - span, - ); - } - - if self.suggest_add_clone_to_arg(&obligation, &mut err, trait_predicate) { - err.emit(); - return; - } - - if self.suggest_impl_trait(&mut err, &obligation, trait_predicate) { - err.emit(); - return; - } - - if is_unsize { - // If the obligation failed due to a missing implementation of the - // `Unsize` trait, give a pointer to why that might be the case - err.note( - "all implementations of `Unsize` are provided \ - automatically by the compiler, see \ - \ - for more information", - ); - } - - let is_fn_trait = tcx.is_fn_trait(trait_ref.def_id()); - let is_target_feature_fn = if let ty::FnDef(def_id, _) = - *trait_ref.skip_binder().self_ty().kind() - { - !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() - } else { - false - }; - if is_fn_trait && is_target_feature_fn { - err.note( - "`#[target_feature]` functions do not implement the `Fn` traits", - ); - } - - self.try_to_add_help_message( - &obligation, - trait_ref, - &trait_predicate, - &mut err, - span, - is_fn_trait, - suggested, - unsatisfied_const, - ); - - // Changing mutability doesn't make a difference to whether we have - // an `Unsize` impl (Fixes ICE in #71036) - if !is_unsize { - self.suggest_change_mut(&obligation, &mut err, trait_predicate); - } - - // If this error is due to `!: Trait` not implemented but `(): Trait` is - // implemented, and fallback has occurred, then it could be due to a - // variable that used to fallback to `()` now falling back to `!`. Issue a - // note informing about the change in behaviour. - if trait_predicate.skip_binder().self_ty().is_never() - && self.fallback_has_occurred - { - let predicate = trait_predicate.map_bound(|trait_pred| { - trait_pred.with_self_ty(self.tcx, Ty::new_unit(self.tcx)) - }); - let unit_obligation = obligation.with(tcx, predicate); - if self.predicate_may_hold(&unit_obligation) { - err.note( - "this error might have been caused by changes to \ - Rust's type-inference algorithm (see issue #48950 \ - \ - for more information)", - ); - err.help("did you intend to use the type `()` here instead?"); - } - } - - self.explain_hrtb_projection(&mut err, trait_predicate, obligation.param_env, &obligation.cause); - self.suggest_desugaring_async_fn_in_trait(&mut err, trait_ref); - - // Return early if the trait is Debug or Display and the invocation - // originates within a standard library macro, because the output - // is otherwise overwhelming and unhelpful (see #85844 for an - // example). - - let in_std_macro = - match obligation.cause.span.ctxt().outer_expn_data().macro_def_id { - Some(macro_def_id) => { - let crate_name = tcx.crate_name(macro_def_id.krate); - crate_name == sym::std || crate_name == sym::core - } - None => false, - }; - - if in_std_macro - && matches!( - self.tcx.get_diagnostic_name(trait_ref.def_id()), - Some(sym::Debug | sym::Display) - ) - { - err.emit(); - return; - } - - err - } - - ty::PredicateKind::Subtype(predicate) => { - // Errors for Subtype predicates show up as - // `FulfillmentErrorCode::CodeSubtypeError`, - // not selection error. - span_bug!(span, "subtype requirement gave wrong error: `{:?}`", predicate) - } - - ty::PredicateKind::Coerce(predicate) => { - // Errors for Coerce predicates show up as - // `FulfillmentErrorCode::CodeSubtypeError`, - // not selection error. - span_bug!(span, "coerce requirement gave wrong error: `{:?}`", predicate) - } - - ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(..)) - | ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(..)) => { - span_bug!( - span, - "outlives clauses should not error outside borrowck. obligation: `{:?}`", - obligation - ) - } - - ty::PredicateKind::Clause(ty::ClauseKind::Projection(..)) => { - span_bug!( - span, - "projection clauses should be implied from elsewhere. obligation: `{:?}`", - obligation - ) - } - - ty::PredicateKind::ObjectSafe(trait_def_id) => { - let violations = self.tcx.object_safety_violations(trait_def_id); - report_object_safety_error(self.tcx, span, trait_def_id, violations) - } - - ty::PredicateKind::ClosureKind(closure_def_id, closure_args, kind) => { - let found_kind = self.closure_kind(closure_args).unwrap(); - self.report_closure_error(&obligation, closure_def_id, found_kind, kind) - } - - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(ty)) => { - let ty = self.resolve_vars_if_possible(ty); - match self.tcx.sess.opts.unstable_opts.trait_solver { - TraitSolver::Classic => { - // WF predicates cannot themselves make - // errors. They can only block due to - // ambiguity; otherwise, they always - // degenerate into other obligations - // (which may fail). - span_bug!(span, "WF predicate not satisfied for {:?}", ty); - } - TraitSolver::Next | TraitSolver::NextCoherence => { - // FIXME: we'll need a better message which takes into account - // which bounds actually failed to hold. - self.tcx.sess.struct_span_err( - span, - format!("the type `{ty}` is not well-formed"), - ) - } - } - } - - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..)) => { - // Errors for `ConstEvaluatable` predicates show up as - // `SelectionError::ConstEvalFailure`, - // not `Unimplemented`. - span_bug!( - span, - "const-evaluatable requirement gave wrong error: `{:?}`", - obligation - ) - } - - ty::PredicateKind::ConstEquate(..) => { - // Errors for `ConstEquate` predicates show up as - // `SelectionError::ConstEvalFailure`, - // not `Unimplemented`. - span_bug!( - span, - "const-equate requirement gave wrong error: `{:?}`", - obligation - ) - } - - ty::PredicateKind::Ambiguous => span_bug!(span, "ambiguous"), - - ty::PredicateKind::AliasRelate(..) => span_bug!( - span, - "AliasRelate predicate should never be the predicate cause of a SelectionError" - ), - - ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { - let mut diag = self.tcx.sess.struct_span_err( - span, - format!("the constant `{ct}` is not of type `{ty}`"), - ); - self.note_type_err( - &mut diag, - &obligation.cause, - None, - None, - TypeError::Sorts(ty::error::ExpectedFound::new(true, ty, ct.ty())), - false, - false, - ); - diag - } - } - } - - OutputTypeParameterMismatch(box SelectionOutputTypeParameterMismatch { - found_trait_ref, - expected_trait_ref, - terr: terr @ TypeError::CyclicTy(_), - }) => self.report_type_parameter_mismatch_cyclic_type_error( - &obligation, - found_trait_ref, - expected_trait_ref, - terr, - ), - OutputTypeParameterMismatch(box SelectionOutputTypeParameterMismatch { - found_trait_ref, - expected_trait_ref, - terr: _, - }) => { - match self.report_type_parameter_mismatch_error( - &obligation, - span, - found_trait_ref, - expected_trait_ref, - ) { - Some(err) => err, - None => return, - } - } - - SelectionError::OpaqueTypeAutoTraitLeakageUnknown(def_id) => self.report_opaque_type_auto_trait_leakage( - &obligation, - def_id, - ), - - TraitNotObjectSafe(did) => { - let violations = self.tcx.object_safety_violations(did); - report_object_safety_error(self.tcx, span, did, violations) - } - - SelectionError::NotConstEvaluatable(NotConstEvaluatable::MentionsInfer) => { - bug!( - "MentionsInfer should have been handled in `traits/fulfill.rs` or `traits/select/mod.rs`" - ) - } - SelectionError::NotConstEvaluatable(NotConstEvaluatable::MentionsParam) => { - match self.report_not_const_evaluatable_error(&obligation, span) { - Some(err) => err, - None => return, - } - } - - // Already reported in the query. - SelectionError::NotConstEvaluatable(NotConstEvaluatable::Error(_)) | - // Already reported. - Overflow(OverflowError::Error(_)) => return, - - Overflow(_) => { - bug!("overflow should be handled before the `report_selection_error` path"); - } - SelectionError::ErrorReporting => { - bug!("ErrorReporting Overflow should not reach `report_selection_err` call") - } - }; - - self.note_obligation_cause(&mut err, &obligation); - self.point_at_returns_when_relevant(&mut err, &obligation); - err.emit(); - } - - fn report_const_param_not_wf( - &self, - ty: Ty<'tcx>, - obligation: &PredicateObligation<'tcx>, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - let span = obligation.cause.span; - - let mut diag = match ty.kind() { - _ if ty.has_param() => { - span_bug!(span, "const param tys cannot mention other generic parameters"); - } - ty::Float(_) => { - struct_span_err!( - self.tcx.sess, - span, - E0741, - "`{ty}` is forbidden as the type of a const generic parameter", - ) - } - ty::FnPtr(_) => { - struct_span_err!( - self.tcx.sess, - span, - E0741, - "using function pointers as const generic parameters is forbidden", - ) - } - ty::RawPtr(_) => { - struct_span_err!( - self.tcx.sess, - span, - E0741, - "using raw pointers as const generic parameters is forbidden", - ) - } - ty::Adt(def, _) => { - // We should probably see if we're *allowed* to derive `ConstParamTy` on the type... - let mut diag = struct_span_err!( - self.tcx.sess, - span, - E0741, - "`{ty}` must implement `ConstParamTy` to be used as the type of a const generic parameter", - ); - // Only suggest derive if this isn't a derived obligation, - // and the struct is local. - if let Some(span) = self.tcx.hir().span_if_local(def.did()) - && obligation.cause.code().parent().is_none() - { - if ty.is_structural_eq_shallow(self.tcx) { - diag.span_suggestion( - span, - "add `#[derive(ConstParamTy)]` to the struct", - "#[derive(ConstParamTy)]\n", - Applicability::MachineApplicable, - ); - } else { - // FIXME(adt_const_params): We should check there's not already an - // overlapping `Eq`/`PartialEq` impl. - diag.span_suggestion( - span, - "add `#[derive(ConstParamTy, PartialEq, Eq)]` to the struct", - "#[derive(ConstParamTy, PartialEq, Eq)]\n", - Applicability::MachineApplicable, - ); - } - } - diag - } - _ => { - struct_span_err!( - self.tcx.sess, - span, - E0741, - "`{ty}` can't be used as a const parameter type", - ) - } - }; - - let mut code = obligation.cause.code(); - let mut pred = obligation.predicate.to_opt_poly_trait_pred(); - while let Some((next_code, next_pred)) = code.parent() { - if let Some(pred) = pred { - let pred = self.instantiate_binder_with_placeholders(pred); - diag.note(format!( - "`{}` must implement `{}`, but it does not", - pred.self_ty(), - pred.print_modifiers_and_trait_path() - )); - } - code = next_code; - pred = next_pred; - } - - diag - } -} - -trait InferCtxtPrivExt<'tcx> { - // returns if `cond` not occurring implies that `error` does not occur - i.e., that - // `error` occurring implies that `cond` occurs. - fn error_implies(&self, cond: ty::Predicate<'tcx>, error: ty::Predicate<'tcx>) -> bool; - - fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>); - - fn report_projection_error( - &self, - obligation: &PredicateObligation<'tcx>, - error: &MismatchedProjectionTypes<'tcx>, - ); - - fn maybe_detailed_projection_msg( - &self, - pred: ty::ProjectionPredicate<'tcx>, - normalized_ty: ty::Term<'tcx>, - expected_ty: ty::Term<'tcx>, - ) -> Option; - - fn fuzzy_match_tys( - &self, - a: Ty<'tcx>, - b: Ty<'tcx>, - ignoring_lifetimes: bool, - ) -> Option; - - fn describe_generator(&self, body_id: hir::BodyId) -> Option<&'static str>; - - fn find_similar_impl_candidates( - &self, - trait_pred: ty::PolyTraitPredicate<'tcx>, - ) -> Vec>; - - fn report_similar_impl_candidates( - &self, - impl_candidates: &[ImplCandidate<'tcx>], - trait_ref: ty::PolyTraitRef<'tcx>, - body_def_id: LocalDefId, - err: &mut Diagnostic, - other: bool, - ) -> bool; - - fn report_similar_impl_candidates_for_root_obligation( - &self, - obligation: &PredicateObligation<'tcx>, - trait_predicate: ty::Binder<'tcx, ty::TraitPredicate<'tcx>>, - body_def_id: LocalDefId, - err: &mut Diagnostic, - ); - - /// Gets the parent trait chain start - fn get_parent_trait_ref( - &self, - code: &ObligationCauseCode<'tcx>, - ) -> Option<(String, Option)>; - - /// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait - /// with the same path as `trait_ref`, a help message about - /// a probable version mismatch is added to `err` - fn note_version_mismatch( - &self, - err: &mut Diagnostic, - trait_ref: &ty::PolyTraitRef<'tcx>, - ) -> bool; - - /// Creates a `PredicateObligation` with `new_self_ty` replacing the existing type in the - /// `trait_ref`. - /// - /// For this to work, `new_self_ty` must have no escaping bound variables. - fn mk_trait_obligation_with_new_self_ty( - &self, - param_env: ty::ParamEnv<'tcx>, - trait_ref_and_ty: ty::Binder<'tcx, (ty::TraitPredicate<'tcx>, Ty<'tcx>)>, - ) -> PredicateObligation<'tcx>; - - fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>); - - fn predicate_can_apply( - &self, - param_env: ty::ParamEnv<'tcx>, - pred: ty::PolyTraitPredicate<'tcx>, - ) -> bool; - - fn note_obligation_cause(&self, err: &mut Diagnostic, obligation: &PredicateObligation<'tcx>); - - fn suggest_unsized_bound_if_applicable( - &self, - err: &mut Diagnostic, - obligation: &PredicateObligation<'tcx>, - ); - - fn annotate_source_of_ambiguity( - &self, - err: &mut Diagnostic, - impls: &[ambiguity::Ambiguity], - predicate: ty::Predicate<'tcx>, - ); - - fn maybe_suggest_unsized_generics(&self, err: &mut Diagnostic, span: Span, node: Node<'tcx>); - - fn maybe_indirection_for_unsized( - &self, - err: &mut Diagnostic, - item: &'tcx Item<'tcx>, - param: &'tcx GenericParam<'tcx>, - ) -> bool; - - fn is_recursive_obligation( - &self, - obligated_types: &mut Vec>, - cause_code: &ObligationCauseCode<'tcx>, - ) -> bool; - - fn get_standard_error_message( - &self, - trait_predicate: &ty::PolyTraitPredicate<'tcx>, - message: Option, - predicate_is_const: bool, - append_const_msg: Option, - post_message: String, - ) -> String; - - fn get_safe_transmute_error_and_reason( - &self, - obligation: PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - span: Span, - ) -> GetSafeTransmuteErrorAndReason; - - fn add_tuple_trait_message( - &self, - obligation_cause_code: &ObligationCauseCode<'tcx>, - err: &mut Diagnostic, - ); - - fn try_to_add_help_message( - &self, - obligation: &PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - trait_predicate: &ty::PolyTraitPredicate<'tcx>, - err: &mut Diagnostic, - span: Span, - is_fn_trait: bool, - suggested: bool, - unsatisfied_const: bool, - ); - - fn add_help_message_for_fn_trait( - &self, - trait_ref: ty::PolyTraitRef<'tcx>, - err: &mut Diagnostic, - implemented_kind: ty::ClosureKind, - params: ty::Binder<'tcx, Ty<'tcx>>, - ); - - fn maybe_add_note_for_unsatisfied_const( - &self, - obligation: &PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - trait_predicate: &ty::PolyTraitPredicate<'tcx>, - err: &mut Diagnostic, - span: Span, - ) -> UnsatisfiedConst; - - fn report_closure_error( - &self, - obligation: &PredicateObligation<'tcx>, - closure_def_id: DefId, - found_kind: ty::ClosureKind, - kind: ty::ClosureKind, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; - - fn report_type_parameter_mismatch_cyclic_type_error( - &self, - obligation: &PredicateObligation<'tcx>, - found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - terr: TypeError<'tcx>, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; - - fn report_opaque_type_auto_trait_leakage( - &self, - obligation: &PredicateObligation<'tcx>, - def_id: DefId, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; - - fn report_type_parameter_mismatch_error( - &self, - obligation: &PredicateObligation<'tcx>, - span: Span, - found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - ) -> Option>; - - fn report_not_const_evaluatable_error( - &self, - obligation: &PredicateObligation<'tcx>, - span: Span, - ) -> Option>; -} - -impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> { - // returns if `cond` not occurring implies that `error` does not occur - i.e., that - // `error` occurring implies that `cond` occurs. - fn error_implies(&self, cond: ty::Predicate<'tcx>, error: ty::Predicate<'tcx>) -> bool { - if cond == error { - return true; - } - - // FIXME: It should be possible to deal with `ForAll` in a cleaner way. - let bound_error = error.kind(); - let (cond, error) = match (cond.kind().skip_binder(), bound_error.skip_binder()) { - ( - ty::PredicateKind::Clause(ty::ClauseKind::Trait(..)), - ty::PredicateKind::Clause(ty::ClauseKind::Trait(error)), - ) => (cond, bound_error.rebind(error)), - _ => { - // FIXME: make this work in other cases too. - return false; - } - }; - - for pred in super::elaborate(self.tcx, std::iter::once(cond)) { - let bound_predicate = pred.kind(); - if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(implication)) = - bound_predicate.skip_binder() - { - let error = error.to_poly_trait_ref(); - let implication = bound_predicate.rebind(implication.trait_ref); - // FIXME: I'm just not taking associated types at all here. - // Eventually I'll need to implement param-env-aware - // `Γ₁ ⊦ φ₁ => Γ₂ ⊦ φ₂` logic. - let param_env = ty::ParamEnv::empty(); - if self.can_sub(param_env, error, implication) { - debug!("error_implies: {:?} -> {:?} -> {:?}", cond, error, implication); - return true; - } - } - } - - false - } - - #[instrument(skip(self), level = "debug")] - fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>) { - if self.tcx.sess.opts.unstable_opts.dump_solver_proof_tree == DumpSolverProofTree::OnError { - dump_proof_tree(&error.root_obligation, self.infcx); - } - - match error.code { - FulfillmentErrorCode::CodeSelectionError(ref selection_error) => { - self.report_selection_error( - error.obligation.clone(), - &error.root_obligation, - selection_error, - ); - } - FulfillmentErrorCode::CodeProjectionError(ref e) => { - self.report_projection_error(&error.obligation, e); - } - FulfillmentErrorCode::CodeAmbiguity { overflow: false } => { - self.maybe_report_ambiguity(&error.obligation); - } - FulfillmentErrorCode::CodeAmbiguity { overflow: true } => { - self.report_overflow_no_abort(error.obligation.clone()); - } - FulfillmentErrorCode::CodeSubtypeError(ref expected_found, ref err) => { - self.report_mismatched_types( - &error.obligation.cause, - expected_found.expected, - expected_found.found, - *err, - ) - .emit(); - } - FulfillmentErrorCode::CodeConstEquateError(ref expected_found, ref err) => { - let mut diag = self.report_mismatched_consts( - &error.obligation.cause, - expected_found.expected, - expected_found.found, - *err, - ); - let code = error.obligation.cause.code().peel_derives().peel_match_impls(); - if let ObligationCauseCode::BindingObligation(..) - | ObligationCauseCode::ItemObligation(..) - | ObligationCauseCode::ExprBindingObligation(..) - | ObligationCauseCode::ExprItemObligation(..) = code - { - self.note_obligation_cause_code( - error.obligation.cause.body_id, - &mut diag, - error.obligation.predicate, - error.obligation.param_env, - code, - &mut vec![], - &mut Default::default(), - ); - } - diag.emit(); - } - FulfillmentErrorCode::CodeCycle(ref cycle) => { - self.report_overflow_obligation_cycle(cycle); - } - } - } - - #[instrument(level = "debug", skip_all)] - fn report_projection_error( - &self, - obligation: &PredicateObligation<'tcx>, - error: &MismatchedProjectionTypes<'tcx>, - ) { - let predicate = self.resolve_vars_if_possible(obligation.predicate); - - if predicate.references_error() { - return; - } - - self.probe(|_| { - let ocx = ObligationCtxt::new(self); - - // try to find the mismatched types to report the error with. - // - // this can fail if the problem was higher-ranked, in which - // cause I have no idea for a good error message. - let bound_predicate = predicate.kind(); - let (values, err) = if let ty::PredicateKind::Clause(ty::ClauseKind::Projection(data)) = - bound_predicate.skip_binder() - { - let data = self.instantiate_binder_with_fresh_vars( - obligation.cause.span, - infer::LateBoundRegionConversionTime::HigherRankedType, - bound_predicate.rebind(data), - ); - let unnormalized_term = match data.term.unpack() { - ty::TermKind::Ty(_) => Ty::new_projection( - self.tcx, - data.projection_ty.def_id, - data.projection_ty.args, - ) - .into(), - ty::TermKind::Const(ct) => ty::Const::new_unevaluated( - self.tcx, - ty::UnevaluatedConst { - def: data.projection_ty.def_id, - args: data.projection_ty.args, - }, - ct.ty(), - ) - .into(), - }; - // FIXME(-Ztrait-solver=next): For diagnostic purposes, it would be nice - // to deeply normalize this type. - let normalized_term = - ocx.normalize(&obligation.cause, obligation.param_env, unnormalized_term); - - debug!(?obligation.cause, ?obligation.param_env); - - debug!(?normalized_term, data.ty = ?data.term); - - let is_normalized_term_expected = !matches!( - obligation.cause.code().peel_derives(), - ObligationCauseCode::ItemObligation(_) - | ObligationCauseCode::BindingObligation(_, _) - | ObligationCauseCode::ExprItemObligation(..) - | ObligationCauseCode::ExprBindingObligation(..) - | ObligationCauseCode::Coercion { .. } - | ObligationCauseCode::OpaqueType - ); - - // constrain inference variables a bit more to nested obligations from normalize so - // we can have more helpful errors. - // - // we intentionally drop errors from normalization here, - // since the normalization is just done to improve the error message. - let _ = ocx.select_where_possible(); - - if let Err(new_err) = ocx.eq_exp( - &obligation.cause, - obligation.param_env, - is_normalized_term_expected, - normalized_term, - data.term, - ) { - (Some((data, is_normalized_term_expected, normalized_term, data.term)), new_err) - } else { - (None, error.err) - } - } else { - (None, error.err) - }; - - let msg = values - .and_then(|(predicate, _, normalized_term, expected_term)| { - self.maybe_detailed_projection_msg(predicate, normalized_term, expected_term) - }) - .unwrap_or_else(|| { - with_forced_trimmed_paths!(format!( - "type mismatch resolving `{}`", - self.resolve_vars_if_possible(predicate) - .print(FmtPrinter::new_with_limit( - self.tcx, - Namespace::TypeNS, - rustc_session::Limit(10), - )) - .unwrap() - .into_buffer() - )) - }); - let mut diag = struct_span_err!(self.tcx.sess, obligation.cause.span, E0271, "{msg}"); - - let secondary_span = (|| { - let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) = - predicate.kind().skip_binder() - else { - return None; - }; - - let trait_assoc_item = self.tcx.opt_associated_item(proj.projection_ty.def_id)?; - let trait_assoc_ident = trait_assoc_item.ident(self.tcx); - - let mut associated_items = vec![]; - self.tcx.for_each_relevant_impl( - self.tcx.trait_of_item(proj.projection_ty.def_id)?, - proj.projection_ty.self_ty(), - |impl_def_id| { - associated_items.extend( - self.tcx - .associated_items(impl_def_id) - .in_definition_order() - .find(|assoc| assoc.ident(self.tcx) == trait_assoc_ident), - ); - }, - ); - - let [associated_item]: &[ty::AssocItem] = &associated_items[..] else { - return None; - }; - match self.tcx.hir().get_if_local(associated_item.def_id) { - Some( - hir::Node::TraitItem(hir::TraitItem { - kind: hir::TraitItemKind::Type(_, Some(ty)), - .. - }) - | hir::Node::ImplItem(hir::ImplItem { - kind: hir::ImplItemKind::Type(ty), - .. - }), - ) => Some(( - ty.span, - with_forced_trimmed_paths!(Cow::from(format!( - "type mismatch resolving `{}`", - self.resolve_vars_if_possible(predicate) - .print(FmtPrinter::new_with_limit( - self.tcx, - Namespace::TypeNS, - rustc_session::Limit(5), - )) - .unwrap() - .into_buffer() - ))), - )), - _ => None, - } - })(); - - self.note_type_err( - &mut diag, - &obligation.cause, - secondary_span, - values.map(|(_, is_normalized_ty_expected, normalized_ty, expected_ty)| { - infer::ValuePairs::Terms(ExpectedFound::new( - is_normalized_ty_expected, - normalized_ty, - expected_ty, - )) - }), - err, - true, - false, - ); - self.note_obligation_cause(&mut diag, obligation); - diag.emit(); - }); - } - - fn maybe_detailed_projection_msg( - &self, - pred: ty::ProjectionPredicate<'tcx>, - normalized_ty: ty::Term<'tcx>, - expected_ty: ty::Term<'tcx>, - ) -> Option { - let trait_def_id = pred.projection_ty.trait_def_id(self.tcx); - let self_ty = pred.projection_ty.self_ty(); - - with_forced_trimmed_paths! { - if Some(pred.projection_ty.def_id) == self.tcx.lang_items().fn_once_output() { - let fn_kind = self_ty.prefix_string(self.tcx); - let item = match self_ty.kind() { - ty::FnDef(def, _) => self.tcx.item_name(*def).to_string(), - _ => self_ty.to_string(), - }; - Some(format!( - "expected `{item}` to be a {fn_kind} that returns `{expected_ty}`, but it \ - returns `{normalized_ty}`", - )) - } else if Some(trait_def_id) == self.tcx.lang_items().future_trait() { - Some(format!( - "expected `{self_ty}` to be a future that resolves to `{expected_ty}`, but it \ - resolves to `{normalized_ty}`" - )) - } else if Some(trait_def_id) == self.tcx.get_diagnostic_item(sym::Iterator) { - Some(format!( - "expected `{self_ty}` to be an iterator that yields `{expected_ty}`, but it \ - yields `{normalized_ty}`" - )) - } else { - None - } - } - } - - fn fuzzy_match_tys( - &self, - mut a: Ty<'tcx>, - mut b: Ty<'tcx>, - ignoring_lifetimes: bool, - ) -> Option { - /// returns the fuzzy category of a given type, or None - /// if the type can be equated to any type. - fn type_category(tcx: TyCtxt<'_>, t: Ty<'_>) -> Option { - match t.kind() { - ty::Bool => Some(0), - ty::Char => Some(1), - ty::Str => Some(2), - ty::Adt(def, _) if Some(def.did()) == tcx.lang_items().string() => Some(2), - ty::Int(..) - | ty::Uint(..) - | ty::Float(..) - | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) => Some(4), - ty::Ref(..) | ty::RawPtr(..) => Some(5), - ty::Array(..) | ty::Slice(..) => Some(6), - ty::FnDef(..) | ty::FnPtr(..) => Some(7), - ty::Dynamic(..) => Some(8), - ty::Closure(..) => Some(9), - ty::Tuple(..) => Some(10), - ty::Param(..) => Some(11), - ty::Alias(ty::Projection, ..) => Some(12), - ty::Alias(ty::Inherent, ..) => Some(13), - ty::Alias(ty::Opaque, ..) => Some(14), - ty::Alias(ty::Weak, ..) => Some(15), - ty::Never => Some(16), - ty::Adt(..) => Some(17), - ty::Generator(..) => Some(18), - ty::Foreign(..) => Some(19), - ty::GeneratorWitness(..) => Some(20), - ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => None, - } - } - - let strip_references = |mut t: Ty<'tcx>| -> Ty<'tcx> { - loop { - match t.kind() { - ty::Ref(_, inner, _) | ty::RawPtr(ty::TypeAndMut { ty: inner, .. }) => { - t = *inner - } - _ => break t, - } - } - }; - - if !ignoring_lifetimes { - a = strip_references(a); - b = strip_references(b); - } - - let cat_a = type_category(self.tcx, a)?; - let cat_b = type_category(self.tcx, b)?; - if a == b { - Some(CandidateSimilarity::Exact { ignoring_lifetimes }) - } else if cat_a == cat_b { - match (a.kind(), b.kind()) { - (ty::Adt(def_a, _), ty::Adt(def_b, _)) => def_a == def_b, - (ty::Foreign(def_a), ty::Foreign(def_b)) => def_a == def_b, - // Matching on references results in a lot of unhelpful - // suggestions, so let's just not do that for now. - // - // We still upgrade successful matches to `ignoring_lifetimes: true` - // to prioritize that impl. - (ty::Ref(..) | ty::RawPtr(..), ty::Ref(..) | ty::RawPtr(..)) => { - self.fuzzy_match_tys(a, b, true).is_some() - } - _ => true, - } - .then_some(CandidateSimilarity::Fuzzy { ignoring_lifetimes }) - } else if ignoring_lifetimes { - None - } else { - self.fuzzy_match_tys(a, b, true) - } - } - - fn describe_generator(&self, body_id: hir::BodyId) -> Option<&'static str> { - self.tcx.hir().body(body_id).generator_kind.map(|gen_kind| match gen_kind { - hir::GeneratorKind::Gen => "a generator", - hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) => "an async block", - hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Fn) => "an async function", - hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Closure) => "an async closure", - }) - } - - fn find_similar_impl_candidates( - &self, - trait_pred: ty::PolyTraitPredicate<'tcx>, - ) -> Vec> { - let mut candidates: Vec<_> = self - .tcx - .all_impls(trait_pred.def_id()) - .filter_map(|def_id| { - if self.tcx.impl_polarity(def_id) == ty::ImplPolarity::Negative - || !self.tcx.is_user_visible_dep(def_id.krate) - { - return None; - } - - let imp = self.tcx.impl_trait_ref(def_id).unwrap().skip_binder(); - - self.fuzzy_match_tys(trait_pred.skip_binder().self_ty(), imp.self_ty(), false) - .map(|similarity| ImplCandidate { trait_ref: imp, similarity }) - }) - .collect(); - if candidates.iter().any(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })) { - // If any of the candidates is a perfect match, we don't want to show all of them. - // This is particularly relevant for the case of numeric types (as they all have the - // same category). - candidates.retain(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })); - } - candidates - } - - fn report_similar_impl_candidates( - &self, - impl_candidates: &[ImplCandidate<'tcx>], - trait_ref: ty::PolyTraitRef<'tcx>, - body_def_id: LocalDefId, - err: &mut Diagnostic, - other: bool, - ) -> bool { - let other = if other { "other " } else { "" }; - let report = |candidates: Vec>, err: &mut Diagnostic| { - if candidates.is_empty() { - return false; - } - if let &[cand] = &candidates[..] { - let (desc, mention_castable) = - match (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) { - (ty::FnPtr(_), ty::FnDef(..)) => { - (" implemented for fn pointer `", ", cast using `as`") - } - (ty::FnPtr(_), _) => (" implemented for fn pointer `", ""), - _ => (" implemented for `", ""), - }; - err.highlighted_help(vec![ - (format!("the trait `{}` ", cand.print_only_trait_path()), Style::NoStyle), - ("is".to_string(), Style::Highlight), - (desc.to_string(), Style::NoStyle), - (cand.self_ty().to_string(), Style::Highlight), - ("`".to_string(), Style::NoStyle), - (mention_castable.to_string(), Style::NoStyle), - ]); - return true; - } - let trait_ref = TraitRef::identity(self.tcx, candidates[0].def_id); - // Check if the trait is the same in all cases. If so, we'll only show the type. - let mut traits: Vec<_> = - candidates.iter().map(|c| c.print_only_trait_path().to_string()).collect(); - traits.sort(); - traits.dedup(); - // FIXME: this could use a better heuristic, like just checking - // that args[1..] is the same. - let all_traits_equal = traits.len() == 1; - - let candidates: Vec = candidates - .into_iter() - .map(|c| { - if all_traits_equal { - format!("\n {}", c.self_ty()) - } else { - format!("\n {c}") - } - }) - .collect(); - - let end = if candidates.len() <= 9 { candidates.len() } else { 8 }; - err.help(format!( - "the following {other}types implement trait `{}`:{}{}", - trait_ref.print_only_trait_path(), - candidates[..end].join(""), - if candidates.len() > 9 { - format!("\nand {} others", candidates.len() - 8) - } else { - String::new() - } - )); - true - }; - - let def_id = trait_ref.def_id(); - if impl_candidates.is_empty() { - if self.tcx.trait_is_auto(def_id) - || self.tcx.lang_items().iter().any(|(_, id)| id == def_id) - || self.tcx.get_diagnostic_name(def_id).is_some() - { - // Mentioning implementers of `Copy`, `Debug` and friends is not useful. - return false; - } - let mut impl_candidates: Vec<_> = self - .tcx - .all_impls(def_id) - // Ignore automatically derived impls and `!Trait` impls. - .filter(|&def_id| { - self.tcx.impl_polarity(def_id) != ty::ImplPolarity::Negative - || self.tcx.is_automatically_derived(def_id) - }) - .filter_map(|def_id| self.tcx.impl_trait_ref(def_id)) - .map(ty::EarlyBinder::instantiate_identity) - .filter(|trait_ref| { - let self_ty = trait_ref.self_ty(); - // Avoid mentioning type parameters. - if let ty::Param(_) = self_ty.kind() { - false - } - // Avoid mentioning types that are private to another crate - else if let ty::Adt(def, _) = self_ty.peel_refs().kind() { - // FIXME(compiler-errors): This could be generalized, both to - // be more granular, and probably look past other `#[fundamental]` - // types, too. - self.tcx.visibility(def.did()).is_accessible_from(body_def_id, self.tcx) - } else { - true - } - }) - .collect(); - - impl_candidates.sort(); - impl_candidates.dedup(); - return report(impl_candidates, err); - } - - // Sort impl candidates so that ordering is consistent for UI tests. - // because the ordering of `impl_candidates` may not be deterministic: - // https://github.com/rust-lang/rust/pull/57475#issuecomment-455519507 - // - // Prefer more similar candidates first, then sort lexicographically - // by their normalized string representation. - let mut impl_candidates: Vec<_> = impl_candidates - .iter() - .cloned() - .map(|mut cand| { - // Fold the consts so that they shows up as, e.g., `10` - // instead of `core::::array::{impl#30}::{constant#0}`. - cand.trait_ref = cand.trait_ref.fold_with(&mut BottomUpFolder { - tcx: self.tcx, - ty_op: |ty| ty, - lt_op: |lt| lt, - ct_op: |ct| ct.normalize(self.tcx, ty::ParamEnv::empty()), - }); - cand - }) - .collect(); - impl_candidates.sort_by_key(|cand| (cand.similarity, cand.trait_ref)); - impl_candidates.dedup(); - - report(impl_candidates.into_iter().map(|cand| cand.trait_ref).collect(), err) - } - - fn report_similar_impl_candidates_for_root_obligation( - &self, - obligation: &PredicateObligation<'tcx>, - trait_predicate: ty::Binder<'tcx, ty::TraitPredicate<'tcx>>, - body_def_id: LocalDefId, - err: &mut Diagnostic, - ) { - // This is *almost* equivalent to - // `obligation.cause.code().peel_derives()`, but it gives us the - // trait predicate for that corresponding root obligation. This - // lets us get a derived obligation from a type parameter, like - // when calling `string.strip_suffix(p)` where `p` is *not* an - // implementer of `Pattern<'_>`. - let mut code = obligation.cause.code(); - let mut trait_pred = trait_predicate; - let mut peeled = false; - while let Some((parent_code, parent_trait_pred)) = code.parent() { - code = parent_code; - if let Some(parent_trait_pred) = parent_trait_pred { - trait_pred = parent_trait_pred; - peeled = true; - } - } - let def_id = trait_pred.def_id(); - // Mention *all* the `impl`s for the *top most* obligation, the - // user might have meant to use one of them, if any found. We skip - // auto-traits or fundamental traits that might not be exactly what - // the user might expect to be presented with. Instead this is - // useful for less general traits. - if peeled - && !self.tcx.trait_is_auto(def_id) - && !self.tcx.lang_items().iter().any(|(_, id)| id == def_id) - { - let trait_ref = trait_pred.to_poly_trait_ref(); - let impl_candidates = self.find_similar_impl_candidates(trait_pred); - self.report_similar_impl_candidates( - &impl_candidates, - trait_ref, - body_def_id, - err, - true, - ); - } - } - - /// Gets the parent trait chain start - fn get_parent_trait_ref( - &self, - code: &ObligationCauseCode<'tcx>, - ) -> Option<(String, Option)> { - match code { - ObligationCauseCode::BuiltinDerivedObligation(data) => { - let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_pred); - match self.get_parent_trait_ref(&data.parent_code) { - Some(t) => Some(t), - None => { - let ty = parent_trait_ref.skip_binder().self_ty(); - let span = TyCategory::from_ty(self.tcx, ty) - .map(|(_, def_id)| self.tcx.def_span(def_id)); - Some((ty.to_string(), span)) - } - } - } - ObligationCauseCode::FunctionArgumentObligation { parent_code, .. } => { - self.get_parent_trait_ref(&parent_code) - } - _ => None, - } - } - - /// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait - /// with the same path as `trait_ref`, a help message about - /// a probable version mismatch is added to `err` - fn note_version_mismatch( - &self, - err: &mut Diagnostic, - trait_ref: &ty::PolyTraitRef<'tcx>, - ) -> bool { - let get_trait_impls = |trait_def_id| { - let mut trait_impls = vec![]; - self.tcx.for_each_relevant_impl( - trait_def_id, - trait_ref.skip_binder().self_ty(), - |impl_def_id| { - trait_impls.push(impl_def_id); - }, - ); - trait_impls - }; - - let required_trait_path = self.tcx.def_path_str(trait_ref.def_id()); - let traits_with_same_path: std::collections::BTreeSet<_> = self - .tcx - .all_traits() - .filter(|trait_def_id| *trait_def_id != trait_ref.def_id()) - .filter(|trait_def_id| self.tcx.def_path_str(*trait_def_id) == required_trait_path) - .collect(); - let mut suggested = false; - for trait_with_same_path in traits_with_same_path { - let trait_impls = get_trait_impls(trait_with_same_path); - if trait_impls.is_empty() { - continue; - } - let impl_spans: Vec<_> = - trait_impls.iter().map(|impl_def_id| self.tcx.def_span(*impl_def_id)).collect(); - err.span_help( - impl_spans, - format!("trait impl{} with same name found", pluralize!(trait_impls.len())), - ); - let trait_crate = self.tcx.crate_name(trait_with_same_path.krate); - let crate_msg = - format!("perhaps two different versions of crate `{trait_crate}` are being used?"); - err.note(crate_msg); - suggested = true; - } - suggested - } - - fn mk_trait_obligation_with_new_self_ty( - &self, - param_env: ty::ParamEnv<'tcx>, - trait_ref_and_ty: ty::Binder<'tcx, (ty::TraitPredicate<'tcx>, Ty<'tcx>)>, - ) -> PredicateObligation<'tcx> { - let trait_pred = - trait_ref_and_ty.map_bound(|(tr, new_self_ty)| tr.with_self_ty(self.tcx, new_self_ty)); - - Obligation::new(self.tcx, ObligationCause::dummy(), param_env, trait_pred) - } - - #[instrument(skip(self), level = "debug")] - fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) { - // Unable to successfully determine, probably means - // insufficient type information, but could mean - // ambiguous impls. The latter *ought* to be a - // coherence violation, so we don't report it here. - - let predicate = self.resolve_vars_if_possible(obligation.predicate); - let span = obligation.cause.span; - - debug!(?predicate, obligation.cause.code = ?obligation.cause.code()); - - // Ambiguity errors are often caused as fallout from earlier errors. - // We ignore them if this `infcx` is tainted in some cases below. - - let bound_predicate = predicate.kind(); - let mut err = match bound_predicate.skip_binder() { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) => { - let trait_ref = bound_predicate.rebind(data.trait_ref); - debug!(?trait_ref); - - if predicate.references_error() { - return; - } - - // This is kind of a hack: it frequently happens that some earlier - // error prevents types from being fully inferred, and then we get - // a bunch of uninteresting errors saying something like " doesn't implement Sized". It may even be true that we - // could just skip over all checks where the self-ty is an - // inference variable, but I was afraid that there might be an - // inference variable created, registered as an obligation, and - // then never forced by writeback, and hence by skipping here we'd - // be ignoring the fact that we don't KNOW the type works - // out. Though even that would probably be harmless, given that - // we're only talking about builtin traits, which are known to be - // inhabited. We used to check for `self.tcx.sess.has_errors()` to - // avoid inundating the user with unnecessary errors, but we now - // check upstream for type errors and don't add the obligations to - // begin with in those cases. - if self.tcx.lang_items().sized_trait() == Some(trait_ref.def_id()) { - if let None = self.tainted_by_errors() { - self.emit_inference_failure_err( - obligation.cause.body_id, - span, - trait_ref.self_ty().skip_binder().into(), - ErrorCode::E0282, - false, - ) - .emit(); - } - return; - } - - // Typically, this ambiguity should only happen if - // there are unresolved type inference variables - // (otherwise it would suggest a coherence - // failure). But given #21974 that is not necessarily - // the case -- we can have multiple where clauses that - // are only distinguished by a region, which results - // in an ambiguity even when all types are fully - // known, since we don't dispatch based on region - // relationships. - - // Pick the first substitution that still contains inference variables as the one - // we're going to emit an error for. If there are none (see above), fall back to - // a more general error. - let subst = data.trait_ref.args.iter().find(|s| s.has_non_region_infer()); - - let mut err = if let Some(subst) = subst { - self.emit_inference_failure_err( - obligation.cause.body_id, - span, - subst, - ErrorCode::E0283, - true, - ) - } else { - struct_span_err!( - self.tcx.sess, - span, - E0283, - "type annotations needed: cannot satisfy `{}`", - predicate, - ) - }; - - let ambiguities = ambiguity::recompute_applicable_impls( - self.infcx, - &obligation.with(self.tcx, trait_ref), - ); - let has_non_region_infer = - trait_ref.skip_binder().args.types().any(|t| !t.is_ty_or_numeric_infer()); - // It doesn't make sense to talk about applicable impls if there are more - // than a handful of them. - if ambiguities.len() > 1 && ambiguities.len() < 10 && has_non_region_infer { - if self.tainted_by_errors().is_some() && subst.is_none() { - // If `subst.is_none()`, then this is probably two param-env - // candidates or impl candidates that are equal modulo lifetimes. - // Therefore, if we've already emitted an error, just skip this - // one, since it's not particularly actionable. - err.cancel(); - return; - } - self.annotate_source_of_ambiguity(&mut err, &ambiguities, predicate); - } else { - if self.tainted_by_errors().is_some() { - err.cancel(); - return; - } - err.note(format!("cannot satisfy `{predicate}`")); - let impl_candidates = self - .find_similar_impl_candidates(predicate.to_opt_poly_trait_pred().unwrap()); - if impl_candidates.len() < 10 { - self.report_similar_impl_candidates( - impl_candidates.as_slice(), - trait_ref, - obligation.cause.body_id, - &mut err, - false, - ); - } - } - - if let ObligationCauseCode::ItemObligation(def_id) - | ObligationCauseCode::ExprItemObligation(def_id, ..) = *obligation.cause.code() - { - self.suggest_fully_qualified_path(&mut err, def_id, span, trait_ref.def_id()); - } - - if let Some(ty::GenericArgKind::Type(_)) = subst.map(|subst| subst.unpack()) - && let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) - { - let mut expr_finder = FindExprBySpan::new(span); - expr_finder.visit_expr(&self.tcx.hir().body(body_id).value); - - if let Some(hir::Expr { - kind: hir::ExprKind::Path(hir::QPath::Resolved(None, path)), .. } - ) = expr_finder.result - && let [ - .., - trait_path_segment @ hir::PathSegment { - res: rustc_hir::def::Res::Def(rustc_hir::def::DefKind::Trait, trait_id), - .. - }, - hir::PathSegment { - ident: assoc_item_name, - res: rustc_hir::def::Res::Def(_, item_id), - .. - } - ] = path.segments - && data.trait_ref.def_id == *trait_id - && self.tcx.trait_of_item(*item_id) == Some(*trait_id) - && let None = self.tainted_by_errors() - { - let (verb, noun) = match self.tcx.associated_item(item_id).kind { - ty::AssocKind::Const => ("refer to the", "constant"), - ty::AssocKind::Fn => ("call", "function"), - ty::AssocKind::Type => ("refer to the", "type"), // this is already covered by E0223, but this single match arm doesn't hurt here - }; - - // Replace the more general E0283 with a more specific error - err.cancel(); - err = self.tcx.sess.struct_span_err_with_code( - span, - format!( - "cannot {verb} associated {noun} on trait without specifying the corresponding `impl` type", - ), - rustc_errors::error_code!(E0790), - ); - - if let Some(local_def_id) = data.trait_ref.def_id.as_local() - && let Some(hir::Node::Item(hir::Item { ident: trait_name, kind: hir::ItemKind::Trait(_, _, _, _, trait_item_refs), .. })) = self.tcx.hir().find_by_def_id(local_def_id) - && let Some(method_ref) = trait_item_refs.iter().find(|item_ref| item_ref.ident == *assoc_item_name) { - err.span_label(method_ref.span, format!("`{trait_name}::{assoc_item_name}` defined here")); - } - - err.span_label(span, format!("cannot {verb} associated {noun} of trait")); - - let trait_impls = self.tcx.trait_impls_of(data.trait_ref.def_id); - - if trait_impls.blanket_impls().is_empty() - && let Some(impl_def_id) = trait_impls.non_blanket_impls().values().flatten().next() - { - let non_blanket_impl_count = trait_impls.non_blanket_impls().values().flatten().count(); - // If there is only one implementation of the trait, suggest using it. - // Otherwise, use a placeholder comment for the implementation. - let (message, impl_suggestion) = if non_blanket_impl_count == 1 {( - "use the fully-qualified path to the only available implementation", - format!("<{} as ", self.tcx.type_of(impl_def_id).instantiate_identity()) - )} else { - ("use a fully-qualified path to a specific available implementation", - " - // must be '::' between them, otherwise the parser won't accept the code - suggestions.push((between_span, "".to_string(),)); - suggestions.push((generic_arg.span_ext.shrink_to_hi(), ">".to_string())); - } else { - suggestions.push((trait_path_segment.ident.span.shrink_to_hi(), ">".to_string())); - } - err.multipart_suggestion( - message, - suggestions, - Applicability::MaybeIncorrect - ); - } - } - }; - - err - } - - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { - // Same hacky approach as above to avoid deluging user - // with error messages. - if arg.references_error() - || self.tcx.sess.has_errors().is_some() - || self.tainted_by_errors().is_some() - { - return; - } - - self.emit_inference_failure_err( - obligation.cause.body_id, - span, - arg, - ErrorCode::E0282, - false, - ) - } - - ty::PredicateKind::Subtype(data) => { - if data.references_error() - || self.tcx.sess.has_errors().is_some() - || self.tainted_by_errors().is_some() - { - // no need to overload user in such cases - return; - } - let SubtypePredicate { a_is_expected: _, a, b } = data; - // both must be type variables, or the other would've been instantiated - assert!(a.is_ty_var() && b.is_ty_var()); - self.emit_inference_failure_err( - obligation.cause.body_id, - span, - a.into(), - ErrorCode::E0282, - true, - ) - } - ty::PredicateKind::Clause(ty::ClauseKind::Projection(data)) => { - if predicate.references_error() || self.tainted_by_errors().is_some() { - return; - } - let subst = data - .projection_ty - .args - .iter() - .chain(Some(data.term.into_arg())) - .find(|g| g.has_non_region_infer()); - if let Some(subst) = subst { - let mut err = self.emit_inference_failure_err( - obligation.cause.body_id, - span, - subst, - ErrorCode::E0284, - true, - ); - err.note(format!("cannot satisfy `{predicate}`")); - err - } else { - // If we can't find a substitution, just print a generic error - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0284, - "type annotations needed: cannot satisfy `{}`", - predicate, - ); - err.span_label(span, format!("cannot satisfy `{predicate}`")); - err - } - } - - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(data)) => { - if predicate.references_error() || self.tainted_by_errors().is_some() { - return; - } - let subst = data.walk().find(|g| g.is_non_region_infer()); - if let Some(subst) = subst { - let err = self.emit_inference_failure_err( - obligation.cause.body_id, - span, - subst, - ErrorCode::E0284, - true, - ); - err - } else { - // If we can't find a substitution, just print a generic error - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0284, - "type annotations needed: cannot satisfy `{}`", - predicate, - ); - err.span_label(span, format!("cannot satisfy `{predicate}`")); - err - } - } - _ => { - if self.tcx.sess.has_errors().is_some() || self.tainted_by_errors().is_some() { - return; - } - let mut err = struct_span_err!( - self.tcx.sess, - span, - E0284, - "type annotations needed: cannot satisfy `{}`", - predicate, - ); - err.span_label(span, format!("cannot satisfy `{predicate}`")); - err - } - }; - self.note_obligation_cause(&mut err, obligation); - err.emit(); - } - - fn annotate_source_of_ambiguity( - &self, - err: &mut Diagnostic, - ambiguities: &[ambiguity::Ambiguity], - predicate: ty::Predicate<'tcx>, - ) { - let mut spans = vec![]; - let mut crates = vec![]; - let mut post = vec![]; - let mut has_param_env = false; - for ambiguity in ambiguities { - match ambiguity { - ambiguity::Ambiguity::DefId(impl_def_id) => { - match self.tcx.span_of_impl(*impl_def_id) { - Ok(span) => spans.push(span), - Err(name) => { - crates.push(name); - if let Some(header) = to_pretty_impl_header(self.tcx, *impl_def_id) { - post.push(header); - } - } - } - } - ambiguity::Ambiguity::ParamEnv(span) => { - has_param_env = true; - spans.push(*span); - } - } - } - let mut crate_names: Vec<_> = crates.iter().map(|n| format!("`{n}`")).collect(); - crate_names.sort(); - crate_names.dedup(); - post.sort(); - post.dedup(); - - if self.tainted_by_errors().is_some() - && (crate_names.len() == 1 - && spans.len() == 0 - && ["`core`", "`alloc`", "`std`"].contains(&crate_names[0].as_str()) - || predicate.visit_with(&mut HasNumericInferVisitor).is_break()) - { - // Avoid complaining about other inference issues for expressions like - // `42 >> 1`, where the types are still `{integer}`, but we want to - // Do we need `trait_ref.skip_binder().self_ty().is_numeric() &&` too? - // NOTE(eddyb) this was `.cancel()`, but `err` - // is borrowed, so we can't fully defuse it. - err.downgrade_to_delayed_bug(); - return; - } - - let msg = format!( - "multiple `impl`s{} satisfying `{}` found", - if has_param_env { " or `where` clauses" } else { "" }, - predicate - ); - let post = if post.len() > 1 || (post.len() == 1 && post[0].contains('\n')) { - format!(":\n{}", post.iter().map(|p| format!("- {p}")).collect::>().join("\n"),) - } else if post.len() == 1 { - format!(": `{}`", post[0]) - } else { - String::new() - }; - - match (spans.len(), crates.len(), crate_names.len()) { - (0, 0, 0) => { - err.note(format!("cannot satisfy `{predicate}`")); - } - (0, _, 1) => { - err.note(format!("{} in the `{}` crate{}", msg, crates[0], post,)); - } - (0, _, _) => { - err.note(format!( - "{} in the following crates: {}{}", - msg, - crate_names.join(", "), - post, - )); - } - (_, 0, 0) => { - let span: MultiSpan = spans.into(); - err.span_note(span, msg); - } - (_, 1, 1) => { - let span: MultiSpan = spans.into(); - err.span_note(span, msg); - err.note(format!("and another `impl` found in the `{}` crate{}", crates[0], post,)); - } - _ => { - let span: MultiSpan = spans.into(); - err.span_note(span, msg); - err.note(format!( - "and more `impl`s found in the following crates: {}{}", - crate_names.join(", "), - post, - )); - } - } - } - - /// Returns `true` if the trait predicate may apply for *some* assignment - /// to the type parameters. - fn predicate_can_apply( - &self, - param_env: ty::ParamEnv<'tcx>, - pred: ty::PolyTraitPredicate<'tcx>, - ) -> bool { - struct ParamToVarFolder<'a, 'tcx> { - infcx: &'a InferCtxt<'tcx>, - var_map: FxHashMap, Ty<'tcx>>, - } - - impl<'a, 'tcx> TypeFolder> for ParamToVarFolder<'a, 'tcx> { - fn interner(&self) -> TyCtxt<'tcx> { - self.infcx.tcx - } - - fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - if let ty::Param(_) = *ty.kind() { - let infcx = self.infcx; - *self.var_map.entry(ty).or_insert_with(|| { - infcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span: DUMMY_SP, - }) - }) - } else { - ty.super_fold_with(self) - } - } - } - - self.probe(|_| { - let cleaned_pred = - pred.fold_with(&mut ParamToVarFolder { infcx: self, var_map: Default::default() }); - - let InferOk { value: cleaned_pred, .. } = - self.infcx.at(&ObligationCause::dummy(), param_env).normalize(cleaned_pred); - - let obligation = - Obligation::new(self.tcx, ObligationCause::dummy(), param_env, cleaned_pred); - - self.predicate_may_hold(&obligation) - }) - } - - fn note_obligation_cause(&self, err: &mut Diagnostic, obligation: &PredicateObligation<'tcx>) { - // First, attempt to add note to this error with an async-await-specific - // message, and fall back to regular note otherwise. - if !self.maybe_note_obligation_cause_for_async_await(err, obligation) { - self.note_obligation_cause_code( - obligation.cause.body_id, - err, - obligation.predicate, - obligation.param_env, - obligation.cause.code(), - &mut vec![], - &mut Default::default(), - ); - self.suggest_unsized_bound_if_applicable(err, obligation); - } - } - - #[instrument(level = "debug", skip_all)] - fn suggest_unsized_bound_if_applicable( - &self, - err: &mut Diagnostic, - obligation: &PredicateObligation<'tcx>, - ) { - let ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) = - obligation.predicate.kind().skip_binder() - else { - return; - }; - let (ObligationCauseCode::BindingObligation(item_def_id, span) - | ObligationCauseCode::ExprBindingObligation(item_def_id, span, ..)) = - *obligation.cause.code().peel_derives() - else { - return; - }; - debug!(?pred, ?item_def_id, ?span); - - let (Some(node), true) = ( - self.tcx.hir().get_if_local(item_def_id), - Some(pred.def_id()) == self.tcx.lang_items().sized_trait(), - ) else { - return; - }; - self.maybe_suggest_unsized_generics(err, span, node); - } - - #[instrument(level = "debug", skip_all)] - fn maybe_suggest_unsized_generics(&self, err: &mut Diagnostic, span: Span, node: Node<'tcx>) { - let Some(generics) = node.generics() else { - return; - }; - let sized_trait = self.tcx.lang_items().sized_trait(); - debug!(?generics.params); - debug!(?generics.predicates); - let Some(param) = generics.params.iter().find(|param| param.span == span) else { - return; - }; - // Check that none of the explicit trait bounds is `Sized`. Assume that an explicit - // `Sized` bound is there intentionally and we don't need to suggest relaxing it. - let explicitly_sized = generics - .bounds_for_param(param.def_id) - .flat_map(|bp| bp.bounds) - .any(|bound| bound.trait_ref().and_then(|tr| tr.trait_def_id()) == sized_trait); - if explicitly_sized { - return; - } - debug!(?param); - match node { - hir::Node::Item( - item @ hir::Item { - // Only suggest indirection for uses of type parameters in ADTs. - kind: - hir::ItemKind::Enum(..) | hir::ItemKind::Struct(..) | hir::ItemKind::Union(..), - .. - }, - ) => { - if self.maybe_indirection_for_unsized(err, item, param) { - return; - } - } - _ => {} - }; - // Didn't add an indirection suggestion, so add a general suggestion to relax `Sized`. - let (span, separator) = if let Some(s) = generics.bounds_span_for_suggestions(param.def_id) - { - (s, " +") - } else { - (span.shrink_to_hi(), ":") - }; - err.span_suggestion_verbose( - span, - "consider relaxing the implicit `Sized` restriction", - format!("{separator} ?Sized"), - Applicability::MachineApplicable, - ); - } - - fn maybe_indirection_for_unsized( - &self, - err: &mut Diagnostic, - item: &Item<'tcx>, - param: &GenericParam<'tcx>, - ) -> bool { - // Suggesting `T: ?Sized` is only valid in an ADT if `T` is only used in a - // borrow. `struct S<'a, T: ?Sized>(&'a T);` is valid, `struct S(T);` - // is not. Look for invalid "bare" parameter uses, and suggest using indirection. - let mut visitor = - FindTypeParam { param: param.name.ident().name, invalid_spans: vec![], nested: false }; - visitor.visit_item(item); - if visitor.invalid_spans.is_empty() { - return false; - } - let mut multispan: MultiSpan = param.span.into(); - multispan.push_span_label( - param.span, - format!("this could be changed to `{}: ?Sized`...", param.name.ident()), - ); - for sp in visitor.invalid_spans { - multispan.push_span_label( - sp, - format!("...if indirection were used here: `Box<{}>`", param.name.ident()), - ); - } - err.span_help( - multispan, - format!( - "you could relax the implicit `Sized` bound on `{T}` if it were \ - used through indirection like `&{T}` or `Box<{T}>`", - T = param.name.ident(), - ), - ); - true - } - - fn is_recursive_obligation( - &self, - obligated_types: &mut Vec>, - cause_code: &ObligationCauseCode<'tcx>, - ) -> bool { - if let ObligationCauseCode::BuiltinDerivedObligation(ref data) = cause_code { - let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_pred); - let self_ty = parent_trait_ref.skip_binder().self_ty(); - if obligated_types.iter().any(|ot| ot == &self_ty) { - return true; - } - if let ty::Adt(def, args) = self_ty.kind() - && let [arg] = &args[..] - && let ty::GenericArgKind::Type(ty) = arg.unpack() - && let ty::Adt(inner_def, _) = ty.kind() - && inner_def == def - { - return true; - } - } - false - } - - fn get_standard_error_message( - &self, - trait_predicate: &ty::PolyTraitPredicate<'tcx>, - message: Option, - predicate_is_const: bool, - append_const_msg: Option, - post_message: String, - ) -> String { - message - .and_then(|cannot_do_this| { - match (predicate_is_const, append_const_msg) { - // do nothing if predicate is not const - (false, _) => Some(cannot_do_this), - // suggested using default post message - (true, Some(AppendConstMessage::Default)) => { - Some(format!("{cannot_do_this} in const contexts")) - } - // overridden post message - (true, Some(AppendConstMessage::Custom(custom_msg))) => { - Some(format!("{cannot_do_this}{custom_msg}")) - } - // fallback to generic message - (true, None) => None, - } - }) - .unwrap_or_else(|| { - format!("the trait bound `{trait_predicate}` is not satisfied{post_message}") - }) - } - - fn get_safe_transmute_error_and_reason( - &self, - obligation: PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - span: Span, - ) -> GetSafeTransmuteErrorAndReason { - use rustc_transmute::Answer; - - // Erase regions because layout code doesn't particularly care about regions. - let trait_ref = self.tcx.erase_regions(self.tcx.erase_late_bound_regions(trait_ref)); - - let src_and_dst = rustc_transmute::Types { - dst: trait_ref.args.type_at(0), - src: trait_ref.args.type_at(1), - }; - let scope = trait_ref.args.type_at(2); - let Some(assume) = rustc_transmute::Assume::from_const( - self.infcx.tcx, - obligation.param_env, - trait_ref.args.const_at(3), - ) else { - span_bug!( - span, - "Unable to construct rustc_transmute::Assume where it was previously possible" - ); - }; - - match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable( - obligation.cause, - src_and_dst, - scope, - assume, - ) { - Answer::No(reason) => { - let dst = trait_ref.args.type_at(0); - let src = trait_ref.args.type_at(1); - let err_msg = format!( - "`{src}` cannot be safely transmuted into `{dst}` in the defining scope of `{scope}`" - ); - let safe_transmute_explanation = match reason { - rustc_transmute::Reason::SrcIsUnspecified => { - format!("`{src}` does not have a well-specified layout") - } - - rustc_transmute::Reason::DstIsUnspecified => { - format!("`{dst}` does not have a well-specified layout") - } - - rustc_transmute::Reason::DstIsBitIncompatible => { - format!("At least one value of `{src}` isn't a bit-valid value of `{dst}`") - } - - rustc_transmute::Reason::DstIsPrivate => format!( - "`{dst}` is or contains a type or field that is not visible in that scope" - ), - rustc_transmute::Reason::DstIsTooBig => { - format!("The size of `{src}` is smaller than the size of `{dst}`") - } - rustc_transmute::Reason::SrcSizeOverflow => { - format!( - "values of the type `{src}` are too big for the current architecture" - ) - } - rustc_transmute::Reason::DstSizeOverflow => { - format!( - "values of the type `{dst}` are too big for the current architecture" - ) - } - rustc_transmute::Reason::DstHasStricterAlignment { - src_min_align, - dst_min_align, - } => { - format!( - "The minimum alignment of `{src}` ({src_min_align}) should be greater than that of `{dst}` ({dst_min_align})" - ) - } - rustc_transmute::Reason::DstIsMoreUnique => { - format!("`{src}` is a shared reference, but `{dst}` is a unique reference") - } - // Already reported by rustc - rustc_transmute::Reason::TypeError => { - return GetSafeTransmuteErrorAndReason::Silent; - } - rustc_transmute::Reason::SrcLayoutUnknown => { - format!("`{src}` has an unknown layout") - } - rustc_transmute::Reason::DstLayoutUnknown => { - format!("`{dst}` has an unknown layout") - } - }; - GetSafeTransmuteErrorAndReason::Error { err_msg, safe_transmute_explanation } - } - // Should never get a Yes at this point! We already ran it before, and did not get a Yes. - Answer::Yes => span_bug!( - span, - "Inconsistent rustc_transmute::is_transmutable(...) result, got Yes", - ), - other => span_bug!(span, "Unsupported rustc_transmute::Answer variant: `{other:?}`"), - } - } - - fn add_tuple_trait_message( - &self, - obligation_cause_code: &ObligationCauseCode<'tcx>, - err: &mut Diagnostic, - ) { - match obligation_cause_code { - ObligationCauseCode::RustCall => { - err.set_primary_message("functions with the \"rust-call\" ABI must take a single non-self tuple argument"); - } - ObligationCauseCode::BindingObligation(def_id, _) - | ObligationCauseCode::ItemObligation(def_id) - if self.tcx.is_fn_trait(*def_id) => - { - err.code(rustc_errors::error_code!(E0059)); - err.set_primary_message(format!( - "type parameter to bare `{}` trait must be a tuple", - self.tcx.def_path_str(*def_id) - )); - } - _ => {} - } - } - - fn try_to_add_help_message( - &self, - obligation: &PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - trait_predicate: &ty::PolyTraitPredicate<'tcx>, - err: &mut Diagnostic, - span: Span, - is_fn_trait: bool, - suggested: bool, - unsatisfied_const: bool, - ) { - let body_def_id = obligation.cause.body_id; - let span = if let ObligationCauseCode::BinOp { rhs_span: Some(rhs_span), .. } = - obligation.cause.code() - { - *rhs_span - } else { - span - }; - - // Try to report a help message - if is_fn_trait - && let Ok((implemented_kind, params)) = self.type_implements_fn_trait( - obligation.param_env, - trait_ref.self_ty(), - trait_predicate.skip_binder().polarity, - ) - { - self.add_help_message_for_fn_trait(trait_ref, err, implemented_kind, params); - } else if !trait_ref.has_non_region_infer() - && self.predicate_can_apply(obligation.param_env, *trait_predicate) - { - // If a where-clause may be useful, remind the - // user that they can add it. - // - // don't display an on-unimplemented note, as - // these notes will often be of the form - // "the type `T` can't be frobnicated" - // which is somewhat confusing. - self.suggest_restricting_param_bound( - err, - *trait_predicate, - None, - obligation.cause.body_id, - ); - } else if trait_ref.def_id().is_local() - && self.tcx.trait_impls_of(trait_ref.def_id()).is_empty() - && !self.tcx.trait_is_auto(trait_ref.def_id()) - && !self.tcx.trait_is_alias(trait_ref.def_id()) - { - err.span_help( - self.tcx.def_span(trait_ref.def_id()), - crate::fluent_generated::trait_selection_trait_has_no_impls, - ); - } else if !suggested && !unsatisfied_const { - // Can't show anything else useful, try to find similar impls. - let impl_candidates = self.find_similar_impl_candidates(*trait_predicate); - if !self.report_similar_impl_candidates( - &impl_candidates, - trait_ref, - body_def_id, - err, - true, - ) { - self.report_similar_impl_candidates_for_root_obligation( - &obligation, - *trait_predicate, - body_def_id, - err, - ); - } - - self.suggest_convert_to_slice( - err, - obligation, - trait_ref, - impl_candidates.as_slice(), - span, - ); - } - } - - fn add_help_message_for_fn_trait( - &self, - trait_ref: ty::PolyTraitRef<'tcx>, - err: &mut Diagnostic, - implemented_kind: ty::ClosureKind, - params: ty::Binder<'tcx, Ty<'tcx>>, - ) { - // If the type implements `Fn`, `FnMut`, or `FnOnce`, suppress the following - // suggestion to add trait bounds for the type, since we only typically implement - // these traits once. - - // Note if the `FnMut` or `FnOnce` is less general than the trait we're trying - // to implement. - let selected_kind = self - .tcx - .fn_trait_kind_from_def_id(trait_ref.def_id()) - .expect("expected to map DefId to ClosureKind"); - if !implemented_kind.extends(selected_kind) { - err.note(format!( - "`{}` implements `{}`, but it must implement `{}`, which is more general", - trait_ref.skip_binder().self_ty(), - implemented_kind, - selected_kind - )); - } - - // Note any argument mismatches - let given_ty = params.skip_binder(); - let expected_ty = trait_ref.skip_binder().args.type_at(1); - if let ty::Tuple(given) = given_ty.kind() - && let ty::Tuple(expected) = expected_ty.kind() - { - if expected.len() != given.len() { - // Note number of types that were expected and given - err.note( - format!( - "expected a closure taking {} argument{}, but one taking {} argument{} was given", - given.len(), - pluralize!(given.len()), - expected.len(), - pluralize!(expected.len()), - ) - ); - } else if !self.same_type_modulo_infer(given_ty, expected_ty) { - // Print type mismatch - let (expected_args, given_args) = - self.cmp(given_ty, expected_ty); - err.note_expected_found( - &"a closure with arguments", - expected_args, - &"a closure with arguments", - given_args, - ); - } - } - } - - fn maybe_add_note_for_unsatisfied_const( - &self, - _obligation: &PredicateObligation<'tcx>, - _trait_ref: ty::PolyTraitRef<'tcx>, - _trait_predicate: &ty::PolyTraitPredicate<'tcx>, - _err: &mut Diagnostic, - _span: Span, - ) -> UnsatisfiedConst { - let unsatisfied_const = UnsatisfiedConst(false); - // FIXME(effects) - unsatisfied_const - } - - fn report_closure_error( - &self, - obligation: &PredicateObligation<'tcx>, - closure_def_id: DefId, - found_kind: ty::ClosureKind, - kind: ty::ClosureKind, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - let closure_span = self.tcx.def_span(closure_def_id); - - let mut err = ClosureKindMismatch { - closure_span, - expected: kind, - found: found_kind, - cause_span: obligation.cause.span, - fn_once_label: None, - fn_mut_label: None, - }; - - // Additional context information explaining why the closure only implements - // a particular trait. - if let Some(typeck_results) = &self.typeck_results { - let hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id.expect_local()); - match (found_kind, typeck_results.closure_kind_origins().get(hir_id)) { - (ty::ClosureKind::FnOnce, Some((span, place))) => { - err.fn_once_label = Some(ClosureFnOnceLabel { - span: *span, - place: ty::place_to_string_for_capture(self.tcx, &place), - }) - } - (ty::ClosureKind::FnMut, Some((span, place))) => { - err.fn_mut_label = Some(ClosureFnMutLabel { - span: *span, - place: ty::place_to_string_for_capture(self.tcx, &place), - }) - } - _ => {} - } - } - - self.tcx.sess.create_err(err) - } - - fn report_type_parameter_mismatch_cyclic_type_error( - &self, - obligation: &PredicateObligation<'tcx>, - found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - terr: TypeError<'tcx>, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - let self_ty = found_trait_ref.self_ty().skip_binder(); - let (cause, terr) = if let ty::Closure(def_id, _) = self_ty.kind() { - ( - ObligationCause::dummy_with_span(self.tcx.def_span(def_id)), - TypeError::CyclicTy(self_ty), - ) - } else { - (obligation.cause.clone(), terr) - }; - self.report_and_explain_type_error( - TypeTrace::poly_trait_refs(&cause, true, expected_trait_ref, found_trait_ref), - terr, - ) - } - - fn report_opaque_type_auto_trait_leakage( - &self, - obligation: &PredicateObligation<'tcx>, - def_id: DefId, - ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - let name = match self.tcx.opaque_type_origin(def_id.expect_local()) { - hir::OpaqueTyOrigin::FnReturn(_) | hir::OpaqueTyOrigin::AsyncFn(_) => { - "opaque type".to_string() - } - hir::OpaqueTyOrigin::TyAlias { .. } => { - format!("`{}`", self.tcx.def_path_debug_str(def_id)) - } - }; - let mut err = self.tcx.sess.struct_span_err( - obligation.cause.span, - format!("cannot check whether the hidden type of {name} satisfies auto traits"), - ); - err.span_note(self.tcx.def_span(def_id), "opaque type is declared here"); - match self.defining_use_anchor { - DefiningAnchor::Bubble | DefiningAnchor::Error => {} - DefiningAnchor::Bind(bind) => { - err.span_note( - self.tcx.def_ident_span(bind).unwrap_or_else(|| self.tcx.def_span(bind)), - "this item depends on auto traits of the hidden type, \ - but may also be registering the hidden type. \ - This is not supported right now. \ - You can try moving the opaque type and the item that actually registers a hidden type into a new submodule".to_string(), - ); - } - }; - err - } - - fn report_type_parameter_mismatch_error( - &self, - obligation: &PredicateObligation<'tcx>, - span: Span, - found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, - ) -> Option> { - let found_trait_ref = self.resolve_vars_if_possible(found_trait_ref); - let expected_trait_ref = self.resolve_vars_if_possible(expected_trait_ref); - - if expected_trait_ref.self_ty().references_error() { - return None; - } - - let Some(found_trait_ty) = found_trait_ref.self_ty().no_bound_vars() else { - return None; - }; - - let found_did = match *found_trait_ty.kind() { - ty::Closure(did, _) | ty::Foreign(did) | ty::FnDef(did, _) | ty::Generator(did, ..) => { - Some(did) - } - ty::Adt(def, _) => Some(def.did()), - _ => None, - }; - - let found_node = found_did.and_then(|did| self.tcx.hir().get_if_local(did)); - let found_span = found_did.and_then(|did| self.tcx.hir().span_if_local(did)); - - if self.reported_closure_mismatch.borrow().contains(&(span, found_span)) { - // We check closures twice, with obligations flowing in different directions, - // but we want to complain about them only once. - return None; - } - - self.reported_closure_mismatch.borrow_mut().insert((span, found_span)); - - let mut not_tupled = false; - - let found = match found_trait_ref.skip_binder().args.type_at(1).kind() { - ty::Tuple(ref tys) => vec![ArgKind::empty(); tys.len()], - _ => { - not_tupled = true; - vec![ArgKind::empty()] - } - }; - - let expected_ty = expected_trait_ref.skip_binder().args.type_at(1); - let expected = match expected_ty.kind() { - ty::Tuple(ref tys) => { - tys.iter().map(|t| ArgKind::from_expected_ty(t, Some(span))).collect() - } - _ => { - not_tupled = true; - vec![ArgKind::Arg("_".to_owned(), expected_ty.to_string())] - } - }; - - // If this is a `Fn` family trait and either the expected or found - // is not tupled, then fall back to just a regular mismatch error. - // This shouldn't be common unless manually implementing one of the - // traits manually, but don't make it more confusing when it does - // happen. - Some( - if Some(expected_trait_ref.def_id()) != self.tcx.lang_items().gen_trait() && not_tupled - { - self.report_and_explain_type_error( - TypeTrace::poly_trait_refs( - &obligation.cause, - true, - expected_trait_ref, - found_trait_ref, - ), - ty::error::TypeError::Mismatch, - ) - } else if found.len() == expected.len() { - self.report_closure_arg_mismatch( - span, - found_span, - found_trait_ref, - expected_trait_ref, - obligation.cause.code(), - found_node, - obligation.param_env, - ) - } else { - let (closure_span, closure_arg_span, found) = found_did - .and_then(|did| { - let node = self.tcx.hir().get_if_local(did)?; - let (found_span, closure_arg_span, found) = - self.get_fn_like_arguments(node)?; - Some((Some(found_span), closure_arg_span, found)) - }) - .unwrap_or((found_span, None, found)); - - self.report_arg_count_mismatch( - span, - closure_span, - expected, - found, - found_trait_ty.is_closure(), - closure_arg_span, - ) - }, - ) - } - - fn report_not_const_evaluatable_error( - &self, - obligation: &PredicateObligation<'tcx>, - span: Span, - ) -> Option> { - if !self.tcx.features().generic_const_exprs { - let mut err = self - .tcx - .sess - .struct_span_err(span, "constant expression depends on a generic parameter"); - // FIXME(const_generics): we should suggest to the user how they can resolve this - // issue. However, this is currently not actually possible - // (see https://github.com/rust-lang/rust/issues/66962#issuecomment-575907083). - // - // Note that with `feature(generic_const_exprs)` this case should not - // be reachable. - err.note("this may fail depending on what value the parameter takes"); - err.emit(); - return None; - } - - match obligation.predicate.kind().skip_binder() { - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => { - let ty::ConstKind::Unevaluated(uv) = ct.kind() else { - bug!("const evaluatable failed for non-unevaluated const `{ct:?}`"); - }; - let mut err = self.tcx.sess.struct_span_err(span, "unconstrained generic constant"); - let const_span = self.tcx.def_span(uv.def); - match self.tcx.sess.source_map().span_to_snippet(const_span) { - Ok(snippet) => err.help(format!( - "try adding a `where` bound using this expression: `where [(); {snippet}]:`" - )), - _ => err.help("consider adding a `where` bound using this expression"), - }; - Some(err) - } - _ => { - span_bug!( - span, - "unexpected non-ConstEvaluatable predicate, this should not be reachable" - ) - } - } - } -} - struct UnsatisfiedConst(pub bool); -fn get_explanation_based_on_obligation<'tcx>( - obligation: &PredicateObligation<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - trait_predicate: &ty::PolyTraitPredicate<'tcx>, - pre_message: String, -) -> String { - if let ObligationCauseCode::MainFunctionType = obligation.cause.code() { - "consider using `()`, or a `Result`".to_owned() - } else { - let ty_desc = match trait_ref.skip_binder().self_ty().kind() { - ty::FnDef(_, _) => Some("fn item"), - ty::Closure(_, _) => Some("closure"), - _ => None, - }; - - match ty_desc { - Some(desc) => format!( - "{}the trait `{}` is not implemented for {} `{}`", - pre_message, - trait_predicate.print_modifiers_and_trait_path(), - desc, - trait_ref.skip_binder().self_ty(), - ), - None => format!( - "{}the trait `{}` is not implemented for `{}`", - pre_message, - trait_predicate.print_modifiers_and_trait_path(), - trait_ref.skip_binder().self_ty(), - ), - } - } -} /// Crude way of getting back an `Expr` from a `Span`. pub struct FindExprBySpan<'hir> { pub span: Span, @@ -3552,7 +173,7 @@ infcx.probe(|_| { let goal = Goal { predicate: o.predicate, param_env: o.param_env }; let tree = infcx - .evaluate_root_goal(goal, GenerateProofTree::Yes(UseGlobalCache::No)) + .evaluate_root_goal(goal, GenerateProofTree::Yes) .1 .expect("proof tree should have been generated"); let mut lock = std::io::stdout().lock(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,9 @@ use super::{ObligationCauseCode, PredicateObligation}; use crate::infer::error_reporting::TypeErrCtxt; -use rustc_ast::{MetaItem, NestedMetaItem}; +use rustc_ast::AttrArgs; +use rustc_ast::AttrArgsEq; +use rustc_ast::AttrKind; +use rustc_ast::{Attribute, MetaItem, NestedMetaItem}; use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{struct_span_err, ErrorGuaranteed}; @@ -18,7 +21,7 @@ EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented, }; -use super::InferCtxtPrivExt; +use crate::traits::error_reporting::type_err_ctxt_ext::InferCtxtPrivExt; pub trait TypeErrCtxtExt<'tcx> { /*private*/ @@ -50,6 +53,7 @@ sym::float, sym::_Self, sym::crate_local, + sym::Trait, ]; impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { @@ -102,7 +106,7 @@ let node = hir.find(hir_id)?; match &node { hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig, _, body_id), .. }) => { - self.describe_generator(*body_id).or_else(|| { + self.describe_coroutine(*body_id).or_else(|| { Some(match sig.header { hir::FnHeader { asyncness: hir::IsAsync::Async(_), .. } => { "an async function" @@ -114,11 +118,11 @@ hir::Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)), .. - }) => self.describe_generator(*body_id).or_else(|| Some("a trait method")), + }) => self.describe_coroutine(*body_id).or_else(|| Some("a trait method")), hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Fn(sig, body_id), .. - }) => self.describe_generator(*body_id).or_else(|| { + }) => self.describe_coroutine(*body_id).or_else(|| { Some(match sig.header { hir::FnHeader { asyncness: hir::IsAsync::Async(_), .. } => "an async method", _ => "a method", @@ -127,7 +131,7 @@ hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(hir::Closure { body, movability, .. }), .. - }) => self.describe_generator(*body).or_else(|| { + }) => self.describe_coroutine(*body).or_else(|| { Some(if movability.is_some() { "an async closure" } else { "a closure" }) }), hir::Node::Expr(hir::Expr { .. }) => { @@ -180,8 +184,22 @@ flags.push((sym::cause, Some("MainFunctionType".to_string()))); } - // Add all types without trimmed paths. - ty::print::with_no_trimmed_paths!({ + if let Some(kind) = self.tcx.fn_trait_kind_from_def_id(trait_ref.def_id) + && let ty::Tuple(args) = trait_ref.args.type_at(1).kind() + { + let args = args + .iter() + .map(|ty| ty.to_string()) + .collect::>() + .join(", "); + flags.push((sym::Trait, Some(format!("{}({args})", kind.as_str())))); + } else { + flags.push((sym::Trait, Some(trait_ref.print_only_trait_path().to_string()))); + } + + // Add all types without trimmed paths or visible paths, ensuring they end up with + // their "canonical" def path. + ty::print::with_no_trimmed_paths!(ty::print::with_no_visible_paths!({ let generics = self.tcx.generics_of(def_id); let self_ty = trait_ref.self_ty(); // This is also included through the generics list as `Self`, @@ -296,7 +314,7 @@ { flags.push((sym::_Self, Some("&[{integral}]".to_owned()))); } - }); + })); if let Ok(Some(command)) = OnUnimplementedDirective::of_item(self.tcx, def_id) { command.evaluate(self.tcx, trait_ref, &flags) @@ -315,7 +333,7 @@ pub subcommands: Vec, pub message: Option, pub label: Option, - pub note: Option, + pub notes: Vec, pub parent_label: Option, pub append_const_msg: Option, } @@ -325,7 +343,7 @@ pub struct OnUnimplementedNote { pub message: Option, pub label: Option, - pub note: Option, + pub notes: Vec, pub parent_label: Option, // If none, should fall back to a generic message pub append_const_msg: Option, @@ -341,7 +359,22 @@ #[derive(LintDiagnostic)] #[diag(trait_selection_malformed_on_unimplemented_attr)] -pub struct NoValueInOnUnimplementedLint; +#[help] +pub struct MalformedOnUnimplementedAttrLint { + #[label] + pub span: Span, +} + +impl MalformedOnUnimplementedAttrLint { + fn new(span: Span) -> Self { + Self { span } + } +} + +#[derive(LintDiagnostic)] +#[diag(trait_selection_missing_options_for_on_unimplemented_attr)] +#[help] +pub struct MissingOptionsForOnUnimplementedAttr; impl<'tcx> OnUnimplementedDirective { fn parse( @@ -368,7 +401,9 @@ .meta_item() .ok_or_else(|| tcx.sess.emit_err(InvalidOnClauseInOnUnimplemented { span }))?; attr::eval_condition(cond, &tcx.sess.parse_sess, Some(tcx.features()), &mut |cfg| { - if let Some(value) = cfg.value && let Err(guar) = parse_value(value) { + if let Some(value) = cfg.value + && let Err(guar) = parse_value(value) + { errored = Some(guar); } true @@ -378,7 +413,7 @@ let mut message = None; let mut label = None; - let mut note = None; + let mut notes = Vec::new(); let mut parent_label = None; let mut subcommands = vec![]; let mut append_const_msg = None; @@ -394,10 +429,12 @@ label = parse_value(label_)?; continue; } - } else if item.has_name(sym::note) && note.is_none() { + } else if item.has_name(sym::note) { if let Some(note_) = item.value_str() { - note = parse_value(note_)?; - continue; + if let Some(note) = parse_value(note_)? { + notes.push(note); + continue; + } } } else if item.has_name(sym::parent_label) && parent_label.is_none() @@ -411,7 +448,7 @@ && is_root && message.is_none() && label.is_none() - && note.is_none() + && notes.is_empty() && !is_diagnostic_namespace_variant // FIXME(diagnostic_namespace): disallow filters for now { @@ -450,7 +487,7 @@ UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, tcx.hir().local_def_id_to_hir_id(item_def_id.expect_local()), vec![item.span()], - NoValueInOnUnimplementedLint, + MalformedOnUnimplementedAttrLint::new(item.span()), ); } else { // nothing found @@ -466,7 +503,7 @@ subcommands, message, label, - note, + notes, parent_label, append_const_msg, })) @@ -474,18 +511,42 @@ } pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result, ErrorGuaranteed> { - let mut is_diagnostic_namespace_variant = false; - let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented).or_else(|| { - if tcx.features().diagnostic_namespace { - is_diagnostic_namespace_variant = true; - tcx.get_attrs_by_path(item_def_id, &[sym::diagnostic, sym::on_unimplemented]).next() - } else { - None - } - }) else { - return Ok(None); - }; + if let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) { + return Self::parse_attribute(attr, false, tcx, item_def_id); + } else if tcx.features().diagnostic_namespace { + tcx.get_attrs_by_path(item_def_id, &[sym::diagnostic, sym::on_unimplemented]) + .filter_map(|attr| Self::parse_attribute(attr, true, tcx, item_def_id).transpose()) + .try_fold(None, |aggr: Option, directive| { + let directive = directive?; + if let Some(aggr) = aggr { + let mut subcommands = aggr.subcommands; + subcommands.extend(directive.subcommands); + let mut notes = aggr.notes; + notes.extend(directive.notes); + Ok(Some(Self { + condition: aggr.condition.or(directive.condition), + subcommands, + message: aggr.message.or(directive.message), + label: aggr.label.or(directive.label), + notes, + parent_label: aggr.parent_label.or(directive.parent_label), + append_const_msg: aggr.append_const_msg.or(directive.append_const_msg), + })) + } else { + Ok(Some(directive)) + } + }) + } else { + Ok(None) + } + } + fn parse_attribute( + attr: &Attribute, + is_diagnostic_namespace_variant: bool, + tcx: TyCtxt<'tcx>, + item_def_id: DefId, + ) -> Result, ErrorGuaranteed> { let result = if let Some(items) = attr.meta_item_list() { Self::parse(tcx, item_def_id, &items, attr.span, true, is_diagnostic_namespace_variant) } else if let Some(value) = attr.value_str() { @@ -500,26 +561,45 @@ value, attr.span, )?), - note: None, + notes: Vec::new(), parent_label: None, append_const_msg: None, })) } else { + let item = attr.get_normal_item(); + let report_span = match &item.args { + AttrArgs::Empty => item.path.span, + AttrArgs::Delimited(args) => args.dspan.entire(), + AttrArgs::Eq(eq_span, AttrArgsEq::Ast(expr)) => eq_span.to(expr.span), + AttrArgs::Eq(span, AttrArgsEq::Hir(expr)) => span.to(expr.span), + }; + tcx.emit_spanned_lint( UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, tcx.hir().local_def_id_to_hir_id(item_def_id.expect_local()), - attr.span, - NoValueInOnUnimplementedLint, + report_span, + MalformedOnUnimplementedAttrLint::new(report_span), ); Ok(None) } } else if is_diagnostic_namespace_variant { - tcx.emit_spanned_lint( - UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, - tcx.hir().local_def_id_to_hir_id(item_def_id.expect_local()), - attr.span, - NoValueInOnUnimplementedLint, - ); + match &attr.kind { + AttrKind::Normal(p) if !matches!(p.item.args, AttrArgs::Empty) => { + tcx.emit_spanned_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.hir().local_def_id_to_hir_id(item_def_id.expect_local()), + attr.span, + MalformedOnUnimplementedAttrLint::new(attr.span), + ); + } + _ => tcx.emit_spanned_lint( + UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, + tcx.hir().local_def_id_to_hir_id(item_def_id.expect_local()), + attr.span, + MissingOptionsForOnUnimplementedAttr, + ), + }; + Ok(None) } else { let reported = @@ -538,7 +618,7 @@ ) -> OnUnimplementedNote { let mut message = None; let mut label = None; - let mut note = None; + let mut notes = Vec::new(); let mut parent_label = None; let mut append_const_msg = None; info!("evaluate({:?}, trait_ref={:?}, options={:?})", self, trait_ref, options); @@ -547,18 +627,22 @@ options.iter().filter_map(|(k, v)| v.clone().map(|v| (*k, v))).collect(); for command in self.subcommands.iter().chain(Some(self)).rev() { - if let Some(ref condition) = command.condition && !attr::eval_condition( - condition, - &tcx.sess.parse_sess, - Some(tcx.features()), - &mut |cfg| { - let value = cfg.value.map(|v| { - OnUnimplementedFormatString(v).format(tcx, trait_ref, &options_map) - }); - - options.contains(&(cfg.name, value)) - }, - ) { + if let Some(ref condition) = command.condition + && !attr::eval_condition( + condition, + &tcx.sess.parse_sess, + Some(tcx.features()), + &mut |cfg| { + let value = cfg.value.map(|v| { + // `with_no_visible_paths` is also used when generating the options, + // so we need to match it here. + ty::print::with_no_visible_paths!(OnUnimplementedFormatString(v).format(tcx, trait_ref, &options_map)) + }); + + options.contains(&(cfg.name, value)) + }, + ) + { debug!("evaluate: skipping {:?} due to condition", command); continue; } @@ -571,9 +655,7 @@ label = Some(label_.clone()); } - if let Some(ref note_) = command.note { - note = Some(note_.clone()); - } + notes.extend(command.notes.clone()); if let Some(ref parent_label_) = command.parent_label { parent_label = Some(parent_label_.clone()); @@ -585,7 +667,7 @@ OnUnimplementedNote { label: label.map(|l| l.format(tcx, trait_ref, &options_map)), message: message.map(|m| m.format(tcx, trait_ref, &options_map)), - note: note.map(|n| n.format(tcx, trait_ref, &options_map)), + notes: notes.into_iter().map(|n| n.format(tcx, trait_ref, &options_map)).collect(), parent_label: parent_label.map(|e_s| e_s.format(tcx, trait_ref, &options_map)), append_const_msg, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs 2023-12-21 16:55:28.000000000 +0000 @@ -22,12 +22,13 @@ use rustc_hir::intravisit::Visitor; use rustc_hir::is_range_literal; use rustc_hir::lang_items::LangItem; -use rustc_hir::{AsyncGeneratorKind, GeneratorKind, Node}; +use rustc_hir::{CoroutineKind, CoroutineSource, Node}; use rustc_hir::{Expr, HirId}; use rustc_infer::infer::error_reporting::TypeErrCtxt; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_infer::infer::{DefineOpaqueTypes, InferOk, LateBoundRegionConversionTime}; use rustc_middle::hir::map; +use rustc_middle::traits::IsConstable; use rustc_middle::ty::error::TypeError::{self, Sorts}; use rustc_middle::ty::{ self, suggest_arbitrary_trait_bound, suggest_constraining_type_param, AdtKind, GenericArgs, @@ -41,43 +42,43 @@ use std::borrow::Cow; use std::iter; -use super::InferCtxtPrivExt; use crate::infer::InferCtxtExt as _; +use crate::traits::error_reporting::type_err_ctxt_ext::InferCtxtPrivExt; use crate::traits::query::evaluate_obligation::InferCtxtExt as _; use rustc_middle::ty::print::{with_forced_trimmed_paths, with_no_trimmed_paths}; #[derive(Debug)] -pub enum GeneratorInteriorOrUpvar { +pub enum CoroutineInteriorOrUpvar { // span of interior type Interior(Span, Option<(Span, Option)>), // span of upvar Upvar(Span), } -// This type provides a uniform interface to retrieve data on generators, whether it originated from +// This type provides a uniform interface to retrieve data on coroutines, whether it originated from // the local crate being compiled or from a foreign crate. #[derive(Debug)] -struct GeneratorData<'tcx, 'a>(&'a TypeckResults<'tcx>); +struct CoroutineData<'tcx, 'a>(&'a TypeckResults<'tcx>); -impl<'tcx, 'a> GeneratorData<'tcx, 'a> { - /// Try to get information about variables captured by the generator that matches a type we are +impl<'tcx, 'a> CoroutineData<'tcx, 'a> { + /// Try to get information about variables captured by the coroutine that matches a type we are /// looking for with `ty_matches` function. We uses it to find upvar which causes a failure to /// meet an obligation fn try_get_upvar_span( &self, infer_context: &InferCtxt<'tcx>, - generator_did: DefId, + coroutine_did: DefId, ty_matches: F, - ) -> Option + ) -> Option where F: Fn(ty::Binder<'tcx, Ty<'tcx>>) -> bool, { - infer_context.tcx.upvars_mentioned(generator_did).and_then(|upvars| { + infer_context.tcx.upvars_mentioned(coroutine_did).and_then(|upvars| { upvars.iter().find_map(|(upvar_id, upvar)| { let upvar_ty = self.0.node_type(*upvar_id); let upvar_ty = infer_context.resolve_vars_if_possible(upvar_ty); ty_matches(ty::Binder::dummy(upvar_ty)) - .then(|| GeneratorInteriorOrUpvar::Upvar(upvar.span)) + .then(|| CoroutineInteriorOrUpvar::Upvar(upvar.span)) }) }) } @@ -244,9 +245,9 @@ fn note_obligation_cause_for_async_await( &self, err: &mut Diagnostic, - interior_or_upvar_span: GeneratorInteriorOrUpvar, + interior_or_upvar_span: CoroutineInteriorOrUpvar, is_async: bool, - outer_generator: Option, + outer_coroutine: Option, trait_pred: ty::TraitPredicate<'tcx>, target_ty: Ty<'tcx>, obligation: &PredicateObligation<'tcx>, @@ -313,6 +314,18 @@ predicate: ty::Predicate<'tcx>, call_hir_id: HirId, ); + + fn look_for_iterator_item_mistakes( + &self, + assocs_in_this_method: &[Option<(Span, (DefId, Ty<'tcx>))>], + typeck_results: &TypeckResults<'tcx>, + type_diffs: &[TypeError<'tcx>], + param_env: ty::ParamEnv<'tcx>, + path_segment: &hir::PathSegment<'_>, + args: &[hir::Expr<'_>], + err: &mut Diagnostic, + ); + fn point_at_chain( &self, expr: &hir::Expr<'_>, @@ -321,6 +334,7 @@ param_env: ty::ParamEnv<'tcx>, err: &mut Diagnostic, ); + fn probe_assoc_types_at_expr( &self, type_diffs: &[TypeError<'tcx>], @@ -364,7 +378,7 @@ /// Type parameter needs more bounds. The trivial case is `T` `where T: Bound`, but /// it can also be an `impl Trait` param that needs to be decomposed to a type /// param for cleaner code. -fn suggest_restriction<'tcx>( +pub fn suggest_restriction<'tcx>( tcx: TyCtxt<'tcx>, item_id: LocalDefId, hir_generics: &hir::Generics<'tcx>, @@ -884,7 +898,8 @@ return false; } - if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) = obligation.predicate.kind().skip_binder() + if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) = + obligation.predicate.kind().skip_binder() && Some(trait_pred.def_id()) == self.tcx.lang_items().sized_trait() { // Don't suggest calling to turn an unsized type into a sized type @@ -1156,15 +1171,15 @@ && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output() // args tuple will always be args[1] && let ty::Tuple(args) = proj.projection_ty.args.type_at(1).kind() - { - Some(( - DefIdOrName::DefId(def_id), - pred.kind().rebind(proj.term.ty().unwrap()), - pred.kind().rebind(args.as_slice()), - )) - } else { - None - } + { + Some(( + DefIdOrName::DefId(def_id), + pred.kind().rebind(proj.term.ty().unwrap()), + pred.kind().rebind(args.as_slice()), + )) + } else { + None + } }, ) } @@ -1174,43 +1189,43 @@ && Some(proj.def_id) == self.tcx.lang_items().fn_once_output() // for existential projection, args are shifted over by 1 && let ty::Tuple(args) = proj.args.type_at(0).kind() - { - Some(( - DefIdOrName::Name("trait object"), - pred.rebind(proj.term.ty().unwrap()), - pred.rebind(args.as_slice()), - )) - } else { - None - } + { + Some(( + DefIdOrName::Name("trait object"), + pred.rebind(proj.term.ty().unwrap()), + pred.rebind(args.as_slice()), + )) + } else { + None + } }) } ty::Param(param) => { let generics = self.tcx.generics_of(body_id); let name = if generics.count() > param.index as usize - && let def = generics.param_at(param.index as usize, self.tcx) - && matches!(def.kind, ty::GenericParamDefKind::Type { .. }) - && def.name == param.name - { - DefIdOrName::DefId(def.def_id) - } else { - DefIdOrName::Name("type parameter") - }; + && let def = generics.param_at(param.index as usize, self.tcx) + && matches!(def.kind, ty::GenericParamDefKind::Type { .. }) + && def.name == param.name + { + DefIdOrName::DefId(def.def_id) + } else { + DefIdOrName::Name("type parameter") + }; param_env.caller_bounds().iter().find_map(|pred| { if let ty::ClauseKind::Projection(proj) = pred.kind().skip_binder() && Some(proj.projection_ty.def_id) == self.tcx.lang_items().fn_once_output() && proj.projection_ty.self_ty() == found // args tuple will always be args[1] && let ty::Tuple(args) = proj.projection_ty.args.type_at(1).kind() - { - Some(( - name, - pred.kind().rebind(proj.term.ty().unwrap()), - pred.kind().rebind(args.as_slice()), - )) - } else { - None - } + { + Some(( + name, + pred.kind().rebind(proj.term.ty().unwrap()), + pred.kind().rebind(args.as_slice()), + )) + } else { + None + } }) } _ => None, @@ -1316,7 +1331,8 @@ let mut_ref_self_ty_satisfies_pred = mk_result(trait_pred_and_mut_ref); let (ref_inner_ty_satisfies_pred, ref_inner_ty_mut) = - if let ObligationCauseCode::ItemObligation(_) | ObligationCauseCode::ExprItemObligation(..) = obligation.cause.code() + if let ObligationCauseCode::ItemObligation(_) + | ObligationCauseCode::ExprItemObligation(..) = obligation.cause.code() && let ty::Ref(_, ty, mutability) = old_pred.self_ty().skip_binder().kind() { ( @@ -1618,7 +1634,8 @@ fn suggest_remove_await(&self, obligation: &PredicateObligation<'tcx>, err: &mut Diagnostic) { let hir = self.tcx.hir(); - if let ObligationCauseCode::AwaitableExpr(Some(hir_id)) = obligation.cause.code().peel_derives() + if let ObligationCauseCode::AwaitableExpr(Some(hir_id)) = + obligation.cause.code().peel_derives() && let hir::Node::Expr(expr) = hir.get(*hir_id) { // FIXME: use `obligation.predicate.kind()...trait_ref.self_ty()` to see if we have `()` @@ -1628,9 +1645,10 @@ // use nth(1) to skip one layer of desugaring from `IntoIter::into_iter` if let Some((_, hir::Node::Expr(await_expr))) = hir.parent_iter(*hir_id).nth(1) - && let Some(expr_span) = expr.span.find_ancestor_inside(await_expr.span) + && let Some(expr_span) = expr.span.find_ancestor_inside_same_ctxt(await_expr.span) { - let removal_span = self.tcx + let removal_span = self + .tcx .sess .source_map() .span_extend_while(expr_span, char::is_whitespace) @@ -1654,30 +1672,28 @@ err.span_label(*span, format!("this call returns `{}`", pred.self_ty())); } if let Some(typeck_results) = &self.typeck_results - && let ty = typeck_results.expr_ty_adjusted(base) - && let ty::FnDef(def_id, _args) = ty.kind() - && let Some(hir::Node::Item(hir::Item { ident, span, vis_span, .. })) = - hir.get_if_local(*def_id) - { - let msg = format!( - "alternatively, consider making `fn {ident}` asynchronous" + && let ty = typeck_results.expr_ty_adjusted(base) + && let ty::FnDef(def_id, _args) = ty.kind() + && let Some(hir::Node::Item(hir::Item { ident, span, vis_span, .. })) = + hir.get_if_local(*def_id) + { + let msg = format!("alternatively, consider making `fn {ident}` asynchronous"); + if vis_span.is_empty() { + err.span_suggestion_verbose( + span.shrink_to_lo(), + msg, + "async ", + Applicability::MaybeIncorrect, + ); + } else { + err.span_suggestion_verbose( + vis_span.shrink_to_hi(), + msg, + " async", + Applicability::MaybeIncorrect, ); - if vis_span.is_empty() { - err.span_suggestion_verbose( - span.shrink_to_lo(), - msg, - "async ", - Applicability::MaybeIncorrect, - ); - } else { - err.span_suggestion_verbose( - vis_span.shrink_to_hi(), - msg, - " async", - Applicability::MaybeIncorrect, - ); - } } + } } } } @@ -1791,13 +1807,13 @@ "this expression has type `{}`, which implements `{}`", ty, trait_pred.print_modifiers_and_trait_path() - ) + ), ); err.span_suggestion( self.tcx.sess.source_map().end_point(stmt.span), "remove this semicolon", "", - Applicability::MachineApplicable + Applicability::MachineApplicable, ); return true; } @@ -1856,14 +1872,18 @@ let mut sugg = vec![(span.shrink_to_lo(), "Box<".to_string()), (span.shrink_to_hi(), ">".to_string())]; sugg.extend(visitor.returns.into_iter().flat_map(|expr| { - let span = expr.span.find_ancestor_in_same_ctxt(obligation.cause.span).unwrap_or(expr.span); + let span = + expr.span.find_ancestor_in_same_ctxt(obligation.cause.span).unwrap_or(expr.span); if !span.can_be_used_for_suggestions() { vec![] } else if let hir::ExprKind::Call(path, ..) = expr.kind && let hir::ExprKind::Path(hir::QPath::TypeRelative(ty, method)) = path.kind && method.ident.name == sym::new && let hir::TyKind::Path(hir::QPath::Resolved(.., box_path)) = ty.kind - && box_path.res.opt_def_id().is_some_and(|def_id| Some(def_id) == self.tcx.lang_items().owned_box()) + && box_path + .res + .opt_def_id() + .is_some_and(|def_id| Some(def_id) == self.tcx.lang_items().owned_box()) { // Don't box `Box::new` vec![] @@ -1963,7 +1983,7 @@ let argument_kind = match expected.skip_binder().self_ty().kind() { ty::Closure(..) => "closure", - ty::Generator(..) => "generator", + ty::Coroutine(..) => "coroutine", _ => "function", }; let mut err = struct_span_err!( @@ -2013,15 +2033,13 @@ { let expected_self = self.tcx.anonymize_bound_vars(pred.kind().rebind(trait_pred.self_ty())); - let expected_args = self - .tcx - .anonymize_bound_vars(pred.kind().rebind(trait_pred.trait_ref.args)); + let expected_args = + self.tcx.anonymize_bound_vars(pred.kind().rebind(trait_pred.trait_ref.args)); // Find another predicate whose self-type is equal to the expected self type, // but whose args don't match. - let other_pred = predicates.into_iter() - .enumerate() - .find(|(other_idx, (pred, _))| match pred.kind().skip_binder() { + let other_pred = predicates.into_iter().enumerate().find(|(other_idx, (pred, _))| { + match pred.kind().skip_binder() { ty::ClauseKind::Trait(trait_pred) if self.tcx.is_fn_trait(trait_pred.def_id()) && other_idx != idx @@ -2040,7 +2058,8 @@ true } _ => false, - }); + } + }); // If we found one, then it's very likely the cause of the error. if let Some((_, (_, other_pred_span))) = other_pred { err.span_note( @@ -2126,33 +2145,33 @@ let hir = self.tcx.hir(); // Attempt to detect an async-await error by looking at the obligation causes, looking - // for a generator to be present. + // for a coroutine to be present. // // When a future does not implement a trait because of a captured type in one of the - // generators somewhere in the call stack, then the result is a chain of obligations. + // coroutines somewhere in the call stack, then the result is a chain of obligations. // // Given an `async fn` A that calls an `async fn` B which captures a non-send type and that // future is passed as an argument to a function C which requires a `Send` type, then the // chain looks something like this: // - // - `BuiltinDerivedObligation` with a generator witness (B) - // - `BuiltinDerivedObligation` with a generator (B) + // - `BuiltinDerivedObligation` with a coroutine witness (B) + // - `BuiltinDerivedObligation` with a coroutine (B) // - `BuiltinDerivedObligation` with `impl std::future::Future` (B) - // - `BuiltinDerivedObligation` with a generator witness (A) - // - `BuiltinDerivedObligation` with a generator (A) + // - `BuiltinDerivedObligation` with a coroutine witness (A) + // - `BuiltinDerivedObligation` with a coroutine (A) // - `BuiltinDerivedObligation` with `impl std::future::Future` (A) // - `BindingObligation` with `impl_send` (Send requirement) // - // The first obligation in the chain is the most useful and has the generator that captured - // the type. The last generator (`outer_generator` below) has information about where the - // bound was introduced. At least one generator should be present for this diagnostic to be + // The first obligation in the chain is the most useful and has the coroutine that captured + // the type. The last coroutine (`outer_coroutine` below) has information about where the + // bound was introduced. At least one coroutine should be present for this diagnostic to be // modified. let (mut trait_ref, mut target_ty) = match obligation.predicate.kind().skip_binder() { ty::PredicateKind::Clause(ty::ClauseKind::Trait(p)) => (Some(p), Some(p.self_ty())), _ => (None, None), }; - let mut generator = None; - let mut outer_generator = None; + let mut coroutine = None; + let mut outer_coroutine = None; let mut next_code = Some(obligation.cause.code()); let mut seen_upvar_tys_infer_tuple = false; @@ -2172,18 +2191,18 @@ ); match *ty.kind() { - ty::Generator(did, ..) | ty::GeneratorWitness(did, _) => { - generator = generator.or(Some(did)); - outer_generator = Some(did); + ty::Coroutine(did, ..) | ty::CoroutineWitness(did, _) => { + coroutine = coroutine.or(Some(did)); + outer_coroutine = Some(did); } ty::Tuple(_) if !seen_upvar_tys_infer_tuple => { // By introducing a tuple of upvar types into the chain of obligations - // of a generator, the first non-generator item is now the tuple itself, + // of a coroutine, the first non-coroutine item is now the tuple itself, // we shall ignore this. seen_upvar_tys_infer_tuple = true; } - _ if generator.is_none() => { + _ if coroutine.is_none() => { trait_ref = Some(cause.derived.parent_trait_pred.skip_binder()); target_ty = Some(ty); } @@ -2201,18 +2220,18 @@ ); match *ty.kind() { - ty::Generator(did, ..) | ty::GeneratorWitness(did, ..) => { - generator = generator.or(Some(did)); - outer_generator = Some(did); + ty::Coroutine(did, ..) | ty::CoroutineWitness(did, ..) => { + coroutine = coroutine.or(Some(did)); + outer_coroutine = Some(did); } ty::Tuple(_) if !seen_upvar_tys_infer_tuple => { // By introducing a tuple of upvar types into the chain of obligations - // of a generator, the first non-generator item is now the tuple itself, + // of a coroutine, the first non-coroutine item is now the tuple itself, // we shall ignore this. seen_upvar_tys_infer_tuple = true; } - _ if generator.is_none() => { + _ if coroutine.is_none() => { trait_ref = Some(derived_obligation.parent_trait_pred.skip_binder()); target_ty = Some(ty); } @@ -2225,48 +2244,48 @@ } } - // Only continue if a generator was found. - debug!(?generator, ?trait_ref, ?target_ty); - let (Some(generator_did), Some(trait_ref), Some(target_ty)) = - (generator, trait_ref, target_ty) + // Only continue if a coroutine was found. + debug!(?coroutine, ?trait_ref, ?target_ty); + let (Some(coroutine_did), Some(trait_ref), Some(target_ty)) = + (coroutine, trait_ref, target_ty) else { return false; }; - let span = self.tcx.def_span(generator_did); + let span = self.tcx.def_span(coroutine_did); - let generator_did_root = self.tcx.typeck_root_def_id(generator_did); + let coroutine_did_root = self.tcx.typeck_root_def_id(coroutine_did); debug!( - ?generator_did, - ?generator_did_root, + ?coroutine_did, + ?coroutine_did_root, typeck_results.hir_owner = ?self.typeck_results.as_ref().map(|t| t.hir_owner), ?span, ); - let generator_body = generator_did + let coroutine_body = coroutine_did .as_local() .and_then(|def_id| hir.maybe_body_owned_by(def_id)) .map(|body_id| hir.body(body_id)); let mut visitor = AwaitsVisitor::default(); - if let Some(body) = generator_body { + if let Some(body) = coroutine_body { visitor.visit_body(body); } debug!(awaits = ?visitor.awaits); - // Look for a type inside the generator interior that matches the target type to get + // Look for a type inside the coroutine interior that matches the target type to get // a span. let target_ty_erased = self.tcx.erase_regions(target_ty); let ty_matches = |ty| -> bool { // Careful: the regions for types that appear in the - // generator interior are not generally known, so we + // coroutine interior are not generally known, so we // want to erase them when comparing (and anyway, // `Send` and other bounds are generally unaffected by // the choice of region). When erasing regions, we // also have to erase late-bound regions. This is - // because the types that appear in the generator + // because the types that appear in the coroutine // interior generally contain "bound regions" to // represent regions that are part of the suspended - // generator frame. Bound regions are preserved by + // coroutine frame. Bound regions are preserved by // `erase_regions` and so we must also call // `erase_late_bound_regions`. let ty_erased = self.tcx.erase_late_bound_regions(ty); @@ -2276,44 +2295,44 @@ eq }; - // Get the typeck results from the infcx if the generator is the function we are currently + // Get the typeck results from the infcx if the coroutine is the function we are currently // type-checking; otherwise, get them by performing a query. This is needed to avoid - // cycles. If we can't use resolved types because the generator comes from another crate, + // cycles. If we can't use resolved types because the coroutine comes from another crate, // we still provide a targeted error but without all the relevant spans. - let generator_data = match &self.typeck_results { - Some(t) if t.hir_owner.to_def_id() == generator_did_root => GeneratorData(&t), - _ if generator_did.is_local() => { - GeneratorData(self.tcx.typeck(generator_did.expect_local())) + let coroutine_data = match &self.typeck_results { + Some(t) if t.hir_owner.to_def_id() == coroutine_did_root => CoroutineData(&t), + _ if coroutine_did.is_local() => { + CoroutineData(self.tcx.typeck(coroutine_did.expect_local())) } _ => return false, }; - let generator_within_in_progress_typeck = match &self.typeck_results { - Some(t) => t.hir_owner.to_def_id() == generator_did_root, + let coroutine_within_in_progress_typeck = match &self.typeck_results { + Some(t) => t.hir_owner.to_def_id() == coroutine_did_root, _ => false, }; let mut interior_or_upvar_span = None; - let from_awaited_ty = generator_data.get_from_await_ty(visitor, hir, ty_matches); + let from_awaited_ty = coroutine_data.get_from_await_ty(visitor, hir, ty_matches); debug!(?from_awaited_ty); // Avoid disclosing internal information to downstream crates. - if generator_did.is_local() + if coroutine_did.is_local() // Try to avoid cycles. - && !generator_within_in_progress_typeck - && let Some(generator_info) = self.tcx.mir_generator_witnesses(generator_did) + && !coroutine_within_in_progress_typeck + && let Some(coroutine_info) = self.tcx.mir_coroutine_witnesses(coroutine_did) { - debug!(?generator_info); + debug!(?coroutine_info); 'find_source: for (variant, source_info) in - generator_info.variant_fields.iter().zip(&generator_info.variant_source_info) + coroutine_info.variant_fields.iter().zip(&coroutine_info.variant_source_info) { debug!(?variant); for &local in variant { - let decl = &generator_info.field_tys[local]; + let decl = &coroutine_info.field_tys[local]; debug!(?decl); if ty_matches(ty::Binder::dummy(decl.ty)) && !decl.ignore_for_traits { - interior_or_upvar_span = Some(GeneratorInteriorOrUpvar::Interior( + interior_or_upvar_span = Some(CoroutineInteriorOrUpvar::Interior( decl.source_info.span, Some((source_info.span, from_awaited_ty)), )); @@ -2325,21 +2344,21 @@ if interior_or_upvar_span.is_none() { interior_or_upvar_span = - generator_data.try_get_upvar_span(&self, generator_did, ty_matches); + coroutine_data.try_get_upvar_span(&self, coroutine_did, ty_matches); } - if interior_or_upvar_span.is_none() && !generator_did.is_local() { - interior_or_upvar_span = Some(GeneratorInteriorOrUpvar::Interior(span, None)); + if interior_or_upvar_span.is_none() && !coroutine_did.is_local() { + interior_or_upvar_span = Some(CoroutineInteriorOrUpvar::Interior(span, None)); } debug!(?interior_or_upvar_span); if let Some(interior_or_upvar_span) = interior_or_upvar_span { - let is_async = self.tcx.generator_is_async(generator_did); + let is_async = self.tcx.coroutine_is_async(coroutine_did); self.note_obligation_cause_for_async_await( err, interior_or_upvar_span, is_async, - outer_generator, + outer_coroutine, trait_ref, target_ty, obligation, @@ -2357,9 +2376,9 @@ fn note_obligation_cause_for_async_await( &self, err: &mut Diagnostic, - interior_or_upvar_span: GeneratorInteriorOrUpvar, + interior_or_upvar_span: CoroutineInteriorOrUpvar, is_async: bool, - outer_generator: Option, + outer_coroutine: Option, trait_pred: ty::TraitPredicate<'tcx>, target_ty: Ty<'tcx>, obligation: &PredicateObligation<'tcx>, @@ -2369,7 +2388,7 @@ let (await_or_yield, an_await_or_yield) = if is_async { ("await", "an await") } else { ("yield", "a yield") }; - let future_or_generator = if is_async { "future" } else { "generator" }; + let future_or_coroutine = if is_async { "future" } else { "coroutine" }; // Special case the primary error message when send or sync is the trait that was // not implemented. @@ -2382,34 +2401,49 @@ err.clear_code(); err.set_primary_message(format!( - "{future_or_generator} cannot be {trait_verb} between threads safely" + "{future_or_coroutine} cannot be {trait_verb} between threads safely" )); let original_span = err.span.primary_span().unwrap(); let mut span = MultiSpan::from_span(original_span); - let message = outer_generator - .and_then(|generator_did| { - Some(match self.tcx.generator_kind(generator_did).unwrap() { - GeneratorKind::Gen => format!("generator is not {trait_name}"), - GeneratorKind::Async(AsyncGeneratorKind::Fn) => self + let message = outer_coroutine + .and_then(|coroutine_did| { + Some(match self.tcx.coroutine_kind(coroutine_did).unwrap() { + CoroutineKind::Coroutine => format!("coroutine is not {trait_name}"), + CoroutineKind::Async(CoroutineSource::Fn) => self .tcx - .parent(generator_did) + .parent(coroutine_did) .as_local() .map(|parent_did| hir.local_def_id_to_hir_id(parent_did)) .and_then(|parent_hir_id| hir.opt_name(parent_hir_id)) .map(|name| { format!("future returned by `{name}` is not {trait_name}") })?, - GeneratorKind::Async(AsyncGeneratorKind::Block) => { + CoroutineKind::Async(CoroutineSource::Block) => { format!("future created by async block is not {trait_name}") } - GeneratorKind::Async(AsyncGeneratorKind::Closure) => { + CoroutineKind::Async(CoroutineSource::Closure) => { format!("future created by async closure is not {trait_name}") } + CoroutineKind::Gen(CoroutineSource::Fn) => self + .tcx + .parent(coroutine_did) + .as_local() + .map(|parent_did| hir.local_def_id_to_hir_id(parent_did)) + .and_then(|parent_hir_id| hir.opt_name(parent_hir_id)) + .map(|name| { + format!("iterator returned by `{name}` is not {trait_name}") + })?, + CoroutineKind::Gen(CoroutineSource::Block) => { + format!("iterator created by gen block is not {trait_name}") + } + CoroutineKind::Gen(CoroutineSource::Closure) => { + format!("iterator created by gen closure is not {trait_name}") + } }) }) - .unwrap_or_else(|| format!("{future_or_generator} is not {trait_name}")); + .unwrap_or_else(|| format!("{future_or_coroutine} is not {trait_name}")); span.push_span_label(original_span, message); err.set_span(span); @@ -2453,11 +2487,11 @@ ); err.span_note( span, - format!("{future_or_generator} {trait_explanation} as this value is used across {an_await_or_yield}"), + format!("{future_or_coroutine} {trait_explanation} as this value is used across {an_await_or_yield}"), ); }; match interior_or_upvar_span { - GeneratorInteriorOrUpvar::Interior(interior_span, interior_extra_info) => { + CoroutineInteriorOrUpvar::Interior(interior_span, interior_extra_info) => { if let Some((yield_span, from_awaited_ty)) = interior_extra_info { if let Some(await_span) = from_awaited_ty { // The type causing this obligation is one being awaited at await_span. @@ -2480,7 +2514,7 @@ } } } - GeneratorInteriorOrUpvar::Upvar(upvar_span) => { + CoroutineInteriorOrUpvar::Upvar(upvar_span) => { // `Some((ref_ty, is_mut))` if `target_ty` is `&T` or `&mut T` and fails to impl `Send` let non_send = match target_ty.kind() { ty::Ref(_, ref_ty, mutability) => match self.evaluate_obligation(&obligation) { @@ -2647,9 +2681,32 @@ // Check for foreign traits being reachable. self.tcx.visible_parent_map(()).get(&def_id).is_some() }; - if let DefKind::Trait = tcx.def_kind(item_def_id) && !visible_item { - // FIXME(estebank): extend this to search for all the types that do - // implement this trait and list them. + if Some(def_id) == self.tcx.lang_items().sized_trait() + && let Some(hir::Node::TraitItem(hir::TraitItem { + ident, + kind: hir::TraitItemKind::Type(bounds, None), + .. + })) = tcx.hir().get_if_local(item_def_id) + // Do not suggest relaxing if there is an explicit `Sized` obligation. + && !bounds.iter() + .filter_map(|bound| bound.trait_ref()) + .any(|tr| tr.trait_def_id() == self.tcx.lang_items().sized_trait()) + { + let (span, separator) = if let [.., last] = bounds { + (last.span().shrink_to_hi(), " +") + } else { + (ident.span.shrink_to_hi(), ":") + }; + err.span_suggestion_verbose( + span, + "consider relaxing the implicit `Sized` restriction", + format!("{separator} ?Sized"), + Applicability::MachineApplicable, + ); + } + if let DefKind::Trait = tcx.def_kind(item_def_id) + && !visible_item + { err.note(format!( "`{short_item_name}` is a \"sealed trait\", because to implement \ it you also need to implement `{}`, which is not accessible; \ @@ -2657,6 +2714,34 @@ types that already implement it", with_no_trimmed_paths!(tcx.def_path_str(def_id)), )); + let impls_of = tcx.trait_impls_of(def_id); + let impls = impls_of + .non_blanket_impls() + .values() + .flatten() + .chain(impls_of.blanket_impls().iter()) + .collect::>(); + if !impls.is_empty() { + let len = impls.len(); + let mut types = impls.iter() + .map(|t| with_no_trimmed_paths!(format!( + " {}", + tcx.type_of(*t).instantiate_identity(), + ))) + .collect::>(); + let post = if types.len() > 9 { + types.truncate(8); + format!("\nand {} others", len - 8) + } else { + String::new() + }; + err.help(format!( + "the following type{} implement{} the trait:\n{}{post}", + pluralize!(len), + if len == 1 { "s" } else { "" }, + types.join("\n"), + )); + } } } } else { @@ -2684,20 +2769,30 @@ )); } } - ObligationCauseCode::RepeatElementCopy { is_const_fn } => { + ObligationCauseCode::RepeatElementCopy { is_constable, elt_type, elt_span, elt_stmt_span } => { err.note( "the `Copy` trait is required because this value will be copied for each element of the array", ); - - if is_const_fn { - err.help( - "consider creating a new `const` item and initializing it with the result \ - of the function call to be used in the repeat position, like \ - `const VAL: Type = const_fn();` and `let x = [VAL; 42];`", - ); + let value_kind = match is_constable { + IsConstable::Fn => Some("the result of the function call"), + IsConstable::Ctor => Some("the result of the constructor"), + _ => None + }; + let sm = tcx.sess.source_map(); + if let Some(value_kind) = value_kind && + let Ok(snip) = sm.span_to_snippet(elt_span) + { + let help_msg = format!( + "consider creating a new `const` item and initializing it with {value_kind} \ + to be used in the repeat position"); + let indentation = sm.indentation_before(elt_stmt_span).unwrap_or_default(); + err.multipart_suggestion(help_msg, vec![ + (elt_stmt_span.shrink_to_lo(), format!("const ARRAY_REPEAT_VALUE: {elt_type} = {snip};\n{indentation}")), + (elt_span, "ARRAY_REPEAT_VALUE".to_string()) + ], Applicability::MachineApplicable); } - if self.tcx.sess.is_nightly_build() && is_const_fn { + if self.tcx.sess.is_nightly_build() && matches!(is_constable, IsConstable::Fn|IsConstable::Ctor) { err.help( "create an inline `const` block, see RFC #2920 \ for more information", @@ -2754,7 +2849,8 @@ && let ty::ClauseKind::Trait(trait_pred) = clause && let ty::Dynamic(..) = trait_pred.self_ty().kind() { - let span = if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) + let span = if let Ok(snippet) = + self.tcx.sess.source_map().span_to_snippet(span) && snippet.starts_with("dyn ") { let pos = snippet.len() - snippet[3..].trim_start().len(); @@ -2789,7 +2885,7 @@ err.note("the return type of a function must have a statically known size"); } ObligationCauseCode::SizedYieldType => { - err.note("the yield type of a generator must have a statically known size"); + err.note("the yield type of a coroutine must have a statically known size"); } ObligationCauseCode::AssignmentLhsSized => { err.note("the left-hand-side of an assignment must have a statically known size"); @@ -2845,22 +2941,28 @@ err.note("all inline asm arguments must have a statically known size"); } ObligationCauseCode::SizedClosureCapture(closure_def_id) => { - err.note("all values captured by value by a closure must have a statically known size"); - let hir::ExprKind::Closure(closure) = self.tcx.hir().get_by_def_id(closure_def_id).expect_expr().kind else { + err.note( + "all values captured by value by a closure must have a statically known size", + ); + let hir::ExprKind::Closure(closure) = + self.tcx.hir().get_by_def_id(closure_def_id).expect_expr().kind + else { bug!("expected closure in SizedClosureCapture obligation"); }; - if let hir::CaptureBy::Value = closure.capture_clause + if let hir::CaptureBy::Value { .. } = closure.capture_clause && let Some(span) = closure.fn_arg_span { err.span_label(span, "this closure captures all values by move"); } } - ObligationCauseCode::SizedGeneratorInterior(generator_def_id) => { - let what = match self.tcx.generator_kind(generator_def_id) { - None | Some(hir::GeneratorKind::Gen) => "yield", - Some(hir::GeneratorKind::Async(..)) => "await", + ObligationCauseCode::SizedCoroutineInterior(coroutine_def_id) => { + let what = match self.tcx.coroutine_kind(coroutine_def_id) { + None | Some(hir::CoroutineKind::Coroutine) | Some(hir::CoroutineKind::Gen(_)) => "yield", + Some(hir::CoroutineKind::Async(..)) => "await", }; - err.note(format!("all values live across `{what}` must have a statically known size")); + err.note(format!( + "all values live across `{what}` must have a statically known size" + )); } ObligationCauseCode::ConstPatternStructural => { err.note("constants used for pattern-matching must derive `PartialEq` and `Eq`"); @@ -2878,7 +2980,7 @@ return; } - // If the obligation for a tuple is set directly by a Generator or Closure, + // If the obligation for a tuple is set directly by a Coroutine or Closure, // then the tuple must be the one containing capture types. let is_upvar_tys_infer_tuple = if !matches!(ty.kind(), ty::Tuple(..)) { false @@ -2888,7 +2990,7 @@ let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_pred); let nested_ty = parent_trait_ref.skip_binder().self_ty(); - matches!(nested_ty.kind(), ty::Generator(..)) + matches!(nested_ty.kind(), ty::Coroutine(..)) || matches!(nested_ty.kind(), ty::Closure(..)) } else { false @@ -2908,7 +3010,7 @@ }, ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) => { // If the previous type is async fn, this is the future generated by the body of an async function. - // Avoid printing it twice (it was already printed in the `ty::Generator` arm below). + // Avoid printing it twice (it was already printed in the `ty::Coroutine` arm below). let is_future = tcx.ty_is_opaque_future(ty); debug!( ?obligated_types, @@ -2917,8 +3019,8 @@ ); if is_future && obligated_types.last().is_some_and(|ty| match ty.kind() { - ty::Generator(last_def_id, ..) => { - tcx.generator_is_async(*last_def_id) + ty::Coroutine(last_def_id, ..) => { + tcx.coroutine_is_async(*last_def_id) } _ => false, }) @@ -2927,7 +3029,7 @@ } err.span_note(self.tcx.def_span(def_id), msg) } - ty::GeneratorWitness(def_id, args) => { + ty::CoroutineWitness(def_id, args) => { use std::fmt::Write; // FIXME: this is kind of an unusual format for rustc, can we make it more clear? @@ -2935,21 +3037,21 @@ // FIXME: only print types which don't meet the trait requirement let mut msg = "required because it captures the following types: ".to_owned(); - for bty in tcx.generator_hidden_types(*def_id) { + for bty in tcx.coroutine_hidden_types(*def_id) { let ty = bty.instantiate(tcx, args); write!(msg, "`{ty}`, ").unwrap(); } err.note(msg.trim_end_matches(", ").to_string()) } - ty::Generator(def_id, _, _) => { + ty::Coroutine(def_id, _, _) => { let sp = self.tcx.def_span(def_id); - // Special-case this to say "async block" instead of `[static generator]`. - let kind = tcx.generator_kind(def_id).unwrap().descr(); + // Special-case this to say "async block" instead of `[static coroutine]`. + let kind = tcx.coroutine_kind(def_id).unwrap(); err.span_note( sp, with_forced_trimmed_paths!(format!( - "required because it's used within this {kind}", + "required because it's used within this {kind:#}", )), ) } @@ -3244,7 +3346,7 @@ ) { if let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) { let body = self.tcx.hir().body(body_id); - if let Some(hir::GeneratorKind::Async(_)) = body.generator_kind { + if let Some(hir::CoroutineKind::Async(_)) = body.coroutine_kind { let future_trait = self.tcx.require_lang_item(LangItem::Future, None); let self_ty = self.resolve_vars_if_possible(trait_pred.self_ty()); @@ -3385,15 +3487,17 @@ trait_pred: ty::PolyTraitPredicate<'tcx>, ) { if let ObligationCauseCode::ImplDerivedObligation(_) = obligation.cause.code() - && self.tcx.is_diagnostic_item(sym::SliceIndex, trait_pred.skip_binder().trait_ref.def_id) + && self + .tcx + .is_diagnostic_item(sym::SliceIndex, trait_pred.skip_binder().trait_ref.def_id) && let ty::Slice(_) = trait_pred.skip_binder().trait_ref.args.type_at(1).kind() && let ty::Ref(_, inner_ty, _) = trait_pred.skip_binder().self_ty().kind() && let ty::Uint(ty::UintTy::Usize) = inner_ty.kind() { err.span_suggestion_verbose( obligation.cause.span.shrink_to_lo(), - "dereference this index", - '*', + "dereference this index", + '*', Applicability::MachineApplicable, ); } @@ -3413,10 +3517,11 @@ if let Some(Node::Expr(expr)) = hir.find(arg_hir_id) && let Some(typeck_results) = &self.typeck_results { - if let hir::Expr { kind: hir::ExprKind::Block(..), .. } = expr { - let expr = expr.peel_blocks(); - let ty = typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx,)); - let span = expr.span; + if let hir::Expr { kind: hir::ExprKind::Block(block, _), .. } = expr { + let inner_expr = expr.peel_blocks(); + let ty = typeck_results.expr_ty_adjusted_opt(inner_expr) + .unwrap_or(Ty::new_misc_error(tcx)); + let span = inner_expr.span; if Some(span) != err.span.primary_span() { err.span_label( span, @@ -3427,6 +3532,49 @@ format!("this tail expression is of type `{ty}`") }, ); + if let ty::PredicateKind::Clause(clause) = failed_pred.kind().skip_binder() + && let ty::ClauseKind::Trait(pred) = clause + && [ + tcx.lang_items().fn_once_trait(), + tcx.lang_items().fn_mut_trait(), + tcx.lang_items().fn_trait(), + ].contains(&Some(pred.def_id())) + { + if let [stmt, ..] = block.stmts + && let hir::StmtKind::Semi(value) = stmt.kind + && let hir::ExprKind::Closure(hir::Closure { + body, + fn_decl_span, + .. + }) = value.kind + && let body = hir.body(*body) + && !matches!(body.value.kind, hir::ExprKind::Block(..)) + { + // Check if the failed predicate was an expectation of a closure type + // and if there might have been a `{ |args|` typo instead of `|args| {`. + err.multipart_suggestion( + "you might have meant to open the closure body instead of placing \ + a closure within a block", + vec![ + (expr.span.with_hi(value.span.lo()), String::new()), + (fn_decl_span.shrink_to_hi(), " {".to_string()), + ], + Applicability::MaybeIncorrect, + ); + } else { + // Maybe the bare block was meant to be a closure. + err.span_suggestion_verbose( + expr.span.shrink_to_lo(), + "you might have meant to create the closure instead of a block", + format!( + "|{}| ", + (0..pred.trait_ref.args.len() - 1).map(|_| "_") + .collect::>() + .join(", ")), + Applicability::MaybeIncorrect, + ); + } + } } } @@ -3437,7 +3585,8 @@ let mut type_diffs = vec![]; if let ObligationCauseCode::ExprBindingObligation(def_id, _, _, idx) = parent_code && let Some(node_args) = typeck_results.node_args_opt(call_hir_id) - && let where_clauses = self.tcx.predicates_of(def_id).instantiate(self.tcx, node_args) + && let where_clauses = + self.tcx.predicates_of(def_id).instantiate(self.tcx, node_args) && let Some(where_pred) = where_clauses.predicates.get(*idx) { if let Some(where_pred) = where_pred.as_trait_clause() @@ -3447,32 +3596,34 @@ let failed_pred = self.instantiate_binder_with_fresh_vars( expr.span, LateBoundRegionConversionTime::FnCall, - failed_pred + failed_pred, ); - let zipped = - iter::zip(where_pred.trait_ref.args, failed_pred.trait_ref.args); + let zipped = iter::zip(where_pred.trait_ref.args, failed_pred.trait_ref.args); for (expected, actual) in zipped { self.probe(|_| { - match self - .at(&ObligationCause::misc(expr.span, body_id), param_env) - .eq(DefineOpaqueTypes::No, expected, actual) - { + match self.at(&ObligationCause::misc(expr.span, body_id), param_env).eq( + DefineOpaqueTypes::No, + expected, + actual, + ) { Ok(_) => (), // We ignore nested obligations here for now. Err(err) => type_diffs.push(err), } }) - }; + } } else if let Some(where_pred) = where_pred.as_projection_clause() && let Some(failed_pred) = failed_pred.to_opt_poly_projection_pred() && let Some(found) = failed_pred.skip_binder().term.ty() { - type_diffs = vec![ - Sorts(ty::error::ExpectedFound { - expected: Ty::new_alias(self.tcx,ty::Projection, where_pred.skip_binder().projection_ty), - found, - }), - ]; + type_diffs = vec![Sorts(ty::error::ExpectedFound { + expected: Ty::new_alias( + self.tcx, + ty::Projection, + where_pred.skip_binder().projection_ty, + ), + found, + })]; } } if let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = expr.kind @@ -3586,12 +3737,115 @@ msg, sugg, Applicability::MaybeIncorrect, - SuggestionStyle::ShowAlways + SuggestionStyle::ShowAlways, ); } } } + fn look_for_iterator_item_mistakes( + &self, + assocs_in_this_method: &[Option<(Span, (DefId, Ty<'tcx>))>], + typeck_results: &TypeckResults<'tcx>, + type_diffs: &[TypeError<'tcx>], + param_env: ty::ParamEnv<'tcx>, + path_segment: &hir::PathSegment<'_>, + args: &[hir::Expr<'_>], + err: &mut Diagnostic, + ) { + let tcx = self.tcx; + // Special case for iterator chains, we look at potential failures of `Iterator::Item` + // not being `: Clone` and `Iterator::map` calls with spurious trailing `;`. + for entry in assocs_in_this_method { + let Some((_span, (def_id, ty))) = entry else { + continue; + }; + for diff in type_diffs { + let Sorts(expected_found) = diff else { + continue; + }; + if tcx.is_diagnostic_item(sym::IteratorItem, *def_id) + && path_segment.ident.name == sym::map + && self.can_eq(param_env, expected_found.found, *ty) + && let [arg] = args + && let hir::ExprKind::Closure(closure) = arg.kind + { + let body = tcx.hir().body(closure.body); + if let hir::ExprKind::Block(block, None) = body.value.kind + && let None = block.expr + && let [.., stmt] = block.stmts + && let hir::StmtKind::Semi(expr) = stmt.kind + // FIXME: actually check the expected vs found types, but right now + // the expected is a projection that we need to resolve. + // && let Some(tail_ty) = typeck_results.expr_ty_opt(expr) + && expected_found.found.is_unit() + { + err.span_suggestion_verbose( + expr.span.shrink_to_hi().with_hi(stmt.span.hi()), + "consider removing this semicolon", + String::new(), + Applicability::MachineApplicable, + ); + } + let expr = if let hir::ExprKind::Block(block, None) = body.value.kind + && let Some(expr) = block.expr + { + expr + } else { + body.value + }; + if let hir::ExprKind::MethodCall(path_segment, rcvr, [], span) = expr.kind + && path_segment.ident.name == sym::clone + && let Some(expr_ty) = typeck_results.expr_ty_opt(expr) + && let Some(rcvr_ty) = typeck_results.expr_ty_opt(rcvr) + && self.can_eq(param_env, expr_ty, rcvr_ty) + && let ty::Ref(_, ty, _) = expr_ty.kind() + { + err.span_label( + span, + format!( + "this method call is cloning the reference `{expr_ty}`, not \ + `{ty}` which doesn't implement `Clone`", + ), + ); + let ty::Param(..) = ty.kind() else { + continue; + }; + let hir = tcx.hir(); + let node = hir.get_by_def_id(hir.get_parent_item(expr.hir_id).def_id); + + let pred = ty::Binder::dummy(ty::TraitPredicate { + trait_ref: ty::TraitRef::from_lang_item( + tcx, + LangItem::Clone, + span, + [*ty], + ), + polarity: ty::ImplPolarity::Positive, + }); + let Some(generics) = node.generics() else { + continue; + }; + let Some(body_id) = node.body_id() else { + continue; + }; + suggest_restriction( + tcx, + hir.body_owner_def_id(body_id), + &generics, + &format!("type parameter `{ty}`"), + err, + node.fn_sig(), + None, + pred, + None, + ); + } + } + } + } + } + fn point_at_chain( &self, expr: &hir::Expr<'_>, @@ -3611,13 +3865,22 @@ let mut prev_ty = self.resolve_vars_if_possible( typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx)), ); - while let hir::ExprKind::MethodCall(_path_segment, rcvr_expr, _args, span) = expr.kind { + while let hir::ExprKind::MethodCall(path_segment, rcvr_expr, args, span) = expr.kind { // Point at every method call in the chain with the resulting type. // vec![1, 2, 3].iter().map(mapper).sum() // ^^^^^^ ^^^^^^^^^^^ expr = rcvr_expr; let assocs_in_this_method = self.probe_assoc_types_at_expr(&type_diffs, span, prev_ty, expr.hir_id, param_env); + self.look_for_iterator_item_mistakes( + &assocs_in_this_method, + typeck_results, + &type_diffs, + param_env, + path_segment, + args, + err, + ); assocs.push(assocs_in_this_method); prev_ty = self.resolve_vars_if_possible( typeck_results.expr_ty_adjusted_opt(expr).unwrap_or(Ty::new_misc_error(tcx)), @@ -3638,9 +3901,17 @@ if let hir::Node::Param(param) = parent { // ...and it is a an fn argument. let prev_ty = self.resolve_vars_if_possible( - typeck_results.node_type_opt(param.hir_id).unwrap_or(Ty::new_misc_error(tcx,)), + typeck_results + .node_type_opt(param.hir_id) + .unwrap_or(Ty::new_misc_error(tcx)), + ); + let assocs_in_this_method = self.probe_assoc_types_at_expr( + &type_diffs, + param.ty_span, + prev_ty, + param.hir_id, + param_env, ); - let assocs_in_this_method = self.probe_assoc_types_at_expr(&type_diffs, param.ty_span, prev_ty, param.hir_id, param_env); if assocs_in_this_method.iter().any(|a| a.is_some()) { assocs.push(assocs_in_this_method); print_root_expr = false; @@ -3651,7 +3922,9 @@ } // We want the type before deref coercions, otherwise we talk about `&[_]` // instead of `Vec<_>`. - if let Some(ty) = typeck_results.expr_ty_opt(expr) && print_root_expr { + if let Some(ty) = typeck_results.expr_ty_opt(expr) + && print_root_expr + { let ty = with_forced_trimmed_paths!(self.ty_to_string(ty)); // Point at the root expression // vec![1, 2, 3].iter().map(mapper).sum() @@ -3782,7 +4055,7 @@ // This corresponds to `::Item = _`. let projection = ty::Binder::dummy(ty::PredicateKind::Clause( ty::ClauseKind::Projection(ty::ProjectionPredicate { - projection_ty: self.tcx.mk_alias_ty(proj.def_id, args), + projection_ty: ty::AliasTy::new(self.tcx, proj.def_id, args), term: ty_var.into(), }), )); @@ -4000,14 +4273,6 @@ // ... whose signature is `async` (i.e. this is an AFIT) let (sig, body) = item.expect_fn(); - let hir::IsAsync::Async(async_span) = sig.header.asyncness else { - return; - }; - let Ok(async_span) = - self.tcx.sess.source_map().span_extend_while(async_span, |c| c.is_whitespace()) - else { - return; - }; let hir::FnRetTy::Return(hir::Ty { kind: hir::TyKind::OpaqueDef(def, ..), .. }) = sig.decl.output else { @@ -4021,55 +4286,17 @@ return; } - let future = self.tcx.hir().item(*def).expect_opaque_ty(); - let Some(hir::GenericBound::LangItemTrait(_, _, _, generics)) = future.bounds.get(0) else { - // `async fn` should always lower to a lang item bound... but don't ICE. - return; - }; - let Some(hir::TypeBindingKind::Equality { term: hir::Term::Ty(future_output_ty) }) = - generics.bindings.get(0).map(|binding| binding.kind) - else { - // Also should never happen. + let Some(sugg) = suggest_desugaring_async_fn_to_impl_future_in_trait( + self.tcx, + *sig, + *body, + opaque_def_id.expect_local(), + &format!(" + {auto_trait}"), + ) else { return; }; let function_name = self.tcx.def_path_str(fn_def_id); - - let mut sugg = if future_output_ty.span.is_empty() { - vec![ - (async_span, String::new()), - ( - future_output_ty.span, - format!(" -> impl std::future::Future + {auto_trait}"), - ), - ] - } else { - vec![ - ( - future_output_ty.span.shrink_to_lo(), - "impl std::future::Future + {auto_trait}")), - (async_span, String::new()), - ] - }; - - // If there's a body, we also need to wrap it in `async {}` - if let hir::TraitFn::Provided(body) = body { - let body = self.tcx.hir().body(*body); - let body_span = body.value.span; - let body_span_without_braces = - body_span.with_lo(body_span.lo() + BytePos(1)).with_hi(body_span.hi() - BytePos(1)); - if body_span_without_braces.is_empty() { - sugg.push((body_span_without_braces, " async {} ".to_owned())); - } else { - sugg.extend([ - (body_span_without_braces.shrink_to_lo(), "async {".to_owned()), - (body_span_without_braces.shrink_to_hi(), "} ".to_owned()), - ]); - } - } - err.multipart_suggestion( format!( "`{auto_trait}` can be made part of the associated future's \ @@ -4150,7 +4377,9 @@ let mut span = arg.span.shrink_to_lo(); let mut left = found_refs.len() - expected_refs.len(); let mut ty = arg; - while let hir::TyKind::Ref(_, mut_ty) = &ty.kind && left > 0 { + while let hir::TyKind::Ref(_, mut_ty) = &ty.kind + && left > 0 + { span = span.with_hi(mut_ty.ty.span.lo()); ty = mut_ty.ty; left -= 1; @@ -4221,7 +4450,7 @@ fn visit_body(&mut self, body: &'v hir::Body<'v>) { assert!(!self.in_block_tail); - if body.generator_kind().is_none() { + if body.coroutine_kind().is_none() { if let hir::ExprKind::Block(block, None) = body.value.kind { if block.expr.is_some() { self.in_block_tail = true; @@ -4300,6 +4529,39 @@ } } +pub(super) fn get_explanation_based_on_obligation<'tcx>( + obligation: &PredicateObligation<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + trait_predicate: &ty::PolyTraitPredicate<'tcx>, + pre_message: String, +) -> String { + if let ObligationCauseCode::MainFunctionType = obligation.cause.code() { + "consider using `()`, or a `Result`".to_owned() + } else { + let ty_desc = match trait_ref.skip_binder().self_ty().kind() { + ty::FnDef(_, _) => Some("fn item"), + ty::Closure(_, _) => Some("closure"), + _ => None, + }; + + match ty_desc { + Some(desc) => format!( + "{}the trait `{}` is not implemented for {} `{}`", + pre_message, + trait_predicate.print_modifiers_and_trait_path(), + desc, + trait_ref.skip_binder().self_ty(), + ), + None => format!( + "{}the trait `{}` is not implemented for `{}`", + pre_message, + trait_predicate.print_modifiers_and_trait_path(), + trait_ref.skip_binder().self_ty(), + ), + } + } +} + // Replace `param` with `replace_ty` struct ReplaceImplTraitFolder<'tcx> { tcx: TyCtxt<'tcx>, @@ -4321,3 +4583,65 @@ self.tcx } } + +pub fn suggest_desugaring_async_fn_to_impl_future_in_trait<'tcx>( + tcx: TyCtxt<'tcx>, + sig: hir::FnSig<'tcx>, + body: hir::TraitFn<'tcx>, + opaque_def_id: LocalDefId, + add_bounds: &str, +) -> Option> { + let hir::IsAsync::Async(async_span) = sig.header.asyncness else { + return None; + }; + let Ok(async_span) = tcx.sess.source_map().span_extend_while(async_span, |c| c.is_whitespace()) + else { + return None; + }; + + let future = tcx.hir().get_by_def_id(opaque_def_id).expect_item().expect_opaque_ty(); + let Some(hir::GenericBound::LangItemTrait(_, _, _, generics)) = future.bounds.get(0) else { + // `async fn` should always lower to a lang item bound... but don't ICE. + return None; + }; + let Some(hir::TypeBindingKind::Equality { term: hir::Term::Ty(future_output_ty) }) = + generics.bindings.get(0).map(|binding| binding.kind) + else { + // Also should never happen. + return None; + }; + + let mut sugg = if future_output_ty.span.is_empty() { + vec![ + (async_span, String::new()), + ( + future_output_ty.span, + format!(" -> impl std::future::Future{add_bounds}"), + ), + ] + } else { + vec![ + (future_output_ty.span.shrink_to_lo(), "impl std::future::Future{add_bounds}")), + (async_span, String::new()), + ] + }; + + // If there's a body, we also need to wrap it in `async {}` + if let hir::TraitFn::Provided(body) = body { + let body = tcx.hir().body(body); + let body_span = body.value.span; + let body_span_without_braces = + body_span.with_lo(body_span.lo() + BytePos(1)).with_hi(body_span.hi() - BytePos(1)); + if body_span_without_braces.is_empty() { + sugg.push((body_span_without_braces, " async {} ".to_owned())); + } else { + sugg.extend([ + (body_span_without_braces.shrink_to_lo(), "async {".to_owned()), + (body_span_without_braces.shrink_to_hi(), "} ".to_owned()), + ]); + } + } + + Some(sugg) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,3278 @@ +use super::on_unimplemented::{AppendConstMessage, OnUnimplementedNote, TypeErrCtxtExt as _}; +use super::suggestions::{get_explanation_based_on_obligation, TypeErrCtxtExt as _}; +use crate::errors::{ClosureFnMutLabel, ClosureFnOnceLabel, ClosureKindMismatch}; +use crate::infer::error_reporting::{TyCategory, TypeAnnotationNeeded as ErrorCode}; +use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use crate::infer::{self, InferCtxt}; +use crate::traits::error_reporting::infer_ctxt_ext::InferCtxtExt; +use crate::traits::error_reporting::{ambiguity, ambiguity::Ambiguity::*}; +use crate::traits::query::evaluate_obligation::InferCtxtExt as _; +use crate::traits::specialize::to_pretty_impl_header; +use crate::traits::NormalizeExt; +use crate::traits::{ + elaborate, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, Obligation, + ObligationCause, ObligationCauseCode, ObligationCtxt, OutputTypeParameterMismatch, Overflow, + PredicateObligation, SelectionError, TraitNotObjectSafe, +}; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_errors::{ + pluralize, struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, + MultiSpan, StashKey, Style, +}; +use rustc_hir as hir; +use rustc_hir::def::{DefKind, Namespace, Res}; +use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::intravisit::Visitor; +use rustc_hir::{GenericParam, Item, Node}; +use rustc_infer::infer::error_reporting::TypeErrCtxt; +use rustc_infer::infer::{InferOk, TypeTrace}; +use rustc_middle::traits::select::OverflowError; +use rustc_middle::traits::{DefiningAnchor, SelectionOutputTypeParameterMismatch}; +use rustc_middle::ty::abstract_const::NotConstEvaluatable; +use rustc_middle::ty::error::{ExpectedFound, TypeError}; +use rustc_middle::ty::fold::{BottomUpFolder, TypeFolder, TypeSuperFoldable}; +use rustc_middle::ty::print::{with_forced_trimmed_paths, FmtPrinter, Print}; +use rustc_middle::ty::{ + self, SubtypePredicate, ToPolyTraitRef, ToPredicate, TraitRef, Ty, TyCtxt, TypeFoldable, + TypeVisitable, TypeVisitableExt, +}; +use rustc_session::config::{DumpSolverProofTree, TraitSolver}; +use rustc_session::Limit; +use rustc_span::def_id::LOCAL_CRATE; +use rustc_span::symbol::sym; +use rustc_span::{ExpnKind, Span, DUMMY_SP}; +use std::borrow::Cow; +use std::fmt; +use std::iter; + +use super::{ + dump_proof_tree, ArgKind, CandidateSimilarity, FindExprBySpan, FindTypeParam, + GetSafeTransmuteErrorAndReason, HasNumericInferVisitor, ImplCandidate, UnsatisfiedConst, +}; + +pub use rustc_infer::traits::error_reporting::*; + +pub trait TypeErrCtxtExt<'tcx> { + fn build_overflow_error( + &self, + predicate: &T, + span: Span, + suggest_increasing_limit: bool, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> + where + T: fmt::Display + TypeFoldable> + Print<'tcx, FmtPrinter<'tcx, 'tcx>>; + + fn report_overflow_error( + &self, + predicate: &T, + span: Span, + suggest_increasing_limit: bool, + mutate: impl FnOnce(&mut Diagnostic), + ) -> ! + where + T: fmt::Display + TypeFoldable> + Print<'tcx, FmtPrinter<'tcx, 'tcx>>; + + fn report_overflow_no_abort(&self, obligation: PredicateObligation<'tcx>) -> ErrorGuaranteed; + + fn report_fulfillment_errors(&self, errors: Vec>) -> ErrorGuaranteed; + + fn report_overflow_obligation( + &self, + obligation: &Obligation<'tcx, T>, + suggest_increasing_limit: bool, + ) -> ! + where + T: ToPredicate<'tcx> + Clone; + + fn suggest_new_overflow_limit(&self, err: &mut Diagnostic); + + fn report_overflow_obligation_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> !; + + /// The `root_obligation` parameter should be the `root_obligation` field + /// from a `FulfillmentError`. If no `FulfillmentError` is available, + /// then it should be the same as `obligation`. + fn report_selection_error( + &self, + obligation: PredicateObligation<'tcx>, + root_obligation: &PredicateObligation<'tcx>, + error: &SelectionError<'tcx>, + ); + + fn fn_arg_obligation(&self, obligation: &PredicateObligation<'tcx>) -> bool; + + fn report_const_param_not_wf( + &self, + ty: Ty<'tcx>, + obligation: &PredicateObligation<'tcx>, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; +} + +impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { + fn report_fulfillment_errors( + &self, + mut errors: Vec>, + ) -> ErrorGuaranteed { + #[derive(Debug)] + struct ErrorDescriptor<'tcx> { + predicate: ty::Predicate<'tcx>, + index: Option, // None if this is an old error + } + + let mut error_map: FxIndexMap<_, Vec<_>> = self + .reported_trait_errors + .borrow() + .iter() + .map(|(&span, predicates)| { + ( + span, + predicates + .iter() + .map(|&predicate| ErrorDescriptor { predicate, index: None }) + .collect(), + ) + }) + .collect(); + + // Ensure `T: Sized` and `T: WF` obligations come last. This lets us display diagnostics + // with more relevant type information and hide redundant E0282 errors. + errors.sort_by_key(|e| match e.obligation.predicate.kind().skip_binder() { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) + if Some(pred.def_id()) == self.tcx.lang_items().sized_trait() => + { + 1 + } + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(_)) => 3, + ty::PredicateKind::Coerce(_) => 2, + _ => 0, + }); + + for (index, error) in errors.iter().enumerate() { + // We want to ignore desugarings here: spans are equivalent even + // if one is the result of a desugaring and the other is not. + let mut span = error.obligation.cause.span; + let expn_data = span.ctxt().outer_expn_data(); + if let ExpnKind::Desugaring(_) = expn_data.kind { + span = expn_data.call_site; + } + + error_map.entry(span).or_default().push(ErrorDescriptor { + predicate: error.obligation.predicate, + index: Some(index), + }); + } + + // We do this in 2 passes because we want to display errors in order, though + // maybe it *is* better to sort errors by span or something. + let mut is_suppressed = vec![false; errors.len()]; + for (_, error_set) in error_map.iter() { + // We want to suppress "duplicate" errors with the same span. + for error in error_set { + if let Some(index) = error.index { + // Suppress errors that are either: + // 1) strictly implied by another error. + // 2) implied by an error with a smaller index. + for error2 in error_set { + if error2.index.is_some_and(|index2| is_suppressed[index2]) { + // Avoid errors being suppressed by already-suppressed + // errors, to prevent all errors from being suppressed + // at once. + continue; + } + + if self.error_implies(error2.predicate, error.predicate) + && !(error2.index >= error.index + && self.error_implies(error.predicate, error2.predicate)) + { + info!("skipping {:?} (implied by {:?})", error, error2); + is_suppressed[index] = true; + break; + } + } + } + } + } + + for from_expansion in [false, true] { + for (error, suppressed) in iter::zip(&errors, &is_suppressed) { + if !suppressed && error.obligation.cause.span.from_expansion() == from_expansion { + self.report_fulfillment_error(error); + // We want to ignore desugarings here: spans are equivalent even + // if one is the result of a desugaring and the other is not. + let mut span = error.obligation.cause.span; + let expn_data = span.ctxt().outer_expn_data(); + if let ExpnKind::Desugaring(_) = expn_data.kind { + span = expn_data.call_site; + } + self.reported_trait_errors + .borrow_mut() + .entry(span) + .or_default() + .push(error.obligation.predicate); + } + } + } + + self.tcx.sess.delay_span_bug(DUMMY_SP, "expected fulfillment errors") + } + + /// Reports that an overflow has occurred and halts compilation. We + /// halt compilation unconditionally because it is important that + /// overflows never be masked -- they basically represent computations + /// whose result could not be truly determined and thus we can't say + /// if the program type checks or not -- and they are unusual + /// occurrences in any case. + fn report_overflow_error( + &self, + predicate: &T, + span: Span, + suggest_increasing_limit: bool, + mutate: impl FnOnce(&mut Diagnostic), + ) -> ! + where + T: fmt::Display + TypeFoldable> + Print<'tcx, FmtPrinter<'tcx, 'tcx>>, + { + let mut err = self.build_overflow_error(predicate, span, suggest_increasing_limit); + mutate(&mut err); + err.emit(); + + self.tcx.sess.abort_if_errors(); + bug!(); + } + + fn build_overflow_error( + &self, + predicate: &T, + span: Span, + suggest_increasing_limit: bool, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> + where + T: fmt::Display + TypeFoldable> + Print<'tcx, FmtPrinter<'tcx, 'tcx>>, + { + let predicate = self.resolve_vars_if_possible(predicate.clone()); + let mut pred_str = predicate.to_string(); + + if pred_str.len() > 50 { + // We don't need to save the type to a file, we will be talking about this type already + // in a separate note when we explain the obligation, so it will be available that way. + let mut cx: FmtPrinter<'_, '_> = + FmtPrinter::new_with_limit(self.tcx, Namespace::TypeNS, rustc_session::Limit(6)); + predicate.print(&mut cx).unwrap(); + pred_str = cx.into_buffer(); + } + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0275, + "overflow evaluating the requirement `{}`", + pred_str, + ); + + if suggest_increasing_limit { + self.suggest_new_overflow_limit(&mut err); + } + + err + } + + /// Reports that an overflow has occurred and halts compilation. We + /// halt compilation unconditionally because it is important that + /// overflows never be masked -- they basically represent computations + /// whose result could not be truly determined and thus we can't say + /// if the program type checks or not -- and they are unusual + /// occurrences in any case. + fn report_overflow_obligation( + &self, + obligation: &Obligation<'tcx, T>, + suggest_increasing_limit: bool, + ) -> ! + where + T: ToPredicate<'tcx> + Clone, + { + let predicate = obligation.predicate.clone().to_predicate(self.tcx); + let predicate = self.resolve_vars_if_possible(predicate); + self.report_overflow_error( + &predicate, + obligation.cause.span, + suggest_increasing_limit, + |err| { + self.note_obligation_cause_code( + obligation.cause.body_id, + err, + predicate, + obligation.param_env, + obligation.cause.code(), + &mut vec![], + &mut Default::default(), + ); + }, + ); + } + + fn suggest_new_overflow_limit(&self, err: &mut Diagnostic) { + let suggested_limit = match self.tcx.recursion_limit() { + Limit(0) => Limit(2), + limit => limit * 2, + }; + err.help(format!( + "consider increasing the recursion limit by adding a \ + `#![recursion_limit = \"{}\"]` attribute to your crate (`{}`)", + suggested_limit, + self.tcx.crate_name(LOCAL_CRATE), + )); + } + + /// Reports that a cycle was detected which led to overflow and halts + /// compilation. This is equivalent to `report_overflow_obligation` except + /// that we can give a more helpful error message (and, in particular, + /// we do not suggest increasing the overflow limit, which is not + /// going to help). + fn report_overflow_obligation_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! { + let cycle = self.resolve_vars_if_possible(cycle.to_owned()); + assert!(!cycle.is_empty()); + + debug!(?cycle, "report_overflow_error_cycle"); + + // The 'deepest' obligation is most likely to have a useful + // cause 'backtrace' + self.report_overflow_obligation( + cycle.iter().max_by_key(|p| p.recursion_depth).unwrap(), + false, + ); + } + + fn report_overflow_no_abort(&self, obligation: PredicateObligation<'tcx>) -> ErrorGuaranteed { + let obligation = self.resolve_vars_if_possible(obligation); + let mut err = self.build_overflow_error(&obligation.predicate, obligation.cause.span, true); + self.note_obligation_cause(&mut err, &obligation); + self.point_at_returns_when_relevant(&mut err, &obligation); + err.emit() + } + + fn report_selection_error( + &self, + mut obligation: PredicateObligation<'tcx>, + root_obligation: &PredicateObligation<'tcx>, + error: &SelectionError<'tcx>, + ) { + let tcx = self.tcx; + + if tcx.sess.opts.unstable_opts.dump_solver_proof_tree == DumpSolverProofTree::OnError { + dump_proof_tree(root_obligation, self.infcx); + } + + let mut span = obligation.cause.span; + // FIXME: statically guarantee this by tainting after the diagnostic is emitted + self.set_tainted_by_errors( + tcx.sess.delay_span_bug(span, "`report_selection_error` did not emit an error"), + ); + + let mut err = match *error { + SelectionError::Unimplemented => { + // If this obligation was generated as a result of well-formedness checking, see if we + // can get a better error message by performing HIR-based well-formedness checking. + if let ObligationCauseCode::WellFormed(Some(wf_loc)) = + root_obligation.cause.code().peel_derives() + && !obligation.predicate.has_non_region_infer() + { + if let Some(cause) = self + .tcx + .diagnostic_hir_wf_check((tcx.erase_regions(obligation.predicate), *wf_loc)) + { + obligation.cause = cause.clone(); + span = obligation.cause.span; + } + } + + if let ObligationCauseCode::CompareImplItemObligation { + impl_item_def_id, + trait_item_def_id, + kind: _, + } = *obligation.cause.code() + { + self.report_extra_impl_obligation( + span, + impl_item_def_id, + trait_item_def_id, + &format!("`{}`", obligation.predicate), + ) + .emit(); + return; + } + + // Report a const-param specific error + if let ObligationCauseCode::ConstParam(ty) = *obligation.cause.code().peel_derives() + { + self.report_const_param_not_wf(ty, &obligation).emit(); + return; + } + + let bound_predicate = obligation.predicate.kind(); + match bound_predicate.skip_binder() { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_predicate)) => { + let trait_predicate = bound_predicate.rebind(trait_predicate); + let trait_predicate = self.resolve_vars_if_possible(trait_predicate); + + // FIXME(effects) + let predicate_is_const = false; + + if self.tcx.sess.has_errors().is_some() + && trait_predicate.references_error() + { + return; + } + if self.fn_arg_obligation(&obligation) { + // Silence redundant errors on binding acccess that are already + // reported on the binding definition (#56607). + return; + } + let trait_ref = trait_predicate.to_poly_trait_ref(); + let (post_message, pre_message, type_def, file_note) = self + .get_parent_trait_ref(obligation.cause.code()) + .map(|(t, s)| { + let (t, file) = self.tcx.short_ty_string(t); + ( + format!(" in `{t}`"), + format!("within `{t}`, "), + s.map(|s| (format!("within this `{t}`"), s)), + file.map(|file| format!( + "the full trait has been written to '{}'", + file.display(), + )) + ) + }) + .unwrap_or_default(); + + let OnUnimplementedNote { + message, + label, + notes, + parent_label, + append_const_msg, + } = self.on_unimplemented_note(trait_ref, &obligation); + let have_alt_message = message.is_some() || label.is_some(); + let is_try_conversion = self.is_try_conversion(span, trait_ref.def_id()); + let is_unsize = + Some(trait_ref.def_id()) == self.tcx.lang_items().unsize_trait(); + let (message, notes, append_const_msg) = if is_try_conversion { + ( + Some(format!( + "`?` couldn't convert the error to `{}`", + trait_ref.skip_binder().self_ty(), + )), + vec![ + "the question mark operation (`?`) implicitly performs a \ + conversion on the error value using the `From` trait" + .to_owned(), + ], + Some(AppendConstMessage::Default), + ) + } else { + (message, notes, append_const_msg) + }; + + let err_msg = self.get_standard_error_message( + &trait_predicate, + message, + predicate_is_const, + append_const_msg, + post_message, + ); + + let (err_msg, safe_transmute_explanation) = if Some(trait_ref.def_id()) + == self.tcx.lang_items().transmute_trait() + { + // Recompute the safe transmute reason and use that for the error reporting + match self.get_safe_transmute_error_and_reason( + obligation.clone(), + trait_ref, + span, + ) { + GetSafeTransmuteErrorAndReason::Silent => return, + GetSafeTransmuteErrorAndReason::Error { + err_msg, + safe_transmute_explanation, + } => (err_msg, Some(safe_transmute_explanation)), + } + } else { + (err_msg, None) + }; + + let mut err = struct_span_err!(self.tcx.sess, span, E0277, "{}", err_msg); + + if is_try_conversion && let Some(ret_span) = self.return_type_span(&obligation) { + err.span_label( + ret_span, + format!( + "expected `{}` because of this", + trait_ref.skip_binder().self_ty() + ), + ); + } + + if Some(trait_ref.def_id()) == tcx.lang_items().tuple_trait() { + self.add_tuple_trait_message( + &obligation.cause.code().peel_derives(), + &mut err, + ); + } + + if Some(trait_ref.def_id()) == tcx.lang_items().drop_trait() + && predicate_is_const + { + err.note("`~const Drop` was renamed to `~const Destruct`"); + err.note("See for more details"); + } + + let explanation = get_explanation_based_on_obligation( + &obligation, + trait_ref, + &trait_predicate, + pre_message, + ); + + self.check_for_binding_assigned_block_without_tail_expression( + &obligation, + &mut err, + trait_predicate, + ); + if self.suggest_add_reference_to_arg( + &obligation, + &mut err, + trait_predicate, + have_alt_message, + ) { + self.note_obligation_cause(&mut err, &obligation); + err.emit(); + return; + } + + file_note.map(|note| err.note(note)); + if let Some(s) = label { + // If it has a custom `#[rustc_on_unimplemented]` + // error message, let's display it as the label! + err.span_label(span, s); + if !matches!(trait_ref.skip_binder().self_ty().kind(), ty::Param(_)) { + // When the self type is a type param We don't need to "the trait + // `std::marker::Sized` is not implemented for `T`" as we will point + // at the type param with a label to suggest constraining it. + err.help(explanation); + } + } else if let Some(custom_explanation) = safe_transmute_explanation { + err.span_label(span, custom_explanation); + } else { + err.span_label(span, explanation); + } + + if let ObligationCauseCode::Coercion { source, target } = + *obligation.cause.code().peel_derives() + { + if Some(trait_ref.def_id()) == self.tcx.lang_items().sized_trait() { + self.suggest_borrowing_for_object_cast( + &mut err, + &root_obligation, + source, + target, + ); + } + } + + let UnsatisfiedConst(unsatisfied_const) = self + .maybe_add_note_for_unsatisfied_const( + &obligation, + trait_ref, + &trait_predicate, + &mut err, + span, + ); + + if let Some((msg, span)) = type_def { + err.span_label(span, msg); + } + for note in notes { + // If it has a custom `#[rustc_on_unimplemented]` note, let's display it + err.note(note); + } + if let Some(s) = parent_label { + let body = obligation.cause.body_id; + err.span_label(tcx.def_span(body), s); + } + + self.suggest_floating_point_literal(&obligation, &mut err, &trait_ref); + self.suggest_dereferencing_index(&obligation, &mut err, trait_predicate); + let mut suggested = + self.suggest_dereferences(&obligation, &mut err, trait_predicate); + suggested |= self.suggest_fn_call(&obligation, &mut err, trait_predicate); + let impl_candidates = self.find_similar_impl_candidates(trait_predicate); + suggested = if let &[cand] = &impl_candidates[..] { + let cand = cand.trait_ref; + if let (ty::FnPtr(_), ty::FnDef(..)) = + (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) + { + err.span_suggestion( + span.shrink_to_hi(), + format!( + "the trait `{}` is implemented for fn pointer `{}`, try casting using `as`", + cand.print_only_trait_path(), + cand.self_ty(), + ), + format!(" as {}", cand.self_ty()), + Applicability::MaybeIncorrect, + ); + true + } else { + false + } + } else { + false + } || suggested; + suggested |= + self.suggest_remove_reference(&obligation, &mut err, trait_predicate); + suggested |= self.suggest_semicolon_removal( + &obligation, + &mut err, + span, + trait_predicate, + ); + self.note_version_mismatch(&mut err, &trait_ref); + self.suggest_remove_await(&obligation, &mut err); + self.suggest_derive(&obligation, &mut err, trait_predicate); + + if Some(trait_ref.def_id()) == tcx.lang_items().try_trait() { + self.suggest_await_before_try( + &mut err, + &obligation, + trait_predicate, + span, + ); + } + + if self.suggest_add_clone_to_arg(&obligation, &mut err, trait_predicate) { + err.emit(); + return; + } + + if self.suggest_impl_trait(&mut err, &obligation, trait_predicate) { + err.emit(); + return; + } + + if is_unsize { + // If the obligation failed due to a missing implementation of the + // `Unsize` trait, give a pointer to why that might be the case + err.note( + "all implementations of `Unsize` are provided \ + automatically by the compiler, see \ + \ + for more information", + ); + } + + let is_fn_trait = tcx.is_fn_trait(trait_ref.def_id()); + let is_target_feature_fn = if let ty::FnDef(def_id, _) = + *trait_ref.skip_binder().self_ty().kind() + { + !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() + } else { + false + }; + if is_fn_trait && is_target_feature_fn { + err.note( + "`#[target_feature]` functions do not implement the `Fn` traits", + ); + } + + self.try_to_add_help_message( + &obligation, + trait_ref, + &trait_predicate, + &mut err, + span, + is_fn_trait, + suggested, + unsatisfied_const, + ); + + // Changing mutability doesn't make a difference to whether we have + // an `Unsize` impl (Fixes ICE in #71036) + if !is_unsize { + self.suggest_change_mut(&obligation, &mut err, trait_predicate); + } + + // If this error is due to `!: Trait` not implemented but `(): Trait` is + // implemented, and fallback has occurred, then it could be due to a + // variable that used to fallback to `()` now falling back to `!`. Issue a + // note informing about the change in behaviour. + if trait_predicate.skip_binder().self_ty().is_never() + && self.fallback_has_occurred + { + let predicate = trait_predicate.map_bound(|trait_pred| { + trait_pred.with_self_ty(self.tcx, Ty::new_unit(self.tcx)) + }); + let unit_obligation = obligation.with(tcx, predicate); + if self.predicate_may_hold(&unit_obligation) { + err.note( + "this error might have been caused by changes to \ + Rust's type-inference algorithm (see issue #48950 \ + \ + for more information)", + ); + err.help("did you intend to use the type `()` here instead?"); + } + } + + self.explain_hrtb_projection(&mut err, trait_predicate, obligation.param_env, &obligation.cause); + self.suggest_desugaring_async_fn_in_trait(&mut err, trait_ref); + + // Return early if the trait is Debug or Display and the invocation + // originates within a standard library macro, because the output + // is otherwise overwhelming and unhelpful (see #85844 for an + // example). + + let in_std_macro = + match obligation.cause.span.ctxt().outer_expn_data().macro_def_id { + Some(macro_def_id) => { + let crate_name = tcx.crate_name(macro_def_id.krate); + crate_name == sym::std || crate_name == sym::core + } + None => false, + }; + + if in_std_macro + && matches!( + self.tcx.get_diagnostic_name(trait_ref.def_id()), + Some(sym::Debug | sym::Display) + ) + { + err.emit(); + return; + } + + err + } + + ty::PredicateKind::Subtype(predicate) => { + // Errors for Subtype predicates show up as + // `FulfillmentErrorCode::CodeSubtypeError`, + // not selection error. + span_bug!(span, "subtype requirement gave wrong error: `{:?}`", predicate) + } + + ty::PredicateKind::Coerce(predicate) => { + // Errors for Coerce predicates show up as + // `FulfillmentErrorCode::CodeSubtypeError`, + // not selection error. + span_bug!(span, "coerce requirement gave wrong error: `{:?}`", predicate) + } + + ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(..)) + | ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(..)) => { + span_bug!( + span, + "outlives clauses should not error outside borrowck. obligation: `{:?}`", + obligation + ) + } + + ty::PredicateKind::Clause(ty::ClauseKind::Projection(..)) => { + span_bug!( + span, + "projection clauses should be implied from elsewhere. obligation: `{:?}`", + obligation + ) + } + + ty::PredicateKind::ObjectSafe(trait_def_id) => { + let violations = self.tcx.object_safety_violations(trait_def_id); + report_object_safety_error(self.tcx, span, trait_def_id, violations) + } + + ty::PredicateKind::ClosureKind(closure_def_id, closure_args, kind) => { + let found_kind = self.closure_kind(closure_args).unwrap(); + self.report_closure_error(&obligation, closure_def_id, found_kind, kind) + } + + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(ty)) => { + let ty = self.resolve_vars_if_possible(ty); + match self.tcx.sess.opts.unstable_opts.trait_solver { + TraitSolver::Classic => { + // WF predicates cannot themselves make + // errors. They can only block due to + // ambiguity; otherwise, they always + // degenerate into other obligations + // (which may fail). + span_bug!(span, "WF predicate not satisfied for {:?}", ty); + } + TraitSolver::Next | TraitSolver::NextCoherence => { + // FIXME: we'll need a better message which takes into account + // which bounds actually failed to hold. + self.tcx.sess.struct_span_err( + span, + format!("the type `{ty}` is not well-formed"), + ) + } + } + } + + ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..)) => { + // Errors for `ConstEvaluatable` predicates show up as + // `SelectionError::ConstEvalFailure`, + // not `Unimplemented`. + span_bug!( + span, + "const-evaluatable requirement gave wrong error: `{:?}`", + obligation + ) + } + + ty::PredicateKind::ConstEquate(..) => { + // Errors for `ConstEquate` predicates show up as + // `SelectionError::ConstEvalFailure`, + // not `Unimplemented`. + span_bug!( + span, + "const-equate requirement gave wrong error: `{:?}`", + obligation + ) + } + + ty::PredicateKind::Ambiguous => span_bug!(span, "ambiguous"), + + ty::PredicateKind::AliasRelate(..) => span_bug!( + span, + "AliasRelate predicate should never be the predicate cause of a SelectionError" + ), + + ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { + let mut diag = self.tcx.sess.struct_span_err( + span, + format!("the constant `{ct}` is not of type `{ty}`"), + ); + self.note_type_err( + &mut diag, + &obligation.cause, + None, + None, + TypeError::Sorts(ty::error::ExpectedFound::new(true, ty, ct.ty())), + false, + false, + ); + diag + } + } + } + + OutputTypeParameterMismatch(box SelectionOutputTypeParameterMismatch { + found_trait_ref, + expected_trait_ref, + terr: terr @ TypeError::CyclicTy(_), + }) => self.report_type_parameter_mismatch_cyclic_type_error( + &obligation, + found_trait_ref, + expected_trait_ref, + terr, + ), + OutputTypeParameterMismatch(box SelectionOutputTypeParameterMismatch { + found_trait_ref, + expected_trait_ref, + terr: _, + }) => { + match self.report_type_parameter_mismatch_error( + &obligation, + span, + found_trait_ref, + expected_trait_ref, + ) { + Some(err) => err, + None => return, + } + } + + SelectionError::OpaqueTypeAutoTraitLeakageUnknown(def_id) => self.report_opaque_type_auto_trait_leakage( + &obligation, + def_id, + ), + + TraitNotObjectSafe(did) => { + let violations = self.tcx.object_safety_violations(did); + report_object_safety_error(self.tcx, span, did, violations) + } + + SelectionError::NotConstEvaluatable(NotConstEvaluatable::MentionsInfer) => { + bug!( + "MentionsInfer should have been handled in `traits/fulfill.rs` or `traits/select/mod.rs`" + ) + } + SelectionError::NotConstEvaluatable(NotConstEvaluatable::MentionsParam) => { + match self.report_not_const_evaluatable_error(&obligation, span) { + Some(err) => err, + None => return, + } + } + + // Already reported in the query. + SelectionError::NotConstEvaluatable(NotConstEvaluatable::Error(_)) | + // Already reported. + Overflow(OverflowError::Error(_)) => return, + + Overflow(_) => { + bug!("overflow should be handled before the `report_selection_error` path"); + } + SelectionError::ErrorReporting => { + bug!("ErrorReporting Overflow should not reach `report_selection_err` call") + } + }; + + self.note_obligation_cause(&mut err, &obligation); + self.point_at_returns_when_relevant(&mut err, &obligation); + err.emit(); + } + + fn fn_arg_obligation(&self, obligation: &PredicateObligation<'tcx>) -> bool { + if let ObligationCauseCode::FunctionArgumentObligation { + arg_hir_id, + .. + } = obligation.cause.code() + && let Some(Node::Expr(arg)) = self.tcx.hir().find(*arg_hir_id) + && let arg = arg.peel_borrows() + && let hir::ExprKind::Path(hir::QPath::Resolved( + None, + hir::Path { res: hir::def::Res::Local(hir_id), .. }, + )) = arg.kind + && let Some(Node::Pat(pat)) = self.tcx.hir().find(*hir_id) + && let Some(preds) = self.reported_trait_errors.borrow().get(&pat.span) + && preds.contains(&obligation.predicate) + { + return true; + } + false + } + + fn report_const_param_not_wf( + &self, + ty: Ty<'tcx>, + obligation: &PredicateObligation<'tcx>, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { + let span = obligation.cause.span; + + let mut diag = match ty.kind() { + _ if ty.has_param() => { + span_bug!(span, "const param tys cannot mention other generic parameters"); + } + ty::Float(_) => { + struct_span_err!( + self.tcx.sess, + span, + E0741, + "`{ty}` is forbidden as the type of a const generic parameter", + ) + } + ty::FnPtr(_) => { + struct_span_err!( + self.tcx.sess, + span, + E0741, + "using function pointers as const generic parameters is forbidden", + ) + } + ty::RawPtr(_) => { + struct_span_err!( + self.tcx.sess, + span, + E0741, + "using raw pointers as const generic parameters is forbidden", + ) + } + ty::Adt(def, _) => { + // We should probably see if we're *allowed* to derive `ConstParamTy` on the type... + let mut diag = struct_span_err!( + self.tcx.sess, + span, + E0741, + "`{ty}` must implement `ConstParamTy` to be used as the type of a const generic parameter", + ); + // Only suggest derive if this isn't a derived obligation, + // and the struct is local. + if let Some(span) = self.tcx.hir().span_if_local(def.did()) + && obligation.cause.code().parent().is_none() + { + if ty.is_structural_eq_shallow(self.tcx) { + diag.span_suggestion( + span, + "add `#[derive(ConstParamTy)]` to the struct", + "#[derive(ConstParamTy)]\n", + Applicability::MachineApplicable, + ); + } else { + // FIXME(adt_const_params): We should check there's not already an + // overlapping `Eq`/`PartialEq` impl. + diag.span_suggestion( + span, + "add `#[derive(ConstParamTy, PartialEq, Eq)]` to the struct", + "#[derive(ConstParamTy, PartialEq, Eq)]\n", + Applicability::MachineApplicable, + ); + } + } + diag + } + _ => { + struct_span_err!( + self.tcx.sess, + span, + E0741, + "`{ty}` can't be used as a const parameter type", + ) + } + }; + + let mut code = obligation.cause.code(); + let mut pred = obligation.predicate.to_opt_poly_trait_pred(); + while let Some((next_code, next_pred)) = code.parent() { + if let Some(pred) = pred { + let pred = self.instantiate_binder_with_placeholders(pred); + diag.note(format!( + "`{}` must implement `{}`, but it does not", + pred.self_ty(), + pred.print_modifiers_and_trait_path() + )); + } + code = next_code; + pred = next_pred; + } + + diag + } +} + +pub(super) trait InferCtxtPrivExt<'tcx> { + // returns if `cond` not occurring implies that `error` does not occur - i.e., that + // `error` occurring implies that `cond` occurs. + fn error_implies(&self, cond: ty::Predicate<'tcx>, error: ty::Predicate<'tcx>) -> bool; + + fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>); + + fn report_projection_error( + &self, + obligation: &PredicateObligation<'tcx>, + error: &MismatchedProjectionTypes<'tcx>, + ); + + fn maybe_detailed_projection_msg( + &self, + pred: ty::ProjectionPredicate<'tcx>, + normalized_ty: ty::Term<'tcx>, + expected_ty: ty::Term<'tcx>, + ) -> Option; + + fn fuzzy_match_tys( + &self, + a: Ty<'tcx>, + b: Ty<'tcx>, + ignoring_lifetimes: bool, + ) -> Option; + + fn describe_coroutine(&self, body_id: hir::BodyId) -> Option<&'static str>; + + fn find_similar_impl_candidates( + &self, + trait_pred: ty::PolyTraitPredicate<'tcx>, + ) -> Vec>; + + fn report_similar_impl_candidates( + &self, + impl_candidates: &[ImplCandidate<'tcx>], + trait_ref: ty::PolyTraitRef<'tcx>, + body_def_id: LocalDefId, + err: &mut Diagnostic, + other: bool, + param_env: ty::ParamEnv<'tcx>, + ) -> bool; + + fn report_similar_impl_candidates_for_root_obligation( + &self, + obligation: &PredicateObligation<'tcx>, + trait_predicate: ty::Binder<'tcx, ty::TraitPredicate<'tcx>>, + body_def_id: LocalDefId, + err: &mut Diagnostic, + ); + + /// Gets the parent trait chain start + fn get_parent_trait_ref( + &self, + code: &ObligationCauseCode<'tcx>, + ) -> Option<(Ty<'tcx>, Option)>; + + /// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait + /// with the same path as `trait_ref`, a help message about + /// a probable version mismatch is added to `err` + fn note_version_mismatch( + &self, + err: &mut Diagnostic, + trait_ref: &ty::PolyTraitRef<'tcx>, + ) -> bool; + + /// Creates a `PredicateObligation` with `new_self_ty` replacing the existing type in the + /// `trait_ref`. + /// + /// For this to work, `new_self_ty` must have no escaping bound variables. + fn mk_trait_obligation_with_new_self_ty( + &self, + param_env: ty::ParamEnv<'tcx>, + trait_ref_and_ty: ty::Binder<'tcx, (ty::TraitPredicate<'tcx>, Ty<'tcx>)>, + ) -> PredicateObligation<'tcx>; + + fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>); + + fn predicate_can_apply( + &self, + param_env: ty::ParamEnv<'tcx>, + pred: ty::PolyTraitPredicate<'tcx>, + ) -> bool; + + fn note_obligation_cause(&self, err: &mut Diagnostic, obligation: &PredicateObligation<'tcx>); + + fn suggest_unsized_bound_if_applicable( + &self, + err: &mut Diagnostic, + obligation: &PredicateObligation<'tcx>, + ); + + fn annotate_source_of_ambiguity( + &self, + err: &mut Diagnostic, + impls: &[ambiguity::Ambiguity], + predicate: ty::Predicate<'tcx>, + ); + + fn maybe_suggest_unsized_generics(&self, err: &mut Diagnostic, span: Span, node: Node<'tcx>); + + fn maybe_indirection_for_unsized( + &self, + err: &mut Diagnostic, + item: &'tcx Item<'tcx>, + param: &'tcx GenericParam<'tcx>, + ) -> bool; + + fn is_recursive_obligation( + &self, + obligated_types: &mut Vec>, + cause_code: &ObligationCauseCode<'tcx>, + ) -> bool; + + fn get_standard_error_message( + &self, + trait_predicate: &ty::PolyTraitPredicate<'tcx>, + message: Option, + predicate_is_const: bool, + append_const_msg: Option, + post_message: String, + ) -> String; + + fn get_safe_transmute_error_and_reason( + &self, + obligation: PredicateObligation<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + span: Span, + ) -> GetSafeTransmuteErrorAndReason; + + fn add_tuple_trait_message( + &self, + obligation_cause_code: &ObligationCauseCode<'tcx>, + err: &mut Diagnostic, + ); + + fn try_to_add_help_message( + &self, + obligation: &PredicateObligation<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + trait_predicate: &ty::PolyTraitPredicate<'tcx>, + err: &mut Diagnostic, + span: Span, + is_fn_trait: bool, + suggested: bool, + unsatisfied_const: bool, + ); + + fn add_help_message_for_fn_trait( + &self, + trait_ref: ty::PolyTraitRef<'tcx>, + err: &mut Diagnostic, + implemented_kind: ty::ClosureKind, + params: ty::Binder<'tcx, Ty<'tcx>>, + ); + + fn maybe_add_note_for_unsatisfied_const( + &self, + obligation: &PredicateObligation<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + trait_predicate: &ty::PolyTraitPredicate<'tcx>, + err: &mut Diagnostic, + span: Span, + ) -> UnsatisfiedConst; + + fn report_closure_error( + &self, + obligation: &PredicateObligation<'tcx>, + closure_def_id: DefId, + found_kind: ty::ClosureKind, + kind: ty::ClosureKind, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; + + fn report_type_parameter_mismatch_cyclic_type_error( + &self, + obligation: &PredicateObligation<'tcx>, + found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + terr: TypeError<'tcx>, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; + + fn report_opaque_type_auto_trait_leakage( + &self, + obligation: &PredicateObligation<'tcx>, + def_id: DefId, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>; + + fn report_type_parameter_mismatch_error( + &self, + obligation: &PredicateObligation<'tcx>, + span: Span, + found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + ) -> Option>; + + fn report_not_const_evaluatable_error( + &self, + obligation: &PredicateObligation<'tcx>, + span: Span, + ) -> Option>; +} + +impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> { + // returns if `cond` not occurring implies that `error` does not occur - i.e., that + // `error` occurring implies that `cond` occurs. + fn error_implies(&self, cond: ty::Predicate<'tcx>, error: ty::Predicate<'tcx>) -> bool { + if cond == error { + return true; + } + + // FIXME: It should be possible to deal with `ForAll` in a cleaner way. + let bound_error = error.kind(); + let (cond, error) = match (cond.kind().skip_binder(), bound_error.skip_binder()) { + ( + ty::PredicateKind::Clause(ty::ClauseKind::Trait(..)), + ty::PredicateKind::Clause(ty::ClauseKind::Trait(error)), + ) => (cond, bound_error.rebind(error)), + _ => { + // FIXME: make this work in other cases too. + return false; + } + }; + + for pred in elaborate(self.tcx, std::iter::once(cond)) { + let bound_predicate = pred.kind(); + if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(implication)) = + bound_predicate.skip_binder() + { + let error = error.to_poly_trait_ref(); + let implication = bound_predicate.rebind(implication.trait_ref); + // FIXME: I'm just not taking associated types at all here. + // Eventually I'll need to implement param-env-aware + // `Γ₁ ⊦ φ₁ => Γ₂ ⊦ φ₂` logic. + let param_env = ty::ParamEnv::empty(); + if self.can_sub(param_env, error, implication) { + debug!("error_implies: {:?} -> {:?} -> {:?}", cond, error, implication); + return true; + } + } + } + + false + } + + #[instrument(skip(self), level = "debug")] + fn report_fulfillment_error(&self, error: &FulfillmentError<'tcx>) { + if self.tcx.sess.opts.unstable_opts.dump_solver_proof_tree == DumpSolverProofTree::OnError { + dump_proof_tree(&error.root_obligation, self.infcx); + } + + match error.code { + FulfillmentErrorCode::CodeSelectionError(ref selection_error) => { + self.report_selection_error( + error.obligation.clone(), + &error.root_obligation, + selection_error, + ); + } + FulfillmentErrorCode::CodeProjectionError(ref e) => { + self.report_projection_error(&error.obligation, e); + } + FulfillmentErrorCode::CodeAmbiguity { overflow: false } => { + self.maybe_report_ambiguity(&error.obligation); + } + FulfillmentErrorCode::CodeAmbiguity { overflow: true } => { + self.report_overflow_no_abort(error.obligation.clone()); + } + FulfillmentErrorCode::CodeSubtypeError(ref expected_found, ref err) => { + self.report_mismatched_types( + &error.obligation.cause, + expected_found.expected, + expected_found.found, + *err, + ) + .emit(); + } + FulfillmentErrorCode::CodeConstEquateError(ref expected_found, ref err) => { + let mut diag = self.report_mismatched_consts( + &error.obligation.cause, + expected_found.expected, + expected_found.found, + *err, + ); + let code = error.obligation.cause.code().peel_derives().peel_match_impls(); + if let ObligationCauseCode::BindingObligation(..) + | ObligationCauseCode::ItemObligation(..) + | ObligationCauseCode::ExprBindingObligation(..) + | ObligationCauseCode::ExprItemObligation(..) = code + { + self.note_obligation_cause_code( + error.obligation.cause.body_id, + &mut diag, + error.obligation.predicate, + error.obligation.param_env, + code, + &mut vec![], + &mut Default::default(), + ); + } + diag.emit(); + } + FulfillmentErrorCode::CodeCycle(ref cycle) => { + self.report_overflow_obligation_cycle(cycle); + } + } + } + + #[instrument(level = "debug", skip_all)] + fn report_projection_error( + &self, + obligation: &PredicateObligation<'tcx>, + error: &MismatchedProjectionTypes<'tcx>, + ) { + let predicate = self.resolve_vars_if_possible(obligation.predicate); + + if predicate.references_error() { + return; + } + + self.probe(|_| { + let ocx = ObligationCtxt::new(self); + + // try to find the mismatched types to report the error with. + // + // this can fail if the problem was higher-ranked, in which + // cause I have no idea for a good error message. + let bound_predicate = predicate.kind(); + let (values, err) = if let ty::PredicateKind::Clause(ty::ClauseKind::Projection(data)) = + bound_predicate.skip_binder() + { + let data = self.instantiate_binder_with_fresh_vars( + obligation.cause.span, + infer::LateBoundRegionConversionTime::HigherRankedType, + bound_predicate.rebind(data), + ); + let unnormalized_term = match data.term.unpack() { + ty::TermKind::Ty(_) => Ty::new_projection( + self.tcx, + data.projection_ty.def_id, + data.projection_ty.args, + ) + .into(), + ty::TermKind::Const(ct) => ty::Const::new_unevaluated( + self.tcx, + ty::UnevaluatedConst { + def: data.projection_ty.def_id, + args: data.projection_ty.args, + }, + ct.ty(), + ) + .into(), + }; + // FIXME(-Ztrait-solver=next): For diagnostic purposes, it would be nice + // to deeply normalize this type. + let normalized_term = + ocx.normalize(&obligation.cause, obligation.param_env, unnormalized_term); + + debug!(?obligation.cause, ?obligation.param_env); + + debug!(?normalized_term, data.ty = ?data.term); + + let is_normalized_term_expected = !matches!( + obligation.cause.code().peel_derives(), + ObligationCauseCode::ItemObligation(_) + | ObligationCauseCode::BindingObligation(_, _) + | ObligationCauseCode::ExprItemObligation(..) + | ObligationCauseCode::ExprBindingObligation(..) + | ObligationCauseCode::Coercion { .. } + | ObligationCauseCode::OpaqueType + ); + + // constrain inference variables a bit more to nested obligations from normalize so + // we can have more helpful errors. + // + // we intentionally drop errors from normalization here, + // since the normalization is just done to improve the error message. + let _ = ocx.select_where_possible(); + + if let Err(new_err) = ocx.eq_exp( + &obligation.cause, + obligation.param_env, + is_normalized_term_expected, + normalized_term, + data.term, + ) { + (Some((data, is_normalized_term_expected, normalized_term, data.term)), new_err) + } else { + (None, error.err) + } + } else { + (None, error.err) + }; + + let msg = values + .and_then(|(predicate, _, normalized_term, expected_term)| { + self.maybe_detailed_projection_msg(predicate, normalized_term, expected_term) + }) + .unwrap_or_else(|| { + let mut cx = FmtPrinter::new_with_limit( + self.tcx, + Namespace::TypeNS, + rustc_session::Limit(10), + ); + with_forced_trimmed_paths!(format!("type mismatch resolving `{}`", { + self.resolve_vars_if_possible(predicate).print(&mut cx).unwrap(); + cx.into_buffer() + })) + }); + let mut diag = struct_span_err!(self.tcx.sess, obligation.cause.span, E0271, "{msg}"); + + let secondary_span = (|| { + let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) = + predicate.kind().skip_binder() + else { + return None; + }; + + let trait_assoc_item = self.tcx.opt_associated_item(proj.projection_ty.def_id)?; + let trait_assoc_ident = trait_assoc_item.ident(self.tcx); + + let mut associated_items = vec![]; + self.tcx.for_each_relevant_impl( + self.tcx.trait_of_item(proj.projection_ty.def_id)?, + proj.projection_ty.self_ty(), + |impl_def_id| { + associated_items.extend( + self.tcx + .associated_items(impl_def_id) + .in_definition_order() + .find(|assoc| assoc.ident(self.tcx) == trait_assoc_ident), + ); + }, + ); + + let [associated_item]: &[ty::AssocItem] = &associated_items[..] else { + return None; + }; + match self.tcx.hir().get_if_local(associated_item.def_id) { + Some( + hir::Node::TraitItem(hir::TraitItem { + kind: hir::TraitItemKind::Type(_, Some(ty)), + .. + }) + | hir::Node::ImplItem(hir::ImplItem { + kind: hir::ImplItemKind::Type(ty), + .. + }), + ) => Some(( + ty.span, + with_forced_trimmed_paths!(Cow::from(format!( + "type mismatch resolving `{}`", + { + let mut cx = FmtPrinter::new_with_limit( + self.tcx, + Namespace::TypeNS, + rustc_session::Limit(5), + ); + self.resolve_vars_if_possible(predicate).print(&mut cx).unwrap(); + cx.into_buffer() + } + ))), + )), + _ => None, + } + })(); + + self.note_type_err( + &mut diag, + &obligation.cause, + secondary_span, + values.map(|(_, is_normalized_ty_expected, normalized_ty, expected_ty)| { + infer::ValuePairs::Terms(ExpectedFound::new( + is_normalized_ty_expected, + normalized_ty, + expected_ty, + )) + }), + err, + true, + false, + ); + self.note_obligation_cause(&mut diag, obligation); + diag.emit(); + }); + } + + fn maybe_detailed_projection_msg( + &self, + pred: ty::ProjectionPredicate<'tcx>, + normalized_ty: ty::Term<'tcx>, + expected_ty: ty::Term<'tcx>, + ) -> Option { + let trait_def_id = pred.projection_ty.trait_def_id(self.tcx); + let self_ty = pred.projection_ty.self_ty(); + + with_forced_trimmed_paths! { + if Some(pred.projection_ty.def_id) == self.tcx.lang_items().fn_once_output() { + let fn_kind = self_ty.prefix_string(self.tcx); + let item = match self_ty.kind() { + ty::FnDef(def, _) => self.tcx.item_name(*def).to_string(), + _ => self_ty.to_string(), + }; + Some(format!( + "expected `{item}` to be a {fn_kind} that returns `{expected_ty}`, but it \ + returns `{normalized_ty}`", + )) + } else if Some(trait_def_id) == self.tcx.lang_items().future_trait() { + Some(format!( + "expected `{self_ty}` to be a future that resolves to `{expected_ty}`, but it \ + resolves to `{normalized_ty}`" + )) + } else if Some(trait_def_id) == self.tcx.get_diagnostic_item(sym::Iterator) { + Some(format!( + "expected `{self_ty}` to be an iterator that yields `{expected_ty}`, but it \ + yields `{normalized_ty}`" + )) + } else { + None + } + } + } + + fn fuzzy_match_tys( + &self, + mut a: Ty<'tcx>, + mut b: Ty<'tcx>, + ignoring_lifetimes: bool, + ) -> Option { + /// returns the fuzzy category of a given type, or None + /// if the type can be equated to any type. + fn type_category(tcx: TyCtxt<'_>, t: Ty<'_>) -> Option { + match t.kind() { + ty::Bool => Some(0), + ty::Char => Some(1), + ty::Str => Some(2), + ty::Adt(def, _) if Some(def.did()) == tcx.lang_items().string() => Some(2), + ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) => Some(4), + ty::Ref(..) | ty::RawPtr(..) => Some(5), + ty::Array(..) | ty::Slice(..) => Some(6), + ty::FnDef(..) | ty::FnPtr(..) => Some(7), + ty::Dynamic(..) => Some(8), + ty::Closure(..) => Some(9), + ty::Tuple(..) => Some(10), + ty::Param(..) => Some(11), + ty::Alias(ty::Projection, ..) => Some(12), + ty::Alias(ty::Inherent, ..) => Some(13), + ty::Alias(ty::Opaque, ..) => Some(14), + ty::Alias(ty::Weak, ..) => Some(15), + ty::Never => Some(16), + ty::Adt(..) => Some(17), + ty::Coroutine(..) => Some(18), + ty::Foreign(..) => Some(19), + ty::CoroutineWitness(..) => Some(20), + ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error(_) => None, + } + } + + let strip_references = |mut t: Ty<'tcx>| -> Ty<'tcx> { + loop { + match t.kind() { + ty::Ref(_, inner, _) | ty::RawPtr(ty::TypeAndMut { ty: inner, .. }) => { + t = *inner + } + _ => break t, + } + } + }; + + if !ignoring_lifetimes { + a = strip_references(a); + b = strip_references(b); + } + + let cat_a = type_category(self.tcx, a)?; + let cat_b = type_category(self.tcx, b)?; + if a == b { + Some(CandidateSimilarity::Exact { ignoring_lifetimes }) + } else if cat_a == cat_b { + match (a.kind(), b.kind()) { + (ty::Adt(def_a, _), ty::Adt(def_b, _)) => def_a == def_b, + (ty::Foreign(def_a), ty::Foreign(def_b)) => def_a == def_b, + // Matching on references results in a lot of unhelpful + // suggestions, so let's just not do that for now. + // + // We still upgrade successful matches to `ignoring_lifetimes: true` + // to prioritize that impl. + (ty::Ref(..) | ty::RawPtr(..), ty::Ref(..) | ty::RawPtr(..)) => { + self.fuzzy_match_tys(a, b, true).is_some() + } + _ => true, + } + .then_some(CandidateSimilarity::Fuzzy { ignoring_lifetimes }) + } else if ignoring_lifetimes { + None + } else { + self.fuzzy_match_tys(a, b, true) + } + } + + fn describe_coroutine(&self, body_id: hir::BodyId) -> Option<&'static str> { + self.tcx.hir().body(body_id).coroutine_kind.map(|coroutine_source| match coroutine_source { + hir::CoroutineKind::Coroutine => "a coroutine", + hir::CoroutineKind::Async(hir::CoroutineSource::Block) => "an async block", + hir::CoroutineKind::Async(hir::CoroutineSource::Fn) => "an async function", + hir::CoroutineKind::Async(hir::CoroutineSource::Closure) => "an async closure", + hir::CoroutineKind::Gen(hir::CoroutineSource::Block) => "a gen block", + hir::CoroutineKind::Gen(hir::CoroutineSource::Fn) => "a gen function", + hir::CoroutineKind::Gen(hir::CoroutineSource::Closure) => "a gen closure", + }) + } + + fn find_similar_impl_candidates( + &self, + trait_pred: ty::PolyTraitPredicate<'tcx>, + ) -> Vec> { + let mut candidates: Vec<_> = self + .tcx + .all_impls(trait_pred.def_id()) + .filter_map(|def_id| { + if self.tcx.impl_polarity(def_id) == ty::ImplPolarity::Negative + || !self.tcx.is_user_visible_dep(def_id.krate) + { + return None; + } + + let imp = self.tcx.impl_trait_ref(def_id).unwrap().skip_binder(); + + self.fuzzy_match_tys(trait_pred.skip_binder().self_ty(), imp.self_ty(), false).map( + |similarity| ImplCandidate { trait_ref: imp, similarity, impl_def_id: def_id }, + ) + }) + .collect(); + if candidates.iter().any(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })) { + // If any of the candidates is a perfect match, we don't want to show all of them. + // This is particularly relevant for the case of numeric types (as they all have the + // same category). + candidates.retain(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })); + } + candidates + } + + fn report_similar_impl_candidates( + &self, + impl_candidates: &[ImplCandidate<'tcx>], + trait_ref: ty::PolyTraitRef<'tcx>, + body_def_id: LocalDefId, + err: &mut Diagnostic, + other: bool, + param_env: ty::ParamEnv<'tcx>, + ) -> bool { + // If we have a single implementation, try to unify it with the trait ref + // that failed. This should uncover a better hint for what *is* implemented. + if let [single] = &impl_candidates { + if self.probe(|_| { + let ocx = ObligationCtxt::new(self); + let obligation_trait_ref = self.instantiate_binder_with_placeholders(trait_ref); + let impl_args = self.fresh_args_for_item(DUMMY_SP, single.impl_def_id); + let impl_trait_ref = ocx.normalize( + &ObligationCause::dummy(), + param_env, + ty::EarlyBinder::bind(single.trait_ref).instantiate(self.tcx, impl_args), + ); + + ocx.register_obligations( + self.tcx + .predicates_of(single.impl_def_id) + .instantiate(self.tcx, impl_args) + .into_iter() + .map(|(clause, _)| { + Obligation::new(self.tcx, ObligationCause::dummy(), param_env, clause) + }), + ); + if !ocx.select_where_possible().is_empty() { + return false; + } + + let mut terrs = vec![]; + for (obligation_arg, impl_arg) in + std::iter::zip(obligation_trait_ref.args, impl_trait_ref.args) + { + if let Err(terr) = + ocx.eq(&ObligationCause::dummy(), param_env, impl_arg, obligation_arg) + { + terrs.push(terr); + } + if !ocx.select_where_possible().is_empty() { + return false; + } + } + + // Literally nothing unified, just give up. + if terrs.len() == impl_trait_ref.args.len() { + return false; + } + + let cand = + self.resolve_vars_if_possible(impl_trait_ref).fold_with(&mut BottomUpFolder { + tcx: self.tcx, + ty_op: |ty| ty, + lt_op: |lt| lt, + ct_op: |ct| ct.normalize(self.tcx, ty::ParamEnv::empty()), + }); + err.highlighted_help(vec![ + (format!("the trait `{}` ", cand.print_only_trait_path()), Style::NoStyle), + ("is".to_string(), Style::Highlight), + (" implemented for `".to_string(), Style::NoStyle), + (cand.self_ty().to_string(), Style::Highlight), + ("`".to_string(), Style::NoStyle), + ]); + + if let [TypeError::Sorts(exp_found)] = &terrs[..] { + let exp_found = self.resolve_vars_if_possible(*exp_found); + err.help(format!( + "for that trait implementation, expected `{}`, found `{}`", + exp_found.expected, exp_found.found + )); + } + + true + }) { + return true; + } + } + + let other = if other { "other " } else { "" }; + let report = |candidates: Vec>, err: &mut Diagnostic| { + if candidates.is_empty() { + return false; + } + if let &[cand] = &candidates[..] { + let (desc, mention_castable) = + match (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) { + (ty::FnPtr(_), ty::FnDef(..)) => { + (" implemented for fn pointer `", ", cast using `as`") + } + (ty::FnPtr(_), _) => (" implemented for fn pointer `", ""), + _ => (" implemented for `", ""), + }; + err.highlighted_help(vec![ + (format!("the trait `{}` ", cand.print_only_trait_path()), Style::NoStyle), + ("is".to_string(), Style::Highlight), + (desc.to_string(), Style::NoStyle), + (cand.self_ty().to_string(), Style::Highlight), + ("`".to_string(), Style::NoStyle), + (mention_castable.to_string(), Style::NoStyle), + ]); + return true; + } + let trait_ref = TraitRef::identity(self.tcx, candidates[0].def_id); + // Check if the trait is the same in all cases. If so, we'll only show the type. + let mut traits: Vec<_> = + candidates.iter().map(|c| c.print_only_trait_path().to_string()).collect(); + traits.sort(); + traits.dedup(); + // FIXME: this could use a better heuristic, like just checking + // that args[1..] is the same. + let all_traits_equal = traits.len() == 1; + + let candidates: Vec = candidates + .into_iter() + .map(|c| { + if all_traits_equal { + format!("\n {}", c.self_ty()) + } else { + format!("\n {c}") + } + }) + .collect(); + + let end = if candidates.len() <= 9 { candidates.len() } else { 8 }; + err.help(format!( + "the following {other}types implement trait `{}`:{}{}", + trait_ref.print_only_trait_path(), + candidates[..end].join(""), + if candidates.len() > 9 { + format!("\nand {} others", candidates.len() - 8) + } else { + String::new() + } + )); + true + }; + + let def_id = trait_ref.def_id(); + if impl_candidates.is_empty() { + if self.tcx.trait_is_auto(def_id) + || self.tcx.lang_items().iter().any(|(_, id)| id == def_id) + || self.tcx.get_diagnostic_name(def_id).is_some() + { + // Mentioning implementers of `Copy`, `Debug` and friends is not useful. + return false; + } + let mut impl_candidates: Vec<_> = self + .tcx + .all_impls(def_id) + // Ignore automatically derived impls and `!Trait` impls. + .filter(|&def_id| { + self.tcx.impl_polarity(def_id) != ty::ImplPolarity::Negative + || self.tcx.is_automatically_derived(def_id) + }) + .filter_map(|def_id| self.tcx.impl_trait_ref(def_id)) + .map(ty::EarlyBinder::instantiate_identity) + .filter(|trait_ref| { + let self_ty = trait_ref.self_ty(); + // Avoid mentioning type parameters. + if let ty::Param(_) = self_ty.kind() { + false + } + // Avoid mentioning types that are private to another crate + else if let ty::Adt(def, _) = self_ty.peel_refs().kind() { + // FIXME(compiler-errors): This could be generalized, both to + // be more granular, and probably look past other `#[fundamental]` + // types, too. + self.tcx.visibility(def.did()).is_accessible_from(body_def_id, self.tcx) + } else { + true + } + }) + .collect(); + + impl_candidates.sort(); + impl_candidates.dedup(); + return report(impl_candidates, err); + } + + // Sort impl candidates so that ordering is consistent for UI tests. + // because the ordering of `impl_candidates` may not be deterministic: + // https://github.com/rust-lang/rust/pull/57475#issuecomment-455519507 + // + // Prefer more similar candidates first, then sort lexicographically + // by their normalized string representation. + let mut impl_candidates: Vec<_> = impl_candidates + .iter() + .cloned() + .map(|mut cand| { + // Fold the consts so that they shows up as, e.g., `10` + // instead of `core::::array::{impl#30}::{constant#0}`. + cand.trait_ref = cand.trait_ref.fold_with(&mut BottomUpFolder { + tcx: self.tcx, + ty_op: |ty| ty, + lt_op: |lt| lt, + ct_op: |ct| ct.normalize(self.tcx, ty::ParamEnv::empty()), + }); + cand + }) + .collect(); + impl_candidates.sort_by_key(|cand| (cand.similarity, cand.trait_ref)); + let mut impl_candidates: Vec<_> = + impl_candidates.into_iter().map(|cand| cand.trait_ref).collect(); + impl_candidates.dedup(); + + report(impl_candidates, err) + } + + fn report_similar_impl_candidates_for_root_obligation( + &self, + obligation: &PredicateObligation<'tcx>, + trait_predicate: ty::Binder<'tcx, ty::TraitPredicate<'tcx>>, + body_def_id: LocalDefId, + err: &mut Diagnostic, + ) { + // This is *almost* equivalent to + // `obligation.cause.code().peel_derives()`, but it gives us the + // trait predicate for that corresponding root obligation. This + // lets us get a derived obligation from a type parameter, like + // when calling `string.strip_suffix(p)` where `p` is *not* an + // implementer of `Pattern<'_>`. + let mut code = obligation.cause.code(); + let mut trait_pred = trait_predicate; + let mut peeled = false; + while let Some((parent_code, parent_trait_pred)) = code.parent() { + code = parent_code; + if let Some(parent_trait_pred) = parent_trait_pred { + trait_pred = parent_trait_pred; + peeled = true; + } + } + let def_id = trait_pred.def_id(); + // Mention *all* the `impl`s for the *top most* obligation, the + // user might have meant to use one of them, if any found. We skip + // auto-traits or fundamental traits that might not be exactly what + // the user might expect to be presented with. Instead this is + // useful for less general traits. + if peeled + && !self.tcx.trait_is_auto(def_id) + && !self.tcx.lang_items().iter().any(|(_, id)| id == def_id) + { + let trait_ref = trait_pred.to_poly_trait_ref(); + let impl_candidates = self.find_similar_impl_candidates(trait_pred); + self.report_similar_impl_candidates( + &impl_candidates, + trait_ref, + body_def_id, + err, + true, + obligation.param_env, + ); + } + } + + /// Gets the parent trait chain start + fn get_parent_trait_ref( + &self, + code: &ObligationCauseCode<'tcx>, + ) -> Option<(Ty<'tcx>, Option)> { + match code { + ObligationCauseCode::BuiltinDerivedObligation(data) => { + let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_pred); + match self.get_parent_trait_ref(&data.parent_code) { + Some(t) => Some(t), + None => { + let ty = parent_trait_ref.skip_binder().self_ty(); + let span = TyCategory::from_ty(self.tcx, ty) + .map(|(_, def_id)| self.tcx.def_span(def_id)); + Some((ty, span)) + } + } + } + ObligationCauseCode::FunctionArgumentObligation { parent_code, .. } => { + self.get_parent_trait_ref(&parent_code) + } + _ => None, + } + } + + /// If the `Self` type of the unsatisfied trait `trait_ref` implements a trait + /// with the same path as `trait_ref`, a help message about + /// a probable version mismatch is added to `err` + fn note_version_mismatch( + &self, + err: &mut Diagnostic, + trait_ref: &ty::PolyTraitRef<'tcx>, + ) -> bool { + let get_trait_impls = |trait_def_id| { + let mut trait_impls = vec![]; + self.tcx.for_each_relevant_impl( + trait_def_id, + trait_ref.skip_binder().self_ty(), + |impl_def_id| { + trait_impls.push(impl_def_id); + }, + ); + trait_impls + }; + + let required_trait_path = self.tcx.def_path_str(trait_ref.def_id()); + let traits_with_same_path: std::collections::BTreeSet<_> = self + .tcx + .all_traits() + .filter(|trait_def_id| *trait_def_id != trait_ref.def_id()) + .filter(|trait_def_id| self.tcx.def_path_str(*trait_def_id) == required_trait_path) + .collect(); + let mut suggested = false; + for trait_with_same_path in traits_with_same_path { + let trait_impls = get_trait_impls(trait_with_same_path); + if trait_impls.is_empty() { + continue; + } + let impl_spans: Vec<_> = + trait_impls.iter().map(|impl_def_id| self.tcx.def_span(*impl_def_id)).collect(); + err.span_help( + impl_spans, + format!("trait impl{} with same name found", pluralize!(trait_impls.len())), + ); + let trait_crate = self.tcx.crate_name(trait_with_same_path.krate); + let crate_msg = + format!("perhaps two different versions of crate `{trait_crate}` are being used?"); + err.note(crate_msg); + suggested = true; + } + suggested + } + + fn mk_trait_obligation_with_new_self_ty( + &self, + param_env: ty::ParamEnv<'tcx>, + trait_ref_and_ty: ty::Binder<'tcx, (ty::TraitPredicate<'tcx>, Ty<'tcx>)>, + ) -> PredicateObligation<'tcx> { + let trait_pred = + trait_ref_and_ty.map_bound(|(tr, new_self_ty)| tr.with_self_ty(self.tcx, new_self_ty)); + + Obligation::new(self.tcx, ObligationCause::dummy(), param_env, trait_pred) + } + + #[instrument(skip(self), level = "debug")] + fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) { + // Unable to successfully determine, probably means + // insufficient type information, but could mean + // ambiguous impls. The latter *ought* to be a + // coherence violation, so we don't report it here. + + let predicate = self.resolve_vars_if_possible(obligation.predicate); + let span = obligation.cause.span; + + debug!(?predicate, obligation.cause.code = ?obligation.cause.code()); + + // Ambiguity errors are often caused as fallout from earlier errors. + // We ignore them if this `infcx` is tainted in some cases below. + + let bound_predicate = predicate.kind(); + let mut err = match bound_predicate.skip_binder() { + ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) => { + let trait_ref = bound_predicate.rebind(data.trait_ref); + debug!(?trait_ref); + + if predicate.references_error() { + return; + } + + // This is kind of a hack: it frequently happens that some earlier + // error prevents types from being fully inferred, and then we get + // a bunch of uninteresting errors saying something like " doesn't implement Sized". It may even be true that we + // could just skip over all checks where the self-ty is an + // inference variable, but I was afraid that there might be an + // inference variable created, registered as an obligation, and + // then never forced by writeback, and hence by skipping here we'd + // be ignoring the fact that we don't KNOW the type works + // out. Though even that would probably be harmless, given that + // we're only talking about builtin traits, which are known to be + // inhabited. We used to check for `self.tcx.sess.has_errors()` to + // avoid inundating the user with unnecessary errors, but we now + // check upstream for type errors and don't add the obligations to + // begin with in those cases. + if self.tcx.lang_items().sized_trait() == Some(trait_ref.def_id()) { + if let None = self.tainted_by_errors() { + let err = self.emit_inference_failure_err( + obligation.cause.body_id, + span, + trait_ref.self_ty().skip_binder().into(), + ErrorCode::E0282, + false, + ); + err.stash(span, StashKey::MaybeForgetReturn); + } + return; + } + + // Typically, this ambiguity should only happen if + // there are unresolved type inference variables + // (otherwise it would suggest a coherence + // failure). But given #21974 that is not necessarily + // the case -- we can have multiple where clauses that + // are only distinguished by a region, which results + // in an ambiguity even when all types are fully + // known, since we don't dispatch based on region + // relationships. + + // Pick the first substitution that still contains inference variables as the one + // we're going to emit an error for. If there are none (see above), fall back to + // a more general error. + let subst = data.trait_ref.args.iter().find(|s| s.has_non_region_infer()); + + let mut err = if let Some(subst) = subst { + self.emit_inference_failure_err( + obligation.cause.body_id, + span, + subst, + ErrorCode::E0283, + true, + ) + } else { + struct_span_err!( + self.tcx.sess, + span, + E0283, + "type annotations needed: cannot satisfy `{}`", + predicate, + ) + }; + + let mut ambiguities = ambiguity::recompute_applicable_impls( + self.infcx, + &obligation.with(self.tcx, trait_ref), + ); + let has_non_region_infer = + trait_ref.skip_binder().args.types().any(|t| !t.is_ty_or_numeric_infer()); + // It doesn't make sense to talk about applicable impls if there are more than a + // handful of them. If there are a lot of them, but only a few of them have no type + // params, we only show those, as they are more likely to be useful/intended. + if ambiguities.len() > 5 { + let infcx = self.infcx; + if !ambiguities.iter().all(|option| match option { + DefId(did) => infcx.fresh_args_for_item(DUMMY_SP, *did).is_empty(), + ParamEnv(_) => true, + }) { + // If not all are blanket impls, we filter blanked impls out. + ambiguities.retain(|option| match option { + DefId(did) => infcx.fresh_args_for_item(DUMMY_SP, *did).is_empty(), + ParamEnv(_) => true, + }); + } + } + if ambiguities.len() > 1 && ambiguities.len() < 10 && has_non_region_infer { + if self.tainted_by_errors().is_some() && subst.is_none() { + // If `subst.is_none()`, then this is probably two param-env + // candidates or impl candidates that are equal modulo lifetimes. + // Therefore, if we've already emitted an error, just skip this + // one, since it's not particularly actionable. + err.cancel(); + return; + } + self.annotate_source_of_ambiguity(&mut err, &ambiguities, predicate); + } else { + if self.tainted_by_errors().is_some() { + err.cancel(); + return; + } + err.note(format!("cannot satisfy `{predicate}`")); + let impl_candidates = self + .find_similar_impl_candidates(predicate.to_opt_poly_trait_pred().unwrap()); + if impl_candidates.len() < 40 { + self.report_similar_impl_candidates( + impl_candidates.as_slice(), + trait_ref, + obligation.cause.body_id, + &mut err, + false, + obligation.param_env, + ); + } + } + + if let ObligationCauseCode::ItemObligation(def_id) + | ObligationCauseCode::ExprItemObligation(def_id, ..) = *obligation.cause.code() + { + self.suggest_fully_qualified_path(&mut err, def_id, span, trait_ref.def_id()); + } + + if let Some(ty::GenericArgKind::Type(_)) = subst.map(|subst| subst.unpack()) + && let Some(body_id) = + self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) + { + let mut expr_finder = FindExprBySpan::new(span); + expr_finder.visit_expr(&self.tcx.hir().body(body_id).value); + + if let Some(hir::Expr { + kind: hir::ExprKind::Path(hir::QPath::Resolved(None, path)), + .. + }) = expr_finder.result + && let [ + .., + trait_path_segment @ hir::PathSegment { + res: Res::Def(DefKind::Trait, trait_id), + .. + }, + hir::PathSegment { + ident: assoc_item_name, + res: Res::Def(_, item_id), + .. + }, + ] = path.segments + && data.trait_ref.def_id == *trait_id + && self.tcx.trait_of_item(*item_id) == Some(*trait_id) + && let None = self.tainted_by_errors() + { + let (verb, noun) = match self.tcx.associated_item(item_id).kind { + ty::AssocKind::Const => ("refer to the", "constant"), + ty::AssocKind::Fn => ("call", "function"), + // This is already covered by E0223, but this following single match + // arm doesn't hurt here. + ty::AssocKind::Type => ("refer to the", "type"), + }; + + // Replace the more general E0283 with a more specific error + err.cancel(); + err = self.tcx.sess.struct_span_err_with_code( + span, + format!( + "cannot {verb} associated {noun} on trait without specifying the \ + corresponding `impl` type", + ), + rustc_errors::error_code!(E0790), + ); + + if let Some(local_def_id) = data.trait_ref.def_id.as_local() + && let Some(hir::Node::Item(hir::Item { + ident: trait_name, + kind: hir::ItemKind::Trait(_, _, _, _, trait_item_refs), + .. + })) = self.tcx.hir().find_by_def_id(local_def_id) + && let Some(method_ref) = trait_item_refs + .iter() + .find(|item_ref| item_ref.ident == *assoc_item_name) + { + err.span_label( + method_ref.span, + format!("`{trait_name}::{assoc_item_name}` defined here"), + ); + } + + err.span_label(span, format!("cannot {verb} associated {noun} of trait")); + + let trait_impls = self.tcx.trait_impls_of(data.trait_ref.def_id); + + if let Some(impl_def_id) = + trait_impls.non_blanket_impls().values().flatten().next() + { + let non_blanket_impl_count = + trait_impls.non_blanket_impls().values().flatten().count(); + // If there is only one implementation of the trait, suggest using it. + // Otherwise, use a placeholder comment for the implementation. + let (message, self_type) = if non_blanket_impl_count == 1 { + ( + "use the fully-qualified path to the only available \ + implementation", + format!( + "{}", + self.tcx.type_of(impl_def_id).instantiate_identity() + ), + ) + } else { + ( + "use a fully-qualified path to a specific available \ + implementation", + "/* self type */".to_string(), + ) + }; + let mut suggestions = + vec![(path.span.shrink_to_lo(), format!("<{self_type} as "))]; + if let Some(generic_arg) = trait_path_segment.args { + let between_span = + trait_path_segment.ident.span.between(generic_arg.span_ext); + // get rid of :: between Trait and + // must be '::' between them, otherwise the parser won't accept the code + suggestions.push((between_span, "".to_string())); + suggestions + .push((generic_arg.span_ext.shrink_to_hi(), ">".to_string())); + } else { + suggestions.push(( + trait_path_segment.ident.span.shrink_to_hi(), + ">".to_string(), + )); + } + err.multipart_suggestion( + message, + suggestions, + Applicability::MaybeIncorrect, + ); + } + } + }; + + err + } + + ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + // Same hacky approach as above to avoid deluging user + // with error messages. + if arg.references_error() + || self.tcx.sess.has_errors().is_some() + || self.tainted_by_errors().is_some() + { + return; + } + + self.emit_inference_failure_err( + obligation.cause.body_id, + span, + arg, + ErrorCode::E0282, + false, + ) + } + + ty::PredicateKind::Subtype(data) => { + if data.references_error() + || self.tcx.sess.has_errors().is_some() + || self.tainted_by_errors().is_some() + { + // no need to overload user in such cases + return; + } + let SubtypePredicate { a_is_expected: _, a, b } = data; + // both must be type variables, or the other would've been instantiated + assert!(a.is_ty_var() && b.is_ty_var()); + self.emit_inference_failure_err( + obligation.cause.body_id, + span, + a.into(), + ErrorCode::E0282, + true, + ) + } + ty::PredicateKind::Clause(ty::ClauseKind::Projection(data)) => { + if predicate.references_error() || self.tainted_by_errors().is_some() { + return; + } + let subst = data + .projection_ty + .args + .iter() + .chain(Some(data.term.into_arg())) + .find(|g| g.has_non_region_infer()); + if let Some(subst) = subst { + let mut err = self.emit_inference_failure_err( + obligation.cause.body_id, + span, + subst, + ErrorCode::E0284, + true, + ); + err.note(format!("cannot satisfy `{predicate}`")); + err + } else { + // If we can't find a substitution, just print a generic error + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0284, + "type annotations needed: cannot satisfy `{}`", + predicate, + ); + err.span_label(span, format!("cannot satisfy `{predicate}`")); + err + } + } + + ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(data)) => { + if predicate.references_error() || self.tainted_by_errors().is_some() { + return; + } + let subst = data.walk().find(|g| g.is_non_region_infer()); + if let Some(subst) = subst { + let err = self.emit_inference_failure_err( + obligation.cause.body_id, + span, + subst, + ErrorCode::E0284, + true, + ); + err + } else { + // If we can't find a substitution, just print a generic error + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0284, + "type annotations needed: cannot satisfy `{}`", + predicate, + ); + err.span_label(span, format!("cannot satisfy `{predicate}`")); + err + } + } + _ => { + if self.tcx.sess.has_errors().is_some() || self.tainted_by_errors().is_some() { + return; + } + let mut err = struct_span_err!( + self.tcx.sess, + span, + E0284, + "type annotations needed: cannot satisfy `{}`", + predicate, + ); + err.span_label(span, format!("cannot satisfy `{predicate}`")); + err + } + }; + self.note_obligation_cause(&mut err, obligation); + err.emit(); + } + + fn annotate_source_of_ambiguity( + &self, + err: &mut Diagnostic, + ambiguities: &[ambiguity::Ambiguity], + predicate: ty::Predicate<'tcx>, + ) { + let mut spans = vec![]; + let mut crates = vec![]; + let mut post = vec![]; + let mut has_param_env = false; + for ambiguity in ambiguities { + match ambiguity { + ambiguity::Ambiguity::DefId(impl_def_id) => { + match self.tcx.span_of_impl(*impl_def_id) { + Ok(span) => spans.push(span), + Err(name) => { + crates.push(name); + if let Some(header) = to_pretty_impl_header(self.tcx, *impl_def_id) { + post.push(header); + } + } + } + } + ambiguity::Ambiguity::ParamEnv(span) => { + has_param_env = true; + spans.push(*span); + } + } + } + let mut crate_names: Vec<_> = crates.iter().map(|n| format!("`{n}`")).collect(); + crate_names.sort(); + crate_names.dedup(); + post.sort(); + post.dedup(); + + if self.tainted_by_errors().is_some() + && (crate_names.len() == 1 + && spans.len() == 0 + && ["`core`", "`alloc`", "`std`"].contains(&crate_names[0].as_str()) + || predicate.visit_with(&mut HasNumericInferVisitor).is_break()) + { + // Avoid complaining about other inference issues for expressions like + // `42 >> 1`, where the types are still `{integer}`, but we want to + // Do we need `trait_ref.skip_binder().self_ty().is_numeric() &&` too? + // NOTE(eddyb) this was `.cancel()`, but `err` + // is borrowed, so we can't fully defuse it. + err.downgrade_to_delayed_bug(); + return; + } + + let msg = format!( + "multiple `impl`s{} satisfying `{}` found", + if has_param_env { " or `where` clauses" } else { "" }, + predicate + ); + let post = if post.len() > 1 || (post.len() == 1 && post[0].contains('\n')) { + format!(":\n{}", post.iter().map(|p| format!("- {p}")).collect::>().join("\n"),) + } else if post.len() == 1 { + format!(": `{}`", post[0]) + } else { + String::new() + }; + + match (spans.len(), crates.len(), crate_names.len()) { + (0, 0, 0) => { + err.note(format!("cannot satisfy `{predicate}`")); + } + (0, _, 1) => { + err.note(format!("{} in the `{}` crate{}", msg, crates[0], post,)); + } + (0, _, _) => { + err.note(format!( + "{} in the following crates: {}{}", + msg, + crate_names.join(", "), + post, + )); + } + (_, 0, 0) => { + let span: MultiSpan = spans.into(); + err.span_note(span, msg); + } + (_, 1, 1) => { + let span: MultiSpan = spans.into(); + err.span_note(span, msg); + err.note(format!("and another `impl` found in the `{}` crate{}", crates[0], post,)); + } + _ => { + let span: MultiSpan = spans.into(); + err.span_note(span, msg); + err.note(format!( + "and more `impl`s found in the following crates: {}{}", + crate_names.join(", "), + post, + )); + } + } + } + + /// Returns `true` if the trait predicate may apply for *some* assignment + /// to the type parameters. + fn predicate_can_apply( + &self, + param_env: ty::ParamEnv<'tcx>, + pred: ty::PolyTraitPredicate<'tcx>, + ) -> bool { + struct ParamToVarFolder<'a, 'tcx> { + infcx: &'a InferCtxt<'tcx>, + var_map: FxHashMap, Ty<'tcx>>, + } + + impl<'a, 'tcx> TypeFolder> for ParamToVarFolder<'a, 'tcx> { + fn interner(&self) -> TyCtxt<'tcx> { + self.infcx.tcx + } + + fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { + if let ty::Param(_) = *ty.kind() { + let infcx = self.infcx; + *self.var_map.entry(ty).or_insert_with(|| { + infcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::MiscVariable, + span: DUMMY_SP, + }) + }) + } else { + ty.super_fold_with(self) + } + } + } + + self.probe(|_| { + let cleaned_pred = + pred.fold_with(&mut ParamToVarFolder { infcx: self, var_map: Default::default() }); + + let InferOk { value: cleaned_pred, .. } = + self.infcx.at(&ObligationCause::dummy(), param_env).normalize(cleaned_pred); + + let obligation = + Obligation::new(self.tcx, ObligationCause::dummy(), param_env, cleaned_pred); + + self.predicate_may_hold(&obligation) + }) + } + + fn note_obligation_cause(&self, err: &mut Diagnostic, obligation: &PredicateObligation<'tcx>) { + // First, attempt to add note to this error with an async-await-specific + // message, and fall back to regular note otherwise. + if !self.maybe_note_obligation_cause_for_async_await(err, obligation) { + self.note_obligation_cause_code( + obligation.cause.body_id, + err, + obligation.predicate, + obligation.param_env, + obligation.cause.code(), + &mut vec![], + &mut Default::default(), + ); + self.suggest_unsized_bound_if_applicable(err, obligation); + } + } + + #[instrument(level = "debug", skip_all)] + fn suggest_unsized_bound_if_applicable( + &self, + err: &mut Diagnostic, + obligation: &PredicateObligation<'tcx>, + ) { + let ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) = + obligation.predicate.kind().skip_binder() + else { + return; + }; + let (ObligationCauseCode::BindingObligation(item_def_id, span) + | ObligationCauseCode::ExprBindingObligation(item_def_id, span, ..)) = + *obligation.cause.code().peel_derives() + else { + return; + }; + debug!(?pred, ?item_def_id, ?span); + + let (Some(node), true) = ( + self.tcx.hir().get_if_local(item_def_id), + Some(pred.def_id()) == self.tcx.lang_items().sized_trait(), + ) else { + return; + }; + self.maybe_suggest_unsized_generics(err, span, node); + } + + #[instrument(level = "debug", skip_all)] + fn maybe_suggest_unsized_generics(&self, err: &mut Diagnostic, span: Span, node: Node<'tcx>) { + let Some(generics) = node.generics() else { + return; + }; + let sized_trait = self.tcx.lang_items().sized_trait(); + debug!(?generics.params); + debug!(?generics.predicates); + let Some(param) = generics.params.iter().find(|param| param.span == span) else { + return; + }; + // Check that none of the explicit trait bounds is `Sized`. Assume that an explicit + // `Sized` bound is there intentionally and we don't need to suggest relaxing it. + let explicitly_sized = generics + .bounds_for_param(param.def_id) + .flat_map(|bp| bp.bounds) + .any(|bound| bound.trait_ref().and_then(|tr| tr.trait_def_id()) == sized_trait); + if explicitly_sized { + return; + } + debug!(?param); + match node { + hir::Node::Item( + item @ hir::Item { + // Only suggest indirection for uses of type parameters in ADTs. + kind: + hir::ItemKind::Enum(..) | hir::ItemKind::Struct(..) | hir::ItemKind::Union(..), + .. + }, + ) => { + if self.maybe_indirection_for_unsized(err, item, param) { + return; + } + } + _ => {} + }; + // Didn't add an indirection suggestion, so add a general suggestion to relax `Sized`. + let (span, separator) = if let Some(s) = generics.bounds_span_for_suggestions(param.def_id) + { + (s, " +") + } else { + (span.shrink_to_hi(), ":") + }; + err.span_suggestion_verbose( + span, + "consider relaxing the implicit `Sized` restriction", + format!("{separator} ?Sized"), + Applicability::MachineApplicable, + ); + } + + fn maybe_indirection_for_unsized( + &self, + err: &mut Diagnostic, + item: &Item<'tcx>, + param: &GenericParam<'tcx>, + ) -> bool { + // Suggesting `T: ?Sized` is only valid in an ADT if `T` is only used in a + // borrow. `struct S<'a, T: ?Sized>(&'a T);` is valid, `struct S(T);` + // is not. Look for invalid "bare" parameter uses, and suggest using indirection. + let mut visitor = + FindTypeParam { param: param.name.ident().name, invalid_spans: vec![], nested: false }; + visitor.visit_item(item); + if visitor.invalid_spans.is_empty() { + return false; + } + let mut multispan: MultiSpan = param.span.into(); + multispan.push_span_label( + param.span, + format!("this could be changed to `{}: ?Sized`...", param.name.ident()), + ); + for sp in visitor.invalid_spans { + multispan.push_span_label( + sp, + format!("...if indirection were used here: `Box<{}>`", param.name.ident()), + ); + } + err.span_help( + multispan, + format!( + "you could relax the implicit `Sized` bound on `{T}` if it were \ + used through indirection like `&{T}` or `Box<{T}>`", + T = param.name.ident(), + ), + ); + true + } + + fn is_recursive_obligation( + &self, + obligated_types: &mut Vec>, + cause_code: &ObligationCauseCode<'tcx>, + ) -> bool { + if let ObligationCauseCode::BuiltinDerivedObligation(ref data) = cause_code { + let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_pred); + let self_ty = parent_trait_ref.skip_binder().self_ty(); + if obligated_types.iter().any(|ot| ot == &self_ty) { + return true; + } + if let ty::Adt(def, args) = self_ty.kind() + && let [arg] = &args[..] + && let ty::GenericArgKind::Type(ty) = arg.unpack() + && let ty::Adt(inner_def, _) = ty.kind() + && inner_def == def + { + return true; + } + } + false + } + + fn get_standard_error_message( + &self, + trait_predicate: &ty::PolyTraitPredicate<'tcx>, + message: Option, + predicate_is_const: bool, + append_const_msg: Option, + post_message: String, + ) -> String { + message + .and_then(|cannot_do_this| { + match (predicate_is_const, append_const_msg) { + // do nothing if predicate is not const + (false, _) => Some(cannot_do_this), + // suggested using default post message + (true, Some(AppendConstMessage::Default)) => { + Some(format!("{cannot_do_this} in const contexts")) + } + // overridden post message + (true, Some(AppendConstMessage::Custom(custom_msg))) => { + Some(format!("{cannot_do_this}{custom_msg}")) + } + // fallback to generic message + (true, None) => None, + } + }) + .unwrap_or_else(|| { + format!("the trait bound `{trait_predicate}` is not satisfied{post_message}") + }) + } + + fn get_safe_transmute_error_and_reason( + &self, + obligation: PredicateObligation<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + span: Span, + ) -> GetSafeTransmuteErrorAndReason { + use rustc_transmute::Answer; + + // Erase regions because layout code doesn't particularly care about regions. + let trait_ref = self.tcx.erase_regions(self.tcx.erase_late_bound_regions(trait_ref)); + + let src_and_dst = rustc_transmute::Types { + dst: trait_ref.args.type_at(0), + src: trait_ref.args.type_at(1), + }; + let scope = trait_ref.args.type_at(2); + let Some(assume) = rustc_transmute::Assume::from_const( + self.infcx.tcx, + obligation.param_env, + trait_ref.args.const_at(3), + ) else { + span_bug!( + span, + "Unable to construct rustc_transmute::Assume where it was previously possible" + ); + }; + + match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable( + obligation.cause, + src_and_dst, + scope, + assume, + ) { + Answer::No(reason) => { + let dst = trait_ref.args.type_at(0); + let src = trait_ref.args.type_at(1); + let err_msg = format!( + "`{src}` cannot be safely transmuted into `{dst}` in the defining scope of `{scope}`" + ); + let safe_transmute_explanation = match reason { + rustc_transmute::Reason::SrcIsUnspecified => { + format!("`{src}` does not have a well-specified layout") + } + + rustc_transmute::Reason::DstIsUnspecified => { + format!("`{dst}` does not have a well-specified layout") + } + + rustc_transmute::Reason::DstIsBitIncompatible => { + format!("At least one value of `{src}` isn't a bit-valid value of `{dst}`") + } + + rustc_transmute::Reason::DstIsPrivate => format!( + "`{dst}` is or contains a type or field that is not visible in that scope" + ), + rustc_transmute::Reason::DstIsTooBig => { + format!("The size of `{src}` is smaller than the size of `{dst}`") + } + rustc_transmute::Reason::SrcSizeOverflow => { + format!( + "values of the type `{src}` are too big for the current architecture" + ) + } + rustc_transmute::Reason::DstSizeOverflow => { + format!( + "values of the type `{dst}` are too big for the current architecture" + ) + } + rustc_transmute::Reason::DstHasStricterAlignment { + src_min_align, + dst_min_align, + } => { + format!( + "The minimum alignment of `{src}` ({src_min_align}) should be greater than that of `{dst}` ({dst_min_align})" + ) + } + rustc_transmute::Reason::DstIsMoreUnique => { + format!("`{src}` is a shared reference, but `{dst}` is a unique reference") + } + // Already reported by rustc + rustc_transmute::Reason::TypeError => { + return GetSafeTransmuteErrorAndReason::Silent; + } + rustc_transmute::Reason::SrcLayoutUnknown => { + format!("`{src}` has an unknown layout") + } + rustc_transmute::Reason::DstLayoutUnknown => { + format!("`{dst}` has an unknown layout") + } + }; + GetSafeTransmuteErrorAndReason::Error { err_msg, safe_transmute_explanation } + } + // Should never get a Yes at this point! We already ran it before, and did not get a Yes. + Answer::Yes => span_bug!( + span, + "Inconsistent rustc_transmute::is_transmutable(...) result, got Yes", + ), + other => span_bug!(span, "Unsupported rustc_transmute::Answer variant: `{other:?}`"), + } + } + + fn add_tuple_trait_message( + &self, + obligation_cause_code: &ObligationCauseCode<'tcx>, + err: &mut Diagnostic, + ) { + match obligation_cause_code { + ObligationCauseCode::RustCall => { + err.set_primary_message("functions with the \"rust-call\" ABI must take a single non-self tuple argument"); + } + ObligationCauseCode::BindingObligation(def_id, _) + | ObligationCauseCode::ItemObligation(def_id) + if self.tcx.is_fn_trait(*def_id) => + { + err.code(rustc_errors::error_code!(E0059)); + err.set_primary_message(format!( + "type parameter to bare `{}` trait must be a tuple", + self.tcx.def_path_str(*def_id) + )); + } + _ => {} + } + } + + fn try_to_add_help_message( + &self, + obligation: &PredicateObligation<'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>, + trait_predicate: &ty::PolyTraitPredicate<'tcx>, + err: &mut Diagnostic, + span: Span, + is_fn_trait: bool, + suggested: bool, + unsatisfied_const: bool, + ) { + let body_def_id = obligation.cause.body_id; + let span = if let ObligationCauseCode::BinOp { rhs_span: Some(rhs_span), .. } = + obligation.cause.code() + { + *rhs_span + } else { + span + }; + + // Try to report a help message + if is_fn_trait + && let Ok((implemented_kind, params)) = self.type_implements_fn_trait( + obligation.param_env, + trait_ref.self_ty(), + trait_predicate.skip_binder().polarity, + ) + { + self.add_help_message_for_fn_trait(trait_ref, err, implemented_kind, params); + } else if !trait_ref.has_non_region_infer() + && self.predicate_can_apply(obligation.param_env, *trait_predicate) + { + // If a where-clause may be useful, remind the + // user that they can add it. + // + // don't display an on-unimplemented note, as + // these notes will often be of the form + // "the type `T` can't be frobnicated" + // which is somewhat confusing. + self.suggest_restricting_param_bound( + err, + *trait_predicate, + None, + obligation.cause.body_id, + ); + } else if trait_ref.def_id().is_local() + && self.tcx.trait_impls_of(trait_ref.def_id()).is_empty() + && !self.tcx.trait_is_auto(trait_ref.def_id()) + && !self.tcx.trait_is_alias(trait_ref.def_id()) + { + err.span_help( + self.tcx.def_span(trait_ref.def_id()), + crate::fluent_generated::trait_selection_trait_has_no_impls, + ); + } else if !suggested && !unsatisfied_const { + // Can't show anything else useful, try to find similar impls. + let impl_candidates = self.find_similar_impl_candidates(*trait_predicate); + if !self.report_similar_impl_candidates( + &impl_candidates, + trait_ref, + body_def_id, + err, + true, + obligation.param_env, + ) { + self.report_similar_impl_candidates_for_root_obligation( + &obligation, + *trait_predicate, + body_def_id, + err, + ); + } + + self.suggest_convert_to_slice( + err, + obligation, + trait_ref, + impl_candidates.as_slice(), + span, + ); + } + } + + fn add_help_message_for_fn_trait( + &self, + trait_ref: ty::PolyTraitRef<'tcx>, + err: &mut Diagnostic, + implemented_kind: ty::ClosureKind, + params: ty::Binder<'tcx, Ty<'tcx>>, + ) { + // If the type implements `Fn`, `FnMut`, or `FnOnce`, suppress the following + // suggestion to add trait bounds for the type, since we only typically implement + // these traits once. + + // Note if the `FnMut` or `FnOnce` is less general than the trait we're trying + // to implement. + let selected_kind = self + .tcx + .fn_trait_kind_from_def_id(trait_ref.def_id()) + .expect("expected to map DefId to ClosureKind"); + if !implemented_kind.extends(selected_kind) { + err.note(format!( + "`{}` implements `{}`, but it must implement `{}`, which is more general", + trait_ref.skip_binder().self_ty(), + implemented_kind, + selected_kind + )); + } + + // Note any argument mismatches + let given_ty = params.skip_binder(); + let expected_ty = trait_ref.skip_binder().args.type_at(1); + if let ty::Tuple(given) = given_ty.kind() + && let ty::Tuple(expected) = expected_ty.kind() + { + if expected.len() != given.len() { + // Note number of types that were expected and given + err.note( + format!( + "expected a closure taking {} argument{}, but one taking {} argument{} was given", + given.len(), + pluralize!(given.len()), + expected.len(), + pluralize!(expected.len()), + ) + ); + } else if !self.same_type_modulo_infer(given_ty, expected_ty) { + // Print type mismatch + let (expected_args, given_args) = self.cmp(given_ty, expected_ty); + err.note_expected_found( + &"a closure with arguments", + expected_args, + &"a closure with arguments", + given_args, + ); + } + } + } + + fn maybe_add_note_for_unsatisfied_const( + &self, + _obligation: &PredicateObligation<'tcx>, + _trait_ref: ty::PolyTraitRef<'tcx>, + _trait_predicate: &ty::PolyTraitPredicate<'tcx>, + _err: &mut Diagnostic, + _span: Span, + ) -> UnsatisfiedConst { + let unsatisfied_const = UnsatisfiedConst(false); + // FIXME(effects) + unsatisfied_const + } + + fn report_closure_error( + &self, + obligation: &PredicateObligation<'tcx>, + closure_def_id: DefId, + found_kind: ty::ClosureKind, + kind: ty::ClosureKind, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { + let closure_span = self.tcx.def_span(closure_def_id); + + let mut err = ClosureKindMismatch { + closure_span, + expected: kind, + found: found_kind, + cause_span: obligation.cause.span, + fn_once_label: None, + fn_mut_label: None, + }; + + // Additional context information explaining why the closure only implements + // a particular trait. + if let Some(typeck_results) = &self.typeck_results { + let hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id.expect_local()); + match (found_kind, typeck_results.closure_kind_origins().get(hir_id)) { + (ty::ClosureKind::FnOnce, Some((span, place))) => { + err.fn_once_label = Some(ClosureFnOnceLabel { + span: *span, + place: ty::place_to_string_for_capture(self.tcx, &place), + }) + } + (ty::ClosureKind::FnMut, Some((span, place))) => { + err.fn_mut_label = Some(ClosureFnMutLabel { + span: *span, + place: ty::place_to_string_for_capture(self.tcx, &place), + }) + } + _ => {} + } + } + + self.tcx.sess.create_err(err) + } + + fn report_type_parameter_mismatch_cyclic_type_error( + &self, + obligation: &PredicateObligation<'tcx>, + found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + terr: TypeError<'tcx>, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { + let self_ty = found_trait_ref.self_ty().skip_binder(); + let (cause, terr) = if let ty::Closure(def_id, _) = self_ty.kind() { + ( + ObligationCause::dummy_with_span(self.tcx.def_span(def_id)), + TypeError::CyclicTy(self_ty), + ) + } else { + (obligation.cause.clone(), terr) + }; + self.report_and_explain_type_error( + TypeTrace::poly_trait_refs(&cause, true, expected_trait_ref, found_trait_ref), + terr, + ) + } + + fn report_opaque_type_auto_trait_leakage( + &self, + obligation: &PredicateObligation<'tcx>, + def_id: DefId, + ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { + let name = match self.tcx.opaque_type_origin(def_id.expect_local()) { + hir::OpaqueTyOrigin::FnReturn(_) | hir::OpaqueTyOrigin::AsyncFn(_) => { + "opaque type".to_string() + } + hir::OpaqueTyOrigin::TyAlias { .. } => { + format!("`{}`", self.tcx.def_path_debug_str(def_id)) + } + }; + let mut err = self.tcx.sess.struct_span_err( + obligation.cause.span, + format!("cannot check whether the hidden type of {name} satisfies auto traits"), + ); + err.span_note(self.tcx.def_span(def_id), "opaque type is declared here"); + match self.defining_use_anchor { + DefiningAnchor::Bubble | DefiningAnchor::Error => {} + DefiningAnchor::Bind(bind) => { + err.span_note( + self.tcx.def_ident_span(bind).unwrap_or_else(|| self.tcx.def_span(bind)), + "this item depends on auto traits of the hidden type, \ + but may also be registering the hidden type. \ + This is not supported right now. \ + You can try moving the opaque type and the item that actually registers a hidden type into a new submodule".to_string(), + ); + } + }; + + if let Some(diag) = + self.tcx.sess.diagnostic().steal_diagnostic(self.tcx.def_span(def_id), StashKey::Cycle) + { + diag.cancel(); + } + + err + } + + fn report_type_parameter_mismatch_error( + &self, + obligation: &PredicateObligation<'tcx>, + span: Span, + found_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + expected_trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>, + ) -> Option> { + let found_trait_ref = self.resolve_vars_if_possible(found_trait_ref); + let expected_trait_ref = self.resolve_vars_if_possible(expected_trait_ref); + + if expected_trait_ref.self_ty().references_error() { + return None; + } + + let Some(found_trait_ty) = found_trait_ref.self_ty().no_bound_vars() else { + return None; + }; + + let found_did = match *found_trait_ty.kind() { + ty::Closure(did, _) | ty::Foreign(did) | ty::FnDef(did, _) | ty::Coroutine(did, ..) => { + Some(did) + } + ty::Adt(def, _) => Some(def.did()), + _ => None, + }; + + let found_node = found_did.and_then(|did| self.tcx.hir().get_if_local(did)); + let found_span = found_did.and_then(|did| self.tcx.hir().span_if_local(did)); + + if self.reported_closure_mismatch.borrow().contains(&(span, found_span)) { + // We check closures twice, with obligations flowing in different directions, + // but we want to complain about them only once. + return None; + } + + self.reported_closure_mismatch.borrow_mut().insert((span, found_span)); + + let mut not_tupled = false; + + let found = match found_trait_ref.skip_binder().args.type_at(1).kind() { + ty::Tuple(ref tys) => vec![ArgKind::empty(); tys.len()], + _ => { + not_tupled = true; + vec![ArgKind::empty()] + } + }; + + let expected_ty = expected_trait_ref.skip_binder().args.type_at(1); + let expected = match expected_ty.kind() { + ty::Tuple(ref tys) => { + tys.iter().map(|t| ArgKind::from_expected_ty(t, Some(span))).collect() + } + _ => { + not_tupled = true; + vec![ArgKind::Arg("_".to_owned(), expected_ty.to_string())] + } + }; + + // If this is a `Fn` family trait and either the expected or found + // is not tupled, then fall back to just a regular mismatch error. + // This shouldn't be common unless manually implementing one of the + // traits manually, but don't make it more confusing when it does + // happen. + Some( + if Some(expected_trait_ref.def_id()) != self.tcx.lang_items().coroutine_trait() + && not_tupled + { + self.report_and_explain_type_error( + TypeTrace::poly_trait_refs( + &obligation.cause, + true, + expected_trait_ref, + found_trait_ref, + ), + ty::error::TypeError::Mismatch, + ) + } else if found.len() == expected.len() { + self.report_closure_arg_mismatch( + span, + found_span, + found_trait_ref, + expected_trait_ref, + obligation.cause.code(), + found_node, + obligation.param_env, + ) + } else { + let (closure_span, closure_arg_span, found) = found_did + .and_then(|did| { + let node = self.tcx.hir().get_if_local(did)?; + let (found_span, closure_arg_span, found) = + self.get_fn_like_arguments(node)?; + Some((Some(found_span), closure_arg_span, found)) + }) + .unwrap_or((found_span, None, found)); + + self.report_arg_count_mismatch( + span, + closure_span, + expected, + found, + found_trait_ty.is_closure(), + closure_arg_span, + ) + }, + ) + } + + fn report_not_const_evaluatable_error( + &self, + obligation: &PredicateObligation<'tcx>, + span: Span, + ) -> Option> { + if !self.tcx.features().generic_const_exprs { + let mut err = self + .tcx + .sess + .struct_span_err(span, "constant expression depends on a generic parameter"); + // FIXME(const_generics): we should suggest to the user how they can resolve this + // issue. However, this is currently not actually possible + // (see https://github.com/rust-lang/rust/issues/66962#issuecomment-575907083). + // + // Note that with `feature(generic_const_exprs)` this case should not + // be reachable. + err.note("this may fail depending on what value the parameter takes"); + err.emit(); + return None; + } + + match obligation.predicate.kind().skip_binder() { + ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => { + let ty::ConstKind::Unevaluated(uv) = ct.kind() else { + bug!("const evaluatable failed for non-unevaluated const `{ct:?}`"); + }; + let mut err = self.tcx.sess.struct_span_err(span, "unconstrained generic constant"); + let const_span = self.tcx.def_span(uv.def); + match self.tcx.sess.source_map().span_to_snippet(const_span) { + Ok(snippet) => err.help(format!( + "try adding a `where` bound using this expression: `where [(); {snippet}]:`" + )), + _ => err.help("consider adding a `where` bound using this expression"), + }; + Some(err) + } + _ => { + span_bug!( + span, + "unexpected non-ConstEvaluatable predicate, this should not be reachable" + ) + } + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/fulfill.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/fulfill.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/fulfill.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/fulfill.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,5 @@ use crate::infer::{InferCtxt, TyOrConstInferVar}; +use rustc_data_structures::captures::Captures; use rustc_data_structures::obligation_forest::ProcessResult; use rustc_data_structures::obligation_forest::{Error, ForestObligation, Outcome}; use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor}; @@ -68,7 +69,7 @@ // should mostly optimize for reading speed, while modifying is not as relevant. // // For whatever reason using a boxed slice is slower than using a `Vec` here. - pub stalled_on: Vec>, + pub stalled_on: Vec, } // `PendingPredicateObligation` is used a lot. Make sure it doesn't unintentionally get bigger. @@ -669,7 +670,7 @@ &mut self, obligation: &PredicateObligation<'tcx>, trait_obligation: PolyTraitObligation<'tcx>, - stalled_on: &mut Vec>, + stalled_on: &mut Vec, ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { let infcx = self.selcx.infcx; if obligation.predicate.is_global() && !self.selcx.is_intercrate() { @@ -722,7 +723,7 @@ &mut self, obligation: &PredicateObligation<'tcx>, project_obligation: PolyProjectionObligation<'tcx>, - stalled_on: &mut Vec>, + stalled_on: &mut Vec, ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { let tcx = self.selcx.tcx(); @@ -775,7 +776,7 @@ fn args_infer_vars<'a, 'tcx>( selcx: &SelectionContext<'a, 'tcx>, args: ty::Binder<'tcx, GenericArgsRef<'tcx>>, -) -> impl Iterator> { +) -> impl Iterator + Captures<'tcx> { selcx .infcx .resolve_vars_if_possible(args) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -41,18 +41,13 @@ pub(crate) use self::project::{needs_normalization, BoundVarReplacer, PlaceholderReplacer}; -pub use self::FulfillmentErrorCode::*; -pub use self::ImplSource::*; -pub use self::ObligationCauseCode::*; -pub use self::SelectionError::*; - pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; pub use self::coherence::{OrphanCheckErr, OverlapResult}; pub use self::engine::{ObligationCtxt, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; pub use self::object_safety::astconv_object_safety_violations; pub use self::object_safety::is_vtable_safe_method; -pub use self::object_safety::MethodViolationCode; +pub use self::object_safety::object_safety_violations_for_assoc_item; pub use self::object_safety::ObjectSafetyViolation; pub use self::project::NormalizeExt; pub use self::project::{normalize_inherent_projection, normalize_projection_type}; @@ -204,7 +199,7 @@ let predicates = match fully_normalize(&infcx, cause, elaborated_env, predicates) { Ok(predicates) => predicates, Err(errors) => { - let reported = infcx.err_ctxt().report_fulfillment_errors(&errors); + let reported = infcx.err_ctxt().report_fulfillment_errors(errors); return Err(reported); } }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/object_safety.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/object_safety.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/object_safety.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/object_safety.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,6 +97,10 @@ /// object. Note that object-safe traits can have some /// non-vtable-safe methods, so long as they require `Self: Sized` or /// otherwise ensure that they cannot be used when `Self = Trait`. +/// +/// [`MethodViolationCode::WhereClauseReferencesSelf`] is considered object safe due to backwards +/// compatibility, see and +/// [`WHERE_CLAUSES_OBJECT_SAFETY`]. pub fn is_vtable_safe_method(tcx: TyCtxt<'_>, trait_def_id: DefId, method: ty::AssocItem) -> bool { debug_assert!(tcx.generics_of(trait_def_id).has_self); debug!("is_vtable_safe_method({:?}, {:?})", trait_def_id, method); @@ -105,10 +109,9 @@ return false; } - match virtual_call_violation_for_method(tcx, trait_def_id, method) { - None | Some(MethodViolationCode::WhereClauseReferencesSelf) => true, - Some(_) => false, - } + virtual_call_violations_for_method(tcx, trait_def_id, method) + .iter() + .all(|v| matches!(v, MethodViolationCode::WhereClauseReferencesSelf)) } fn object_safety_violations_for_trait( @@ -119,7 +122,7 @@ let mut violations: Vec<_> = tcx .associated_items(trait_def_id) .in_definition_order() - .filter_map(|&item| object_safety_violation_for_assoc_item(tcx, trait_def_id, item)) + .flat_map(|&item| object_safety_violations_for_assoc_item(tcx, trait_def_id, item)) .collect(); // Check the trait itself. @@ -357,49 +360,52 @@ /// Returns `Some(_)` if this item makes the containing trait not object safe. #[instrument(level = "debug", skip(tcx), ret)] -fn object_safety_violation_for_assoc_item( +pub fn object_safety_violations_for_assoc_item( tcx: TyCtxt<'_>, trait_def_id: DefId, item: ty::AssocItem, -) -> Option { +) -> Vec { // Any item that has a `Self : Sized` requisite is otherwise // exempt from the regulations. if tcx.generics_require_sized_self(item.def_id) { - return None; + return Vec::new(); } match item.kind { // Associated consts are never object safe, as they can't have `where` bounds yet at all, // and associated const bounds in trait objects aren't a thing yet either. ty::AssocKind::Const => { - Some(ObjectSafetyViolation::AssocConst(item.name, item.ident(tcx).span)) + vec![ObjectSafetyViolation::AssocConst(item.name, item.ident(tcx).span)] } - ty::AssocKind::Fn => virtual_call_violation_for_method(tcx, trait_def_id, item).map(|v| { - let node = tcx.hir().get_if_local(item.def_id); - // Get an accurate span depending on the violation. - let span = match (&v, node) { - (MethodViolationCode::ReferencesSelfInput(Some(span)), _) => *span, - (MethodViolationCode::UndispatchableReceiver(Some(span)), _) => *span, - (MethodViolationCode::ReferencesImplTraitInTrait(span), _) => *span, - (MethodViolationCode::ReferencesSelfOutput, Some(node)) => { - node.fn_decl().map_or(item.ident(tcx).span, |decl| decl.output.span()) - } - _ => item.ident(tcx).span, - }; + ty::AssocKind::Fn => virtual_call_violations_for_method(tcx, trait_def_id, item) + .into_iter() + .map(|v| { + let node = tcx.hir().get_if_local(item.def_id); + // Get an accurate span depending on the violation. + let span = match (&v, node) { + (MethodViolationCode::ReferencesSelfInput(Some(span)), _) => *span, + (MethodViolationCode::UndispatchableReceiver(Some(span)), _) => *span, + (MethodViolationCode::ReferencesImplTraitInTrait(span), _) => *span, + (MethodViolationCode::ReferencesSelfOutput, Some(node)) => { + node.fn_decl().map_or(item.ident(tcx).span, |decl| decl.output.span()) + } + _ => item.ident(tcx).span, + }; - ObjectSafetyViolation::Method(item.name, v, span) - }), + ObjectSafetyViolation::Method(item.name, v, span) + }) + .collect(), // Associated types can only be object safe if they have `Self: Sized` bounds. ty::AssocKind::Type => { if !tcx.features().generic_associated_types_extended && !tcx.generics_of(item.def_id).params.is_empty() && !item.is_impl_trait_in_trait() { - Some(ObjectSafetyViolation::GAT(item.name, item.ident(tcx).span)) + vec![ObjectSafetyViolation::GAT(item.name, item.ident(tcx).span)] } else { // We will permit associated types if they are explicitly mentioned in the trait object. // We can't check this here, as here we only check if it is guaranteed to not be possible. - None + Vec::new() } } } @@ -409,11 +415,11 @@ /// object; this does not necessarily imply that the enclosing trait /// is not object safe, because the method might have a where clause /// `Self:Sized`. -fn virtual_call_violation_for_method<'tcx>( +fn virtual_call_violations_for_method<'tcx>( tcx: TyCtxt<'tcx>, trait_def_id: DefId, method: ty::AssocItem, -) -> Option { +) -> Vec { let sig = tcx.fn_sig(method.def_id).instantiate_identity(); // The method's first parameter must be named `self` @@ -438,9 +444,14 @@ } else { None }; - return Some(MethodViolationCode::StaticMethod(sugg)); + + // Not having `self` parameter messes up the later checks, + // so we need to return instead of pushing + return vec![MethodViolationCode::StaticMethod(sugg)]; } + let mut errors = Vec::new(); + for (i, &input_ty) in sig.skip_binder().inputs().iter().enumerate().skip(1) { if contains_illegal_self_type_reference(tcx, trait_def_id, sig.rebind(input_ty)) { let span = if let Some(hir::Node::TraitItem(hir::TraitItem { @@ -452,20 +463,20 @@ } else { None }; - return Some(MethodViolationCode::ReferencesSelfInput(span)); + errors.push(MethodViolationCode::ReferencesSelfInput(span)); } } if contains_illegal_self_type_reference(tcx, trait_def_id, sig.output()) { - return Some(MethodViolationCode::ReferencesSelfOutput); + errors.push(MethodViolationCode::ReferencesSelfOutput); } if let Some(code) = contains_illegal_impl_trait_in_trait(tcx, method.def_id, sig.output()) { - return Some(code); + errors.push(code); } // We can't monomorphize things like `fn foo(...)`. let own_counts = tcx.generics_of(method.def_id).own_counts(); - if own_counts.types + own_counts.consts != 0 { - return Some(MethodViolationCode::Generic); + if own_counts.types > 0 || own_counts.consts > 0 { + errors.push(MethodViolationCode::Generic); } let receiver_ty = tcx.liberate_late_bound_regions(method.def_id, sig.input(0)); @@ -485,7 +496,7 @@ } else { None }; - return Some(MethodViolationCode::UndispatchableReceiver(span)); + errors.push(MethodViolationCode::UndispatchableReceiver(span)); } else { // Do sanity check to make sure the receiver actually has the layout of a pointer. @@ -584,20 +595,19 @@ // would already have reported an error at the definition of the // auto trait. if pred_trait_ref.args.len() != 1 { - tcx.sess.diagnostic().delay_span_bug( - span, - "auto traits cannot have generic parameters", - ); + tcx.sess + .diagnostic() + .delay_span_bug(span, "auto traits cannot have generic parameters"); } return false; } contains_illegal_self_type_reference(tcx, trait_def_id, pred) }) { - return Some(MethodViolationCode::WhereClauseReferencesSelf); + errors.push(MethodViolationCode::WhereClauseReferencesSelf); } - None + errors } /// Performs a type substitution to produce the version of `receiver_ty` when `Self = self_ty`. @@ -710,7 +720,6 @@ // FIXME(mikeyhew) when unsized receivers are implemented as part of unsized rvalues, add this // fallback query: `Receiver: Unsize U]>` to support receivers like // `self: Wrapper`. -#[allow(dead_code)] fn receiver_is_dispatchable<'tcx>( tcx: TyCtxt<'tcx>, method: ty::AssocItem, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/project.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/project.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/project.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/project.rs 2023-12-21 16:55:28.000000000 +0000 @@ -783,7 +783,7 @@ // the `var` (but we *could* bring that into scope if we were to track them as we pass them). mapped_regions: BTreeMap, mapped_types: BTreeMap, - mapped_consts: BTreeMap, ty::BoundVar>, + mapped_consts: BTreeMap, // The current depth relative to *this* folding, *not* the entire normalization. In other words, // the depth of binders we've passed here. current_index: ty::DebruijnIndex, @@ -843,11 +843,11 @@ T, BTreeMap, BTreeMap, - BTreeMap, ty::BoundVar>, + BTreeMap, ) { let mapped_regions: BTreeMap = BTreeMap::new(); let mapped_types: BTreeMap = BTreeMap::new(); - let mapped_consts: BTreeMap, ty::BoundVar> = BTreeMap::new(); + let mapped_consts: BTreeMap = BTreeMap::new(); let mut replacer = BoundVarReplacer { infcx, @@ -898,7 +898,10 @@ if debruijn.as_usize() + 1 > self.current_index.as_usize() + self.universe_indices.len() => { - bug!("Bound vars outside of `self.universe_indices`"); + bug!( + "Bound vars {r:#?} outside of `self.universe_indices`: {:#?}", + self.universe_indices + ); } ty::ReLateBound(debruijn, br) if debruijn >= self.current_index => { let universe = self.universe_for(debruijn); @@ -916,7 +919,10 @@ if debruijn.as_usize() + 1 > self.current_index.as_usize() + self.universe_indices.len() => { - bug!("Bound vars outside of `self.universe_indices`"); + bug!( + "Bound vars {t:#?} outside of `self.universe_indices`: {:#?}", + self.universe_indices + ); } ty::Bound(debruijn, bound_ty) if debruijn >= self.current_index => { let universe = self.universe_for(debruijn); @@ -935,7 +941,10 @@ if debruijn.as_usize() + 1 > self.current_index.as_usize() + self.universe_indices.len() => { - bug!("Bound vars outside of `self.universe_indices`"); + bug!( + "Bound vars {ct:#?} outside of `self.universe_indices`: {:#?}", + self.universe_indices + ); } ty::ConstKind::Bound(debruijn, bound_const) if debruijn >= self.current_index => { let universe = self.universe_for(debruijn); @@ -957,7 +966,7 @@ infcx: &'me InferCtxt<'tcx>, mapped_regions: BTreeMap, mapped_types: BTreeMap, - mapped_consts: BTreeMap, ty::BoundVar>, + mapped_consts: BTreeMap, universe_indices: &'me [Option], current_index: ty::DebruijnIndex, } @@ -967,7 +976,7 @@ infcx: &'me InferCtxt<'tcx>, mapped_regions: BTreeMap, mapped_types: BTreeMap, - mapped_consts: BTreeMap, ty::BoundVar>, + mapped_consts: BTreeMap, universe_indices: &'me [Option], value: T, ) -> T { @@ -1644,7 +1653,7 @@ let env_predicates = data .projection_bounds() .filter(|bound| bound.item_def_id() == obligation.predicate.def_id) - .map(|p| ty::Clause::from_projection_clause(tcx, p.with_self_ty(tcx, object_ty))); + .map(|p| p.with_self_ty(tcx, object_ty).to_predicate(tcx)); assemble_candidates_from_predicates( selcx, @@ -1789,7 +1798,7 @@ let self_ty = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()); let lang_items = selcx.tcx().lang_items(); - if [lang_items.gen_trait(), lang_items.future_trait()].contains(&Some(trait_ref.def_id)) + if [lang_items.coroutine_trait(), lang_items.future_trait(), lang_items.iterator_trait()].contains(&Some(trait_ref.def_id)) || selcx.tcx().fn_trait_kind_from_def_id(trait_ref.def_id).is_some() { true @@ -1811,8 +1820,8 @@ | ty::FnPtr(..) | ty::Dynamic(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(..) // Integers and floats always have `u8` as their discriminant. @@ -1860,8 +1869,8 @@ | ty::FnPtr(..) | ty::Dynamic(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Never // Extern types have unit metadata, according to RFC 2850 | ty::Foreign(_) @@ -2002,10 +2011,12 @@ ImplSource::Builtin(BuiltinImplSource::Misc, data) => { let trait_def_id = obligation.predicate.trait_def_id(selcx.tcx()); let lang_items = selcx.tcx().lang_items(); - if lang_items.gen_trait() == Some(trait_def_id) { - confirm_generator_candidate(selcx, obligation, data) + if lang_items.coroutine_trait() == Some(trait_def_id) { + confirm_coroutine_candidate(selcx, obligation, data) } else if lang_items.future_trait() == Some(trait_def_id) { confirm_future_candidate(selcx, obligation, data) + } else if lang_items.iterator_trait() == Some(trait_def_id) { + confirm_iterator_candidate(selcx, obligation, data) } else if selcx.tcx().fn_trait_kind_from_def_id(trait_def_id).is_some() { if obligation.predicate.self_ty().is_closure() { confirm_closure_candidate(selcx, obligation, data) @@ -2030,36 +2041,36 @@ } } -fn confirm_generator_candidate<'cx, 'tcx>( +fn confirm_coroutine_candidate<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, nested: Vec>, ) -> Progress<'tcx> { - let ty::Generator(_, args, _) = + let ty::Coroutine(_, args, _) = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()).kind() else { unreachable!() }; - let gen_sig = args.as_generator().poly_sig(); - let Normalized { value: gen_sig, obligations } = normalize_with_depth( + let coroutine_sig = args.as_coroutine().poly_sig(); + let Normalized { value: coroutine_sig, obligations } = normalize_with_depth( selcx, obligation.param_env, obligation.cause.clone(), obligation.recursion_depth + 1, - gen_sig, + coroutine_sig, ); - debug!(?obligation, ?gen_sig, ?obligations, "confirm_generator_candidate"); + debug!(?obligation, ?coroutine_sig, ?obligations, "confirm_coroutine_candidate"); let tcx = selcx.tcx(); - let gen_def_id = tcx.require_lang_item(LangItem::Generator, None); + let coroutine_def_id = tcx.require_lang_item(LangItem::Coroutine, None); - let predicate = super::util::generator_trait_ref_and_outputs( + let predicate = super::util::coroutine_trait_ref_and_outputs( tcx, - gen_def_id, + coroutine_def_id, obligation.predicate.self_ty(), - gen_sig, + coroutine_sig, ) .map_bound(|(trait_ref, yield_ty, return_ty)| { let name = tcx.associated_item(obligation.predicate.def_id).name; @@ -2072,7 +2083,7 @@ }; ty::ProjectionPredicate { - projection_ty: tcx.mk_alias_ty(obligation.predicate.def_id, trait_ref.args), + projection_ty: ty::AliasTy::new(tcx, obligation.predicate.def_id, trait_ref.args), term: ty.into(), } }); @@ -2087,21 +2098,21 @@ obligation: &ProjectionTyObligation<'tcx>, nested: Vec>, ) -> Progress<'tcx> { - let ty::Generator(_, args, _) = + let ty::Coroutine(_, args, _) = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()).kind() else { unreachable!() }; - let gen_sig = args.as_generator().poly_sig(); - let Normalized { value: gen_sig, obligations } = normalize_with_depth( + let coroutine_sig = args.as_coroutine().poly_sig(); + let Normalized { value: coroutine_sig, obligations } = normalize_with_depth( selcx, obligation.param_env, obligation.cause.clone(), obligation.recursion_depth + 1, - gen_sig, + coroutine_sig, ); - debug!(?obligation, ?gen_sig, ?obligations, "confirm_future_candidate"); + debug!(?obligation, ?coroutine_sig, ?obligations, "confirm_future_candidate"); let tcx = selcx.tcx(); let fut_def_id = tcx.require_lang_item(LangItem::Future, None); @@ -2110,13 +2121,13 @@ tcx, fut_def_id, obligation.predicate.self_ty(), - gen_sig, + coroutine_sig, ) .map_bound(|(trait_ref, return_ty)| { debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name, sym::Output); ty::ProjectionPredicate { - projection_ty: tcx.mk_alias_ty(obligation.predicate.def_id, trait_ref.args), + projection_ty: ty::AliasTy::new(tcx, obligation.predicate.def_id, trait_ref.args), term: return_ty.into(), } }); @@ -2126,6 +2137,50 @@ .with_addl_obligations(obligations) } +fn confirm_iterator_candidate<'cx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'tcx>, + obligation: &ProjectionTyObligation<'tcx>, + nested: Vec>, +) -> Progress<'tcx> { + let ty::Coroutine(_, args, _) = + selcx.infcx.shallow_resolve(obligation.predicate.self_ty()).kind() + else { + unreachable!() + }; + let gen_sig = args.as_coroutine().poly_sig(); + let Normalized { value: gen_sig, obligations } = normalize_with_depth( + selcx, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + gen_sig, + ); + + debug!(?obligation, ?gen_sig, ?obligations, "confirm_iterator_candidate"); + + let tcx = selcx.tcx(); + let iter_def_id = tcx.require_lang_item(LangItem::Iterator, None); + + let predicate = super::util::iterator_trait_ref_and_outputs( + tcx, + iter_def_id, + obligation.predicate.self_ty(), + gen_sig, + ) + .map_bound(|(trait_ref, yield_ty)| { + debug_assert_eq!(tcx.associated_item(obligation.predicate.def_id).name, sym::Item); + + ty::ProjectionPredicate { + projection_ty: ty::AliasTy::new(tcx, obligation.predicate.def_id, trait_ref.args), + term: yield_ty.into(), + } + }); + + confirm_param_env_candidate(selcx, obligation, predicate, false) + .with_addl_obligations(nested) + .with_addl_obligations(obligations) +} + fn confirm_builtin_candidate<'cx, 'tcx>( selcx: &mut SelectionContext<'cx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, @@ -2172,7 +2227,7 @@ }; let predicate = - ty::ProjectionPredicate { projection_ty: tcx.mk_alias_ty(item_def_id, args), term }; + ty::ProjectionPredicate { projection_ty: ty::AliasTy::new(tcx, item_def_id, args), term }; confirm_param_env_candidate(selcx, obligation, ty::Binder::dummy(predicate), false) .with_addl_obligations(obligations) @@ -2245,7 +2300,7 @@ flag, ) .map_bound(|(trait_ref, ret_type)| ty::ProjectionPredicate { - projection_ty: tcx.mk_alias_ty(fn_once_output_def_id, trait_ref.args), + projection_ty: ty::AliasTy::new(tcx, fn_once_output_def_id, trait_ref.args), term: ret_type.into(), }); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_middle::traits::query::{DropckConstraint, DropckOutlivesResult}; use rustc_middle::ty::{self, EarlyBinder, ParamEnvAnd, Ty, TyCtxt}; -use rustc_span::source_map::{Span, DUMMY_SP}; +use rustc_span::{Span, DUMMY_SP}; /// This returns true if the type `ty` is "trivial" for /// dropck-outlives -- that is, if it doesn't require any types to @@ -35,7 +35,7 @@ | ty::FnDef(..) | ty::FnPtr(_) | ty::Char - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::RawPtr(_) | ty::Ref(..) | ty::Str @@ -72,7 +72,7 @@ | ty::Placeholder(..) | ty::Infer(_) | ty::Bound(..) - | ty::Generator(..) => false, + | ty::Coroutine(..) => false, } } @@ -216,7 +216,7 @@ | ty::Ref(..) | ty::FnDef(..) | ty::FnPtr(_) - | ty::GeneratorWitness(..) => { + | ty::CoroutineWitness(..) => { // these types never have a destructor } @@ -261,22 +261,22 @@ })? } - ty::Generator(_, args, _movability) => { + ty::Coroutine(_, args, _movability) => { // rust-lang/rust#49918: types can be constructed, stored - // in the interior, and sit idle when generator yields + // in the interior, and sit idle when coroutine yields // (and is subsequently dropped). // // It would be nice to descend into interior of a - // generator to determine what effects dropping it might + // coroutine to determine what effects dropping it might // have (by looking at any drop effects associated with // its interior). // // However, the interior's representation uses things like - // GeneratorWitness that explicitly assume they are not + // CoroutineWitness that explicitly assume they are not // traversed in such a manner. So instead, we will - // simplify things for now by treating all generators as + // simplify things for now by treating all coroutines as // if they were like trait objects, where its upvars must - // all be alive for the generator's (potential) + // all be alive for the coroutine's (potential) // destructor. // // In particular, skipping over `_interior` is safe @@ -284,13 +284,13 @@ // only take place through references with lifetimes // derived from lifetimes attached to the upvars and resume // argument, and we *do* incorporate those here. - let args = args.as_generator(); + let args = args.as_coroutine(); if !args.is_valid() { // By the time this code runs, all type variables ought to // be fully resolved. tcx.sess.delay_span_bug( span, - format!("upvar_tys for generator not found. Expected capture information for generator {ty}",), + format!("upvar_tys for coroutine not found. Expected capture information for coroutine {ty}",), ); return Err(NoSolution); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/normalize.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/normalize.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/normalize.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/normalize.rs 2023-12-21 16:55:28.000000000 +0000 @@ -230,17 +230,14 @@ Reveal::All => { let args = data.args.try_fold_with(self)?; let recursion_limit = self.interner().recursion_limit(); + if !recursion_limit.value_within_limit(self.anon_depth) { - // A closure or generator may have itself as in its upvars. - // This should be checked handled by the recursion check for opaque - // types, but we may end up here before that check can happen. - // In that case, we delay a bug to mark the trip, and continue without - // revealing the opaque. - self.infcx + let guar = self + .infcx .err_ctxt() .build_overflow_error(&ty, self.cause.span, true) .delay_as_bug(); - return ty.try_super_fold_with(self); + return Ok(Ty::new_error(self.interner(), guar)); } let generic_ty = self.interner().type_of(data.def_id); @@ -293,7 +290,7 @@ _ => unreachable!(), }?; // We don't expect ambiguity. - if result.is_ambiguous() { + if !result.value.is_proven() { // Rustdoc normalizes possibly not well-formed types, so only // treat this as a bug if we're not in rustdoc. if !tcx.sess.opts.actually_rustdoc { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/custom.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,8 +6,7 @@ use rustc_infer::infer::region_constraints::RegionConstraintData; use rustc_middle::traits::query::NoSolution; use rustc_middle::ty::{TyCtxt, TypeFoldable}; -use rustc_span::source_map::DUMMY_SP; -use rustc_span::Span; +use rustc_span::{Span, DUMMY_SP}; use std::fmt; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/query/type_op/implied_outlives_bounds.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ use rustc_middle::traits::ObligationCause; use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::CRATE_DEF_ID; -use rustc_span::source_map::DUMMY_SP; +use rustc_span::DUMMY_SP; use smallvec::{smallvec, SmallVec}; #[derive(Copy, Clone, Debug, HashStable, TypeFoldable, TypeVisitable)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs 2023-12-21 16:55:28.000000000 +0000 @@ -52,8 +52,7 @@ let mut candidates = SelectionCandidateSet { vec: Vec::new(), ambiguous: false }; - // The only way to prove a NotImplemented(T: Foo) predicate is via a negative impl. - // There are no compiler built-in rules for this. + // Negative trait predicates have different rules than positive trait predicates. if obligation.polarity() == ty::ImplPolarity::Negative { self.assemble_candidates_for_trait_alias(obligation, &mut candidates); self.assemble_candidates_from_impls(obligation, &mut candidates); @@ -110,10 +109,12 @@ self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates); } - if lang_items.gen_trait() == Some(def_id) { - self.assemble_generator_candidates(obligation, &mut candidates); + if lang_items.coroutine_trait() == Some(def_id) { + self.assemble_coroutine_candidates(obligation, &mut candidates); } else if lang_items.future_trait() == Some(def_id) { self.assemble_future_candidates(obligation, &mut candidates); + } else if lang_items.iterator_trait() == Some(def_id) { + self.assemble_iterator_candidates(obligation, &mut candidates); } self.assemble_closure_candidates(obligation, &mut candidates); @@ -201,25 +202,25 @@ Ok(()) } - fn assemble_generator_candidates( + fn assemble_coroutine_candidates( &mut self, obligation: &PolyTraitObligation<'tcx>, candidates: &mut SelectionCandidateSet<'tcx>, ) { - // Okay to skip binder because the args on generator types never + // Okay to skip binder because the args on coroutine types never // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = obligation.self_ty().skip_binder(); match self_ty.kind() { - // async constructs get lowered to a special kind of generator that - // should *not* `impl Generator`. - ty::Generator(did, ..) if !self.tcx().generator_is_async(*did) => { - debug!(?self_ty, ?obligation, "assemble_generator_candidates",); + // `async`/`gen` constructs get lowered to a special kind of coroutine that + // should *not* `impl Coroutine`. + ty::Coroutine(did, ..) if self.tcx().is_general_coroutine(*did) => { + debug!(?self_ty, ?obligation, "assemble_coroutine_candidates",); - candidates.vec.push(GeneratorCandidate); + candidates.vec.push(CoroutineCandidate); } ty::Infer(ty::TyVar(_)) => { - debug!("assemble_generator_candidates: ambiguous self-type"); + debug!("assemble_coroutine_candidates: ambiguous self-type"); candidates.ambiguous = true; } _ => {} @@ -232,10 +233,10 @@ candidates: &mut SelectionCandidateSet<'tcx>, ) { let self_ty = obligation.self_ty().skip_binder(); - if let ty::Generator(did, ..) = self_ty.kind() { - // async constructs get lowered to a special kind of generator that + if let ty::Coroutine(did, ..) = self_ty.kind() { + // async constructs get lowered to a special kind of coroutine that // should directly `impl Future`. - if self.tcx().generator_is_async(*did) { + if self.tcx().coroutine_is_async(*did) { debug!(?self_ty, ?obligation, "assemble_future_candidates",); candidates.vec.push(FutureCandidate); @@ -243,6 +244,23 @@ } } + fn assemble_iterator_candidates( + &mut self, + obligation: &PolyTraitObligation<'tcx>, + candidates: &mut SelectionCandidateSet<'tcx>, + ) { + let self_ty = obligation.self_ty().skip_binder(); + if let ty::Coroutine(did, ..) = self_ty.kind() { + // gen constructs get lowered to a special kind of coroutine that + // should directly `impl Iterator`. + if self.tcx().coroutine_is_gen(*did) { + debug!(?self_ty, ?obligation, "assemble_iterator_candidates",); + + candidates.vec.push(IteratorCandidate); + } + } + } + /// Checks for the artificial impl that the compiler will create for an obligation like `X : /// FnMut<..>` where `X` is a closure type. /// @@ -435,8 +453,8 @@ | ty::RawPtr(_) | ty::Ref(_, _, _) | ty::Closure(_, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) | ty::Error(_) => return true, @@ -492,7 +510,7 @@ // this trait and type. } ty::Param(..) - | ty::Alias(ty::Projection | ty::Inherent, ..) + | ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ..) | ty::Placeholder(..) | ty::Bound(..) => { // In these cases, we don't know what the actual @@ -513,16 +531,16 @@ // The auto impl might apply; we don't know. candidates.ambiguous = true; } - ty::Generator(_, _, movability) + ty::Coroutine(_, _, movability) if self.tcx().lang_items().unpin_trait() == Some(def_id) => { match movability { hir::Movability::Static => { - // Immovable generators are never `Unpin`, so + // Immovable coroutines are never `Unpin`, so // suppress the normal auto-impl candidate for it. } hir::Movability::Movable => { - // Movable generators are always `Unpin`, so add an + // Movable coroutines are always `Unpin`, so add an // unconditional builtin candidate. candidates.vec.push(BuiltinCandidate { has_nested: false }); } @@ -536,20 +554,25 @@ ); } - ty::Alias(_, _) - if candidates.vec.iter().any(|c| matches!(c, ProjectionCandidate(..))) => - { - // We do not generate an auto impl candidate for `impl Trait`s which already - // reference our auto trait. - // - // For example during candidate assembly for `impl Send: Send`, we don't have - // to look at the constituent types for this opaque types to figure out that this - // trivially holds. - // - // Note that this is only sound as projection candidates of opaque types - // are always applicable for auto traits. + ty::Alias(ty::Opaque, _) => { + if candidates.vec.iter().any(|c| matches!(c, ProjectionCandidate(..))) { + // We do not generate an auto impl candidate for `impl Trait`s which already + // reference our auto trait. + // + // For example during candidate assembly for `impl Send: Send`, we don't have + // to look at the constituent types for this opaque types to figure out that this + // trivially holds. + // + // Note that this is only sound as projection candidates of opaque types + // are always applicable for auto traits. + } else if self.infcx.intercrate { + // We do not emit auto trait candidates for opaque types in coherence. + // Doing so can result in weird dependency cycles. + candidates.ambiguous = true; + } else { + candidates.vec.push(AutoImplCandidate) + } } - ty::Alias(_, _) => candidates.vec.push(AutoImplCandidate), ty::Bool | ty::Char @@ -565,10 +588,10 @@ | ty::FnDef(..) | ty::FnPtr(_) | ty::Closure(_, _) - | ty::Generator(..) + | ty::Coroutine(..) | ty::Never | ty::Tuple(_) - | ty::GeneratorWitness(..) => { + | ty::CoroutineWitness(..) => { // Only consider auto impls if there are no manual impls for the root of `self_ty`. // // For example, we only consider auto candidates for `&i32: Auto` if no explicit impl @@ -605,15 +628,12 @@ } self.infcx.probe(|_snapshot| { - if obligation.has_non_region_late_bound() { - return; - } + let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate); + let placeholder_trait_predicate = + self.infcx.instantiate_binder_with_placeholders(poly_trait_predicate); - // The code below doesn't care about regions, and the - // self-ty here doesn't escape this probe, so just erase - // any LBR. - let self_ty = self.tcx().erase_late_bound_regions(obligation.self_ty()); - let poly_trait_ref = match self_ty.kind() { + let self_ty = placeholder_trait_predicate.self_ty(); + let principal_trait_ref = match self_ty.kind() { ty::Dynamic(ref data, ..) => { if data.auto_traits().any(|did| did == obligation.predicate.def_id()) { debug!( @@ -645,18 +665,14 @@ _ => return, }; - debug!(?poly_trait_ref, "assemble_candidates_from_object_ty"); - - let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate); - let placeholder_trait_predicate = - self.infcx.instantiate_binder_with_placeholders(poly_trait_predicate); + debug!(?principal_trait_ref, "assemble_candidates_from_object_ty"); // Count only those upcast versions that match the trait-ref // we are looking for. Specifically, do not only check for the // correct trait, but also the correct type parameters. // For example, we may be trying to upcast `Foo` to `Bar`, // but `Foo` is declared as `trait Foo: Bar`. - let candidate_supertraits = util::supertraits(self.tcx(), poly_trait_ref) + let candidate_supertraits = util::supertraits(self.tcx(), principal_trait_ref) .enumerate() .filter(|&(_, upcast_trait_ref)| { self.infcx.probe(|_| { @@ -699,7 +715,7 @@ let ty = traits::normalize_projection_type( self, param_env, - tcx.mk_alias_ty(tcx.lang_items().deref_target()?, trait_ref.args), + ty::AliasTy::new(tcx, tcx.lang_items().deref_target()?, trait_ref.args), cause.clone(), 0, // We're *intentionally* throwing these away, @@ -942,9 +958,9 @@ | ty::Array(..) | ty::Slice(_) | ty::Closure(..) - | ty::Generator(..) + | ty::Coroutine(..) | ty::Tuple(_) - | ty::GeneratorWitness(..) => { + | ty::CoroutineWitness(..) => { // These are built-in, and cannot have a custom `impl const Destruct`. candidates.vec.push(ConstDestructCandidate(None)); } @@ -1016,8 +1032,8 @@ | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Generator(_, _, _) - | ty::GeneratorWitness(..) + | ty::Coroutine(_, _, _) + | ty::CoroutineWitness(..) | ty::Never | ty::Alias(..) | ty::Param(_) @@ -1059,6 +1075,7 @@ candidates: &mut SelectionCandidateSet<'tcx>, ) { let self_ty = self.infcx.shallow_resolve(obligation.self_ty()); + match self_ty.skip_binder().kind() { ty::FnPtr(_) => candidates.vec.push(BuiltinCandidate { has_nested: false }), ty::Bool @@ -1077,8 +1094,8 @@ | ty::Placeholder(..) | ty::Dynamic(..) | ty::Closure(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(..) | ty::Alias(..) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/confirmation.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/confirmation.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/confirmation.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/confirmation.rs 2023-12-21 16:55:28.000000000 +0000 @@ -83,9 +83,9 @@ ImplSource::Builtin(BuiltinImplSource::Misc, vtable_closure) } - GeneratorCandidate => { - let vtable_generator = self.confirm_generator_candidate(obligation)?; - ImplSource::Builtin(BuiltinImplSource::Misc, vtable_generator) + CoroutineCandidate => { + let vtable_coroutine = self.confirm_coroutine_candidate(obligation)?; + ImplSource::Builtin(BuiltinImplSource::Misc, vtable_coroutine) } FutureCandidate => { @@ -93,6 +93,11 @@ ImplSource::Builtin(BuiltinImplSource::Misc, vtable_future) } + IteratorCandidate => { + let vtable_iterator = self.confirm_iterator_candidate(obligation)?; + ImplSource::Builtin(BuiltinImplSource::Misc, vtable_iterator) + } + FnPointerCandidate { is_const } => { let data = self.confirm_fn_pointer_candidate(obligation, is_const)?; ImplSource::Builtin(BuiltinImplSource::Misc, data) @@ -711,23 +716,23 @@ trait_obligations } - fn confirm_generator_candidate( + fn confirm_coroutine_candidate( &mut self, obligation: &PolyTraitObligation<'tcx>, ) -> Result>, SelectionError<'tcx>> { - // Okay to skip binder because the args on generator types never + // Okay to skip binder because the args on coroutine types never // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); - let ty::Generator(generator_def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { bug!("closure candidate for non-closure {:?}", obligation); }; - debug!(?obligation, ?generator_def_id, ?args, "confirm_generator_candidate"); + debug!(?obligation, ?coroutine_def_id, ?args, "confirm_coroutine_candidate"); - let gen_sig = args.as_generator().poly_sig(); + let coroutine_sig = args.as_coroutine().poly_sig(); - // NOTE: The self-type is a generator type and hence is + // NOTE: The self-type is a coroutine type and hence is // in fact unparameterized (or at least does not reference any // regions bound in the obligation). let self_ty = obligation @@ -736,16 +741,16 @@ .no_bound_vars() .expect("unboxed closure type should not capture bound vars from the predicate"); - let trait_ref = super::util::generator_trait_ref_and_outputs( + let trait_ref = super::util::coroutine_trait_ref_and_outputs( self.tcx(), obligation.predicate.def_id(), self_ty, - gen_sig, + coroutine_sig, ) .map_bound(|(trait_ref, ..)| trait_ref); let nested = self.confirm_poly_trait_refs(obligation, trait_ref)?; - debug!(?trait_ref, ?nested, "generator candidate obligations"); + debug!(?trait_ref, ?nested, "coroutine candidate obligations"); Ok(nested) } @@ -754,23 +759,23 @@ &mut self, obligation: &PolyTraitObligation<'tcx>, ) -> Result>, SelectionError<'tcx>> { - // Okay to skip binder because the args on generator types never + // Okay to skip binder because the args on coroutine types never // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); - let ty::Generator(generator_def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { bug!("closure candidate for non-closure {:?}", obligation); }; - debug!(?obligation, ?generator_def_id, ?args, "confirm_future_candidate"); + debug!(?obligation, ?coroutine_def_id, ?args, "confirm_future_candidate"); - let gen_sig = args.as_generator().poly_sig(); + let coroutine_sig = args.as_coroutine().poly_sig(); let trait_ref = super::util::future_trait_ref_and_outputs( self.tcx(), obligation.predicate.def_id(), obligation.predicate.no_bound_vars().expect("future has no bound vars").self_ty(), - gen_sig, + coroutine_sig, ) .map_bound(|(trait_ref, ..)| trait_ref); @@ -780,6 +785,36 @@ Ok(nested) } + fn confirm_iterator_candidate( + &mut self, + obligation: &PolyTraitObligation<'tcx>, + ) -> Result>, SelectionError<'tcx>> { + // Okay to skip binder because the args on coroutine types never + // touch bound regions, they just capture the in-scope + // type/region parameters. + let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); + let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { + bug!("closure candidate for non-closure {:?}", obligation); + }; + + debug!(?obligation, ?coroutine_def_id, ?args, "confirm_iterator_candidate"); + + let gen_sig = args.as_coroutine().poly_sig(); + + let trait_ref = super::util::iterator_trait_ref_and_outputs( + self.tcx(), + obligation.predicate.def_id(), + obligation.predicate.no_bound_vars().expect("iterator has no bound vars").self_ty(), + gen_sig, + ) + .map_bound(|(trait_ref, ..)| trait_ref); + + let nested = self.confirm_poly_trait_refs(obligation, trait_ref)?; + debug!(?trait_ref, ?nested, "iterator candidate obligations"); + + Ok(nested) + } + #[instrument(skip(self), level = "debug")] fn confirm_closure_candidate( &mut self, @@ -1234,13 +1269,13 @@ ty::Closure(_, args) => { stack.push(args.as_closure().tupled_upvars_ty()); } - ty::Generator(_, args, _) => { - let generator = args.as_generator(); - stack.extend([generator.tupled_upvars_ty(), generator.witness()]); + ty::Coroutine(_, args, _) => { + let coroutine = args.as_coroutine(); + stack.extend([coroutine.tupled_upvars_ty(), coroutine.witness()]); } - ty::GeneratorWitness(def_id, args) => { + ty::CoroutineWitness(def_id, args) => { let tcx = self.tcx(); - stack.extend(tcx.generator_hidden_types(def_id).map(|bty| { + stack.extend(tcx.coroutine_hidden_types(def_id).map(|bty| { let ty = bty.instantiate(tcx, args); debug_assert!(!ty.has_late_bound_regions()); ty diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/select/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -38,6 +38,7 @@ use rustc_middle::dep_graph::dep_kinds; use rustc_middle::dep_graph::DepNodeIndex; use rustc_middle::mir::interpret::ErrorHandled; +use rustc_middle::ty::_match::MatchAgainstFreshVars; use rustc_middle::ty::abstract_const::NotConstEvaluatable; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::relate::TypeRelation; @@ -858,7 +859,8 @@ subobligations, ); if let Ok(eval_rslt) = res - && (eval_rslt == EvaluatedToOk || eval_rslt == EvaluatedToOkModuloRegions) + && (eval_rslt == EvaluatedToOk + || eval_rslt == EvaluatedToOkModuloRegions) && let Some(key) = ProjectionCacheKey::from_poly_projection_predicate( self, data, @@ -1884,8 +1886,9 @@ ImplCandidate(..) | AutoImplCandidate | ClosureCandidate { .. } - | GeneratorCandidate + | CoroutineCandidate | FutureCandidate + | IteratorCandidate | FnPointerCandidate { .. } | BuiltinObjectCandidate | BuiltinUnsizeCandidate @@ -1912,8 +1915,9 @@ ImplCandidate(_) | AutoImplCandidate | ClosureCandidate { .. } - | GeneratorCandidate + | CoroutineCandidate | FutureCandidate + | IteratorCandidate | FnPointerCandidate { .. } | BuiltinObjectCandidate | BuiltinUnsizeCandidate @@ -1946,8 +1950,9 @@ ImplCandidate(..) | AutoImplCandidate | ClosureCandidate { .. } - | GeneratorCandidate + | CoroutineCandidate | FutureCandidate + | IteratorCandidate | FnPointerCandidate { .. } | BuiltinObjectCandidate | BuiltinUnsizeCandidate @@ -1960,8 +1965,9 @@ ImplCandidate(..) | AutoImplCandidate | ClosureCandidate { .. } - | GeneratorCandidate + | CoroutineCandidate | FutureCandidate + | IteratorCandidate | FnPointerCandidate { .. } | BuiltinObjectCandidate | BuiltinUnsizeCandidate @@ -2066,8 +2072,9 @@ ( ImplCandidate(_) | ClosureCandidate { .. } - | GeneratorCandidate + | CoroutineCandidate | FutureCandidate + | IteratorCandidate | FnPointerCandidate { .. } | BuiltinObjectCandidate | BuiltinUnsizeCandidate @@ -2076,8 +2083,9 @@ | TraitAliasCandidate, ImplCandidate(_) | ClosureCandidate { .. } - | GeneratorCandidate + | CoroutineCandidate | FutureCandidate + | IteratorCandidate | FnPointerCandidate { .. } | BuiltinObjectCandidate | BuiltinUnsizeCandidate @@ -2110,8 +2118,8 @@ | ty::RawPtr(..) | ty::Char | ty::Ref(..) - | ty::Generator(..) - | ty::GeneratorWitness(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Array(..) | ty::Closure(..) | ty::Never @@ -2178,7 +2186,7 @@ ty::Dynamic(..) | ty::Str | ty::Slice(..) - | ty::Generator(_, _, hir::Movability::Static) + | ty::Coroutine(_, _, hir::Movability::Static) | ty::Foreign(..) | ty::Ref(_, _, hir::Mutability::Mut) => None, @@ -2187,21 +2195,21 @@ Where(obligation.predicate.rebind(tys.iter().collect())) } - ty::Generator(_, args, hir::Movability::Movable) => { - if self.tcx().features().generator_clone { + ty::Coroutine(_, args, hir::Movability::Movable) => { + if self.tcx().features().coroutine_clone { let resolved_upvars = - self.infcx.shallow_resolve(args.as_generator().tupled_upvars_ty()); + self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); let resolved_witness = - self.infcx.shallow_resolve(args.as_generator().witness()); + self.infcx.shallow_resolve(args.as_coroutine().witness()); if resolved_upvars.is_ty_var() || resolved_witness.is_ty_var() { // Not yet resolved. Ambiguous } else { let all = args - .as_generator() + .as_coroutine() .upvar_tys() .iter() - .chain([args.as_generator().witness()]) + .chain([args.as_coroutine().witness()]) .collect::>(); Where(obligation.predicate.rebind(all)) } @@ -2210,8 +2218,8 @@ } } - ty::GeneratorWitness(def_id, ref args) => { - let hidden_types = bind_generator_hidden_types_above( + ty::CoroutineWitness(def_id, ref args) => { + let hidden_types = bind_coroutine_hidden_types_above( self.infcx, def_id, args, @@ -2309,14 +2317,14 @@ t.rebind(vec![ty]) } - ty::Generator(_, ref args, _) => { - let ty = self.infcx.shallow_resolve(args.as_generator().tupled_upvars_ty()); - let witness = args.as_generator().witness(); + ty::Coroutine(_, ref args, _) => { + let ty = self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); + let witness = args.as_coroutine().witness(); t.rebind([ty].into_iter().chain(iter::once(witness)).collect()) } - ty::GeneratorWitness(def_id, ref args) => { - bind_generator_hidden_types_above(self.infcx, def_id, args, t.bound_vars()) + ty::CoroutineWitness(def_id, ref args) => { + bind_coroutine_hidden_types_above(self.infcx, def_id, args, t.bound_vars()) } // For `PhantomData`, we pass `T`. @@ -2381,12 +2389,21 @@ ) }); - let obligation = Obligation::new( - self.tcx(), - cause.clone(), - param_env, - ty::TraitRef::new(self.tcx(), trait_def_id, [normalized_ty]), - ); + let tcx = self.tcx(); + let trait_ref = if tcx.generics_of(trait_def_id).params.len() == 1 { + ty::TraitRef::new(tcx, trait_def_id, [normalized_ty]) + } else { + // If this is an ill-formed auto/built-in trait, then synthesize + // new error args for the missing generics. + let err_args = ty::GenericArgs::extend_with_error( + tcx, + trait_def_id, + &[normalized_ty.into()], + ); + ty::TraitRef::new(tcx, trait_def_id, err_args) + }; + + let obligation = Obligation::new(self.tcx(), cause.clone(), param_env, trait_ref); obligations.push(obligation); obligations }) @@ -2623,7 +2640,7 @@ current: ty::PolyTraitPredicate<'tcx>, param_env: ty::ParamEnv<'tcx>, ) -> bool { - let mut matcher = ty::_match::Match::new(self.tcx(), param_env); + let mut matcher = MatchAgainstFreshVars::new(self.tcx(), param_env); matcher.relate(previous, current).is_ok() } @@ -3052,12 +3069,12 @@ No, } -/// Replace all regions inside the generator interior with late bound regions. +/// Replace all regions inside the coroutine interior with late bound regions. /// Note that each region slot in the types gets a new fresh late bound region, which means that /// none of the regions inside relate to any other, even if typeck had previously found constraints /// that would cause them to be related. #[instrument(level = "trace", skip(infcx), ret)] -fn bind_generator_hidden_types_above<'tcx>( +fn bind_coroutine_hidden_types_above<'tcx>( infcx: &InferCtxt<'tcx>, def_id: DefId, args: ty::GenericArgsRef<'tcx>, @@ -3072,7 +3089,7 @@ let mut counter = num_bound_variables; let hidden_types: Vec<_> = tcx - .generator_hidden_types(def_id) + .coroutine_hidden_types(def_id) // Deduplicate tys to avoid repeated work. .filter(|bty| seen_tys.insert(*bty)) .map(|mut bty| { @@ -3096,9 +3113,6 @@ bty.instantiate(tcx, args) }) .collect(); - if considering_regions { - debug_assert!(!hidden_types.has_erased_regions()); - } let bound_vars = tcx.mk_bound_variable_kinds_from_iter(bound_vars.iter().chain( (num_bound_variables..counter).map(|_| ty::BoundVariableKind::Region(ty::BrAnon)), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/structural_match.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/structural_match.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/structural_match.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/structural_match.rs 2023-12-21 16:55:28.000000000 +0000 @@ -79,7 +79,7 @@ ty::Closure(..) => { return ControlFlow::Break(ty); } - ty::Generator(..) | ty::GeneratorWitness(..) => { + ty::Coroutine(..) | ty::CoroutineWitness(..) => { return ControlFlow::Break(ty); } ty::FnDef(..) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/util.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,7 @@ use rustc_span::Span; use smallvec::SmallVec; -pub use rustc_infer::traits::{self, util::*}; +pub use rustc_infer::traits::util::*; /////////////////////////////////////////////////////////////////////////// // `TraitAliasExpander` iterator @@ -275,7 +275,7 @@ sig.map_bound(|sig| (trait_ref, sig.output())) } -pub fn generator_trait_ref_and_outputs<'tcx>( +pub fn coroutine_trait_ref_and_outputs<'tcx>( tcx: TyCtxt<'tcx>, fn_trait_def_id: DefId, self_ty: Ty<'tcx>, @@ -297,6 +297,17 @@ sig.map_bound(|sig| (trait_ref, sig.return_ty)) } +pub fn iterator_trait_ref_and_outputs<'tcx>( + tcx: TyCtxt<'tcx>, + iterator_def_id: DefId, + self_ty: Ty<'tcx>, + sig: ty::PolyGenSig<'tcx>, +) -> ty::Binder<'tcx, (ty::TraitRef<'tcx>, Ty<'tcx>)> { + assert!(!self_ty.has_escaping_bound_vars()); + let trait_ref = ty::TraitRef::new(tcx, iterator_def_id, [self_ty]); + sig.map_bound(|sig| (trait_ref, sig.yield_ty)) +} + pub fn impl_item_is_final(tcx: TyCtxt<'_>, assoc_item: &ty::AssocItem) -> bool { assoc_item.defaultness(tcx).is_final() && tcx.defaultness(assoc_item.container_id(tcx)).is_final() @@ -326,7 +337,8 @@ if let Some(parent) = generics.parent && let parent_generics = tcx.generics_of(parent) - && !check_args_compatible_inner(tcx, parent_generics, parent_args) { + && !check_args_compatible_inner(tcx, parent_generics, parent_args) + { return false; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/vtable.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/vtable.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/vtable.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/vtable.rs 2023-12-21 16:55:28.000000000 +0000 @@ -316,7 +316,7 @@ dump_vtable_entries(tcx, sp, trait_ref, &entries); } - tcx.arena.alloc_from_iter(entries.into_iter()) + tcx.arena.alloc_from_iter(entries) } /// Find slot base for trait methods within vtable entries of another trait diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/wf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/wf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/wf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_trait_selection/src/traits/wf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -105,13 +105,13 @@ /// Returns the obligations that make this trait reference /// well-formed. For example, if there is a trait `Set` defined like -/// `trait Set`, then the trait reference `Foo: Set` is WF +/// `trait Set`, then the trait bound `Foo: Set` is WF /// if `Bar: Eq`. pub fn trait_obligations<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, body_id: LocalDefId, - trait_pred: &ty::TraitPredicate<'tcx>, + trait_pred: ty::TraitPredicate<'tcx>, span: Span, item: &'tcx hir::Item<'tcx>, ) -> Vec> { @@ -129,12 +129,17 @@ wf.normalize(infcx) } +/// Returns the requirements for `clause` to be well-formed. +/// +/// For example, if there is a trait `Set` defined like +/// `trait Set`, then the trait bound `Foo: Set` is WF +/// if `Bar: Eq`. #[instrument(skip(infcx), ret)] -pub fn predicate_obligations<'tcx>( +pub fn clause_obligations<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, body_id: LocalDefId, - predicate: ty::Predicate<'tcx>, + clause: ty::Clause<'tcx>, span: Span, ) -> Vec> { let mut wf = WfPredicates { @@ -148,45 +153,32 @@ }; // It's ok to skip the binder here because wf code is prepared for it - match predicate.kind().skip_binder() { - ty::PredicateKind::Clause(ty::ClauseKind::Trait(t)) => { - wf.compute_trait_pred(&t, Elaborate::None); - } - ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(..)) => {} - ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate( - ty, - _reg, - ))) => { + match clause.kind().skip_binder() { + ty::ClauseKind::Trait(t) => { + wf.compute_trait_pred(t, Elaborate::None); + } + ty::ClauseKind::RegionOutlives(..) => {} + ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(ty, _reg)) => { wf.compute(ty.into()); } - ty::PredicateKind::Clause(ty::ClauseKind::Projection(t)) => { + ty::ClauseKind::Projection(t) => { wf.compute_projection(t.projection_ty); wf.compute(match t.term.unpack() { ty::TermKind::Ty(ty) => ty.into(), ty::TermKind::Const(c) => c.into(), }) } - ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => { + ty::ClauseKind::ConstArgHasType(ct, ty) => { wf.compute(ct.into()); wf.compute(ty.into()); } - ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => { + ty::ClauseKind::WellFormed(arg) => { wf.compute(arg); } - ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => { + ty::ClauseKind::ConstEvaluatable(ct) => { wf.compute(ct.into()); } - - ty::PredicateKind::ObjectSafe(_) - | ty::PredicateKind::ClosureKind(..) - | ty::PredicateKind::Subtype(..) - | ty::PredicateKind::Coerce(..) - | ty::PredicateKind::ConstEquate(..) - | ty::PredicateKind::Ambiguous - | ty::PredicateKind::AliasRelate(..) => { - bug!("We should only wf check where clauses, unexpected predicate: {predicate:?}") - } } wf.normalize(infcx) @@ -233,7 +225,7 @@ fn extend_cause_with_original_assoc_item_obligation<'tcx>( tcx: TyCtxt<'tcx>, - trait_ref: &ty::TraitRef<'tcx>, + trait_ref: ty::TraitRef<'tcx>, item: Option<&hir::Item<'tcx>>, cause: &mut traits::ObligationCause<'tcx>, pred: ty::Predicate<'tcx>, @@ -262,7 +254,8 @@ // projection coming from another associated type. See // `tests/ui/associated-types/point-at-type-on-obligation-failure.rs` and // `traits-assoc-type-in-supertrait-bad.rs`. - if let Some(ty::Alias(ty::Projection, projection_ty)) = proj.term.ty().map(|ty| ty.kind()) + if let Some(ty::Alias(ty::Projection, projection_ty)) = + proj.term.ty().map(|ty| ty.kind()) && let Some(&impl_item_id) = tcx.impl_item_implementor_ids(impl_def_id).get(&projection_ty.def_id) && let Some(impl_item_span) = items @@ -278,8 +271,7 @@ // can be seen in `ui/associated-types/point-at-type-on-obligation-failure-2.rs`. debug!("extended_cause_with_original_assoc_item_obligation trait proj {:?}", pred); if let ty::Alias(ty::Projection, ty::AliasTy { def_id, .. }) = *pred.self_ty().kind() - && let Some(&impl_item_id) = - tcx.impl_item_implementor_ids(impl_def_id).get(&def_id) + && let Some(&impl_item_id) = tcx.impl_item_implementor_ids(impl_def_id).get(&def_id) && let Some(impl_item_span) = items .iter() .find(|item| item.id.owner_id.to_def_id() == impl_item_id) @@ -336,9 +328,9 @@ } /// Pushes the obligations required for `trait_ref` to be WF into `self.out`. - fn compute_trait_pred(&mut self, trait_pred: &ty::TraitPredicate<'tcx>, elaborate: Elaborate) { + fn compute_trait_pred(&mut self, trait_pred: ty::TraitPredicate<'tcx>, elaborate: Elaborate) { let tcx = self.tcx(); - let trait_ref = &trait_pred.trait_ref; + let trait_ref = trait_pred.trait_ref; // Negative trait predicates don't require supertraits to hold, just // that their args are WF. @@ -411,7 +403,7 @@ // Compute the obligations that are required for `trait_ref` to be WF, // given that it is a *negative* trait predicate. - fn compute_negative_trait_pred(&mut self, trait_ref: &ty::TraitRef<'tcx>) { + fn compute_negative_trait_pred(&mut self, trait_ref: ty::TraitRef<'tcx>) { for arg in trait_ref.args { self.compute(arg); } @@ -608,7 +600,7 @@ | ty::Float(..) | ty::Error(_) | ty::Str - | ty::GeneratorWitness(..) + | ty::CoroutineWitness(..) | ty::Never | ty::Param(_) | ty::Bound(..) @@ -680,14 +672,14 @@ } } - ty::Generator(did, args, ..) => { - // Walk ALL the types in the generator: this will + ty::Coroutine(did, args, ..) => { + // Walk ALL the types in the coroutine: this will // include the upvar types as well as the yield // type. Note that this is mildly distinct from // the closure case, where we have to be careful // about the signature of the closure. We don't // have the problem of implied bounds here since - // generators don't take arguments. + // coroutines don't take arguments. let obligations = self.nominal_obligations(did, args); self.out.extend(obligations); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_traits/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_traits/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_traits/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_traits/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,10 +4,12 @@ edition = "2021" [dependencies] -tracing = "0.1" -rustc_middle = { path = "../rustc_middle" } +# tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures" } rustc_hir = { path = "../rustc_hir" } -rustc_span = { path = "../rustc_span" } rustc_infer = { path = "../rustc_infer" } +rustc_middle = { path = "../rustc_middle" } +rustc_span = { path = "../rustc_span" } rustc_trait_selection = { path = "../rustc_trait_selection" } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_traits/src/evaluate_obligation.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_traits/src/evaluate_obligation.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_traits/src/evaluate_obligation.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_traits/src/evaluate_obligation.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,7 +1,7 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::query::Providers; use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; -use rustc_span::source_map::DUMMY_SP; +use rustc_span::DUMMY_SP; use rustc_trait_selection::traits::query::CanonicalPredicateGoal; use rustc_trait_selection::traits::{ EvaluationResult, Obligation, ObligationCause, OverflowError, SelectionContext, TraitQueryMode, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,10 @@ [package] name = "rustc_transmute" -version = "0.1.0" +version = "0.0.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -tracing = "0.1" +# tidy-alphabetical-start rustc_data_structures = { path = "../rustc_data_structures"} rustc_hir = { path = "../rustc_hir", optional = true} rustc_infer = { path = "../rustc_infer", optional = true} @@ -14,16 +12,20 @@ rustc_middle = { path = "../rustc_middle", optional = true} rustc_span = { path = "../rustc_span", optional = true} rustc_target = { path = "../rustc_target", optional = true} +tracing = "0.1" +# tidy-alphabetical-end [features] rustc = [ - "rustc_middle", "rustc_hir", "rustc_infer", "rustc_macros", + "rustc_middle", "rustc_span", "rustc_target", ] [dev-dependencies] -itertools = "0.10.1" \ No newline at end of file +# tidy-alphabetical-start +itertools = "0.10.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,7 +9,7 @@ pub(crate) use rustc_data_structures::fx::{FxIndexMap as Map, FxIndexSet as Set}; pub mod layout; -pub(crate) mod maybe_transmutable; +mod maybe_transmutable; #[derive(Default)] pub struct Assume { @@ -19,7 +19,7 @@ pub validity: bool, } -/// Either we have an error, transmutation is allowed, or we have an optional +/// Either transmutation is allowed, we have an error, or we have an optional /// Condition that must hold. #[derive(Debug, Hash, Eq, PartialEq, Clone)] pub enum Answer { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/maybe_transmutable/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/maybe_transmutable/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/maybe_transmutable/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_transmute/src/maybe_transmutable/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -32,26 +32,6 @@ ) -> Self { Self { src, dst, scope, assume, context } } - - // FIXME(bryangarza): Delete this when all usages are removed - pub(crate) fn map_layouts( - self, - f: F, - ) -> Result, Answer<::Ref>> - where - F: FnOnce( - L, - L, - ::Scope, - &C, - ) -> Result<(M, M), Answer<::Ref>>, - { - let Self { src, dst, scope, assume, context } = self; - - let (src, dst) = f(src, dst, scope, &context)?; - - Ok(MaybeTransmutableQuery { src, dst, scope, assume, context }) - } } // FIXME: Nix this cfg, so we can write unit tests independently of rustc @@ -107,42 +87,42 @@ #[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))] pub(crate) fn answer(self) -> Answer<::Ref> { let assume_visibility = self.assume.safety; - // FIXME(bryangarza): Refactor this code to get rid of `map_layouts` - let query_or_answer = self.map_layouts(|src, dst, scope, context| { - // Remove all `Def` nodes from `src`, without checking their visibility. - let src = src.prune(&|def| true); - - trace!(?src, "pruned src"); - - // Remove all `Def` nodes from `dst`, additionally... - let dst = if assume_visibility { - // ...if visibility is assumed, don't check their visibility. - dst.prune(&|def| true) - } else { - // ...otherwise, prune away all unreachable paths through the `Dst` layout. - dst.prune(&|def| context.is_accessible_from(def, scope)) - }; - - trace!(?dst, "pruned dst"); - - // Convert `src` from a tree-based representation to an NFA-based representation. - // If the conversion fails because `src` is uninhabited, conclude that the transmutation - // is acceptable, because instances of the `src` type do not exist. - let src = Nfa::from_tree(src).map_err(|Uninhabited| Answer::Yes)?; - - // Convert `dst` from a tree-based representation to an NFA-based representation. - // If the conversion fails because `src` is uninhabited, conclude that the transmutation - // is unacceptable, because instances of the `dst` type do not exist. - let dst = - Nfa::from_tree(dst).map_err(|Uninhabited| Answer::No(Reason::DstIsPrivate))?; - - Ok((src, dst)) - }); - - match query_or_answer { - Ok(query) => query.answer(), - Err(answer) => answer, - } + + let Self { src, dst, scope, assume, context } = self; + + // Remove all `Def` nodes from `src`, without checking their visibility. + let src = src.prune(&|def| true); + + trace!(?src, "pruned src"); + + // Remove all `Def` nodes from `dst`, additionally... + let dst = if assume_visibility { + // ...if visibility is assumed, don't check their visibility. + dst.prune(&|def| true) + } else { + // ...otherwise, prune away all unreachable paths through the `Dst` layout. + dst.prune(&|def| context.is_accessible_from(def, scope)) + }; + + trace!(?dst, "pruned dst"); + + // Convert `src` from a tree-based representation to an NFA-based representation. + // If the conversion fails because `src` is uninhabited, conclude that the transmutation + // is acceptable, because instances of the `src` type do not exist. + let src = match Nfa::from_tree(src) { + Ok(src) => src, + Err(Uninhabited) => return Answer::Yes, + }; + + // Convert `dst` from a tree-based representation to an NFA-based representation. + // If the conversion fails because `src` is uninhabited, conclude that the transmutation + // is unacceptable, because instances of the `dst` type do not exist. + let dst = match Nfa::from_tree(dst) { + Ok(dst) => dst, + Err(Uninhabited) => return Answer::No(Reason::DstIsPrivate), + }; + + MaybeTransmutableQuery { src, dst, scope, assume, context }.answer() } } @@ -156,14 +136,10 @@ #[inline(always)] #[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))] pub(crate) fn answer(self) -> Answer<::Ref> { - // FIXME(bryangarza): Refactor this code to get rid of `map_layouts` - let query_or_answer = self - .map_layouts(|src, dst, scope, context| Ok((Dfa::from_nfa(src), Dfa::from_nfa(dst)))); - - match query_or_answer { - Ok(query) => query.answer(), - Err(answer) => answer, - } + let Self { src, dst, scope, assume, context } = self; + let src = Dfa::from_nfa(src); + let dst = Dfa::from_nfa(dst); + MaybeTransmutableQuery { src, dst, scope, assume, context }.answer() } } @@ -171,26 +147,8 @@ where C: QueryContext, { - /// Answers whether a `Nfa` is transmutable into another `Nfa`. - /// - /// This method converts `src` and `dst` to DFAs, then computes an answer using those DFAs. + /// Answers whether a `Dfa` is transmutable into another `Dfa`. pub(crate) fn answer(self) -> Answer<::Ref> { - MaybeTransmutableQuery { - src: &self.src, - dst: &self.dst, - scope: self.scope, - assume: self.assume, - context: self.context, - } - .answer() - } -} - -impl<'l, C> MaybeTransmutableQuery<&'l Dfa<::Ref>, C> -where - C: QueryContext, -{ - pub(crate) fn answer(&mut self) -> Answer<::Ref> { self.answer_memo(&mut Map::default(), self.src.start, self.dst.start) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -4,18 +4,20 @@ edition = "2021" [dependencies] -tracing = "0.1" +# tidy-alphabetical-start itertools = "0.10.1" -rustc_middle = { path = "../rustc_middle" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } -rustc_hir = { path = "../rustc_hir" } rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } +rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } rustc_macros = { path = "../rustc_macros" } -rustc_span = { path = "../rustc_span" } +rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_type_ir = { path = "../rustc_type_ir" } -rustc_index = { path = "../rustc_index" } +tracing = "0.1" +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/messages.ftl rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/messages.ftl --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/messages.ftl 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/messages.ftl 2023-12-21 16:55:28.000000000 +0000 @@ -60,6 +60,6 @@ ty_utils_unexpected_fnptr_associated_item = `FnPtr` trait with unexpected associated item -ty_utils_yield_not_supported = generator control flow is not allowed in generic constants +ty_utils_yield_not_supported = coroutine control flow is not allowed in generic constants ty_utils_zero_length_simd_type = monomorphising SIMD type `{$ty}` of zero length diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/abi.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/abi.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/abi.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/abi.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,8 +97,8 @@ bound_vars, ) } - ty::Generator(did, args, _) => { - let sig = args.as_generator().poly_sig(); + ty::Coroutine(did, args, _) => { + let sig = args.as_coroutine().poly_sig(); let bound_vars = tcx.mk_bound_variable_kinds_from_iter( sig.bound_vars().iter().chain(iter::once(ty::BoundVariableKind::Region(ty::BrEnv))), @@ -116,11 +116,11 @@ let env_ty = Ty::new_adt(tcx, pin_adt_ref, pin_args); let sig = sig.skip_binder(); - // The `FnSig` and the `ret_ty` here is for a generators main - // `Generator::resume(...) -> GeneratorState` function in case we - // have an ordinary generator, or the `Future::poll(...) -> Poll` - // function in case this is a special generator backing an async construct. - let (resume_ty, ret_ty) = if tcx.generator_is_async(did) { + // The `FnSig` and the `ret_ty` here is for a coroutines main + // `Coroutine::resume(...) -> CoroutineState` function in case we + // have an ordinary coroutine, or the `Future::poll(...) -> Poll` + // function in case this is a special coroutine backing an async construct. + let (resume_ty, ret_ty) = if tcx.coroutine_is_async(did) { // The signature should be `Future::poll(_, &mut Context<'_>) -> Poll` let poll_did = tcx.require_lang_item(LangItem::Poll, None); let poll_adt_ref = tcx.adt_def(poll_did); @@ -143,8 +143,8 @@ (context_mut_ref, ret_ty) } else { - // The signature should be `Generator::resume(_, Resume) -> GeneratorState` - let state_did = tcx.require_lang_item(LangItem::GeneratorState, None); + // The signature should be `Coroutine::resume(_, Resume) -> CoroutineState` + let state_did = tcx.require_lang_item(LangItem::CoroutineState, None); let state_adt_ref = tcx.adt_def(state_did); let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]); let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args); @@ -528,7 +528,7 @@ arg.make_indirect(); } else { // We want to pass small aggregates as immediates, but using - // a LLVM aggregate type for this leads to bad optimizations, + // an LLVM aggregate type for this leads to bad optimizations, // so we pick an appropriately sized integer type instead. arg.cast_to(Reg { kind: RegKind::Integer, size }); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/assoc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/assoc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/assoc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/assoc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -192,7 +192,8 @@ if let hir::TyKind::OpaqueDef(item_id, _, _) = ty.kind && self.rpits.insert(item_id.owner_id.def_id) { - let opaque_item = self.tcx.hir().expect_item(item_id.owner_id.def_id).expect_opaque_ty(); + let opaque_item = + self.tcx.hir().expect_item(item_id.owner_id.def_id).expect_opaque_ty(); for bound in opaque_item.bounds { intravisit::walk_param_bound(self, bound); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/consts.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/consts.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/consts.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/consts.rs 2023-12-21 16:55:28.000000000 +0000 @@ -71,7 +71,7 @@ _ => bug!("cannot destructure constant {:?}", const_), }; - let fields = tcx.arena.alloc_from_iter(fields.into_iter()); + let fields = tcx.arena.alloc_from_iter(fields); ty::DestructuredConst { variant, fields } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/implied_bounds.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/implied_bounds.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/implied_bounds.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/implied_bounds.rs 2023-12-21 16:55:28.000000000 +0000 @@ -50,73 +50,76 @@ let mut impl_spans = impl_spans(tcx, def_id); tcx.arena.alloc_from_iter(tys.into_iter().map(|ty| (ty, impl_spans.next().unwrap()))) } - DefKind::AssocTy if let Some(data) = tcx.opt_rpitit_info(def_id.to_def_id()) => match data { - ty::ImplTraitInTraitData::Trait { fn_def_id, .. } => { - // We need to remap all of the late-bound lifetimes in theassumed wf types - // of the fn (which are represented as ReFree) to the early-bound lifetimes - // of the RPITIT (which are represented by ReEarlyBound owned by the opaque). - // Luckily, this is very easy to do because we already have that mapping - // stored in the HIR of this RPITIT. - // - // Side-note: We don't really need to do this remapping for early-bound - // lifetimes because they're already "linked" by the bidirectional outlives - // predicates we insert in the `explicit_predicates_of` query for RPITITs. - let mut mapping = FxHashMap::default(); - let generics = tcx.generics_of(def_id); + DefKind::AssocTy if let Some(data) = tcx.opt_rpitit_info(def_id.to_def_id()) => { + match data { + ty::ImplTraitInTraitData::Trait { fn_def_id, .. } => { + // We need to remap all of the late-bound lifetimes in theassumed wf types + // of the fn (which are represented as ReFree) to the early-bound lifetimes + // of the RPITIT (which are represented by ReEarlyBound owned by the opaque). + // Luckily, this is very easy to do because we already have that mapping + // stored in the HIR of this RPITIT. + // + // Side-note: We don't really need to do this remapping for early-bound + // lifetimes because they're already "linked" by the bidirectional outlives + // predicates we insert in the `explicit_predicates_of` query for RPITITs. + let mut mapping = FxHashMap::default(); + let generics = tcx.generics_of(def_id); - // For each captured opaque lifetime, if it's late-bound (`ReFree` in this case, - // since it has been liberated), map it back to the early-bound lifetime of - // the GAT. Since RPITITs also have all of the fn's generics, we slice only - // the end of the list corresponding to the opaque's generics. - for param in &generics.params[tcx.generics_of(fn_def_id).params.len()..] { - let orig_lt = tcx.map_rpit_lifetime_to_fn_lifetime(param.def_id.expect_local()); - if matches!(*orig_lt, ty::ReFree(..)) { - mapping.insert( - orig_lt, - ty::Region::new_early_bound( - tcx, - ty::EarlyBoundRegion { - def_id: param.def_id, - index: param.index, - name: param.name, - }, - ), - ); + // For each captured opaque lifetime, if it's late-bound (`ReFree` in this case, + // since it has been liberated), map it back to the early-bound lifetime of + // the GAT. Since RPITITs also have all of the fn's generics, we slice only + // the end of the list corresponding to the opaque's generics. + for param in &generics.params[tcx.generics_of(fn_def_id).params.len()..] { + let orig_lt = + tcx.map_rpit_lifetime_to_fn_lifetime(param.def_id.expect_local()); + if matches!(*orig_lt, ty::ReFree(..)) { + mapping.insert( + orig_lt, + ty::Region::new_early_bound( + tcx, + ty::EarlyBoundRegion { + def_id: param.def_id, + index: param.index, + name: param.name, + }, + ), + ); + } } + // FIXME: This could use a real folder, I guess. + let remapped_wf_tys = tcx.fold_regions( + tcx.assumed_wf_types(fn_def_id.expect_local()).to_vec(), + |region, _| { + // If `region` is a `ReFree` that is captured by the + // opaque, remap it to its corresponding the early- + // bound region. + if let Some(remapped_region) = mapping.get(®ion) { + *remapped_region + } else { + region + } + }, + ); + tcx.arena.alloc_from_iter(remapped_wf_tys) + } + // Assumed wf types for RPITITs in an impl just inherit (and instantiate) + // the assumed wf types of the trait's RPITIT GAT. + ty::ImplTraitInTraitData::Impl { .. } => { + let impl_def_id = tcx.local_parent(def_id); + let rpitit_def_id = tcx.associated_item(def_id).trait_item_def_id.unwrap(); + let args = ty::GenericArgs::identity_for_item(tcx, def_id).rebase_onto( + tcx, + impl_def_id.to_def_id(), + tcx.impl_trait_ref(impl_def_id).unwrap().instantiate_identity().args, + ); + tcx.arena.alloc_from_iter( + ty::EarlyBinder::bind(tcx.assumed_wf_types_for_rpitit(rpitit_def_id)) + .iter_instantiated_copied(tcx, args) + .chain(tcx.assumed_wf_types(impl_def_id).into_iter().copied()), + ) } - // FIXME: This could use a real folder, I guess. - let remapped_wf_tys = tcx.fold_regions( - tcx.assumed_wf_types(fn_def_id.expect_local()).to_vec(), - |region, _| { - // If `region` is a `ReFree` that is captured by the - // opaque, remap it to its corresponding the early- - // bound region. - if let Some(remapped_region) = mapping.get(®ion) { - *remapped_region - } else { - region - } - }, - ); - tcx.arena.alloc_from_iter(remapped_wf_tys) - } - // Assumed wf types for RPITITs in an impl just inherit (and instantiate) - // the assumed wf types of the trait's RPITIT GAT. - ty::ImplTraitInTraitData::Impl { .. } => { - let impl_def_id = tcx.local_parent(def_id); - let rpitit_def_id = tcx.associated_item(def_id).trait_item_def_id.unwrap(); - let args = ty::GenericArgs::identity_for_item(tcx, def_id).rebase_onto( - tcx, - impl_def_id.to_def_id(), - tcx.impl_trait_ref(impl_def_id).unwrap().instantiate_identity().args, - ); - tcx.arena.alloc_from_iter( - ty::EarlyBinder::bind(tcx.assumed_wf_types_for_rpitit(rpitit_def_id)) - .iter_instantiated_copied(tcx, args) - .chain(tcx.assumed_wf_types(impl_def_id).into_iter().copied()), - ) } - }, + } DefKind::AssocConst | DefKind::AssocTy => tcx.assumed_wf_types(tcx.local_parent(def_id)), DefKind::OpaqueTy => match tcx.def_kind(tcx.local_parent(def_id)) { DefKind::TyAlias => ty::List::empty(), @@ -154,7 +157,7 @@ | DefKind::LifetimeParam | DefKind::GlobalAsm | DefKind::Closure - | DefKind::Generator => ty::List::empty(), + | DefKind::Coroutine => ty::List::empty(), } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/instance.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/instance.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/instance.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/instance.rs 2023-12-21 16:55:28.000000000 +0000 @@ -38,7 +38,7 @@ debug!(" => nontrivial drop glue"); match *ty.kind() { ty::Closure(..) - | ty::Generator(..) + | ty::Coroutine(..) | ty::Tuple(..) | ty::Adt(..) | ty::Dynamic(..) @@ -188,10 +188,7 @@ && trait_item_id != leaf_def.item.def_id && let Some(leaf_def_item) = leaf_def.item.def_id.as_local() { - tcx.compare_impl_const(( - leaf_def_item, - trait_item_id, - ))?; + tcx.compare_impl_const((leaf_def_item, trait_item_id))?; } Some(ty::Instance::new(leaf_def.item.def_id, args)) @@ -215,8 +212,8 @@ let is_copy = self_ty.is_copy_modulo_regions(tcx, param_env); match self_ty.kind() { _ if is_copy => (), - ty::Generator(..) - | ty::GeneratorWitness(..) + ty::Coroutine(..) + | ty::CoroutineWitness(..) | ty::Closure(..) | ty::Tuple(..) => {} _ => return Ok(None), @@ -249,34 +246,47 @@ }) } } else if Some(trait_ref.def_id) == lang_items.future_trait() { - let ty::Generator(generator_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { bug!() }; if Some(trait_item_id) == tcx.lang_items().future_poll_fn() { // `Future::poll` is generated by the compiler. - Some(Instance { def: ty::InstanceDef::Item(generator_def_id), args: args }) + Some(Instance { def: ty::InstanceDef::Item(coroutine_def_id), args: args }) } else { // All other methods are default methods of the `Future` trait. // (this assumes that `ImplSource::Builtin` is only used for methods on `Future`) debug_assert!(tcx.defaultness(trait_item_id).has_value()); Some(Instance::new(trait_item_id, rcvr_args)) } - } else if Some(trait_ref.def_id) == lang_items.gen_trait() { - let ty::Generator(generator_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + } else if Some(trait_ref.def_id) == lang_items.iterator_trait() { + let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + bug!() + }; + if Some(trait_item_id) == tcx.lang_items().next_fn() { + // `Iterator::next` is generated by the compiler. + Some(Instance { def: ty::InstanceDef::Item(coroutine_def_id), args }) + } else { + // All other methods are default methods of the `Iterator` trait. + // (this assumes that `ImplSource::Builtin` is only used for methods on `Iterator`) + debug_assert!(tcx.defaultness(trait_item_id).has_value()); + Some(Instance::new(trait_item_id, rcvr_args)) + } + } else if Some(trait_ref.def_id) == lang_items.coroutine_trait() { + let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { bug!() }; if cfg!(debug_assertions) && tcx.item_name(trait_item_id) != sym::resume { - // For compiler developers who'd like to add new items to `Generator`, + // For compiler developers who'd like to add new items to `Coroutine`, // you either need to generate a shim body, or perhaps return // `InstanceDef::Item` pointing to a trait default method body if // it is given a default implementation by the trait. span_bug!( - tcx.def_span(generator_def_id), - "no definition for `{trait_ref}::{}` for built-in generator type", + tcx.def_span(coroutine_def_id), + "no definition for `{trait_ref}::{}` for built-in coroutine type", tcx.item_name(trait_item_id) ) } - Some(Instance { def: ty::InstanceDef::Item(generator_def_id), args }) + Some(Instance { def: ty::InstanceDef::Item(coroutine_def_id), args }) } else if tcx.fn_trait_kind_from_def_id(trait_ref.def_id).is_some() { // FIXME: This doesn't check for malformed libcore that defines, e.g., // `trait Fn { fn call_once(&self) { .. } }`. This is mostly for extension diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,7 @@ use rustc_hir as hir; use rustc_index::bit_set::BitSet; use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::mir::{GeneratorLayout, GeneratorSavedLocal}; +use rustc_middle::mir::{CoroutineLayout, CoroutineSavedLocal}; use rustc_middle::query::Providers; use rustc_middle::ty::layout::{ IntegerExt, LayoutCx, LayoutError, LayoutOf, TyAndLayout, MAX_SIMD_LANES, @@ -85,7 +85,7 @@ fields: &IndexSlice>, repr: &ReprOptions, kind: StructKind, -) -> Result> { +) -> Result, &'tcx LayoutError<'tcx>> { let dl = cx.data_layout(); let pack = repr.pack; if pack.is_some() && repr.align.is_some() { @@ -170,29 +170,27 @@ // fall back to structurally deducing metadata. && !pointee.references_error() { - let pointee_metadata = Ty::new_projection(tcx,metadata_def_id, [pointee]); - let metadata_ty = match tcx.try_normalize_erasing_regions( - param_env, - pointee_metadata, - ) { - Ok(metadata_ty) => metadata_ty, - Err(mut err) => { - // Usually `::Metadata` can't be normalized because - // its struct tail cannot be normalized either, so try to get a - // more descriptive layout error here, which will lead to less confusing - // diagnostics. - match tcx.try_normalize_erasing_regions( - param_env, - tcx.struct_tail_without_normalization(pointee), - ) { - Ok(_) => {}, - Err(better_err) => { - err = better_err; + let pointee_metadata = Ty::new_projection(tcx, metadata_def_id, [pointee]); + let metadata_ty = + match tcx.try_normalize_erasing_regions(param_env, pointee_metadata) { + Ok(metadata_ty) => metadata_ty, + Err(mut err) => { + // Usually `::Metadata` can't be normalized because + // its struct tail cannot be normalized either, so try to get a + // more descriptive layout error here, which will lead to less confusing + // diagnostics. + match tcx.try_normalize_erasing_regions( + param_env, + tcx.struct_tail_without_normalization(pointee), + ) { + Ok(_) => {} + Err(better_err) => { + err = better_err; + } } + return Err(error(cx, LayoutError::NormalizationFailure(pointee, err))); } - return Err(error(cx, LayoutError::NormalizationFailure(pointee, err))); - }, - }; + }; let metadata_layout = cx.layout_of(metadata_ty)?; // If the metadata is a 1-zst, then the pointer is thin. @@ -316,7 +314,7 @@ tcx.mk_layout(unit) } - ty::Generator(def_id, args, _) => generator_layout(cx, ty, def_id, args)?, + ty::Coroutine(def_id, args, _) => coroutine_layout(cx, ty, def_id, args)?, ty::Closure(_, ref args) => { let tys = args.as_closure().upvar_tys(); @@ -577,7 +575,7 @@ return Err(error(cx, LayoutError::Unknown(ty))); } - ty::Bound(..) | ty::GeneratorWitness(..) | ty::Infer(_) | ty::Error(_) => { + ty::Bound(..) | ty::CoroutineWitness(..) | ty::Infer(_) | ty::Error(_) => { bug!("Layout::compute: unexpected type `{}`", ty) } @@ -587,7 +585,7 @@ }) } -/// Overlap eligibility and variant assignment for each GeneratorSavedLocal. +/// Overlap eligibility and variant assignment for each CoroutineSavedLocal. #[derive(Clone, Debug, PartialEq)] enum SavedLocalEligibility { Unassigned, @@ -595,7 +593,7 @@ Ineligible(Option), } -// When laying out generators, we divide our saved local fields into two +// When laying out coroutines, we divide our saved local fields into two // categories: overlap-eligible and overlap-ineligible. // // Those fields which are ineligible for overlap go in a "prefix" at the @@ -615,16 +613,16 @@ // of any variant. /// Compute the eligibility and assignment of each local. -fn generator_saved_local_eligibility( - info: &GeneratorLayout<'_>, -) -> (BitSet, IndexVec) { +fn coroutine_saved_local_eligibility( + info: &CoroutineLayout<'_>, +) -> (BitSet, IndexVec) { use SavedLocalEligibility::*; - let mut assignments: IndexVec = + let mut assignments: IndexVec = IndexVec::from_elem(Unassigned, &info.field_tys); // The saved locals not eligible for overlap. These will get - // "promoted" to the prefix of our generator. + // "promoted" to the prefix of our coroutine. let mut ineligible_locals = BitSet::new_empty(info.field_tys.len()); // Figure out which of our saved locals are fields in only @@ -662,7 +660,7 @@ for local_b in info.storage_conflicts.iter(local_a) { // local_a and local_b are storage live at the same time, therefore they - // cannot overlap in the generator layout. The only way to guarantee + // cannot overlap in the coroutine layout. The only way to guarantee // this is if they are in the same variant, or one is ineligible // (which means it is stored in every variant). if ineligible_locals.contains(local_b) || assignments[local_a] == assignments[local_b] { @@ -707,13 +705,13 @@ assignments[local] = Ineligible(Some(FieldIdx::from_usize(idx))); } } - debug!("generator saved local assignments: {:?}", assignments); + debug!("coroutine saved local assignments: {:?}", assignments); (ineligible_locals, assignments) } -/// Compute the full generator layout. -fn generator_layout<'tcx>( +/// Compute the full coroutine layout. +fn coroutine_layout<'tcx>( cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, ty: Ty<'tcx>, def_id: hir::def_id::DefId, @@ -723,15 +721,15 @@ let tcx = cx.tcx; let subst_field = |ty: Ty<'tcx>| EarlyBinder::bind(ty).instantiate(tcx, args); - let Some(info) = tcx.generator_layout(def_id) else { + let Some(info) = tcx.coroutine_layout(def_id) else { return Err(error(cx, LayoutError::Unknown(ty))); }; - let (ineligible_locals, assignments) = generator_saved_local_eligibility(&info); + let (ineligible_locals, assignments) = coroutine_saved_local_eligibility(&info); // Build a prefix layout, including "promoting" all ineligible // locals as part of the prefix. We compute the layout of all of // these fields at once to get optimal packing. - let tag_index = args.as_generator().prefix_tys().len(); + let tag_index = args.as_coroutine().prefix_tys().len(); // `info.variant_fields` already accounts for the reserved variants, so no need to add them. let max_discr = (info.variant_fields.len() - 1) as u128; @@ -748,7 +746,7 @@ .map(|ty| Ty::new_maybe_uninit(tcx, ty)) .map(|ty| Ok(cx.layout_of(ty)?.layout)); let prefix_layouts = args - .as_generator() + .as_coroutine() .prefix_tys() .iter() .map(|ty| Ok(cx.layout_of(ty)?.layout)) @@ -768,7 +766,7 @@ // Split the prefix layout into the "outer" fields (upvars and // discriminant) and the "promoted" fields. Promoted fields will // get included in each variant that requested them in - // GeneratorLayout. + // CoroutineLayout. debug!("prefix = {:#?}", prefix); let (outer_fields, promoted_offsets, promoted_memory_index) = match prefix.fields { FieldsShape::Arbitrary { mut offsets, memory_index } => { @@ -835,7 +833,7 @@ }; // Now, stitch the promoted and variant-only fields back together in - // the order they are mentioned by our GeneratorLayout. + // the order they are mentioned by our CoroutineLayout. // Because we only use some subset (that can differ between variants) // of the promoted fields, we can't just pick those elements of the // `promoted_memory_index` (as we'd end up with gaps). @@ -909,7 +907,7 @@ max_repr_align: None, unadjusted_abi_align: align.abi, }); - debug!("generator layout ({:?}): {:#?}", ty, layout); + debug!("coroutine layout ({:?}): {:#?}", ty, layout); Ok(layout) } @@ -958,12 +956,12 @@ record(adt_kind.into(), adt_packed, opt_discr_size, variant_infos); } - ty::Generator(def_id, args, _) => { - debug!("print-type-size t: `{:?}` record generator", layout.ty); - // Generators always have a begin/poisoned/end state with additional suspend points + ty::Coroutine(def_id, args, _) => { + debug!("print-type-size t: `{:?}` record coroutine", layout.ty); + // Coroutines always have a begin/poisoned/end state with additional suspend points let (variant_infos, opt_discr_size) = - variant_info_for_generator(cx, layout, def_id, args); - record(DataTypeKind::Generator, false, opt_discr_size, variant_infos); + variant_info_for_coroutine(cx, layout, def_id, args); + record(DataTypeKind::Coroutine, false, opt_discr_size, variant_infos); } ty::Closure(..) => { @@ -1048,7 +1046,7 @@ } } -fn variant_info_for_generator<'tcx>( +fn variant_info_for_coroutine<'tcx>( cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, layout: TyAndLayout<'tcx>, def_id: DefId, @@ -1058,12 +1056,12 @@ return (vec![], None); }; - let generator = cx.tcx.optimized_mir(def_id).generator_layout().unwrap(); + let coroutine = cx.tcx.optimized_mir(def_id).coroutine_layout().unwrap(); let upvar_names = cx.tcx.closure_saved_names_of_captured_variables(def_id); let mut upvars_size = Size::ZERO; let upvar_fields: Vec<_> = args - .as_generator() + .as_coroutine() .upvar_tys() .iter() .zip(upvar_names) @@ -1082,7 +1080,7 @@ }) .collect(); - let mut variant_infos: Vec<_> = generator + let mut variant_infos: Vec<_> = coroutine .variant_fields .iter_enumerated() .map(|(variant_idx, variant_def)| { @@ -1097,9 +1095,9 @@ // The struct is as large as the last field's end variant_size = variant_size.max(offset + field_layout.size); FieldInfo { - kind: FieldKind::GeneratorLocal, - name: generator.field_names[*local].unwrap_or(Symbol::intern(&format!( - ".generator_field{}", + kind: FieldKind::CoroutineLocal, + name: coroutine.field_names[*local].unwrap_or(Symbol::intern(&format!( + ".coroutine_field{}", local.as_usize() ))), offset: offset.bytes(), @@ -1117,8 +1115,8 @@ // This `if` deserves some explanation. // - // The layout code has a choice of where to place the discriminant of this generator. - // If the discriminant of the generator is placed early in the layout (before the + // The layout code has a choice of where to place the discriminant of this coroutine. + // If the discriminant of the coroutine is placed early in the layout (before the // variant's own fields), then it'll implicitly be counted towards the size of the // variant, since we use the maximum offset to calculate size. // (side-note: I know this is a bit problematic given upvars placement, etc). @@ -1138,7 +1136,7 @@ } VariantInfo { - name: Some(Symbol::intern(&ty::GeneratorArgs::variant_name(variant_idx))), + name: Some(Symbol::intern(&ty::CoroutineArgs::variant_name(variant_idx))), kind: SizeKind::Exact, size: variant_size.bytes(), align: variant_layout.align.abi.bytes(), @@ -1149,7 +1147,7 @@ // The first three variants are hardcoded to be `UNRESUMED`, `RETURNED` and `POISONED`. // We will move the `RETURNED` and `POISONED` elements to the end so we - // are left with a sorting order according to the generators yield points: + // are left with a sorting order according to the coroutines yield points: // First `Unresumed`, then the `SuspendN` followed by `Returned` and `Panicked` (POISONED). let end_states = variant_infos.drain(1..=2); let end_states: Vec<_> = end_states.collect(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout_sanity_check.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout_sanity_check.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout_sanity_check.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/layout_sanity_check.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,6 +19,9 @@ if layout.size.bytes() % layout.align.abi.bytes() != 0 { bug!("size is not a multiple of align, in the following layout:\n{layout:#?}"); } + if layout.size.bytes() >= cx.tcx.data_layout.obj_size_bound() { + bug!("size is too large, in the following layout:\n{layout:#?}"); + } if !cfg!(debug_assertions) { // Stop here, the rest is kind of expensive. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,7 +5,11 @@ //! This API is completely unstable and subject to change. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] +#![cfg_attr(not(bootstrap), allow(internal_features))] #![feature(assert_matches)] +#![feature(associated_type_defaults)] #![feature(iterator_try_collect)] #![feature(let_chains)] #![feature(if_let_guard)] @@ -36,6 +40,7 @@ mod needs_drop; mod opaque_types; pub mod representability; +pub mod sig_types; mod structural_match; mod ty; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/needs_drop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/needs_drop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/needs_drop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/needs_drop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -134,9 +134,9 @@ for component in components { match *component.kind() { - // The information required to determine whether a generator has drop is + // The information required to determine whether a coroutine has drop is // computed on MIR, while this very method is used to build MIR. - // To avoid cycles, we consider that generators always require drop. + // To avoid cycles, we consider that coroutines always require drop. // // HACK: Because we erase regions contained in the coroutine witness, we // have to conservatively assume that every region captured by the @@ -145,15 +145,15 @@ // for the coroutine witness and check whether any of the contained types // need to be dropped, and only require the captured types to be live // if they do. - ty::Generator(_, args, _) => { + ty::Coroutine(_, args, _) => { if self.reveal_coroutine_witnesses { - queue_type(self, args.as_generator().witness()); + queue_type(self, args.as_coroutine().witness()); } else { return Some(Err(AlwaysRequiresDrop)); } } - ty::GeneratorWitness(def_id, args) => { - if let Some(witness) = tcx.mir_generator_witnesses(def_id) { + ty::CoroutineWitness(def_id, args) => { + if let Some(witness) = tcx.mir_coroutine_witnesses(def_id) { self.reveal_coroutine_witnesses = true; for field_ty in &witness.field_tys { queue_type( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/opaque_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/opaque_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/opaque_types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/opaque_types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -53,14 +53,10 @@ fn parent(&self) -> Option { match self.tcx.def_kind(self.item) { - DefKind::AnonConst | DefKind::InlineConst | DefKind::Fn | DefKind::TyAlias => None, DefKind::AssocFn | DefKind::AssocTy | DefKind::AssocConst => { Some(self.tcx.local_parent(self.item)) } - other => span_bug!( - self.tcx.def_span(self.item), - "unhandled item with opaque types: {other:?}" - ), + _ => None, } } @@ -98,14 +94,6 @@ hir_id == scope } - fn collect_body_and_predicate_taits(&mut self) { - // Look at all where bounds. - self.tcx.predicates_of(self.item).instantiate_identity(self.tcx).visit_with(self); - // An item is allowed to constrain opaques declared within its own body (but not nested within - // nested functions). - self.collect_taits_declared_in_body(); - } - #[instrument(level = "trace", skip(self))] fn collect_taits_declared_in_body(&mut self) { let body = self.tcx.hir().body(self.tcx.hir().body_owned_by(self.item)).value; @@ -132,6 +120,14 @@ } } +impl<'tcx> super::sig_types::SpannedTypeVisitor<'tcx> for OpaqueTypeCollector<'tcx> { + #[instrument(skip(self), ret, level = "trace")] + fn visit(&mut self, span: Span, value: impl TypeVisitable>) -> ControlFlow { + self.visit_spanned(span, value); + ControlFlow::Continue(()) + } +} + impl<'tcx> TypeVisitor> for OpaqueTypeCollector<'tcx> { #[instrument(skip(self), ret, level = "trace")] fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { @@ -159,7 +155,14 @@ self.opaques.push(alias_ty.def_id.expect_local()); - match self.tcx.uses_unique_generic_params(alias_ty.args, CheckRegions::Bound) { + let parent_count = self.tcx.generics_of(alias_ty.def_id).parent_count; + // Only check that the parent generics of the TAIT/RPIT are unique. + // the args owned by the opaque are going to always be duplicate + // lifetime params for RPITs, and empty for TAITs. + match self + .tcx + .uses_unique_generic_params(&alias_ty.args[..parent_count], CheckRegions::Bound) + { Ok(()) => { // FIXME: implement higher kinded lifetime bounds on nested opaque types. They are not // supported at all, so this is sound to do, but once we want to support them, you'll @@ -269,41 +272,27 @@ } } -fn opaque_types_defined_by<'tcx>(tcx: TyCtxt<'tcx>, item: LocalDefId) -> &'tcx [LocalDefId] { +fn opaque_types_defined_by<'tcx>( + tcx: TyCtxt<'tcx>, + item: LocalDefId, +) -> &'tcx ty::List { let kind = tcx.def_kind(item); trace!(?kind); let mut collector = OpaqueTypeCollector::new(tcx, item); + super::sig_types::walk_types(tcx, item, &mut collector); match kind { - // Walk over the signature of the function-like to find the opaques. - DefKind::AssocFn | DefKind::Fn => { - let ty_sig = tcx.fn_sig(item).instantiate_identity(); - let hir_sig = tcx.hir().get_by_def_id(item).fn_sig().unwrap(); - // Walk over the inputs and outputs manually in order to get good spans for them. - collector.visit_spanned(hir_sig.decl.output.span(), ty_sig.output()); - for (hir, ty) in hir_sig.decl.inputs.iter().zip(ty_sig.inputs().iter()) { - collector.visit_spanned(hir.span, ty.map_bound(|x| *x)); - } - collector.collect_body_and_predicate_taits(); - } - // Walk over the type of the item to find opaques. - DefKind::Static(_) | DefKind::Const | DefKind::AssocConst | DefKind::AnonConst => { - let span = match tcx.hir().get_by_def_id(item).ty() { - Some(ty) => ty.span, - _ => tcx.def_span(item), - }; - collector.visit_spanned(span, tcx.type_of(item).instantiate_identity()); - collector.collect_body_and_predicate_taits(); - } - // We're also doing this for `AssocTy` for the wf checks in `check_opaque_meets_bounds` - DefKind::TyAlias | DefKind::AssocTy => { - tcx.type_of(item).instantiate_identity().visit_with(&mut collector); - } - DefKind::OpaqueTy => { - for (pred, span) in tcx.explicit_item_bounds(item).instantiate_identity_iter_copied() { - collector.visit_spanned(span, pred); - } + DefKind::AssocFn + | DefKind::Fn + | DefKind::Static(_) + | DefKind::Const + | DefKind::AssocConst + | DefKind::AnonConst => { + collector.collect_taits_declared_in_body(); } - DefKind::Mod + DefKind::OpaqueTy + | DefKind::TyAlias + | DefKind::AssocTy + | DefKind::Mod | DefKind::Struct | DefKind::Union | DefKind::Enum @@ -322,12 +311,13 @@ | DefKind::LifetimeParam | DefKind::GlobalAsm | DefKind::Impl { .. } => {} - // Closures and generators are type checked with their parent, so there is no difference here. - DefKind::Closure | DefKind::Generator | DefKind::InlineConst => { - return tcx.opaque_types_defined_by(tcx.local_parent(item)); + // Closures and coroutines are type checked with their parent, so we need to allow all + // opaques from the closure signature *and* from the parent body. + DefKind::Closure | DefKind::Coroutine | DefKind::InlineConst => { + collector.opaques.extend(tcx.opaque_types_defined_by(tcx.local_parent(item))); } } - tcx.arena.alloc_from_iter(collector.opaques) + tcx.mk_local_def_ids(&collector.opaques) } pub(super) fn provide(providers: &mut Providers) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/sig_types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/sig_types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/sig_types.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/sig_types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,112 @@ +//! This module contains helpers for walking all types of +//! a signature, while preserving spans as much as possible + +use std::ops::ControlFlow; + +use rustc_hir::{def::DefKind, def_id::LocalDefId}; +use rustc_middle::ty::TyCtxt; +use rustc_span::Span; +use rustc_type_ir::visit::TypeVisitable; + +pub trait SpannedTypeVisitor<'tcx> { + type BreakTy = !; + fn visit( + &mut self, + span: Span, + value: impl TypeVisitable>, + ) -> ControlFlow; +} + +pub fn walk_types<'tcx, V: SpannedTypeVisitor<'tcx>>( + tcx: TyCtxt<'tcx>, + item: LocalDefId, + visitor: &mut V, +) -> ControlFlow { + let kind = tcx.def_kind(item); + trace!(?kind); + match kind { + // Walk over the signature of the function + DefKind::AssocFn | DefKind::Fn => { + let ty_sig = tcx.fn_sig(item).instantiate_identity(); + let hir_sig = tcx.hir().get_by_def_id(item).fn_decl().unwrap(); + // Walk over the inputs and outputs manually in order to get good spans for them. + visitor.visit(hir_sig.output.span(), ty_sig.output()); + for (hir, ty) in hir_sig.inputs.iter().zip(ty_sig.inputs().iter()) { + visitor.visit(hir.span, ty.map_bound(|x| *x))?; + } + for (pred, span) in tcx.predicates_of(item).instantiate_identity(tcx) { + visitor.visit(span, pred)?; + } + } + // Walk over the type behind the alias + DefKind::TyAlias {..} | DefKind::AssocTy | + // Walk over the type of the item + DefKind::Static(_) | DefKind::Const | DefKind::AssocConst | DefKind::AnonConst => { + let span = match tcx.hir().get_by_def_id(item).ty() { + Some(ty) => ty.span, + _ => tcx.def_span(item), + }; + visitor.visit(span, tcx.type_of(item).instantiate_identity()); + for (pred, span) in tcx.predicates_of(item).instantiate_identity(tcx) { + visitor.visit(span, pred)?; + } + } + DefKind::OpaqueTy => { + for (pred, span) in tcx.explicit_item_bounds(item).instantiate_identity_iter_copied() { + visitor.visit(span, pred)?; + } + } + // Look at field types + DefKind::Struct | DefKind::Union | DefKind::Enum => { + let span = tcx.def_ident_span(item).unwrap(); + visitor.visit(span, tcx.type_of(item).instantiate_identity()); + for (pred, span) in tcx.predicates_of(item).instantiate_identity(tcx) { + visitor.visit(span, pred)?; + } + } + // These are not part of a public API, they can only appear as hidden types, and there + // the interesting parts are solely in the signature of the containing item's opaque type + // or dyn type. + DefKind::InlineConst | DefKind::Closure | DefKind::Coroutine => {} + DefKind::Impl { of_trait } => { + if of_trait { + let span = tcx.hir().get_by_def_id(item).expect_item().expect_impl().of_trait.unwrap().path.span; + let args = &tcx.impl_trait_ref(item).unwrap().instantiate_identity().args[1..]; + visitor.visit(span, args)?; + } + let span = match tcx.hir().get_by_def_id(item).ty() { + Some(ty) => ty.span, + _ => tcx.def_span(item), + }; + visitor.visit(span, tcx.type_of(item).instantiate_identity()); + for (pred, span) in tcx.predicates_of(item).instantiate_identity(tcx) { + visitor.visit(span, pred)?; + } + } + DefKind::TraitAlias | DefKind::Trait => { + for (pred, span) in tcx.predicates_of(item).instantiate_identity(tcx) { + visitor.visit(span, pred)?; + } + } + | DefKind::Variant + | DefKind::ForeignTy + | DefKind::TyParam + | DefKind::ConstParam + | DefKind::Ctor(_, _) + | DefKind::Field + | DefKind::LifetimeParam => { + span_bug!( + tcx.def_span(item), + "{kind:?} has not seen any uses of `walk_types` yet, ping oli-obk if you'd like any help" + ) + } + // These don't have any types. + | DefKind::ExternCrate + | DefKind::ForeignMod + | DefKind::Macro(_) + | DefKind::GlobalAsm + | DefKind::Mod + | DefKind::Use => {} + } + ControlFlow::Continue(()) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/ty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/ty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/ty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_ty_utils/src/ty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,9 +3,8 @@ use rustc_hir::def::DefKind; use rustc_index::bit_set::BitSet; use rustc_middle::query::Providers; -use rustc_middle::ty::{ - self, EarlyBinder, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor, -}; +use rustc_middle::ty::{self, EarlyBinder, Ty, TyCtxt, TypeVisitor}; +use rustc_middle::ty::{ToPredicate, TypeSuperVisitable, TypeVisitable}; use rustc_span::def_id::{DefId, LocalDefId, CRATE_DEF_ID}; use rustc_span::DUMMY_SP; use rustc_trait_selection::traits; @@ -15,13 +14,13 @@ adtdef: ty::AdtDef<'tcx>, ty: Ty<'tcx>, ) -> Vec> { - use rustc_type_ir::sty::TyKind::*; + use rustc_type_ir::TyKind::*; let result = match ty.kind() { Bool | Char | Int(..) | Uint(..) | Float(..) | RawPtr(..) | Ref(..) | FnDef(..) - | FnPtr(_) | Array(..) | Closure(..) | Generator(..) | Never => vec![], + | FnPtr(_) | Array(..) | Closure(..) | Coroutine(..) | Never => vec![], - Str | Dynamic(..) | Slice(_) | Foreign(..) | Error(_) | GeneratorWitness(..) => { + Str | Dynamic(..) | Slice(_) | Foreign(..) | Error(_) | CoroutineWitness(..) => { // these are never sized - return the target type vec![ty] } @@ -185,9 +184,10 @@ fn visit_ty(&mut self, ty: Ty<'tcx>) -> std::ops::ControlFlow { if let ty::Alias(ty::Projection, unshifted_alias_ty) = *ty.kind() - && let Some(ty::ImplTraitInTraitData::Trait { fn_def_id, .. } - | ty::ImplTraitInTraitData::Impl { fn_def_id, .. }) - = self.tcx.opt_rpitit_info(unshifted_alias_ty.def_id) + && let Some( + ty::ImplTraitInTraitData::Trait { fn_def_id, .. } + | ty::ImplTraitInTraitData::Impl { fn_def_id, .. }, + ) = self.tcx.opt_rpitit_info(unshifted_alias_ty.def_id) && fn_def_id == self.fn_def_id && self.seen.insert(unshifted_alias_ty.def_id) { @@ -203,7 +203,11 @@ "we shouldn't walk non-predicate binders with `impl Trait`...", ); } - ty::Region::new_late_bound(self.tcx, index.shifted_out_to_binder(self.depth), bv) + ty::Region::new_late_bound( + self.tcx, + index.shifted_out_to_binder(self.depth), + bv, + ) } else { re } @@ -212,12 +216,21 @@ // If we're lowering to associated item, install the opaque type which is just // the `type_of` of the trait's associated item. If we're using the old lowering // strategy, then just reinterpret the associated type like an opaque :^) - let default_ty = self.tcx.type_of(shifted_alias_ty.def_id).instantiate(self.tcx, shifted_alias_ty.args); - - self.predicates.push(ty::Clause::from_projection_clause(self.tcx, ty::Binder::bind_with_vars( - ty::ProjectionPredicate { projection_ty: shifted_alias_ty, term: default_ty.into() }, - self.bound_vars, - ))); + let default_ty = self + .tcx + .type_of(shifted_alias_ty.def_id) + .instantiate(self.tcx, shifted_alias_ty.args); + + self.predicates.push( + ty::Binder::bind_with_vars( + ty::ProjectionPredicate { + projection_ty: shifted_alias_ty, + term: default_ty.into(), + }, + self.bound_vars, + ) + .to_predicate(self.tcx), + ); // We walk the *un-shifted* alias ty, because we're tracking the de bruijn // binder depth, and if we were to walk `shifted_alias_ty` instead, we'd diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -3,12 +3,13 @@ version = "0.0.0" edition = "2021" -[lib] - [dependencies] +# tidy-alphabetical-start bitflags = "1.2.1" -rustc_index = { path = "../rustc_index" } -rustc_serialize = { path = "../rustc_serialize" } +derivative = "2.2.0" rustc_data_structures = { path = "../rustc_data_structures" } +rustc_index = { path = "../rustc_index" } rustc_macros = { path = "../rustc_macros" } +rustc_serialize = { path = "../rustc_serialize" } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +# tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/canonical.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/canonical.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/canonical.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/canonical.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,128 @@ +use std::fmt; +use std::hash::Hash; +use std::ops::ControlFlow; + +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; + +use crate::fold::{FallibleTypeFolder, TypeFoldable}; +use crate::visit::{TypeVisitable, TypeVisitor}; +use crate::{HashStableContext, Interner, UniverseIndex}; + +/// A "canonicalized" type `V` is one where all free inference +/// variables have been rewritten to "canonical vars". These are +/// numbered starting from 0 in order of first appearance. +#[derive(derivative::Derivative)] +#[derivative(Clone(bound = "V: Clone"), Hash(bound = "V: Hash"))] +#[derive(TyEncodable, TyDecodable)] +pub struct Canonical { + pub value: V, + pub max_universe: UniverseIndex, + pub variables: I::CanonicalVars, +} + +impl Canonical { + /// Allows you to map the `value` of a canonical while keeping the + /// same set of bound variables. + /// + /// **WARNING:** This function is very easy to mis-use, hence the + /// name! In particular, the new value `W` must use all **the + /// same type/region variables** in **precisely the same order** + /// as the original! (The ordering is defined by the + /// `TypeFoldable` implementation of the type in question.) + /// + /// An example of a **correct** use of this: + /// + /// ```rust,ignore (not real code) + /// let a: Canonical = ...; + /// let b: Canonical = a.unchecked_map(|v| (v, )); + /// ``` + /// + /// An example of an **incorrect** use of this: + /// + /// ```rust,ignore (not real code) + /// let a: Canonical = ...; + /// let ty: Ty = ...; + /// let b: Canonical)> = a.unchecked_map(|v| (v, ty)); + /// ``` + pub fn unchecked_map(self, map_op: impl FnOnce(V) -> W) -> Canonical { + let Canonical { max_universe, variables, value } = self; + Canonical { max_universe, variables, value: map_op(value) } + } + + /// Allows you to map the `value` of a canonical while keeping the same set of + /// bound variables. + /// + /// **WARNING:** This function is very easy to mis-use, hence the name! See + /// the comment of [Canonical::unchecked_map] for more details. + pub fn unchecked_rebind(self, value: W) -> Canonical { + let Canonical { max_universe, variables, value: _ } = self; + Canonical { max_universe, variables, value } + } +} + +impl> HashStable for Canonical +where + I::CanonicalVars: HashStable, +{ + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { + self.value.hash_stable(hcx, hasher); + self.max_universe.hash_stable(hcx, hasher); + self.variables.hash_stable(hcx, hasher); + } +} + +impl Eq for Canonical {} + +impl PartialEq for Canonical { + fn eq(&self, other: &Self) -> bool { + self.value == other.value + && self.max_universe == other.max_universe + && self.variables == other.variables + } +} + +impl fmt::Display for Canonical { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Canonical {{ value: {}, max_universe: {:?}, variables: {:?} }}", + self.value, self.max_universe, self.variables + ) + } +} + +impl fmt::Debug for Canonical { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Canonical") + .field("value", &self.value) + .field("max_universe", &self.max_universe) + .field("variables", &self.variables) + .finish() + } +} + +impl Copy for Canonical where I::CanonicalVars: Copy {} + +impl> TypeFoldable for Canonical +where + I::CanonicalVars: TypeFoldable, +{ + fn try_fold_with>(self, folder: &mut F) -> Result { + Ok(Canonical { + value: self.value.try_fold_with(folder)?, + max_universe: self.max_universe.try_fold_with(folder)?, + variables: self.variables.try_fold_with(folder)?, + }) + } +} + +impl> TypeVisitable for Canonical +where + I::CanonicalVars: TypeVisitable, +{ + fn visit_with>(&self, folder: &mut F) -> ControlFlow { + self.value.visit_with(folder)?; + self.max_universe.visit_with(folder)?; + self.variables.visit_with(folder) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/codec.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/codec.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/codec.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/codec.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,4 @@ -use crate::Interner; +use crate::{Interner, PredicateKind}; use rustc_data_structures::fx::FxHashMap; use rustc_serialize::{Decoder, Encoder}; @@ -30,9 +30,7 @@ fn type_shorthands(&mut self) -> &mut FxHashMap<::Ty, usize>; - fn predicate_shorthands( - &mut self, - ) -> &mut FxHashMap<::PredicateKind, usize>; + fn predicate_shorthands(&mut self) -> &mut FxHashMap, usize>; fn encode_alloc_id(&mut self, alloc_id: &::AllocId); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/const_kind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/const_kind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/const_kind.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/const_kind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,136 @@ +use rustc_data_structures::stable_hasher::HashStable; +use rustc_data_structures::stable_hasher::StableHasher; +use std::fmt; + +use crate::{DebruijnIndex, DebugWithInfcx, HashStableContext, InferCtxtLike, Interner, WithInfcx}; + +use self::ConstKind::*; + +/// Represents a constant in Rust. +#[derive(derivative::Derivative)] +#[derivative( + Clone(bound = ""), + PartialOrd(bound = ""), + PartialOrd = "feature_allow_slow_enum", + Ord(bound = ""), + Ord = "feature_allow_slow_enum", + Hash(bound = "") +)] +#[derive(TyEncodable, TyDecodable)] +pub enum ConstKind { + /// A const generic parameter. + Param(I::ParamConst), + + /// Infer the value of the const. + Infer(I::InferConst), + + /// Bound const variable, used only when preparing a trait query. + Bound(DebruijnIndex, I::BoundConst), + + /// A placeholder const - universally quantified higher-ranked const. + Placeholder(I::PlaceholderConst), + + /// An unnormalized const item such as an anon const or assoc const or free const item. + /// Right now anything other than anon consts does not actually work properly but this + /// should + Unevaluated(I::AliasConst), + + /// Used to hold computed value. + Value(I::ValueConst), + + /// A placeholder for a const which could not be computed; this is + /// propagated to avoid useless error messages. + Error(I::ErrorGuaranteed), + + /// Unevaluated non-const-item, used by `feature(generic_const_exprs)` to represent + /// const arguments such as `N + 1` or `foo(N)` + Expr(I::ExprConst), +} + +const fn const_kind_discriminant(value: &ConstKind) -> usize { + match value { + Param(_) => 0, + Infer(_) => 1, + Bound(_, _) => 2, + Placeholder(_) => 3, + Unevaluated(_) => 4, + Value(_) => 5, + Error(_) => 6, + Expr(_) => 7, + } +} + +impl HashStable for ConstKind +where + I::ParamConst: HashStable, + I::InferConst: HashStable, + I::BoundConst: HashStable, + I::PlaceholderConst: HashStable, + I::AliasConst: HashStable, + I::ValueConst: HashStable, + I::ErrorGuaranteed: HashStable, + I::ExprConst: HashStable, +{ + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { + const_kind_discriminant(self).hash_stable(hcx, hasher); + match self { + Param(p) => p.hash_stable(hcx, hasher), + Infer(i) => i.hash_stable(hcx, hasher), + Bound(d, b) => { + d.hash_stable(hcx, hasher); + b.hash_stable(hcx, hasher); + } + Placeholder(p) => p.hash_stable(hcx, hasher), + Unevaluated(u) => u.hash_stable(hcx, hasher), + Value(v) => v.hash_stable(hcx, hasher), + Error(e) => e.hash_stable(hcx, hasher), + Expr(e) => e.hash_stable(hcx, hasher), + } + } +} + +impl PartialEq for ConstKind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Param(l0), Param(r0)) => l0 == r0, + (Infer(l0), Infer(r0)) => l0 == r0, + (Bound(l0, l1), Bound(r0, r1)) => l0 == r0 && l1 == r1, + (Placeholder(l0), Placeholder(r0)) => l0 == r0, + (Unevaluated(l0), Unevaluated(r0)) => l0 == r0, + (Value(l0), Value(r0)) => l0 == r0, + (Error(l0), Error(r0)) => l0 == r0, + (Expr(l0), Expr(r0)) => l0 == r0, + _ => false, + } + } +} + +impl Eq for ConstKind {} + +impl fmt::Debug for ConstKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + WithInfcx::with_no_infcx(self).fmt(f) + } +} + +impl DebugWithInfcx for ConstKind { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + use ConstKind::*; + + match this.data { + Param(param) => write!(f, "{param:?}"), + Infer(var) => write!(f, "{:?}", &this.wrap(var)), + Bound(debruijn, var) => crate::debug_bound_var(f, *debruijn, var.clone()), + Placeholder(placeholder) => write!(f, "{placeholder:?}"), + Unevaluated(uv) => { + write!(f, "{:?}", &this.wrap(uv)) + } + Value(valtree) => write!(f, "{valtree:?}"), + Error(_) => write!(f, "{{const error}}"), + Expr(expr) => write!(f, "{:?}", &this.wrap(expr)), + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/debug.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/debug.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/debug.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/debug.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,126 @@ +use crate::{Interner, UniverseIndex}; + +use core::fmt; +use std::marker::PhantomData; + +pub trait InferCtxtLike { + type Interner: Interner; + + fn universe_of_ty(&self, ty: ::InferTy) -> Option; + + fn universe_of_lt( + &self, + lt: ::InferRegion, + ) -> Option; + + fn universe_of_ct(&self, ct: ::InferConst) + -> Option; +} + +pub struct NoInfcx(PhantomData); + +impl InferCtxtLike for NoInfcx { + type Interner = I; + + fn universe_of_ty(&self, _ty: ::InferTy) -> Option { + None + } + + fn universe_of_ct(&self, _ct: ::InferConst) -> Option { + None + } + + fn universe_of_lt(&self, _lt: ::InferRegion) -> Option { + None + } +} + +pub trait DebugWithInfcx: fmt::Debug { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result; +} + +impl + ?Sized> DebugWithInfcx for &'_ T { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { + >::fmt(this.map(|&data| data), f) + } +} + +impl> DebugWithInfcx for [T] { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { + match f.alternate() { + true => { + write!(f, "[\n")?; + for element in this.data.iter() { + write!(f, "{:?},\n", &this.wrap(element))?; + } + write!(f, "]") + } + false => { + write!(f, "[")?; + if this.data.len() > 0 { + for element in &this.data[..(this.data.len() - 1)] { + write!(f, "{:?}, ", &this.wrap(element))?; + } + if let Some(element) = this.data.last() { + write!(f, "{:?}", &this.wrap(element))?; + } + } + write!(f, "]") + } + } + } +} + +pub struct WithInfcx<'a, Infcx: InferCtxtLike, T> { + pub data: T, + pub infcx: &'a Infcx, +} + +impl Copy for WithInfcx<'_, Infcx, T> {} + +impl Clone for WithInfcx<'_, Infcx, T> { + fn clone(&self) -> Self { + Self { data: self.data.clone(), infcx: self.infcx } + } +} + +impl<'a, I: Interner, T> WithInfcx<'a, NoInfcx, T> { + pub fn with_no_infcx(data: T) -> Self { + Self { data, infcx: &NoInfcx(PhantomData) } + } +} + +impl<'a, Infcx: InferCtxtLike, T> WithInfcx<'a, Infcx, T> { + pub fn new(data: T, infcx: &'a Infcx) -> Self { + Self { data, infcx } + } + + pub fn wrap(self, u: U) -> WithInfcx<'a, Infcx, U> { + WithInfcx { data: u, infcx: self.infcx } + } + + pub fn map(self, f: impl FnOnce(T) -> U) -> WithInfcx<'a, Infcx, U> { + WithInfcx { data: f(self.data), infcx: self.infcx } + } + + pub fn as_ref(&self) -> WithInfcx<'a, Infcx, &T> { + WithInfcx { data: &self.data, infcx: self.infcx } + } +} + +impl> fmt::Debug + for WithInfcx<'_, Infcx, T> +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + DebugWithInfcx::fmt(self.as_ref(), f) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/flags.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/flags.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/flags.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/flags.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,122 @@ +bitflags! { + /// Flags that we track on types. These flags are propagated upwards + /// through the type during type construction, so that we can quickly check + /// whether the type has various kinds of types in it without recursing + /// over the type itself. + pub struct TypeFlags: u32 { + // Does this have parameters? Used to determine whether substitution is + // required. + /// Does this have `Param`? + const HAS_TY_PARAM = 1 << 0; + /// Does this have `ReEarlyBound`? + const HAS_RE_PARAM = 1 << 1; + /// Does this have `ConstKind::Param`? + const HAS_CT_PARAM = 1 << 2; + + const HAS_PARAM = TypeFlags::HAS_TY_PARAM.bits + | TypeFlags::HAS_RE_PARAM.bits + | TypeFlags::HAS_CT_PARAM.bits; + + /// Does this have `Infer`? + const HAS_TY_INFER = 1 << 3; + /// Does this have `ReVar`? + const HAS_RE_INFER = 1 << 4; + /// Does this have `ConstKind::Infer`? + const HAS_CT_INFER = 1 << 5; + + /// Does this have inference variables? Used to determine whether + /// inference is required. + const HAS_INFER = TypeFlags::HAS_TY_INFER.bits + | TypeFlags::HAS_RE_INFER.bits + | TypeFlags::HAS_CT_INFER.bits; + + /// Does this have `Placeholder`? + const HAS_TY_PLACEHOLDER = 1 << 6; + /// Does this have `RePlaceholder`? + const HAS_RE_PLACEHOLDER = 1 << 7; + /// Does this have `ConstKind::Placeholder`? + const HAS_CT_PLACEHOLDER = 1 << 8; + + /// Does this have placeholders? + const HAS_PLACEHOLDER = TypeFlags::HAS_TY_PLACEHOLDER.bits + | TypeFlags::HAS_RE_PLACEHOLDER.bits + | TypeFlags::HAS_CT_PLACEHOLDER.bits; + + /// `true` if there are "names" of regions and so forth + /// that are local to a particular fn/inferctxt + const HAS_FREE_LOCAL_REGIONS = 1 << 9; + + /// `true` if there are "names" of types and regions and so forth + /// that are local to a particular fn + const HAS_FREE_LOCAL_NAMES = TypeFlags::HAS_TY_PARAM.bits + | TypeFlags::HAS_CT_PARAM.bits + | TypeFlags::HAS_TY_INFER.bits + | TypeFlags::HAS_CT_INFER.bits + | TypeFlags::HAS_TY_PLACEHOLDER.bits + | TypeFlags::HAS_CT_PLACEHOLDER.bits + // We consider 'freshened' types and constants + // to depend on a particular fn. + // The freshening process throws away information, + // which can make things unsuitable for use in a global + // cache. Note that there is no 'fresh lifetime' flag - + // freshening replaces all lifetimes with `ReErased`, + // which is different from how types/const are freshened. + | TypeFlags::HAS_TY_FRESH.bits + | TypeFlags::HAS_CT_FRESH.bits + | TypeFlags::HAS_FREE_LOCAL_REGIONS.bits + | TypeFlags::HAS_RE_ERASED.bits; + + /// Does this have `Projection`? + const HAS_TY_PROJECTION = 1 << 10; + /// Does this have `Inherent`? + const HAS_TY_INHERENT = 1 << 11; + /// Does this have `Opaque`? + const HAS_TY_OPAQUE = 1 << 12; + /// Does this have `ConstKind::Unevaluated`? + const HAS_CT_PROJECTION = 1 << 13; + + /// Could this type be normalized further? + const HAS_PROJECTION = TypeFlags::HAS_TY_PROJECTION.bits + | TypeFlags::HAS_TY_OPAQUE.bits + | TypeFlags::HAS_TY_INHERENT.bits + | TypeFlags::HAS_CT_PROJECTION.bits; + + /// Is an error type/const reachable? + const HAS_ERROR = 1 << 14; + + /// Does this have any region that "appears free" in the type? + /// Basically anything but `ReLateBound` and `ReErased`. + const HAS_FREE_REGIONS = 1 << 15; + + /// Does this have any `ReLateBound` regions? + const HAS_RE_LATE_BOUND = 1 << 16; + /// Does this have any `Bound` types? + const HAS_TY_LATE_BOUND = 1 << 17; + /// Does this have any `ConstKind::Bound` consts? + const HAS_CT_LATE_BOUND = 1 << 18; + /// Does this have any bound variables? + /// Used to check if a global bound is safe to evaluate. + const HAS_LATE_BOUND = TypeFlags::HAS_RE_LATE_BOUND.bits + | TypeFlags::HAS_TY_LATE_BOUND.bits + | TypeFlags::HAS_CT_LATE_BOUND.bits; + + /// Does this have any `ReErased` regions? + const HAS_RE_ERASED = 1 << 19; + + /// Does this value have parameters/placeholders/inference variables which could be + /// replaced later, in a way that would change the results of `impl` specialization? + const STILL_FURTHER_SPECIALIZABLE = 1 << 20; + + /// Does this value have `InferTy::FreshTy/FreshIntTy/FreshFloatTy`? + const HAS_TY_FRESH = 1 << 21; + + /// Does this value have `InferConst::Fresh`? + const HAS_CT_FRESH = 1 << 22; + + /// Does this have `Coroutine` or `CoroutineWitness`? + const HAS_TY_COROUTINE = 1 << 23; + + /// Does this have any binders with bound vars (e.g. that need to be anonymized)? + const HAS_BINDER_VARS = 1 << 24; + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/fold.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/fold.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/fold.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/fold.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,6 +44,11 @@ //! - ty.super_fold_with(folder) //! - u.fold_with(folder) //! ``` + +use rustc_data_structures::sync::Lrc; +use rustc_index::{Idx, IndexVec}; +use std::mem; + use crate::{visit::TypeVisitable, Interner}; /// This trait is implemented for every type that can be folded, @@ -242,3 +247,101 @@ Ok(self.fold_predicate(p)) } } + +/////////////////////////////////////////////////////////////////////////// +// Traversal implementations. + +impl, U: TypeFoldable> TypeFoldable for (T, U) { + fn try_fold_with>(self, folder: &mut F) -> Result<(T, U), F::Error> { + Ok((self.0.try_fold_with(folder)?, self.1.try_fold_with(folder)?)) + } +} + +impl, B: TypeFoldable, C: TypeFoldable> TypeFoldable + for (A, B, C) +{ + fn try_fold_with>( + self, + folder: &mut F, + ) -> Result<(A, B, C), F::Error> { + Ok(( + self.0.try_fold_with(folder)?, + self.1.try_fold_with(folder)?, + self.2.try_fold_with(folder)?, + )) + } +} + +impl> TypeFoldable for Option { + fn try_fold_with>(self, folder: &mut F) -> Result { + Ok(match self { + Some(v) => Some(v.try_fold_with(folder)?), + None => None, + }) + } +} + +impl, E: TypeFoldable> TypeFoldable for Result { + fn try_fold_with>(self, folder: &mut F) -> Result { + Ok(match self { + Ok(v) => Ok(v.try_fold_with(folder)?), + Err(e) => Err(e.try_fold_with(folder)?), + }) + } +} + +impl> TypeFoldable for Lrc { + fn try_fold_with>(mut self, folder: &mut F) -> Result { + // We merely want to replace the contained `T`, if at all possible, + // so that we don't needlessly allocate a new `Lrc` or indeed clone + // the contained type. + unsafe { + // First step is to ensure that we have a unique reference to + // the contained type, which `Lrc::make_mut` will accomplish (by + // allocating a new `Lrc` and cloning the `T` only if required). + // This is done *before* casting to `Lrc>` so that + // panicking during `make_mut` does not leak the `T`. + Lrc::make_mut(&mut self); + + // Casting to `Lrc>` is safe because `ManuallyDrop` + // is `repr(transparent)`. + let ptr = Lrc::into_raw(self).cast::>(); + let mut unique = Lrc::from_raw(ptr); + + // Call to `Lrc::make_mut` above guarantees that `unique` is the + // sole reference to the contained value, so we can avoid doing + // a checked `get_mut` here. + let slot = Lrc::get_mut_unchecked(&mut unique); + + // Semantically move the contained type out from `unique`, fold + // it, then move the folded value back into `unique`. Should + // folding fail, `ManuallyDrop` ensures that the "moved-out" + // value is not re-dropped. + let owned = mem::ManuallyDrop::take(slot); + let folded = owned.try_fold_with(folder)?; + *slot = mem::ManuallyDrop::new(folded); + + // Cast back to `Lrc`. + Ok(Lrc::from_raw(Lrc::into_raw(unique).cast())) + } + } +} + +impl> TypeFoldable for Box { + fn try_fold_with>(mut self, folder: &mut F) -> Result { + *self = (*self).try_fold_with(folder)?; + Ok(self) + } +} + +impl> TypeFoldable for Vec { + fn try_fold_with>(self, folder: &mut F) -> Result { + self.into_iter().map(|t| t.try_fold_with(folder)).collect() + } +} + +impl, Ix: Idx> TypeFoldable for IndexVec { + fn try_fold_with>(self, folder: &mut F) -> Result { + self.raw.try_fold_with(folder).map(IndexVec::from_raw) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/interner.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/interner.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/interner.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/interner.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,162 @@ +use smallvec::SmallVec; +use std::fmt::Debug; +use std::hash::Hash; + +use crate::{DebugWithInfcx, Mutability}; + +pub trait Interner: Sized { + type DefId: Clone + Debug + Hash + Ord; + type AdtDef: Clone + Debug + Hash + Ord; + + type GenericArgs: Clone + + DebugWithInfcx + + Hash + + Ord + + IntoIterator; + type GenericArg: Clone + DebugWithInfcx + Hash + Ord; + type Term: Clone + Debug + Hash + Ord; + + type Binder; + type TypeAndMut: Clone + Debug + Hash + Ord; + type CanonicalVars: Clone + Debug + Hash + Eq; + + // Kinds of tys + type Ty: Clone + DebugWithInfcx + Hash + Ord; + type Tys: Clone + Debug + Hash + Ord + IntoIterator; + type AliasTy: Clone + DebugWithInfcx + Hash + Ord; + type ParamTy: Clone + Debug + Hash + Ord; + type BoundTy: Clone + Debug + Hash + Ord; + type PlaceholderTy: Clone + Debug + Hash + Ord; + type InferTy: Clone + DebugWithInfcx + Hash + Ord; + + // Things stored inside of tys + type ErrorGuaranteed: Clone + Debug + Hash + Ord; + type BoundExistentialPredicates: Clone + DebugWithInfcx + Hash + Ord; + type PolyFnSig: Clone + DebugWithInfcx + Hash + Ord; + type AllocId: Clone + Debug + Hash + Ord; + + // Kinds of consts + type Const: Clone + DebugWithInfcx + Hash + Ord; + type InferConst: Clone + DebugWithInfcx + Hash + Ord; + type AliasConst: Clone + DebugWithInfcx + Hash + Ord; + type PlaceholderConst: Clone + Debug + Hash + Ord; + type ParamConst: Clone + Debug + Hash + Ord; + type BoundConst: Clone + Debug + Hash + Ord; + type ValueConst: Clone + Debug + Hash + Ord; + type ExprConst: Clone + DebugWithInfcx + Hash + Ord; + + // Kinds of regions + type Region: Clone + DebugWithInfcx + Hash + Ord; + type EarlyBoundRegion: Clone + Debug + Hash + Ord; + type BoundRegion: Clone + Debug + Hash + Ord; + type FreeRegion: Clone + Debug + Hash + Ord; + type InferRegion: Clone + DebugWithInfcx + Hash + Ord; + type PlaceholderRegion: Clone + Debug + Hash + Ord; + + // Predicates + type Predicate: Clone + Debug + Hash + Eq; + type TraitPredicate: Clone + Debug + Hash + Eq; + type RegionOutlivesPredicate: Clone + Debug + Hash + Eq; + type TypeOutlivesPredicate: Clone + Debug + Hash + Eq; + type ProjectionPredicate: Clone + Debug + Hash + Eq; + type SubtypePredicate: Clone + Debug + Hash + Eq; + type CoercePredicate: Clone + Debug + Hash + Eq; + type ClosureKind: Clone + Debug + Hash + Eq; + + fn ty_and_mut_to_parts(ty_and_mut: Self::TypeAndMut) -> (Self::Ty, Mutability); +} + +/// Imagine you have a function `F: FnOnce(&[T]) -> R`, plus an iterator `iter` +/// that produces `T` items. You could combine them with +/// `f(&iter.collect::>())`, but this requires allocating memory for the +/// `Vec`. +/// +/// This trait allows for faster implementations, intended for cases where the +/// number of items produced by the iterator is small. There is a blanket impl +/// for `T` items, but there is also a fallible impl for `Result` items. +pub trait CollectAndApply: Sized { + type Output; + + /// Produce a result of type `Self::Output` from `iter`. The result will + /// typically be produced by applying `f` on the elements produced by + /// `iter`, though this may not happen in some impls, e.g. if an error + /// occurred during iteration. + fn collect_and_apply(iter: I, f: F) -> Self::Output + where + I: Iterator, + F: FnOnce(&[T]) -> R; +} + +/// The blanket impl that always collects all elements and applies `f`. +impl CollectAndApply for T { + type Output = R; + + /// Equivalent to `f(&iter.collect::>())`. + fn collect_and_apply(mut iter: I, f: F) -> R + where + I: Iterator, + F: FnOnce(&[T]) -> R, + { + // This code is hot enough that it's worth specializing for the most + // common length lists, to avoid the overhead of `SmallVec` creation. + // Lengths 0, 1, and 2 typically account for ~95% of cases. If + // `size_hint` is incorrect a panic will occur via an `unwrap` or an + // `assert`. + match iter.size_hint() { + (0, Some(0)) => { + assert!(iter.next().is_none()); + f(&[]) + } + (1, Some(1)) => { + let t0 = iter.next().unwrap(); + assert!(iter.next().is_none()); + f(&[t0]) + } + (2, Some(2)) => { + let t0 = iter.next().unwrap(); + let t1 = iter.next().unwrap(); + assert!(iter.next().is_none()); + f(&[t0, t1]) + } + _ => f(&iter.collect::>()), + } + } +} + +/// A fallible impl that will fail, without calling `f`, if there are any +/// errors during collection. +impl CollectAndApply for Result { + type Output = Result; + + /// Equivalent to `Ok(f(&iter.collect::>>()?))`. + fn collect_and_apply(mut iter: I, f: F) -> Result + where + I: Iterator>, + F: FnOnce(&[T]) -> R, + { + // This code is hot enough that it's worth specializing for the most + // common length lists, to avoid the overhead of `SmallVec` creation. + // Lengths 0, 1, and 2 typically account for ~95% of cases. If + // `size_hint` is incorrect a panic will occur via an `unwrap` or an + // `assert`, unless a failure happens first, in which case the result + // will be an error anyway. + Ok(match iter.size_hint() { + (0, Some(0)) => { + assert!(iter.next().is_none()); + f(&[]) + } + (1, Some(1)) => { + let t0 = iter.next().unwrap()?; + assert!(iter.next().is_none()); + f(&[t0]) + } + (2, Some(2)) => { + let t0 = iter.next().unwrap()?; + let t1 = iter.next().unwrap()?; + assert!(iter.next().is_none()); + f(&[t0, t1]) + } + _ => f(&iter.collect::, _>>()?), + }) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,310 +1,55 @@ #![feature(associated_type_defaults)] #![feature(fmt_helpers_for_derive)] +#![feature(get_mut_unchecked)] #![feature(min_specialization)] #![feature(never_type)] +#![feature(new_uninit)] #![feature(rustc_attrs)] #![feature(unwrap_infallible)] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] #![allow(internal_features)] +extern crate self as rustc_type_ir; + #[macro_use] extern crate bitflags; #[macro_use] extern crate rustc_macros; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::unify::{EqUnifyValue, UnifyKey}; -use smallvec::SmallVec; use std::fmt; -use std::fmt::Debug; use std::hash::Hash; -use std::mem::discriminant; pub mod codec; pub mod fold; -pub mod sty; pub mod ty_info; +pub mod ty_kind; pub mod visit; #[macro_use] mod macros; -mod structural_impls; +mod canonical; +mod const_kind; +mod debug; +mod flags; +mod interner; +mod predicate_kind; +mod region_kind; +pub use canonical::*; pub use codec::*; -pub use structural_impls::{DebugWithInfcx, InferCtxtLike, OptWithInfcx}; -pub use sty::*; +pub use const_kind::*; +pub use debug::{DebugWithInfcx, InferCtxtLike, WithInfcx}; +pub use flags::*; +pub use interner::*; +pub use predicate_kind::*; +pub use region_kind::*; pub use ty_info::*; +pub use ty_kind::*; /// Needed so we can use #[derive(HashStable_Generic)] pub trait HashStableContext {} -pub trait Interner: Sized { - type AdtDef: Clone + Debug + Hash + Ord; - type GenericArgsRef: Clone - + DebugWithInfcx - + Hash - + Ord - + IntoIterator; - type GenericArg: Clone + DebugWithInfcx + Hash + Ord; - type DefId: Clone + Debug + Hash + Ord; - type Binder; - type Ty: Clone + DebugWithInfcx + Hash + Ord; - type Const: Clone + DebugWithInfcx + Hash + Ord; - type Region: Clone + DebugWithInfcx + Hash + Ord; - type Predicate; - type TypeAndMut: Clone + Debug + Hash + Ord; - type Mutability: Clone + Debug + Hash + Ord; - type Movability: Clone + Debug + Hash + Ord; - type PolyFnSig: Clone + DebugWithInfcx + Hash + Ord; - type ListBinderExistentialPredicate: Clone + DebugWithInfcx + Hash + Ord; - type BinderListTy: Clone + DebugWithInfcx + Hash + Ord; - type ListTy: Clone + Debug + Hash + Ord + IntoIterator; - type AliasTy: Clone + DebugWithInfcx + Hash + Ord; - type ParamTy: Clone + Debug + Hash + Ord; - type BoundTy: Clone + Debug + Hash + Ord; - type PlaceholderType: Clone + Debug + Hash + Ord; - type InferTy: Clone + DebugWithInfcx + Hash + Ord; - type ErrorGuaranteed: Clone + Debug + Hash + Ord; - type PredicateKind: Clone + Debug + Hash + PartialEq + Eq; - type AllocId: Clone + Debug + Hash + Ord; - - type InferConst: Clone + DebugWithInfcx + Hash + Ord; - type AliasConst: Clone + DebugWithInfcx + Hash + Ord; - type PlaceholderConst: Clone + Debug + Hash + Ord; - type ParamConst: Clone + Debug + Hash + Ord; - type BoundConst: Clone + Debug + Hash + Ord; - type ValueConst: Clone + Debug + Hash + Ord; - type ExprConst: Clone + DebugWithInfcx + Hash + Ord; - - type EarlyBoundRegion: Clone + Debug + Hash + Ord; - type BoundRegion: Clone + Debug + Hash + Ord; - type FreeRegion: Clone + Debug + Hash + Ord; - type RegionVid: Clone + DebugWithInfcx + Hash + Ord; - type PlaceholderRegion: Clone + Debug + Hash + Ord; - - fn ty_and_mut_to_parts(ty_and_mut: Self::TypeAndMut) -> (Self::Ty, Self::Mutability); - fn mutability_is_mut(mutbl: Self::Mutability) -> bool; -} - -/// Imagine you have a function `F: FnOnce(&[T]) -> R`, plus an iterator `iter` -/// that produces `T` items. You could combine them with -/// `f(&iter.collect::>())`, but this requires allocating memory for the -/// `Vec`. -/// -/// This trait allows for faster implementations, intended for cases where the -/// number of items produced by the iterator is small. There is a blanket impl -/// for `T` items, but there is also a fallible impl for `Result` items. -pub trait CollectAndApply: Sized { - type Output; - - /// Produce a result of type `Self::Output` from `iter`. The result will - /// typically be produced by applying `f` on the elements produced by - /// `iter`, though this may not happen in some impls, e.g. if an error - /// occurred during iteration. - fn collect_and_apply(iter: I, f: F) -> Self::Output - where - I: Iterator, - F: FnOnce(&[T]) -> R; -} - -/// The blanket impl that always collects all elements and applies `f`. -impl CollectAndApply for T { - type Output = R; - - /// Equivalent to `f(&iter.collect::>())`. - fn collect_and_apply(mut iter: I, f: F) -> R - where - I: Iterator, - F: FnOnce(&[T]) -> R, - { - // This code is hot enough that it's worth specializing for the most - // common length lists, to avoid the overhead of `SmallVec` creation. - // Lengths 0, 1, and 2 typically account for ~95% of cases. If - // `size_hint` is incorrect a panic will occur via an `unwrap` or an - // `assert`. - match iter.size_hint() { - (0, Some(0)) => { - assert!(iter.next().is_none()); - f(&[]) - } - (1, Some(1)) => { - let t0 = iter.next().unwrap(); - assert!(iter.next().is_none()); - f(&[t0]) - } - (2, Some(2)) => { - let t0 = iter.next().unwrap(); - let t1 = iter.next().unwrap(); - assert!(iter.next().is_none()); - f(&[t0, t1]) - } - _ => f(&iter.collect::>()), - } - } -} - -/// A fallible impl that will fail, without calling `f`, if there are any -/// errors during collection. -impl CollectAndApply for Result { - type Output = Result; - - /// Equivalent to `Ok(f(&iter.collect::>>()?))`. - fn collect_and_apply(mut iter: I, f: F) -> Result - where - I: Iterator>, - F: FnOnce(&[T]) -> R, - { - // This code is hot enough that it's worth specializing for the most - // common length lists, to avoid the overhead of `SmallVec` creation. - // Lengths 0, 1, and 2 typically account for ~95% of cases. If - // `size_hint` is incorrect a panic will occur via an `unwrap` or an - // `assert`, unless a failure happens first, in which case the result - // will be an error anyway. - Ok(match iter.size_hint() { - (0, Some(0)) => { - assert!(iter.next().is_none()); - f(&[]) - } - (1, Some(1)) => { - let t0 = iter.next().unwrap()?; - assert!(iter.next().is_none()); - f(&[t0]) - } - (2, Some(2)) => { - let t0 = iter.next().unwrap()?; - let t1 = iter.next().unwrap()?; - assert!(iter.next().is_none()); - f(&[t0, t1]) - } - _ => f(&iter.collect::, _>>()?), - }) - } -} - -bitflags! { - /// Flags that we track on types. These flags are propagated upwards - /// through the type during type construction, so that we can quickly check - /// whether the type has various kinds of types in it without recursing - /// over the type itself. - pub struct TypeFlags: u32 { - // Does this have parameters? Used to determine whether substitution is - // required. - /// Does this have `Param`? - const HAS_TY_PARAM = 1 << 0; - /// Does this have `ReEarlyBound`? - const HAS_RE_PARAM = 1 << 1; - /// Does this have `ConstKind::Param`? - const HAS_CT_PARAM = 1 << 2; - - const HAS_PARAM = TypeFlags::HAS_TY_PARAM.bits - | TypeFlags::HAS_RE_PARAM.bits - | TypeFlags::HAS_CT_PARAM.bits; - - /// Does this have `Infer`? - const HAS_TY_INFER = 1 << 3; - /// Does this have `ReVar`? - const HAS_RE_INFER = 1 << 4; - /// Does this have `ConstKind::Infer`? - const HAS_CT_INFER = 1 << 5; - - /// Does this have inference variables? Used to determine whether - /// inference is required. - const HAS_INFER = TypeFlags::HAS_TY_INFER.bits - | TypeFlags::HAS_RE_INFER.bits - | TypeFlags::HAS_CT_INFER.bits; - - /// Does this have `Placeholder`? - const HAS_TY_PLACEHOLDER = 1 << 6; - /// Does this have `RePlaceholder`? - const HAS_RE_PLACEHOLDER = 1 << 7; - /// Does this have `ConstKind::Placeholder`? - const HAS_CT_PLACEHOLDER = 1 << 8; - - /// Does this have placeholders? - const HAS_PLACEHOLDER = TypeFlags::HAS_TY_PLACEHOLDER.bits - | TypeFlags::HAS_RE_PLACEHOLDER.bits - | TypeFlags::HAS_CT_PLACEHOLDER.bits; - - /// `true` if there are "names" of regions and so forth - /// that are local to a particular fn/inferctxt - const HAS_FREE_LOCAL_REGIONS = 1 << 9; - - /// `true` if there are "names" of types and regions and so forth - /// that are local to a particular fn - const HAS_FREE_LOCAL_NAMES = TypeFlags::HAS_TY_PARAM.bits - | TypeFlags::HAS_CT_PARAM.bits - | TypeFlags::HAS_TY_INFER.bits - | TypeFlags::HAS_CT_INFER.bits - | TypeFlags::HAS_TY_PLACEHOLDER.bits - | TypeFlags::HAS_CT_PLACEHOLDER.bits - // We consider 'freshened' types and constants - // to depend on a particular fn. - // The freshening process throws away information, - // which can make things unsuitable for use in a global - // cache. Note that there is no 'fresh lifetime' flag - - // freshening replaces all lifetimes with `ReErased`, - // which is different from how types/const are freshened. - | TypeFlags::HAS_TY_FRESH.bits - | TypeFlags::HAS_CT_FRESH.bits - | TypeFlags::HAS_FREE_LOCAL_REGIONS.bits - | TypeFlags::HAS_RE_ERASED.bits; - - /// Does this have `Projection`? - const HAS_TY_PROJECTION = 1 << 10; - /// Does this have `Inherent`? - const HAS_TY_INHERENT = 1 << 11; - /// Does this have `Opaque`? - const HAS_TY_OPAQUE = 1 << 12; - /// Does this have `ConstKind::Unevaluated`? - const HAS_CT_PROJECTION = 1 << 13; - - /// Could this type be normalized further? - const HAS_PROJECTION = TypeFlags::HAS_TY_PROJECTION.bits - | TypeFlags::HAS_TY_OPAQUE.bits - | TypeFlags::HAS_TY_INHERENT.bits - | TypeFlags::HAS_CT_PROJECTION.bits; - - /// Is an error type/const reachable? - const HAS_ERROR = 1 << 14; - - /// Does this have any region that "appears free" in the type? - /// Basically anything but `ReLateBound` and `ReErased`. - const HAS_FREE_REGIONS = 1 << 15; - - /// Does this have any `ReLateBound` regions? - const HAS_RE_LATE_BOUND = 1 << 16; - /// Does this have any `Bound` types? - const HAS_TY_LATE_BOUND = 1 << 17; - /// Does this have any `ConstKind::Bound` consts? - const HAS_CT_LATE_BOUND = 1 << 18; - /// Does this have any bound variables? - /// Used to check if a global bound is safe to evaluate. - const HAS_LATE_BOUND = TypeFlags::HAS_RE_LATE_BOUND.bits - | TypeFlags::HAS_TY_LATE_BOUND.bits - | TypeFlags::HAS_CT_LATE_BOUND.bits; - - /// Does this have any `ReErased` regions? - const HAS_RE_ERASED = 1 << 19; - - /// Does this value have parameters/placeholders/inference variables which could be - /// replaced later, in a way that would change the results of `impl` specialization? - const STILL_FURTHER_SPECIALIZABLE = 1 << 20; - - /// Does this value have `InferTy::FreshTy/FreshIntTy/FreshFloatTy`? - const HAS_TY_FRESH = 1 << 21; - - /// Does this value have `InferConst::Fresh`? - const HAS_CT_FRESH = 1 << 22; - - /// Does this have `Generator` or `GeneratorWitness`? - const HAS_TY_GENERATOR = 1 << 23; - - /// Does this have any binders with bound vars (e.g. that need to be anonymized)? - const HAS_BINDER_VARS = 1 << 24; - } -} - rustc_index::newtype_index! { /// A [De Bruijn index][dbi] is a standard means of representing /// regions (and perhaps later types) in a higher-ranked setting. In @@ -428,259 +173,6 @@ } } -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[derive(Encodable, Decodable, HashStable_Generic)] -pub enum IntTy { - Isize, - I8, - I16, - I32, - I64, - I128, -} - -impl IntTy { - pub fn name_str(&self) -> &'static str { - match *self { - IntTy::Isize => "isize", - IntTy::I8 => "i8", - IntTy::I16 => "i16", - IntTy::I32 => "i32", - IntTy::I64 => "i64", - IntTy::I128 => "i128", - } - } - - pub fn bit_width(&self) -> Option { - Some(match *self { - IntTy::Isize => return None, - IntTy::I8 => 8, - IntTy::I16 => 16, - IntTy::I32 => 32, - IntTy::I64 => 64, - IntTy::I128 => 128, - }) - } - - pub fn normalize(&self, target_width: u32) -> Self { - match self { - IntTy::Isize => match target_width { - 16 => IntTy::I16, - 32 => IntTy::I32, - 64 => IntTy::I64, - _ => unreachable!(), - }, - _ => *self, - } - } - - pub fn to_unsigned(self) -> UintTy { - match self { - IntTy::Isize => UintTy::Usize, - IntTy::I8 => UintTy::U8, - IntTy::I16 => UintTy::U16, - IntTy::I32 => UintTy::U32, - IntTy::I64 => UintTy::U64, - IntTy::I128 => UintTy::U128, - } - } -} - -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] -#[derive(Encodable, Decodable, HashStable_Generic)] -pub enum UintTy { - Usize, - U8, - U16, - U32, - U64, - U128, -} - -impl UintTy { - pub fn name_str(&self) -> &'static str { - match *self { - UintTy::Usize => "usize", - UintTy::U8 => "u8", - UintTy::U16 => "u16", - UintTy::U32 => "u32", - UintTy::U64 => "u64", - UintTy::U128 => "u128", - } - } - - pub fn bit_width(&self) -> Option { - Some(match *self { - UintTy::Usize => return None, - UintTy::U8 => 8, - UintTy::U16 => 16, - UintTy::U32 => 32, - UintTy::U64 => 64, - UintTy::U128 => 128, - }) - } - - pub fn normalize(&self, target_width: u32) -> Self { - match self { - UintTy::Usize => match target_width { - 16 => UintTy::U16, - 32 => UintTy::U32, - 64 => UintTy::U64, - _ => unreachable!(), - }, - _ => *self, - } - } - - pub fn to_signed(self) -> IntTy { - match self { - UintTy::Usize => IntTy::Isize, - UintTy::U8 => IntTy::I8, - UintTy::U16 => IntTy::I16, - UintTy::U32 => IntTy::I32, - UintTy::U64 => IntTy::I64, - UintTy::U128 => IntTy::I128, - } - } -} - -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[derive(Encodable, Decodable, HashStable_Generic)] -pub enum FloatTy { - F32, - F64, -} - -impl FloatTy { - pub fn name_str(self) -> &'static str { - match self { - FloatTy::F32 => "f32", - FloatTy::F64 => "f64", - } - } - - pub fn bit_width(self) -> u64 { - match self { - FloatTy::F32 => 32, - FloatTy::F64 => 64, - } - } -} - -#[derive(Clone, Copy, PartialEq, Eq)] -pub enum IntVarValue { - IntType(IntTy), - UintType(UintTy), -} - -#[derive(Clone, Copy, PartialEq, Eq)] -pub struct FloatVarValue(pub FloatTy); - -rustc_index::newtype_index! { - /// A **ty**pe **v**ariable **ID**. - #[debug_format = "?{}t"] - pub struct TyVid {} -} - -rustc_index::newtype_index! { - /// An **int**egral (`u32`, `i32`, `usize`, etc.) type **v**ariable **ID**. - #[debug_format = "?{}i"] - pub struct IntVid {} -} - -rustc_index::newtype_index! { - /// A **float**ing-point (`f32` or `f64`) type **v**ariable **ID**. - #[debug_format = "?{}f"] - pub struct FloatVid {} -} - -/// A placeholder for a type that hasn't been inferred yet. -/// -/// E.g., if we have an empty array (`[]`), then we create a fresh -/// type variable for the element type since we won't know until it's -/// used what the element type is supposed to be. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] -pub enum InferTy { - /// A type variable. - TyVar(TyVid), - /// An integral type variable (`{integer}`). - /// - /// These are created when the compiler sees an integer literal like - /// `1` that could be several different types (`u8`, `i32`, `u32`, etc.). - /// We don't know until it's used what type it's supposed to be, so - /// we create a fresh type variable. - IntVar(IntVid), - /// A floating-point type variable (`{float}`). - /// - /// These are created when the compiler sees an float literal like - /// `1.0` that could be either an `f32` or an `f64`. - /// We don't know until it's used what type it's supposed to be, so - /// we create a fresh type variable. - FloatVar(FloatVid), - - /// A [`FreshTy`][Self::FreshTy] is one that is generated as a replacement - /// for an unbound type variable. This is convenient for caching etc. See - /// `rustc_infer::infer::freshen` for more details. - /// - /// Compare with [`TyVar`][Self::TyVar]. - FreshTy(u32), - /// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`IntVar`][Self::IntVar]. - FreshIntTy(u32), - /// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`FloatVar`][Self::FloatVar]. - FreshFloatTy(u32), -} - -/// Raw `TyVid` are used as the unification key for `sub_relations`; -/// they carry no values. -impl UnifyKey for TyVid { - type Value = (); - #[inline] - fn index(&self) -> u32 { - self.as_u32() - } - #[inline] - fn from_index(i: u32) -> TyVid { - TyVid::from_u32(i) - } - fn tag() -> &'static str { - "TyVid" - } -} - -impl EqUnifyValue for IntVarValue {} - -impl UnifyKey for IntVid { - type Value = Option; - #[inline] // make this function eligible for inlining - it is quite hot. - fn index(&self) -> u32 { - self.as_u32() - } - #[inline] - fn from_index(i: u32) -> IntVid { - IntVid::from_u32(i) - } - fn tag() -> &'static str { - "IntVid" - } -} - -impl EqUnifyValue for FloatVarValue {} - -impl UnifyKey for FloatVid { - type Value = Option; - #[inline] - fn index(&self) -> u32 { - self.as_u32() - } - #[inline] - fn from_index(i: u32) -> FloatVid { - FloatVid::from_u32(i) - } - fn tag() -> &'static str { - "FloatVid" - } -} - #[derive(Copy, Clone, PartialEq, Eq, Decodable, Encodable, Hash, HashStable_Generic)] #[rustc_pass_by_value] pub enum Variance { @@ -750,34 +242,6 @@ } } -impl HashStable for InferTy { - fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { - use InferTy::*; - discriminant(self).hash_stable(ctx, hasher); - match self { - TyVar(_) | IntVar(_) | FloatVar(_) => { - panic!("type variables should not be hashed: {self:?}") - } - FreshTy(v) | FreshIntTy(v) | FreshFloatTy(v) => v.hash_stable(ctx, hasher), - } - } -} - -impl fmt::Debug for IntVarValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - IntVarValue::IntType(ref v) => v.fmt(f), - IntVarValue::UintType(ref v) => v.fmt(f), - } - } -} - -impl fmt::Debug for FloatVarValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - impl fmt::Debug for Variance { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match *self { @@ -789,20 +253,6 @@ } } -impl fmt::Display for InferTy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use InferTy::*; - match *self { - TyVar(_) => write!(f, "_"), - IntVar(_) => write!(f, "{}", "{integer}"), - FloatVar(_) => write!(f, "{}", "{float}"), - FreshTy(v) => write!(f, "FreshTy({v})"), - FreshIntTy(v) => write!(f, "FreshIntTy({v})"), - FreshFloatTy(v) => write!(f, "FreshFloatTy({v})"), - } - } -} - rustc_index::newtype_index! { /// "Universes" are used during type- and trait-checking in the /// presence of `for<..>` binders to control what sets of names are diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/macros.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/macros.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/macros.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/macros.rs 2023-12-21 16:55:28.000000000 +0000 @@ -33,3 +33,22 @@ )+ }; } + +/////////////////////////////////////////////////////////////////////////// +// Atomic structs +// +// For things that don't carry any arena-allocated data (and are +// copy...), just add them to this list. + +TrivialTypeTraversalImpls! { + (), + bool, + usize, + u16, + u32, + u64, + String, + crate::DebruijnIndex, + crate::AliasRelationDirection, + crate::UniverseIndex, +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/predicate_kind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/predicate_kind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/predicate_kind.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/predicate_kind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,416 @@ +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use std::fmt; +use std::ops::ControlFlow; + +use crate::fold::{FallibleTypeFolder, TypeFoldable}; +use crate::visit::{TypeVisitable, TypeVisitor}; +use crate::{HashStableContext, Interner}; + +/// A clause is something that can appear in where bounds or be inferred +/// by implied bounds. +#[derive(derivative::Derivative)] +#[derivative(Clone(bound = ""), Hash(bound = ""))] +#[derive(TyEncodable, TyDecodable)] +pub enum ClauseKind { + /// Corresponds to `where Foo: Bar`. `Foo` here would be + /// the `Self` type of the trait reference and `A`, `B`, and `C` + /// would be the type parameters. + Trait(I::TraitPredicate), + + /// `where 'a: 'b` + RegionOutlives(I::RegionOutlivesPredicate), + + /// `where T: 'a` + TypeOutlives(I::TypeOutlivesPredicate), + + /// `where ::Name == X`, approximately. + /// See the `ProjectionPredicate` struct for details. + Projection(I::ProjectionPredicate), + + /// Ensures that a const generic argument to a parameter `const N: u8` + /// is of type `u8`. + ConstArgHasType(I::Const, I::Ty), + + /// No syntax: `T` well-formed. + WellFormed(I::GenericArg), + + /// Constant initializer must evaluate successfully. + ConstEvaluatable(I::Const), +} + +impl Copy for ClauseKind +where + I::Ty: Copy, + I::Const: Copy, + I::GenericArg: Copy, + I::TraitPredicate: Copy, + I::ProjectionPredicate: Copy, + I::TypeOutlivesPredicate: Copy, + I::RegionOutlivesPredicate: Copy, +{ +} + +impl PartialEq for ClauseKind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Trait(l0), Self::Trait(r0)) => l0 == r0, + (Self::RegionOutlives(l0), Self::RegionOutlives(r0)) => l0 == r0, + (Self::TypeOutlives(l0), Self::TypeOutlives(r0)) => l0 == r0, + (Self::Projection(l0), Self::Projection(r0)) => l0 == r0, + (Self::ConstArgHasType(l0, l1), Self::ConstArgHasType(r0, r1)) => l0 == r0 && l1 == r1, + (Self::WellFormed(l0), Self::WellFormed(r0)) => l0 == r0, + (Self::ConstEvaluatable(l0), Self::ConstEvaluatable(r0)) => l0 == r0, + _ => false, + } + } +} + +impl Eq for ClauseKind {} + +fn clause_kind_discriminant(value: &ClauseKind) -> usize { + match value { + ClauseKind::Trait(_) => 0, + ClauseKind::RegionOutlives(_) => 1, + ClauseKind::TypeOutlives(_) => 2, + ClauseKind::Projection(_) => 3, + ClauseKind::ConstArgHasType(_, _) => 4, + ClauseKind::WellFormed(_) => 5, + ClauseKind::ConstEvaluatable(_) => 6, + } +} + +impl HashStable for ClauseKind +where + I::Ty: HashStable, + I::Const: HashStable, + I::GenericArg: HashStable, + I::TraitPredicate: HashStable, + I::ProjectionPredicate: HashStable, + I::TypeOutlivesPredicate: HashStable, + I::RegionOutlivesPredicate: HashStable, +{ + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { + clause_kind_discriminant(self).hash_stable(hcx, hasher); + match self { + ClauseKind::Trait(p) => p.hash_stable(hcx, hasher), + ClauseKind::RegionOutlives(p) => p.hash_stable(hcx, hasher), + ClauseKind::TypeOutlives(p) => p.hash_stable(hcx, hasher), + ClauseKind::Projection(p) => p.hash_stable(hcx, hasher), + ClauseKind::ConstArgHasType(c, t) => { + c.hash_stable(hcx, hasher); + t.hash_stable(hcx, hasher); + } + ClauseKind::WellFormed(t) => t.hash_stable(hcx, hasher), + ClauseKind::ConstEvaluatable(c) => c.hash_stable(hcx, hasher), + } + } +} + +impl TypeFoldable for ClauseKind +where + I::Ty: TypeFoldable, + I::Const: TypeFoldable, + I::GenericArg: TypeFoldable, + I::TraitPredicate: TypeFoldable, + I::ProjectionPredicate: TypeFoldable, + I::TypeOutlivesPredicate: TypeFoldable, + I::RegionOutlivesPredicate: TypeFoldable, +{ + fn try_fold_with>(self, folder: &mut F) -> Result { + Ok(match self { + ClauseKind::Trait(p) => ClauseKind::Trait(p.try_fold_with(folder)?), + ClauseKind::RegionOutlives(p) => ClauseKind::RegionOutlives(p.try_fold_with(folder)?), + ClauseKind::TypeOutlives(p) => ClauseKind::TypeOutlives(p.try_fold_with(folder)?), + ClauseKind::Projection(p) => ClauseKind::Projection(p.try_fold_with(folder)?), + ClauseKind::ConstArgHasType(c, t) => { + ClauseKind::ConstArgHasType(c.try_fold_with(folder)?, t.try_fold_with(folder)?) + } + ClauseKind::WellFormed(p) => ClauseKind::WellFormed(p.try_fold_with(folder)?), + ClauseKind::ConstEvaluatable(p) => { + ClauseKind::ConstEvaluatable(p.try_fold_with(folder)?) + } + }) + } +} + +impl TypeVisitable for ClauseKind +where + I::Ty: TypeVisitable, + I::Const: TypeVisitable, + I::GenericArg: TypeVisitable, + I::TraitPredicate: TypeVisitable, + I::ProjectionPredicate: TypeVisitable, + I::TypeOutlivesPredicate: TypeVisitable, + I::RegionOutlivesPredicate: TypeVisitable, +{ + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + match self { + ClauseKind::Trait(p) => p.visit_with(visitor), + ClauseKind::RegionOutlives(p) => p.visit_with(visitor), + ClauseKind::TypeOutlives(p) => p.visit_with(visitor), + ClauseKind::Projection(p) => p.visit_with(visitor), + ClauseKind::ConstArgHasType(c, t) => { + c.visit_with(visitor)?; + t.visit_with(visitor) + } + ClauseKind::WellFormed(p) => p.visit_with(visitor), + ClauseKind::ConstEvaluatable(p) => p.visit_with(visitor), + } + } +} + +#[derive(derivative::Derivative)] +#[derivative(Clone(bound = ""), Hash(bound = ""))] +#[derive(TyEncodable, TyDecodable)] +pub enum PredicateKind { + /// Prove a clause + Clause(ClauseKind), + + /// Trait must be object-safe. + ObjectSafe(I::DefId), + + /// No direct syntax. May be thought of as `where T: FnFoo<...>` + /// for some generic args `...` and `T` being a closure type. + /// Satisfied (or refuted) once we know the closure's kind. + ClosureKind(I::DefId, I::GenericArgs, I::ClosureKind), + + /// `T1 <: T2` + /// + /// This obligation is created most often when we have two + /// unresolved type variables and hence don't have enough + /// information to process the subtyping obligation yet. + Subtype(I::SubtypePredicate), + + /// `T1` coerced to `T2` + /// + /// Like a subtyping obligation, this is created most often + /// when we have two unresolved type variables and hence + /// don't have enough information to process the coercion + /// obligation yet. At the moment, we actually process coercions + /// very much like subtyping and don't handle the full coercion + /// logic. + Coerce(I::CoercePredicate), + + /// Constants must be equal. The first component is the const that is expected. + ConstEquate(I::Const, I::Const), + + /// A marker predicate that is always ambiguous. + /// Used for coherence to mark opaque types as possibly equal to each other but ambiguous. + Ambiguous, + + /// Separate from `ClauseKind::Projection` which is used for normalization in new solver. + /// This predicate requires two terms to be equal to eachother. + /// + /// Only used for new solver + AliasRelate(I::Term, I::Term, AliasRelationDirection), +} + +impl Copy for PredicateKind +where + I::DefId: Copy, + I::Const: Copy, + I::GenericArgs: Copy, + I::Term: Copy, + I::CoercePredicate: Copy, + I::SubtypePredicate: Copy, + I::ClosureKind: Copy, + ClauseKind: Copy, +{ +} + +impl PartialEq for PredicateKind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Clause(l0), Self::Clause(r0)) => l0 == r0, + (Self::ObjectSafe(l0), Self::ObjectSafe(r0)) => l0 == r0, + (Self::ClosureKind(l0, l1, l2), Self::ClosureKind(r0, r1, r2)) => { + l0 == r0 && l1 == r1 && l2 == r2 + } + (Self::Subtype(l0), Self::Subtype(r0)) => l0 == r0, + (Self::Coerce(l0), Self::Coerce(r0)) => l0 == r0, + (Self::ConstEquate(l0, l1), Self::ConstEquate(r0, r1)) => l0 == r0 && l1 == r1, + (Self::AliasRelate(l0, l1, l2), Self::AliasRelate(r0, r1, r2)) => { + l0 == r0 && l1 == r1 && l2 == r2 + } + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} + +impl Eq for PredicateKind {} + +fn predicate_kind_discriminant(value: &PredicateKind) -> usize { + match value { + PredicateKind::Clause(_) => 0, + PredicateKind::ObjectSafe(_) => 1, + PredicateKind::ClosureKind(_, _, _) => 2, + PredicateKind::Subtype(_) => 3, + PredicateKind::Coerce(_) => 4, + PredicateKind::ConstEquate(_, _) => 5, + PredicateKind::Ambiguous => 6, + PredicateKind::AliasRelate(_, _, _) => 7, + } +} + +impl HashStable for PredicateKind +where + I::DefId: HashStable, + I::Const: HashStable, + I::GenericArgs: HashStable, + I::Term: HashStable, + I::CoercePredicate: HashStable, + I::SubtypePredicate: HashStable, + I::ClosureKind: HashStable, + ClauseKind: HashStable, +{ + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { + predicate_kind_discriminant(self).hash_stable(hcx, hasher); + match self { + PredicateKind::Clause(p) => p.hash_stable(hcx, hasher), + PredicateKind::ObjectSafe(d) => d.hash_stable(hcx, hasher), + PredicateKind::ClosureKind(d, g, k) => { + d.hash_stable(hcx, hasher); + g.hash_stable(hcx, hasher); + k.hash_stable(hcx, hasher); + } + PredicateKind::Subtype(p) => p.hash_stable(hcx, hasher), + PredicateKind::Coerce(p) => p.hash_stable(hcx, hasher), + PredicateKind::ConstEquate(c1, c2) => { + c1.hash_stable(hcx, hasher); + c2.hash_stable(hcx, hasher); + } + PredicateKind::Ambiguous => {} + PredicateKind::AliasRelate(t1, t2, r) => { + t1.hash_stable(hcx, hasher); + t2.hash_stable(hcx, hasher); + r.hash_stable(hcx, hasher); + } + } + } +} + +impl TypeFoldable for PredicateKind +where + I::DefId: TypeFoldable, + I::Const: TypeFoldable, + I::GenericArgs: TypeFoldable, + I::Term: TypeFoldable, + I::CoercePredicate: TypeFoldable, + I::SubtypePredicate: TypeFoldable, + I::ClosureKind: TypeFoldable, + ClauseKind: TypeFoldable, +{ + fn try_fold_with>(self, folder: &mut F) -> Result { + Ok(match self { + PredicateKind::Clause(c) => PredicateKind::Clause(c.try_fold_with(folder)?), + PredicateKind::ObjectSafe(d) => PredicateKind::ObjectSafe(d.try_fold_with(folder)?), + PredicateKind::ClosureKind(d, g, k) => PredicateKind::ClosureKind( + d.try_fold_with(folder)?, + g.try_fold_with(folder)?, + k.try_fold_with(folder)?, + ), + PredicateKind::Subtype(s) => PredicateKind::Subtype(s.try_fold_with(folder)?), + PredicateKind::Coerce(s) => PredicateKind::Coerce(s.try_fold_with(folder)?), + PredicateKind::ConstEquate(a, b) => { + PredicateKind::ConstEquate(a.try_fold_with(folder)?, b.try_fold_with(folder)?) + } + PredicateKind::Ambiguous => PredicateKind::Ambiguous, + PredicateKind::AliasRelate(a, b, d) => PredicateKind::AliasRelate( + a.try_fold_with(folder)?, + b.try_fold_with(folder)?, + d.try_fold_with(folder)?, + ), + }) + } +} + +impl TypeVisitable for PredicateKind +where + I::DefId: TypeVisitable, + I::Const: TypeVisitable, + I::GenericArgs: TypeVisitable, + I::Term: TypeVisitable, + I::CoercePredicate: TypeVisitable, + I::SubtypePredicate: TypeVisitable, + I::ClosureKind: TypeVisitable, + ClauseKind: TypeVisitable, +{ + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + match self { + PredicateKind::Clause(p) => p.visit_with(visitor), + PredicateKind::ObjectSafe(d) => d.visit_with(visitor), + PredicateKind::ClosureKind(d, g, k) => { + d.visit_with(visitor)?; + g.visit_with(visitor)?; + k.visit_with(visitor) + } + PredicateKind::Subtype(s) => s.visit_with(visitor), + PredicateKind::Coerce(s) => s.visit_with(visitor), + PredicateKind::ConstEquate(a, b) => { + a.visit_with(visitor)?; + b.visit_with(visitor) + } + PredicateKind::Ambiguous => ControlFlow::Continue(()), + PredicateKind::AliasRelate(a, b, d) => { + a.visit_with(visitor)?; + b.visit_with(visitor)?; + d.visit_with(visitor) + } + } + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Copy)] +#[derive(HashStable_Generic, Encodable, Decodable)] +pub enum AliasRelationDirection { + Equate, + Subtype, +} + +impl std::fmt::Display for AliasRelationDirection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AliasRelationDirection::Equate => write!(f, "=="), + AliasRelationDirection::Subtype => write!(f, "<:"), + } + } +} + +// FIXME: Convert to DebugWithInfcx impl +impl fmt::Debug for ClauseKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ClauseKind::ConstArgHasType(ct, ty) => write!(f, "ConstArgHasType({ct:?}, {ty:?})"), + ClauseKind::Trait(a) => a.fmt(f), + ClauseKind::RegionOutlives(pair) => pair.fmt(f), + ClauseKind::TypeOutlives(pair) => pair.fmt(f), + ClauseKind::Projection(pair) => pair.fmt(f), + ClauseKind::WellFormed(data) => write!(f, "WellFormed({data:?})"), + ClauseKind::ConstEvaluatable(ct) => { + write!(f, "ConstEvaluatable({ct:?})") + } + } + } +} + +// FIXME: Convert to DebugWithInfcx impl +impl fmt::Debug for PredicateKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + PredicateKind::Clause(a) => a.fmt(f), + PredicateKind::Subtype(pair) => pair.fmt(f), + PredicateKind::Coerce(pair) => pair.fmt(f), + PredicateKind::ObjectSafe(trait_def_id) => { + write!(f, "ObjectSafe({trait_def_id:?})") + } + PredicateKind::ClosureKind(closure_def_id, closure_args, kind) => { + write!(f, "ClosureKind({closure_def_id:?}, {closure_args:?}, {kind:?})") + } + PredicateKind::ConstEquate(c1, c2) => write!(f, "ConstEquate({c1:?}, {c2:?})"), + PredicateKind::Ambiguous => write!(f, "Ambiguous"), + PredicateKind::AliasRelate(t1, t2, dir) => { + write!(f, "AliasRelate({t1:?}, {dir:?}, {t2:?})") + } + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/region_kind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/region_kind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/region_kind.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/region_kind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,279 @@ +use rustc_data_structures::stable_hasher::HashStable; +use rustc_data_structures::stable_hasher::StableHasher; +use std::fmt; + +use crate::{DebruijnIndex, DebugWithInfcx, HashStableContext, InferCtxtLike, Interner, WithInfcx}; + +use self::RegionKind::*; + +/// Representation of regions. Note that the NLL checker uses a distinct +/// representation of regions. For this reason, it internally replaces all the +/// regions with inference variables -- the index of the variable is then used +/// to index into internal NLL data structures. See `rustc_const_eval::borrow_check` +/// module for more information. +/// +/// Note: operations are on the wrapper `Region` type, which is interned, +/// rather than this type. +/// +/// ## The Region lattice within a given function +/// +/// In general, the region lattice looks like +/// +/// ```text +/// static ----------+-----...------+ (greatest) +/// | | | +/// early-bound and | | +/// free regions | | +/// | | | +/// | | | +/// empty(root) placeholder(U1) | +/// | / | +/// | / placeholder(Un) +/// empty(U1) -- / +/// | / +/// ... / +/// | / +/// empty(Un) -------- (smallest) +/// ``` +/// +/// Early-bound/free regions are the named lifetimes in scope from the +/// function declaration. They have relationships to one another +/// determined based on the declared relationships from the +/// function. +/// +/// Note that inference variables and bound regions are not included +/// in this diagram. In the case of inference variables, they should +/// be inferred to some other region from the diagram. In the case of +/// bound regions, they are excluded because they don't make sense to +/// include -- the diagram indicates the relationship between free +/// regions. +/// +/// ## Inference variables +/// +/// During region inference, we sometimes create inference variables, +/// represented as `ReVar`. These will be inferred by the code in +/// `infer::lexical_region_resolve` to some free region from the +/// lattice above (the minimal region that meets the +/// constraints). +/// +/// During NLL checking, where regions are defined differently, we +/// also use `ReVar` -- in that case, the index is used to index into +/// the NLL region checker's data structures. The variable may in fact +/// represent either a free region or an inference variable, in that +/// case. +/// +/// ## Bound Regions +/// +/// These are regions that are stored behind a binder and must be substituted +/// with some concrete region before being used. There are two kind of +/// bound regions: early-bound, which are bound in an item's `Generics`, +/// and are substituted by an `GenericArgs`, and late-bound, which are part of +/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by +/// the likes of `liberate_late_bound_regions`. The distinction exists +/// because higher-ranked lifetimes aren't supported in all places. See [1][2]. +/// +/// Unlike `Param`s, bound regions are not supposed to exist "in the wild" +/// outside their binder, e.g., in types passed to type inference, and +/// should first be substituted (by placeholder regions, free regions, +/// or region variables). +/// +/// ## Placeholder and Free Regions +/// +/// One often wants to work with bound regions without knowing their precise +/// identity. For example, when checking a function, the lifetime of a borrow +/// can end up being assigned to some region parameter. In these cases, +/// it must be ensured that bounds on the region can't be accidentally +/// assumed without being checked. +/// +/// To do this, we replace the bound regions with placeholder markers, +/// which don't satisfy any relation not explicitly provided. +/// +/// There are two kinds of placeholder regions in rustc: `ReFree` and +/// `RePlaceholder`. When checking an item's body, `ReFree` is supposed +/// to be used. These also support explicit bounds: both the internally-stored +/// *scope*, which the region is assumed to outlive, as well as other +/// relations stored in the `FreeRegionMap`. Note that these relations +/// aren't checked when you `make_subregion` (or `eq_types`), only by +/// `resolve_regions_and_report_errors`. +/// +/// When working with higher-ranked types, some region relations aren't +/// yet known, so you can't just call `resolve_regions_and_report_errors`. +/// `RePlaceholder` is designed for this purpose. In these contexts, +/// there's also the risk that some inference variable laying around will +/// get unified with your placeholder region: if you want to check whether +/// `for<'a> Foo<'_>: 'a`, and you substitute your bound region `'a` +/// with a placeholder region `'%a`, the variable `'_` would just be +/// instantiated to the placeholder region `'%a`, which is wrong because +/// the inference variable is supposed to satisfy the relation +/// *for every value of the placeholder region*. To ensure that doesn't +/// happen, you can use `leak_check`. This is more clearly explained +/// by the [rustc dev guide]. +/// +/// [1]: https://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ +/// [2]: https://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ +/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html +#[derive(derivative::Derivative)] +#[derivative( + Clone(bound = ""), + PartialOrd(bound = ""), + PartialOrd = "feature_allow_slow_enum", + Ord(bound = ""), + Ord = "feature_allow_slow_enum", + Hash(bound = "") +)] +#[derive(TyEncodable, TyDecodable)] +pub enum RegionKind { + /// Region bound in a type or fn declaration which will be + /// substituted 'early' -- that is, at the same time when type + /// parameters are substituted. + ReEarlyBound(I::EarlyBoundRegion), + + /// Region bound in a function scope, which will be substituted when the + /// function is called. + ReLateBound(DebruijnIndex, I::BoundRegion), + + /// When checking a function body, the types of all arguments and so forth + /// that refer to bound region parameters are modified to refer to free + /// region parameters. + ReFree(I::FreeRegion), + + /// Static data that has an "infinite" lifetime. Top in the region lattice. + ReStatic, + + /// A region variable. Should not exist outside of type inference. + ReVar(I::InferRegion), + + /// A placeholder region -- basically, the higher-ranked version of `ReFree`. + /// Should not exist outside of type inference. + RePlaceholder(I::PlaceholderRegion), + + /// Erased region, used by trait selection, in MIR and during codegen. + ReErased, + + /// A region that resulted from some other error. Used exclusively for diagnostics. + ReError(I::ErrorGuaranteed), +} + +// This is manually implemented for `RegionKind` because `std::mem::discriminant` +// returns an opaque value that is `PartialEq` but not `PartialOrd` +#[inline] +const fn regionkind_discriminant(value: &RegionKind) -> usize { + match value { + ReEarlyBound(_) => 0, + ReLateBound(_, _) => 1, + ReFree(_) => 2, + ReStatic => 3, + ReVar(_) => 4, + RePlaceholder(_) => 5, + ReErased => 6, + ReError(_) => 7, + } +} + +// This is manually implemented because a derive would require `I: Copy` +impl Copy for RegionKind +where + I::EarlyBoundRegion: Copy, + I::BoundRegion: Copy, + I::FreeRegion: Copy, + I::InferRegion: Copy, + I::PlaceholderRegion: Copy, + I::ErrorGuaranteed: Copy, +{ +} + +// This is manually implemented because a derive would require `I: PartialEq` +impl PartialEq for RegionKind { + #[inline] + fn eq(&self, other: &RegionKind) -> bool { + regionkind_discriminant(self) == regionkind_discriminant(other) + && match (self, other) { + (ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r == b_r, + (ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => a_d == b_d && a_r == b_r, + (ReFree(a_r), ReFree(b_r)) => a_r == b_r, + (ReStatic, ReStatic) => true, + (ReVar(a_r), ReVar(b_r)) => a_r == b_r, + (RePlaceholder(a_r), RePlaceholder(b_r)) => a_r == b_r, + (ReErased, ReErased) => true, + (ReError(_), ReError(_)) => true, + _ => { + debug_assert!( + false, + "This branch must be unreachable, maybe the match is missing an arm? self = {self:?}, other = {other:?}" + ); + true + } + } + } +} + +// This is manually implemented because a derive would require `I: Eq` +impl Eq for RegionKind {} + +impl DebugWithInfcx for RegionKind { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + match this.data { + ReEarlyBound(data) => write!(f, "ReEarlyBound({data:?})"), + + ReLateBound(binder_id, bound_region) => { + write!(f, "ReLateBound({binder_id:?}, {bound_region:?})") + } + + ReFree(fr) => write!(f, "{fr:?}"), + + ReStatic => f.write_str("ReStatic"), + + ReVar(vid) => write!(f, "{:?}", &this.wrap(vid)), + + RePlaceholder(placeholder) => write!(f, "RePlaceholder({placeholder:?})"), + + ReErased => f.write_str("ReErased"), + + ReError(_) => f.write_str("ReError"), + } + } +} +impl fmt::Debug for RegionKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + WithInfcx::with_no_infcx(self).fmt(f) + } +} + +// This is not a derived impl because a derive would require `I: HashStable` +impl HashStable for RegionKind +where + I::EarlyBoundRegion: HashStable, + I::BoundRegion: HashStable, + I::FreeRegion: HashStable, + I::InferRegion: HashStable, + I::PlaceholderRegion: HashStable, +{ + #[inline] + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { + std::mem::discriminant(self).hash_stable(hcx, hasher); + match self { + ReErased | ReStatic | ReError(_) => { + // No variant fields to hash for these ... + } + ReLateBound(d, r) => { + d.hash_stable(hcx, hasher); + r.hash_stable(hcx, hasher); + } + ReEarlyBound(r) => { + r.hash_stable(hcx, hasher); + } + ReFree(r) => { + r.hash_stable(hcx, hasher); + } + RePlaceholder(r) => { + r.hash_stable(hcx, hasher); + } + ReVar(_) => { + panic!("region variables should not be hashed: {self:?}") + } + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/structural_impls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/structural_impls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/structural_impls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/structural_impls.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,352 +0,0 @@ -//! This module contains implementations of the `TypeFoldable` and `TypeVisitable` -//! traits for various types in the Rust compiler. Most are written by hand, though -//! we've recently added some macros and proc-macros to help with the tedium. - -use crate::fold::{FallibleTypeFolder, TypeFoldable}; -use crate::visit::{TypeVisitable, TypeVisitor}; -use crate::{ConstKind, FloatTy, InferTy, IntTy, Interner, UintTy, UniverseIndex}; -use rustc_data_structures::functor::IdFunctor; -use rustc_data_structures::sync::Lrc; -use rustc_index::{Idx, IndexVec}; - -use core::fmt; -use std::marker::PhantomData; -use std::ops::ControlFlow; - -/////////////////////////////////////////////////////////////////////////// -// Atomic structs -// -// For things that don't carry any arena-allocated data (and are -// copy...), just add them to this list. - -TrivialTypeTraversalImpls! { - (), - bool, - usize, - u16, - u32, - u64, - String, - crate::DebruijnIndex, -} - -/////////////////////////////////////////////////////////////////////////// -// Traversal implementations. - -impl, U: TypeFoldable> TypeFoldable for (T, U) { - fn try_fold_with>(self, folder: &mut F) -> Result<(T, U), F::Error> { - Ok((self.0.try_fold_with(folder)?, self.1.try_fold_with(folder)?)) - } -} - -impl, U: TypeVisitable> TypeVisitable for (T, U) { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - self.0.visit_with(visitor)?; - self.1.visit_with(visitor) - } -} - -impl, B: TypeFoldable, C: TypeFoldable> TypeFoldable - for (A, B, C) -{ - fn try_fold_with>( - self, - folder: &mut F, - ) -> Result<(A, B, C), F::Error> { - Ok(( - self.0.try_fold_with(folder)?, - self.1.try_fold_with(folder)?, - self.2.try_fold_with(folder)?, - )) - } -} - -impl, B: TypeVisitable, C: TypeVisitable> TypeVisitable - for (A, B, C) -{ - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - self.0.visit_with(visitor)?; - self.1.visit_with(visitor)?; - self.2.visit_with(visitor) - } -} - -impl> TypeFoldable for Option { - fn try_fold_with>(self, folder: &mut F) -> Result { - Ok(match self { - Some(v) => Some(v.try_fold_with(folder)?), - None => None, - }) - } -} - -impl> TypeVisitable for Option { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - match self { - Some(v) => v.visit_with(visitor), - None => ControlFlow::Continue(()), - } - } -} - -impl, E: TypeFoldable> TypeFoldable for Result { - fn try_fold_with>(self, folder: &mut F) -> Result { - Ok(match self { - Ok(v) => Ok(v.try_fold_with(folder)?), - Err(e) => Err(e.try_fold_with(folder)?), - }) - } -} - -impl, E: TypeVisitable> TypeVisitable for Result { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - match self { - Ok(v) => v.visit_with(visitor), - Err(e) => e.visit_with(visitor), - } - } -} - -impl> TypeFoldable for Lrc { - fn try_fold_with>(self, folder: &mut F) -> Result { - self.try_map_id(|value| value.try_fold_with(folder)) - } -} - -impl> TypeVisitable for Lrc { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - (**self).visit_with(visitor) - } -} - -impl> TypeFoldable for Box { - fn try_fold_with>(self, folder: &mut F) -> Result { - self.try_map_id(|value| value.try_fold_with(folder)) - } -} - -impl> TypeVisitable for Box { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - (**self).visit_with(visitor) - } -} - -impl> TypeFoldable for Vec { - fn try_fold_with>(self, folder: &mut F) -> Result { - self.try_map_id(|t| t.try_fold_with(folder)) - } -} - -impl> TypeVisitable for Vec { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - self.iter().try_for_each(|t| t.visit_with(visitor)) - } -} - -// `TypeFoldable` isn't impl'd for `&[T]`. It doesn't make sense in the general -// case, because we can't return a new slice. But note that there are a couple -// of trivial impls of `TypeFoldable` for specific slice types elsewhere. - -impl> TypeVisitable for &[T] { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - self.iter().try_for_each(|t| t.visit_with(visitor)) - } -} - -impl, Ix: Idx> TypeFoldable for IndexVec { - fn try_fold_with>(self, folder: &mut F) -> Result { - self.try_map_id(|x| x.try_fold_with(folder)) - } -} - -impl, Ix: Idx> TypeVisitable for IndexVec { - fn visit_with>(&self, visitor: &mut V) -> ControlFlow { - self.iter().try_for_each(|t| t.visit_with(visitor)) - } -} - -/////////////////////////////////////////////////// -// Debug impls - -pub trait InferCtxtLike { - fn universe_of_ty(&self, ty: I::InferTy) -> Option; - fn universe_of_lt(&self, lt: I::RegionVid) -> Option; - fn universe_of_ct(&self, ct: I::InferConst) -> Option; -} - -impl InferCtxtLike for core::convert::Infallible { - fn universe_of_ty(&self, _ty: ::InferTy) -> Option { - match *self {} - } - fn universe_of_ct(&self, _ct: ::InferConst) -> Option { - match *self {} - } - fn universe_of_lt(&self, _lt: ::RegionVid) -> Option { - match *self {} - } -} - -pub trait DebugWithInfcx: fmt::Debug { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut fmt::Formatter<'_>, - ) -> fmt::Result; -} - -impl + ?Sized> DebugWithInfcx for &'_ T { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut fmt::Formatter<'_>, - ) -> fmt::Result { - >::fmt(this.map(|&data| data), f) - } -} -impl> DebugWithInfcx for [T] { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut fmt::Formatter<'_>, - ) -> fmt::Result { - match f.alternate() { - true => { - write!(f, "[\n")?; - for element in this.data.iter() { - write!(f, "{:?},\n", &this.wrap(element))?; - } - write!(f, "]") - } - false => { - write!(f, "[")?; - if this.data.len() > 0 { - for element in &this.data[..(this.data.len() - 1)] { - write!(f, "{:?}, ", &this.wrap(element))?; - } - if let Some(element) = this.data.last() { - write!(f, "{:?}", &this.wrap(element))?; - } - } - write!(f, "]") - } - } - } -} - -pub struct OptWithInfcx<'a, I: Interner, InfCtx: InferCtxtLike, T> { - pub data: T, - pub infcx: Option<&'a InfCtx>, - _interner: PhantomData, -} - -impl, T: Copy> Copy for OptWithInfcx<'_, I, InfCtx, T> {} -impl, T: Clone> Clone for OptWithInfcx<'_, I, InfCtx, T> { - fn clone(&self) -> Self { - Self { data: self.data.clone(), infcx: self.infcx, _interner: self._interner } - } -} - -impl<'a, I: Interner, T> OptWithInfcx<'a, I, core::convert::Infallible, T> { - pub fn new_no_ctx(data: T) -> Self { - Self { data, infcx: None, _interner: PhantomData } - } -} - -impl<'a, I: Interner, InfCtx: InferCtxtLike, T> OptWithInfcx<'a, I, InfCtx, T> { - pub fn new(data: T, infcx: &'a InfCtx) -> Self { - Self { data, infcx: Some(infcx), _interner: PhantomData } - } - - pub fn wrap(self, u: U) -> OptWithInfcx<'a, I, InfCtx, U> { - OptWithInfcx { data: u, infcx: self.infcx, _interner: PhantomData } - } - - pub fn map(self, f: impl FnOnce(T) -> U) -> OptWithInfcx<'a, I, InfCtx, U> { - OptWithInfcx { data: f(self.data), infcx: self.infcx, _interner: PhantomData } - } - - pub fn as_ref(&self) -> OptWithInfcx<'a, I, InfCtx, &T> { - OptWithInfcx { data: &self.data, infcx: self.infcx, _interner: PhantomData } - } -} - -impl, T: DebugWithInfcx> fmt::Debug - for OptWithInfcx<'_, I, InfCtx, T> -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - DebugWithInfcx::fmt(self.as_ref(), f) - } -} - -impl fmt::Debug for IntTy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name_str()) - } -} - -impl fmt::Debug for UintTy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name_str()) - } -} - -impl fmt::Debug for FloatTy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name_str()) - } -} - -impl fmt::Debug for InferTy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use InferTy::*; - match *self { - TyVar(ref v) => v.fmt(f), - IntVar(ref v) => v.fmt(f), - FloatVar(ref v) => v.fmt(f), - FreshTy(v) => write!(f, "FreshTy({v:?})"), - FreshIntTy(v) => write!(f, "FreshIntTy({v:?})"), - FreshFloatTy(v) => write!(f, "FreshFloatTy({v:?})"), - } - } -} -impl> DebugWithInfcx for InferTy { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut fmt::Formatter<'_>, - ) -> fmt::Result { - use InferTy::*; - match this.infcx.and_then(|infcx| infcx.universe_of_ty(*this.data)) { - None => write!(f, "{:?}", this.data), - Some(universe) => match *this.data { - TyVar(ty_vid) => write!(f, "?{}_{}t", ty_vid.index(), universe.index()), - IntVar(_) | FloatVar(_) | FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_) => { - unreachable!() - } - }, - } - } -} - -impl fmt::Debug for ConstKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) - } -} -impl DebugWithInfcx for ConstKind { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut core::fmt::Formatter<'_>, - ) -> core::fmt::Result { - use ConstKind::*; - - match this.data { - Param(param) => write!(f, "{param:?}"), - Infer(var) => write!(f, "{:?}", &this.wrap(var)), - Bound(debruijn, var) => crate::debug_bound_var(f, *debruijn, var.clone()), - Placeholder(placeholder) => write!(f, "{placeholder:?}"), - Unevaluated(uv) => { - write!(f, "{:?}", &this.wrap(uv)) - } - Value(valtree) => write!(f, "{valtree:?}"), - Error(_) => write!(f, "{{const error}}"), - Expr(expr) => write!(f, "{:?}", &this.wrap(expr)), - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/sty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/sty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/sty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/sty.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1479 +0,0 @@ -#![allow(rustc::usage_of_ty_tykind)] - -use std::cmp::Ordering; -use std::{fmt, hash}; - -use crate::FloatTy; -use crate::HashStableContext; -use crate::IntTy; -use crate::Interner; -use crate::TyDecoder; -use crate::TyEncoder; -use crate::UintTy; -use crate::{DebruijnIndex, DebugWithInfcx, InferCtxtLike, OptWithInfcx}; - -use self::RegionKind::*; -use self::TyKind::*; - -use rustc_data_structures::stable_hasher::HashStable; -use rustc_serialize::{Decodable, Decoder, Encodable}; - -/// Specifies how a trait object is represented. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -#[derive(Encodable, Decodable, HashStable_Generic)] -pub enum DynKind { - /// An unsized `dyn Trait` object - Dyn, - /// A sized `dyn* Trait` object - /// - /// These objects are represented as a `(data, vtable)` pair where `data` is a value of some - /// ptr-sized and ptr-aligned dynamically determined type `T` and `vtable` is a pointer to the - /// vtable of `impl T for Trait`. This allows a `dyn*` object to be treated agnostically with - /// respect to whether it points to a `Box`, `Rc`, etc. - DynStar, -} - -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -#[derive(Encodable, Decodable, HashStable_Generic)] -pub enum AliasKind { - /// A projection `::AssocType`. - /// Can get normalized away if monomorphic enough. - Projection, - /// An associated type in an inherent `impl` - Inherent, - /// An opaque type (usually from `impl Trait` in type aliases or function return types) - /// Can only be normalized away in RevealAll mode - Opaque, - /// A type alias that actually checks its trait bounds. - /// Currently only used if the type alias references opaque types. - /// Can always be normalized away. - Weak, -} - -/// Defines the kinds of types used by the type system. -/// -/// Types written by the user start out as `hir::TyKind` and get -/// converted to this representation using `AstConv::ast_ty_to_ty`. -#[rustc_diagnostic_item = "IrTyKind"] -pub enum TyKind { - /// The primitive boolean type. Written as `bool`. - Bool, - - /// The primitive character type; holds a Unicode scalar value - /// (a non-surrogate code point). Written as `char`. - Char, - - /// A primitive signed integer type. For example, `i32`. - Int(IntTy), - - /// A primitive unsigned integer type. For example, `u32`. - Uint(UintTy), - - /// A primitive floating-point type. For example, `f64`. - Float(FloatTy), - - /// Algebraic data types (ADT). For example: structures, enumerations and unions. - /// - /// For example, the type `List` would be represented using the `AdtDef` - /// for `struct List` and the args `[i32]`. - /// - /// Note that generic parameters in fields only get lazily substituted - /// by using something like `adt_def.all_fields().map(|field| field.ty(tcx, args))`. - Adt(I::AdtDef, I::GenericArgsRef), - - /// An unsized FFI type that is opaque to Rust. Written as `extern type T`. - Foreign(I::DefId), - - /// The pointee of a string slice. Written as `str`. - Str, - - /// An array with the given length. Written as `[T; N]`. - Array(I::Ty, I::Const), - - /// The pointee of an array slice. Written as `[T]`. - Slice(I::Ty), - - /// A raw pointer. Written as `*mut T` or `*const T` - RawPtr(I::TypeAndMut), - - /// A reference; a pointer with an associated lifetime. Written as - /// `&'a mut T` or `&'a T`. - Ref(I::Region, I::Ty, I::Mutability), - - /// The anonymous type of a function declaration/definition. Each - /// function has a unique type. - /// - /// For the function `fn foo() -> i32 { 3 }` this type would be - /// shown to the user as `fn() -> i32 {foo}`. - /// - /// For example the type of `bar` here: - /// ```rust - /// fn foo() -> i32 { 1 } - /// let bar = foo; // bar: fn() -> i32 {foo} - /// ``` - FnDef(I::DefId, I::GenericArgsRef), - - /// A pointer to a function. Written as `fn() -> i32`. - /// - /// Note that both functions and closures start out as either - /// [FnDef] or [Closure] which can be then be coerced to this variant. - /// - /// For example the type of `bar` here: - /// - /// ```rust - /// fn foo() -> i32 { 1 } - /// let bar: fn() -> i32 = foo; - /// ``` - FnPtr(I::PolyFnSig), - - /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. - Dynamic(I::ListBinderExistentialPredicate, I::Region, DynKind), - - /// The anonymous type of a closure. Used to represent the type of `|a| a`. - /// - /// Closure args contain both the - potentially substituted - generic parameters - /// of its parent and some synthetic parameters. See the documentation for - /// `ClosureArgs` for more details. - Closure(I::DefId, I::GenericArgsRef), - - /// The anonymous type of a generator. Used to represent the type of - /// `|a| yield a`. - /// - /// For more info about generator args, visit the documentation for - /// `GeneratorArgs`. - Generator(I::DefId, I::GenericArgsRef, I::Movability), - - /// A type representing the types stored inside a generator. - /// This should only appear as part of the `GeneratorArgs`. - /// - /// Unlike upvars, the witness can reference lifetimes from - /// inside of the generator itself. To deal with them in - /// the type of the generator, we convert them to higher ranked - /// lifetimes bound by the witness itself. - /// - /// This contains the `DefId` and the `GenericArgsRef` of the generator. - /// The actual witness types are computed on MIR by the `mir_generator_witnesses` query. - /// - /// Looking at the following example, the witness for this generator - /// may end up as something like `for<'a> [Vec, &'a Vec]`: - /// - /// ```ignore UNSOLVED (ask @compiler-errors, should this error? can we just swap the yields?) - /// #![feature(generators)] - /// |a| { - /// let x = &vec![3]; - /// yield a; - /// yield x[0]; - /// } - /// # ; - /// ``` - GeneratorWitness(I::DefId, I::GenericArgsRef), - - /// The never type `!`. - Never, - - /// A tuple type. For example, `(i32, bool)`. - Tuple(I::ListTy), - - /// A projection, opaque type, weak type alias, or inherent associated type. - /// All of these types are represented as pairs of def-id and args, and can - /// be normalized, so they are grouped conceptually. - Alias(AliasKind, I::AliasTy), - - /// A type parameter; for example, `T` in `fn f(x: T) {}`. - Param(I::ParamTy), - - /// Bound type variable, used to represent the `'a` in `for<'a> fn(&'a ())`. - /// - /// For canonical queries, we replace inference variables with bound variables, - /// so e.g. when checking whether `&'_ (): Trait<_>` holds, we canonicalize that to - /// `for<'a, T> &'a (): Trait` and then convert the introduced bound variables - /// back to inference variables in a new inference context when inside of the query. - /// - /// It is conventional to render anonymous bound types like `^N` or `^D_N`, - /// where `N` is the bound variable's anonymous index into the binder, and - /// `D` is the debruijn index, or totally omitted if the debruijn index is zero. - /// - /// See the `rustc-dev-guide` for more details about - /// [higher-ranked trait bounds][1] and [canonical queries][2]. - /// - /// [1]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html - /// [2]: https://rustc-dev-guide.rust-lang.org/traits/canonical-queries.html - Bound(DebruijnIndex, I::BoundTy), - - /// A placeholder type, used during higher ranked subtyping to instantiate - /// bound variables. - /// - /// It is conventional to render anonymous placeholer types like `!N` or `!U_N`, - /// where `N` is the placeholder variable's anonymous index (which corresponds - /// to the bound variable's index from the binder from which it was instantiated), - /// and `U` is the universe index in which it is instantiated, or totally omitted - /// if the universe index is zero. - Placeholder(I::PlaceholderType), - - /// A type variable used during type checking. - /// - /// Similar to placeholders, inference variables also live in a universe to - /// correctly deal with higher ranked types. Though unlike placeholders, - /// that universe is stored in the `InferCtxt` instead of directly - /// inside of the type. - Infer(I::InferTy), - - /// A placeholder for a type which could not be computed; this is - /// propagated to avoid useless error messages. - Error(I::ErrorGuaranteed), -} - -impl TyKind { - #[inline] - pub fn is_primitive(&self) -> bool { - matches!(self, Bool | Char | Int(_) | Uint(_) | Float(_)) - } -} - -// This is manually implemented for `TyKind` because `std::mem::discriminant` -// returns an opaque value that is `PartialEq` but not `PartialOrd` -#[inline] -const fn tykind_discriminant(value: &TyKind) -> usize { - match value { - Bool => 0, - Char => 1, - Int(_) => 2, - Uint(_) => 3, - Float(_) => 4, - Adt(_, _) => 5, - Foreign(_) => 6, - Str => 7, - Array(_, _) => 8, - Slice(_) => 9, - RawPtr(_) => 10, - Ref(_, _, _) => 11, - FnDef(_, _) => 12, - FnPtr(_) => 13, - Dynamic(..) => 14, - Closure(_, _) => 15, - Generator(_, _, _) => 16, - GeneratorWitness(_, _) => 17, - Never => 18, - Tuple(_) => 19, - Alias(_, _) => 20, - Param(_) => 21, - Bound(_, _) => 22, - Placeholder(_) => 23, - Infer(_) => 24, - Error(_) => 25, - } -} - -// This is manually implemented because a derive would require `I: Clone` -impl Clone for TyKind { - fn clone(&self) -> Self { - match self { - Bool => Bool, - Char => Char, - Int(i) => Int(*i), - Uint(u) => Uint(*u), - Float(f) => Float(*f), - Adt(d, s) => Adt(d.clone(), s.clone()), - Foreign(d) => Foreign(d.clone()), - Str => Str, - Array(t, c) => Array(t.clone(), c.clone()), - Slice(t) => Slice(t.clone()), - RawPtr(p) => RawPtr(p.clone()), - Ref(r, t, m) => Ref(r.clone(), t.clone(), m.clone()), - FnDef(d, s) => FnDef(d.clone(), s.clone()), - FnPtr(s) => FnPtr(s.clone()), - Dynamic(p, r, repr) => Dynamic(p.clone(), r.clone(), *repr), - Closure(d, s) => Closure(d.clone(), s.clone()), - Generator(d, s, m) => Generator(d.clone(), s.clone(), m.clone()), - GeneratorWitness(d, s) => GeneratorWitness(d.clone(), s.clone()), - Never => Never, - Tuple(t) => Tuple(t.clone()), - Alias(k, p) => Alias(*k, p.clone()), - Param(p) => Param(p.clone()), - Bound(d, b) => Bound(*d, b.clone()), - Placeholder(p) => Placeholder(p.clone()), - Infer(t) => Infer(t.clone()), - Error(e) => Error(e.clone()), - } - } -} - -// This is manually implemented because a derive would require `I: PartialEq` -impl PartialEq for TyKind { - #[inline] - fn eq(&self, other: &TyKind) -> bool { - // You might expect this `match` to be preceded with this: - // - // tykind_discriminant(self) == tykind_discriminant(other) && - // - // but the data patterns in practice are such that a comparison - // succeeds 99%+ of the time, and it's faster to omit it. - match (self, other) { - (Int(a_i), Int(b_i)) => a_i == b_i, - (Uint(a_u), Uint(b_u)) => a_u == b_u, - (Float(a_f), Float(b_f)) => a_f == b_f, - (Adt(a_d, a_s), Adt(b_d, b_s)) => a_d == b_d && a_s == b_s, - (Foreign(a_d), Foreign(b_d)) => a_d == b_d, - (Array(a_t, a_c), Array(b_t, b_c)) => a_t == b_t && a_c == b_c, - (Slice(a_t), Slice(b_t)) => a_t == b_t, - (RawPtr(a_t), RawPtr(b_t)) => a_t == b_t, - (Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => a_r == b_r && a_t == b_t && a_m == b_m, - (FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d == b_d && a_s == b_s, - (FnPtr(a_s), FnPtr(b_s)) => a_s == b_s, - (Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => { - a_p == b_p && a_r == b_r && a_repr == b_repr - } - (Closure(a_d, a_s), Closure(b_d, b_s)) => a_d == b_d && a_s == b_s, - (Generator(a_d, a_s, a_m), Generator(b_d, b_s, b_m)) => { - a_d == b_d && a_s == b_s && a_m == b_m - } - (GeneratorWitness(a_d, a_s), GeneratorWitness(b_d, b_s)) => a_d == b_d && a_s == b_s, - (Tuple(a_t), Tuple(b_t)) => a_t == b_t, - (Alias(a_i, a_p), Alias(b_i, b_p)) => a_i == b_i && a_p == b_p, - (Param(a_p), Param(b_p)) => a_p == b_p, - (Bound(a_d, a_b), Bound(b_d, b_b)) => a_d == b_d && a_b == b_b, - (Placeholder(a_p), Placeholder(b_p)) => a_p == b_p, - (Infer(a_t), Infer(b_t)) => a_t == b_t, - (Error(a_e), Error(b_e)) => a_e == b_e, - (Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => true, - _ => { - debug_assert!( - tykind_discriminant(self) != tykind_discriminant(other), - "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}" - ); - false - } - } - } -} - -// This is manually implemented because a derive would require `I: Eq` -impl Eq for TyKind {} - -// This is manually implemented because a derive would require `I: PartialOrd` -impl PartialOrd for TyKind { - #[inline] - fn partial_cmp(&self, other: &TyKind) -> Option { - Some(self.cmp(other)) - } -} - -// This is manually implemented because a derive would require `I: Ord` -impl Ord for TyKind { - #[inline] - fn cmp(&self, other: &TyKind) -> Ordering { - tykind_discriminant(self).cmp(&tykind_discriminant(other)).then_with(|| { - match (self, other) { - (Int(a_i), Int(b_i)) => a_i.cmp(b_i), - (Uint(a_u), Uint(b_u)) => a_u.cmp(b_u), - (Float(a_f), Float(b_f)) => a_f.cmp(b_f), - (Adt(a_d, a_s), Adt(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)), - (Foreign(a_d), Foreign(b_d)) => a_d.cmp(b_d), - (Array(a_t, a_c), Array(b_t, b_c)) => a_t.cmp(b_t).then_with(|| a_c.cmp(b_c)), - (Slice(a_t), Slice(b_t)) => a_t.cmp(b_t), - (RawPtr(a_t), RawPtr(b_t)) => a_t.cmp(b_t), - (Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => { - a_r.cmp(b_r).then_with(|| a_t.cmp(b_t).then_with(|| a_m.cmp(b_m))) - } - (FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)), - (FnPtr(a_s), FnPtr(b_s)) => a_s.cmp(b_s), - (Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => { - a_p.cmp(b_p).then_with(|| a_r.cmp(b_r).then_with(|| a_repr.cmp(b_repr))) - } - (Closure(a_p, a_s), Closure(b_p, b_s)) => a_p.cmp(b_p).then_with(|| a_s.cmp(b_s)), - (Generator(a_d, a_s, a_m), Generator(b_d, b_s, b_m)) => { - a_d.cmp(b_d).then_with(|| a_s.cmp(b_s).then_with(|| a_m.cmp(b_m))) - } - ( - GeneratorWitness(a_d, a_s), - GeneratorWitness(b_d, b_s), - ) => match Ord::cmp(a_d, b_d) { - Ordering::Equal => Ord::cmp(a_s, b_s), - cmp => cmp, - }, - (Tuple(a_t), Tuple(b_t)) => a_t.cmp(b_t), - (Alias(a_i, a_p), Alias(b_i, b_p)) => a_i.cmp(b_i).then_with(|| a_p.cmp(b_p)), - (Param(a_p), Param(b_p)) => a_p.cmp(b_p), - (Bound(a_d, a_b), Bound(b_d, b_b)) => a_d.cmp(b_d).then_with(|| a_b.cmp(b_b)), - (Placeholder(a_p), Placeholder(b_p)) => a_p.cmp(b_p), - (Infer(a_t), Infer(b_t)) => a_t.cmp(b_t), - (Error(a_e), Error(b_e)) => a_e.cmp(b_e), - (Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => Ordering::Equal, - _ => { - debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = {self:?}, other = {other:?}"); - Ordering::Equal - } - } - }) - } -} - -// This is manually implemented because a derive would require `I: Hash` -impl hash::Hash for TyKind { - fn hash<__H: hash::Hasher>(&self, state: &mut __H) -> () { - tykind_discriminant(self).hash(state); - match self { - Int(i) => i.hash(state), - Uint(u) => u.hash(state), - Float(f) => f.hash(state), - Adt(d, s) => { - d.hash(state); - s.hash(state) - } - Foreign(d) => d.hash(state), - Array(t, c) => { - t.hash(state); - c.hash(state) - } - Slice(t) => t.hash(state), - RawPtr(t) => t.hash(state), - Ref(r, t, m) => { - r.hash(state); - t.hash(state); - m.hash(state) - } - FnDef(d, s) => { - d.hash(state); - s.hash(state) - } - FnPtr(s) => s.hash(state), - Dynamic(p, r, repr) => { - p.hash(state); - r.hash(state); - repr.hash(state) - } - Closure(d, s) => { - d.hash(state); - s.hash(state) - } - Generator(d, s, m) => { - d.hash(state); - s.hash(state); - m.hash(state) - } - GeneratorWitness(d, s) => { - d.hash(state); - s.hash(state); - } - Tuple(t) => t.hash(state), - Alias(i, p) => { - i.hash(state); - p.hash(state); - } - Param(p) => p.hash(state), - Bound(d, b) => { - d.hash(state); - b.hash(state) - } - Placeholder(p) => p.hash(state), - Infer(t) => t.hash(state), - Error(e) => e.hash(state), - Bool | Char | Str | Never => (), - } - } -} - -impl DebugWithInfcx for TyKind { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut core::fmt::Formatter<'_>, - ) -> fmt::Result { - match this.data { - Bool => write!(f, "bool"), - Char => write!(f, "char"), - Int(i) => write!(f, "{i:?}"), - Uint(u) => write!(f, "{u:?}"), - Float(float) => write!(f, "{float:?}"), - Adt(d, s) => { - write!(f, "{d:?}")?; - let mut s = s.clone().into_iter(); - let first = s.next(); - match first { - Some(first) => write!(f, "<{:?}", first)?, - None => return Ok(()), - }; - - for arg in s { - write!(f, ", {:?}", arg)?; - } - - write!(f, ">") - } - Foreign(d) => f.debug_tuple_field1_finish("Foreign", d), - Str => write!(f, "str"), - Array(t, c) => write!(f, "[{:?}; {:?}]", &this.wrap(t), &this.wrap(c)), - Slice(t) => write!(f, "[{:?}]", &this.wrap(t)), - RawPtr(p) => { - let (ty, mutbl) = I::ty_and_mut_to_parts(p.clone()); - match I::mutability_is_mut(mutbl) { - true => write!(f, "*mut "), - false => write!(f, "*const "), - }?; - write!(f, "{:?}", &this.wrap(ty)) - } - Ref(r, t, m) => match I::mutability_is_mut(m.clone()) { - true => write!(f, "&{:?} mut {:?}", &this.wrap(r), &this.wrap(t)), - false => write!(f, "&{:?} {:?}", &this.wrap(r), &this.wrap(t)), - }, - FnDef(d, s) => f.debug_tuple_field2_finish("FnDef", d, &this.wrap(s)), - FnPtr(s) => write!(f, "{:?}", &this.wrap(s)), - Dynamic(p, r, repr) => match repr { - DynKind::Dyn => write!(f, "dyn {:?} + {:?}", &this.wrap(p), &this.wrap(r)), - DynKind::DynStar => { - write!(f, "dyn* {:?} + {:?}", &this.wrap(p), &this.wrap(r)) - } - }, - Closure(d, s) => f.debug_tuple_field2_finish("Closure", d, &this.wrap(s)), - Generator(d, s, m) => f.debug_tuple_field3_finish("Generator", d, &this.wrap(s), m), - GeneratorWitness(d, s) => { - f.debug_tuple_field2_finish("GeneratorWitness", d, &this.wrap(s)) - } - Never => write!(f, "!"), - Tuple(t) => { - write!(f, "(")?; - let mut count = 0; - for ty in t.clone() { - if count > 0 { - write!(f, ", ")?; - } - write!(f, "{:?}", &this.wrap(ty))?; - count += 1; - } - // unary tuples need a trailing comma - if count == 1 { - write!(f, ",")?; - } - write!(f, ")") - } - Alias(i, a) => f.debug_tuple_field2_finish("Alias", i, &this.wrap(a)), - Param(p) => write!(f, "{p:?}"), - Bound(d, b) => crate::debug_bound_var(f, *d, b), - Placeholder(p) => write!(f, "{p:?}"), - Infer(t) => write!(f, "{:?}", this.wrap(t)), - TyKind::Error(_) => write!(f, "{{type error}}"), - } - } -} -// This is manually implemented because a derive would require `I: Debug` -impl fmt::Debug for TyKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) - } -} - -// This is manually implemented because a derive would require `I: Encodable` -impl Encodable for TyKind -where - I::ErrorGuaranteed: Encodable, - I::AdtDef: Encodable, - I::GenericArgsRef: Encodable, - I::DefId: Encodable, - I::Ty: Encodable, - I::Const: Encodable, - I::Region: Encodable, - I::TypeAndMut: Encodable, - I::Mutability: Encodable, - I::Movability: Encodable, - I::PolyFnSig: Encodable, - I::ListBinderExistentialPredicate: Encodable, - I::BinderListTy: Encodable, - I::ListTy: Encodable, - I::AliasTy: Encodable, - I::ParamTy: Encodable, - I::BoundTy: Encodable, - I::PlaceholderType: Encodable, - I::InferTy: Encodable, - I::PredicateKind: Encodable, - I::AllocId: Encodable, -{ - fn encode(&self, e: &mut E) { - let disc = tykind_discriminant(self); - match self { - Bool => e.emit_enum_variant(disc, |_| {}), - Char => e.emit_enum_variant(disc, |_| {}), - Int(i) => e.emit_enum_variant(disc, |e| { - i.encode(e); - }), - Uint(u) => e.emit_enum_variant(disc, |e| { - u.encode(e); - }), - Float(f) => e.emit_enum_variant(disc, |e| { - f.encode(e); - }), - Adt(adt, args) => e.emit_enum_variant(disc, |e| { - adt.encode(e); - args.encode(e); - }), - Foreign(def_id) => e.emit_enum_variant(disc, |e| { - def_id.encode(e); - }), - Str => e.emit_enum_variant(disc, |_| {}), - Array(t, c) => e.emit_enum_variant(disc, |e| { - t.encode(e); - c.encode(e); - }), - Slice(t) => e.emit_enum_variant(disc, |e| { - t.encode(e); - }), - RawPtr(tam) => e.emit_enum_variant(disc, |e| { - tam.encode(e); - }), - Ref(r, t, m) => e.emit_enum_variant(disc, |e| { - r.encode(e); - t.encode(e); - m.encode(e); - }), - FnDef(def_id, args) => e.emit_enum_variant(disc, |e| { - def_id.encode(e); - args.encode(e); - }), - FnPtr(polyfnsig) => e.emit_enum_variant(disc, |e| { - polyfnsig.encode(e); - }), - Dynamic(l, r, repr) => e.emit_enum_variant(disc, |e| { - l.encode(e); - r.encode(e); - repr.encode(e); - }), - Closure(def_id, args) => e.emit_enum_variant(disc, |e| { - def_id.encode(e); - args.encode(e); - }), - Generator(def_id, args, m) => e.emit_enum_variant(disc, |e| { - def_id.encode(e); - args.encode(e); - m.encode(e); - }), - GeneratorWitness(def_id, args) => e.emit_enum_variant(disc, |e| { - def_id.encode(e); - args.encode(e); - }), - Never => e.emit_enum_variant(disc, |_| {}), - Tuple(args) => e.emit_enum_variant(disc, |e| { - args.encode(e); - }), - Alias(k, p) => e.emit_enum_variant(disc, |e| { - k.encode(e); - p.encode(e); - }), - Param(p) => e.emit_enum_variant(disc, |e| { - p.encode(e); - }), - Bound(d, b) => e.emit_enum_variant(disc, |e| { - d.encode(e); - b.encode(e); - }), - Placeholder(p) => e.emit_enum_variant(disc, |e| { - p.encode(e); - }), - Infer(i) => e.emit_enum_variant(disc, |e| { - i.encode(e); - }), - Error(d) => e.emit_enum_variant(disc, |e| { - d.encode(e); - }), - } - } -} - -// This is manually implemented because a derive would require `I: Decodable` -impl> Decodable for TyKind -where - I::ErrorGuaranteed: Decodable, - I::AdtDef: Decodable, - I::GenericArgsRef: Decodable, - I::DefId: Decodable, - I::Ty: Decodable, - I::Const: Decodable, - I::Region: Decodable, - I::TypeAndMut: Decodable, - I::Mutability: Decodable, - I::Movability: Decodable, - I::PolyFnSig: Decodable, - I::ListBinderExistentialPredicate: Decodable, - I::BinderListTy: Decodable, - I::ListTy: Decodable, - I::AliasTy: Decodable, - I::ParamTy: Decodable, - I::AliasTy: Decodable, - I::BoundTy: Decodable, - I::PlaceholderType: Decodable, - I::InferTy: Decodable, - I::PredicateKind: Decodable, - I::AllocId: Decodable, -{ - fn decode(d: &mut D) -> Self { - match Decoder::read_usize(d) { - 0 => Bool, - 1 => Char, - 2 => Int(Decodable::decode(d)), - 3 => Uint(Decodable::decode(d)), - 4 => Float(Decodable::decode(d)), - 5 => Adt(Decodable::decode(d), Decodable::decode(d)), - 6 => Foreign(Decodable::decode(d)), - 7 => Str, - 8 => Array(Decodable::decode(d), Decodable::decode(d)), - 9 => Slice(Decodable::decode(d)), - 10 => RawPtr(Decodable::decode(d)), - 11 => Ref(Decodable::decode(d), Decodable::decode(d), Decodable::decode(d)), - 12 => FnDef(Decodable::decode(d), Decodable::decode(d)), - 13 => FnPtr(Decodable::decode(d)), - 14 => Dynamic(Decodable::decode(d), Decodable::decode(d), Decodable::decode(d)), - 15 => Closure(Decodable::decode(d), Decodable::decode(d)), - 16 => Generator(Decodable::decode(d), Decodable::decode(d), Decodable::decode(d)), - 17 => GeneratorWitness(Decodable::decode(d), Decodable::decode(d)), - 18 => Never, - 19 => Tuple(Decodable::decode(d)), - 20 => Alias(Decodable::decode(d), Decodable::decode(d)), - 21 => Param(Decodable::decode(d)), - 22 => Bound(Decodable::decode(d), Decodable::decode(d)), - 23 => Placeholder(Decodable::decode(d)), - 24 => Infer(Decodable::decode(d)), - 25 => Error(Decodable::decode(d)), - _ => panic!( - "{}", - format!( - "invalid enum variant tag while decoding `{}`, expected 0..{}", - "TyKind", 26, - ) - ), - } - } -} - -// This is not a derived impl because a derive would require `I: HashStable` -#[allow(rustc::usage_of_ty_tykind)] -impl HashStable for TyKind -where - I::AdtDef: HashStable, - I::DefId: HashStable, - I::GenericArgsRef: HashStable, - I::Ty: HashStable, - I::Const: HashStable, - I::TypeAndMut: HashStable, - I::PolyFnSig: HashStable, - I::ListBinderExistentialPredicate: HashStable, - I::Region: HashStable, - I::Movability: HashStable, - I::Mutability: HashStable, - I::BinderListTy: HashStable, - I::ListTy: HashStable, - I::AliasTy: HashStable, - I::BoundTy: HashStable, - I::ParamTy: HashStable, - I::PlaceholderType: HashStable, - I::InferTy: HashStable, - I::ErrorGuaranteed: HashStable, -{ - #[inline] - fn hash_stable( - &self, - __hcx: &mut CTX, - __hasher: &mut rustc_data_structures::stable_hasher::StableHasher, - ) { - std::mem::discriminant(self).hash_stable(__hcx, __hasher); - match self { - Bool => {} - Char => {} - Int(i) => { - i.hash_stable(__hcx, __hasher); - } - Uint(u) => { - u.hash_stable(__hcx, __hasher); - } - Float(f) => { - f.hash_stable(__hcx, __hasher); - } - Adt(adt, args) => { - adt.hash_stable(__hcx, __hasher); - args.hash_stable(__hcx, __hasher); - } - Foreign(def_id) => { - def_id.hash_stable(__hcx, __hasher); - } - Str => {} - Array(t, c) => { - t.hash_stable(__hcx, __hasher); - c.hash_stable(__hcx, __hasher); - } - Slice(t) => { - t.hash_stable(__hcx, __hasher); - } - RawPtr(tam) => { - tam.hash_stable(__hcx, __hasher); - } - Ref(r, t, m) => { - r.hash_stable(__hcx, __hasher); - t.hash_stable(__hcx, __hasher); - m.hash_stable(__hcx, __hasher); - } - FnDef(def_id, args) => { - def_id.hash_stable(__hcx, __hasher); - args.hash_stable(__hcx, __hasher); - } - FnPtr(polyfnsig) => { - polyfnsig.hash_stable(__hcx, __hasher); - } - Dynamic(l, r, repr) => { - l.hash_stable(__hcx, __hasher); - r.hash_stable(__hcx, __hasher); - repr.hash_stable(__hcx, __hasher); - } - Closure(def_id, args) => { - def_id.hash_stable(__hcx, __hasher); - args.hash_stable(__hcx, __hasher); - } - Generator(def_id, args, m) => { - def_id.hash_stable(__hcx, __hasher); - args.hash_stable(__hcx, __hasher); - m.hash_stable(__hcx, __hasher); - } - GeneratorWitness(def_id, args) => { - def_id.hash_stable(__hcx, __hasher); - args.hash_stable(__hcx, __hasher); - } - Never => {} - Tuple(args) => { - args.hash_stable(__hcx, __hasher); - } - Alias(k, p) => { - k.hash_stable(__hcx, __hasher); - p.hash_stable(__hcx, __hasher); - } - Param(p) => { - p.hash_stable(__hcx, __hasher); - } - Bound(d, b) => { - d.hash_stable(__hcx, __hasher); - b.hash_stable(__hcx, __hasher); - } - Placeholder(p) => { - p.hash_stable(__hcx, __hasher); - } - Infer(i) => { - i.hash_stable(__hcx, __hasher); - } - Error(d) => { - d.hash_stable(__hcx, __hasher); - } - } - } -} - -/// Represents a constant in Rust. -// #[derive(derive_more::From)] -pub enum ConstKind { - /// A const generic parameter. - Param(I::ParamConst), - - /// Infer the value of the const. - Infer(I::InferConst), - - /// Bound const variable, used only when preparing a trait query. - Bound(DebruijnIndex, I::BoundConst), - - /// A placeholder const - universally quantified higher-ranked const. - Placeholder(I::PlaceholderConst), - - /// An unnormalized const item such as an anon const or assoc const or free const item. - /// Right now anything other than anon consts does not actually work properly but this - /// should - Unevaluated(I::AliasConst), - - /// Used to hold computed value. - Value(I::ValueConst), - - /// A placeholder for a const which could not be computed; this is - /// propagated to avoid useless error messages. - Error(I::ErrorGuaranteed), - - /// Unevaluated non-const-item, used by `feature(generic_const_exprs)` to represent - /// const arguments such as `N + 1` or `foo(N)` - Expr(I::ExprConst), -} - -const fn const_kind_discriminant(value: &ConstKind) -> usize { - match value { - ConstKind::Param(_) => 0, - ConstKind::Infer(_) => 1, - ConstKind::Bound(_, _) => 2, - ConstKind::Placeholder(_) => 3, - ConstKind::Unevaluated(_) => 4, - ConstKind::Value(_) => 5, - ConstKind::Error(_) => 6, - ConstKind::Expr(_) => 7, - } -} - -impl hash::Hash for ConstKind { - fn hash(&self, state: &mut H) { - const_kind_discriminant(self).hash(state); - match self { - ConstKind::Param(p) => p.hash(state), - ConstKind::Infer(i) => i.hash(state), - ConstKind::Bound(d, b) => { - d.hash(state); - b.hash(state); - } - ConstKind::Placeholder(p) => p.hash(state), - ConstKind::Unevaluated(u) => u.hash(state), - ConstKind::Value(v) => v.hash(state), - ConstKind::Error(e) => e.hash(state), - ConstKind::Expr(e) => e.hash(state), - } - } -} - -impl HashStable for ConstKind -where - I::ParamConst: HashStable, - I::InferConst: HashStable, - I::BoundConst: HashStable, - I::PlaceholderConst: HashStable, - I::AliasConst: HashStable, - I::ValueConst: HashStable, - I::ErrorGuaranteed: HashStable, - I::ExprConst: HashStable, -{ - fn hash_stable( - &self, - hcx: &mut CTX, - hasher: &mut rustc_data_structures::stable_hasher::StableHasher, - ) { - const_kind_discriminant(self).hash_stable(hcx, hasher); - match self { - ConstKind::Param(p) => p.hash_stable(hcx, hasher), - ConstKind::Infer(i) => i.hash_stable(hcx, hasher), - ConstKind::Bound(d, b) => { - d.hash_stable(hcx, hasher); - b.hash_stable(hcx, hasher); - } - ConstKind::Placeholder(p) => p.hash_stable(hcx, hasher), - ConstKind::Unevaluated(u) => u.hash_stable(hcx, hasher), - ConstKind::Value(v) => v.hash_stable(hcx, hasher), - ConstKind::Error(e) => e.hash_stable(hcx, hasher), - ConstKind::Expr(e) => e.hash_stable(hcx, hasher), - } - } -} - -impl> Decodable for ConstKind -where - I::ParamConst: Decodable, - I::InferConst: Decodable, - I::BoundConst: Decodable, - I::PlaceholderConst: Decodable, - I::AliasConst: Decodable, - I::ValueConst: Decodable, - I::ErrorGuaranteed: Decodable, - I::ExprConst: Decodable, -{ - fn decode(d: &mut D) -> Self { - match Decoder::read_usize(d) { - 0 => ConstKind::Param(Decodable::decode(d)), - 1 => ConstKind::Infer(Decodable::decode(d)), - 2 => ConstKind::Bound(Decodable::decode(d), Decodable::decode(d)), - 3 => ConstKind::Placeholder(Decodable::decode(d)), - 4 => ConstKind::Unevaluated(Decodable::decode(d)), - 5 => ConstKind::Value(Decodable::decode(d)), - 6 => ConstKind::Error(Decodable::decode(d)), - 7 => ConstKind::Expr(Decodable::decode(d)), - _ => panic!( - "{}", - format!( - "invalid enum variant tag while decoding `{}`, expected 0..{}", - "ConstKind", 8, - ) - ), - } - } -} - -impl> Encodable for ConstKind -where - I::ParamConst: Encodable, - I::InferConst: Encodable, - I::BoundConst: Encodable, - I::PlaceholderConst: Encodable, - I::AliasConst: Encodable, - I::ValueConst: Encodable, - I::ErrorGuaranteed: Encodable, - I::ExprConst: Encodable, -{ - fn encode(&self, e: &mut E) { - let disc = const_kind_discriminant(self); - match self { - ConstKind::Param(p) => e.emit_enum_variant(disc, |e| p.encode(e)), - ConstKind::Infer(i) => e.emit_enum_variant(disc, |e| i.encode(e)), - ConstKind::Bound(d, b) => e.emit_enum_variant(disc, |e| { - d.encode(e); - b.encode(e); - }), - ConstKind::Placeholder(p) => e.emit_enum_variant(disc, |e| p.encode(e)), - ConstKind::Unevaluated(u) => e.emit_enum_variant(disc, |e| u.encode(e)), - ConstKind::Value(v) => e.emit_enum_variant(disc, |e| v.encode(e)), - ConstKind::Error(er) => e.emit_enum_variant(disc, |e| er.encode(e)), - ConstKind::Expr(ex) => e.emit_enum_variant(disc, |e| ex.encode(e)), - } - } -} - -impl PartialOrd for ConstKind { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for ConstKind { - fn cmp(&self, other: &Self) -> Ordering { - const_kind_discriminant(self) - .cmp(&const_kind_discriminant(other)) - .then_with(|| match (self, other) { - (ConstKind::Param(p1), ConstKind::Param(p2)) => p1.cmp(p2), - (ConstKind::Infer(i1), ConstKind::Infer(i2)) => i1.cmp(i2), - (ConstKind::Bound(d1, b1), ConstKind::Bound(d2, b2)) => d1.cmp(d2).then_with(|| b1.cmp(b2)), - (ConstKind::Placeholder(p1), ConstKind::Placeholder(p2)) => p1.cmp(p2), - (ConstKind::Unevaluated(u1), ConstKind::Unevaluated(u2)) => u1.cmp(u2), - (ConstKind::Value(v1), ConstKind::Value(v2)) => v1.cmp(v2), - (ConstKind::Error(e1), ConstKind::Error(e2)) => e1.cmp(e2), - (ConstKind::Expr(e1), ConstKind::Expr(e2)) => e1.cmp(e2), - _ => { - debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = {self:?}, other = {other:?}"); - Ordering::Equal - } - }) - } -} - -impl PartialEq for ConstKind { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::Param(l0), Self::Param(r0)) => l0 == r0, - (Self::Infer(l0), Self::Infer(r0)) => l0 == r0, - (Self::Bound(l0, l1), Self::Bound(r0, r1)) => l0 == r0 && l1 == r1, - (Self::Placeholder(l0), Self::Placeholder(r0)) => l0 == r0, - (Self::Unevaluated(l0), Self::Unevaluated(r0)) => l0 == r0, - (Self::Value(l0), Self::Value(r0)) => l0 == r0, - (Self::Error(l0), Self::Error(r0)) => l0 == r0, - (Self::Expr(l0), Self::Expr(r0)) => l0 == r0, - _ => false, - } - } -} - -impl Eq for ConstKind {} - -impl Clone for ConstKind { - fn clone(&self) -> Self { - match self { - Self::Param(arg0) => Self::Param(arg0.clone()), - Self::Infer(arg0) => Self::Infer(arg0.clone()), - Self::Bound(arg0, arg1) => Self::Bound(arg0.clone(), arg1.clone()), - Self::Placeholder(arg0) => Self::Placeholder(arg0.clone()), - Self::Unevaluated(arg0) => Self::Unevaluated(arg0.clone()), - Self::Value(arg0) => Self::Value(arg0.clone()), - Self::Error(arg0) => Self::Error(arg0.clone()), - Self::Expr(arg0) => Self::Expr(arg0.clone()), - } - } -} - -/// Representation of regions. Note that the NLL checker uses a distinct -/// representation of regions. For this reason, it internally replaces all the -/// regions with inference variables -- the index of the variable is then used -/// to index into internal NLL data structures. See `rustc_const_eval::borrow_check` -/// module for more information. -/// -/// Note: operations are on the wrapper `Region` type, which is interned, -/// rather than this type. -/// -/// ## The Region lattice within a given function -/// -/// In general, the region lattice looks like -/// -/// ```text -/// static ----------+-----...------+ (greatest) -/// | | | -/// early-bound and | | -/// free regions | | -/// | | | -/// | | | -/// empty(root) placeholder(U1) | -/// | / | -/// | / placeholder(Un) -/// empty(U1) -- / -/// | / -/// ... / -/// | / -/// empty(Un) -------- (smallest) -/// ``` -/// -/// Early-bound/free regions are the named lifetimes in scope from the -/// function declaration. They have relationships to one another -/// determined based on the declared relationships from the -/// function. -/// -/// Note that inference variables and bound regions are not included -/// in this diagram. In the case of inference variables, they should -/// be inferred to some other region from the diagram. In the case of -/// bound regions, they are excluded because they don't make sense to -/// include -- the diagram indicates the relationship between free -/// regions. -/// -/// ## Inference variables -/// -/// During region inference, we sometimes create inference variables, -/// represented as `ReVar`. These will be inferred by the code in -/// `infer::lexical_region_resolve` to some free region from the -/// lattice above (the minimal region that meets the -/// constraints). -/// -/// During NLL checking, where regions are defined differently, we -/// also use `ReVar` -- in that case, the index is used to index into -/// the NLL region checker's data structures. The variable may in fact -/// represent either a free region or an inference variable, in that -/// case. -/// -/// ## Bound Regions -/// -/// These are regions that are stored behind a binder and must be substituted -/// with some concrete region before being used. There are two kind of -/// bound regions: early-bound, which are bound in an item's `Generics`, -/// and are substituted by an `GenericArgs`, and late-bound, which are part of -/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by -/// the likes of `liberate_late_bound_regions`. The distinction exists -/// because higher-ranked lifetimes aren't supported in all places. See [1][2]. -/// -/// Unlike `Param`s, bound regions are not supposed to exist "in the wild" -/// outside their binder, e.g., in types passed to type inference, and -/// should first be substituted (by placeholder regions, free regions, -/// or region variables). -/// -/// ## Placeholder and Free Regions -/// -/// One often wants to work with bound regions without knowing their precise -/// identity. For example, when checking a function, the lifetime of a borrow -/// can end up being assigned to some region parameter. In these cases, -/// it must be ensured that bounds on the region can't be accidentally -/// assumed without being checked. -/// -/// To do this, we replace the bound regions with placeholder markers, -/// which don't satisfy any relation not explicitly provided. -/// -/// There are two kinds of placeholder regions in rustc: `ReFree` and -/// `RePlaceholder`. When checking an item's body, `ReFree` is supposed -/// to be used. These also support explicit bounds: both the internally-stored -/// *scope*, which the region is assumed to outlive, as well as other -/// relations stored in the `FreeRegionMap`. Note that these relations -/// aren't checked when you `make_subregion` (or `eq_types`), only by -/// `resolve_regions_and_report_errors`. -/// -/// When working with higher-ranked types, some region relations aren't -/// yet known, so you can't just call `resolve_regions_and_report_errors`. -/// `RePlaceholder` is designed for this purpose. In these contexts, -/// there's also the risk that some inference variable laying around will -/// get unified with your placeholder region: if you want to check whether -/// `for<'a> Foo<'_>: 'a`, and you substitute your bound region `'a` -/// with a placeholder region `'%a`, the variable `'_` would just be -/// instantiated to the placeholder region `'%a`, which is wrong because -/// the inference variable is supposed to satisfy the relation -/// *for every value of the placeholder region*. To ensure that doesn't -/// happen, you can use `leak_check`. This is more clearly explained -/// by the [rustc dev guide]. -/// -/// [1]: https://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ -/// [2]: https://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ -/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html -pub enum RegionKind { - /// Region bound in a type or fn declaration which will be - /// substituted 'early' -- that is, at the same time when type - /// parameters are substituted. - ReEarlyBound(I::EarlyBoundRegion), - - /// Region bound in a function scope, which will be substituted when the - /// function is called. - ReLateBound(DebruijnIndex, I::BoundRegion), - - /// When checking a function body, the types of all arguments and so forth - /// that refer to bound region parameters are modified to refer to free - /// region parameters. - ReFree(I::FreeRegion), - - /// Static data that has an "infinite" lifetime. Top in the region lattice. - ReStatic, - - /// A region variable. Should not exist outside of type inference. - ReVar(I::RegionVid), - - /// A placeholder region -- basically, the higher-ranked version of `ReFree`. - /// Should not exist outside of type inference. - RePlaceholder(I::PlaceholderRegion), - - /// Erased region, used by trait selection, in MIR and during codegen. - ReErased, - - /// A region that resulted from some other error. Used exclusively for diagnostics. - ReError(I::ErrorGuaranteed), -} - -// This is manually implemented for `RegionKind` because `std::mem::discriminant` -// returns an opaque value that is `PartialEq` but not `PartialOrd` -#[inline] -const fn regionkind_discriminant(value: &RegionKind) -> usize { - match value { - ReEarlyBound(_) => 0, - ReLateBound(_, _) => 1, - ReFree(_) => 2, - ReStatic => 3, - ReVar(_) => 4, - RePlaceholder(_) => 5, - ReErased => 6, - ReError(_) => 7, - } -} - -// This is manually implemented because a derive would require `I: Copy` -impl Copy for RegionKind -where - I::EarlyBoundRegion: Copy, - I::BoundRegion: Copy, - I::FreeRegion: Copy, - I::RegionVid: Copy, - I::PlaceholderRegion: Copy, - I::ErrorGuaranteed: Copy, -{ -} - -// This is manually implemented because a derive would require `I: Clone` -impl Clone for RegionKind { - fn clone(&self) -> Self { - match self { - ReEarlyBound(r) => ReEarlyBound(r.clone()), - ReLateBound(d, r) => ReLateBound(*d, r.clone()), - ReFree(r) => ReFree(r.clone()), - ReStatic => ReStatic, - ReVar(r) => ReVar(r.clone()), - RePlaceholder(r) => RePlaceholder(r.clone()), - ReErased => ReErased, - ReError(r) => ReError(r.clone()), - } - } -} - -// This is manually implemented because a derive would require `I: PartialEq` -impl PartialEq for RegionKind { - #[inline] - fn eq(&self, other: &RegionKind) -> bool { - regionkind_discriminant(self) == regionkind_discriminant(other) - && match (self, other) { - (ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r == b_r, - (ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => a_d == b_d && a_r == b_r, - (ReFree(a_r), ReFree(b_r)) => a_r == b_r, - (ReStatic, ReStatic) => true, - (ReVar(a_r), ReVar(b_r)) => a_r == b_r, - (RePlaceholder(a_r), RePlaceholder(b_r)) => a_r == b_r, - (ReErased, ReErased) => true, - (ReError(_), ReError(_)) => true, - _ => { - debug_assert!( - false, - "This branch must be unreachable, maybe the match is missing an arm? self = {self:?}, other = {other:?}" - ); - true - } - } - } -} - -// This is manually implemented because a derive would require `I: Eq` -impl Eq for RegionKind {} - -// This is manually implemented because a derive would require `I: PartialOrd` -impl PartialOrd for RegionKind { - #[inline] - fn partial_cmp(&self, other: &RegionKind) -> Option { - Some(self.cmp(other)) - } -} - -// This is manually implemented because a derive would require `I: Ord` -impl Ord for RegionKind { - #[inline] - fn cmp(&self, other: &RegionKind) -> Ordering { - regionkind_discriminant(self).cmp(®ionkind_discriminant(other)).then_with(|| { - match (self, other) { - (ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r.cmp(b_r), - (ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => { - a_d.cmp(b_d).then_with(|| a_r.cmp(b_r)) - } - (ReFree(a_r), ReFree(b_r)) => a_r.cmp(b_r), - (ReStatic, ReStatic) => Ordering::Equal, - (ReVar(a_r), ReVar(b_r)) => a_r.cmp(b_r), - (RePlaceholder(a_r), RePlaceholder(b_r)) => a_r.cmp(b_r), - (ReErased, ReErased) => Ordering::Equal, - _ => { - debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}"); - Ordering::Equal - } - } - }) - } -} - -// This is manually implemented because a derive would require `I: Hash` -impl hash::Hash for RegionKind { - fn hash(&self, state: &mut H) -> () { - regionkind_discriminant(self).hash(state); - match self { - ReEarlyBound(r) => r.hash(state), - ReLateBound(d, r) => { - d.hash(state); - r.hash(state) - } - ReFree(r) => r.hash(state), - ReStatic => (), - ReVar(r) => r.hash(state), - RePlaceholder(r) => r.hash(state), - ReErased => (), - ReError(_) => (), - } - } -} - -impl DebugWithInfcx for RegionKind { - fn fmt>( - this: OptWithInfcx<'_, I, InfCtx, &Self>, - f: &mut core::fmt::Formatter<'_>, - ) -> core::fmt::Result { - match this.data { - ReEarlyBound(data) => write!(f, "ReEarlyBound({data:?})"), - - ReLateBound(binder_id, bound_region) => { - write!(f, "ReLateBound({binder_id:?}, {bound_region:?})") - } - - ReFree(fr) => write!(f, "{fr:?}"), - - ReStatic => f.write_str("ReStatic"), - - ReVar(vid) => write!(f, "{:?}", &this.wrap(vid)), - - RePlaceholder(placeholder) => write!(f, "RePlaceholder({placeholder:?})"), - - ReErased => f.write_str("ReErased"), - - ReError(_) => f.write_str("ReError"), - } - } -} -impl fmt::Debug for RegionKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - OptWithInfcx::new_no_ctx(self).fmt(f) - } -} - -// This is manually implemented because a derive would require `I: Encodable` -impl Encodable for RegionKind -where - I::EarlyBoundRegion: Encodable, - I::BoundRegion: Encodable, - I::FreeRegion: Encodable, - I::RegionVid: Encodable, - I::PlaceholderRegion: Encodable, -{ - fn encode(&self, e: &mut E) { - let disc = regionkind_discriminant(self); - match self { - ReEarlyBound(a) => e.emit_enum_variant(disc, |e| { - a.encode(e); - }), - ReLateBound(a, b) => e.emit_enum_variant(disc, |e| { - a.encode(e); - b.encode(e); - }), - ReFree(a) => e.emit_enum_variant(disc, |e| { - a.encode(e); - }), - ReStatic => e.emit_enum_variant(disc, |_| {}), - ReVar(a) => e.emit_enum_variant(disc, |e| { - a.encode(e); - }), - RePlaceholder(a) => e.emit_enum_variant(disc, |e| { - a.encode(e); - }), - ReErased => e.emit_enum_variant(disc, |_| {}), - ReError(_) => e.emit_enum_variant(disc, |_| {}), - } - } -} - -// This is manually implemented because a derive would require `I: Decodable` -impl> Decodable for RegionKind -where - I::EarlyBoundRegion: Decodable, - I::BoundRegion: Decodable, - I::FreeRegion: Decodable, - I::RegionVid: Decodable, - I::PlaceholderRegion: Decodable, - I::ErrorGuaranteed: Decodable, -{ - fn decode(d: &mut D) -> Self { - match Decoder::read_usize(d) { - 0 => ReEarlyBound(Decodable::decode(d)), - 1 => ReLateBound(Decodable::decode(d), Decodable::decode(d)), - 2 => ReFree(Decodable::decode(d)), - 3 => ReStatic, - 4 => ReVar(Decodable::decode(d)), - 5 => RePlaceholder(Decodable::decode(d)), - 6 => ReErased, - 7 => ReError(Decodable::decode(d)), - _ => panic!( - "{}", - format!( - "invalid enum variant tag while decoding `{}`, expected 0..{}", - "RegionKind", 8, - ) - ), - } - } -} - -// This is not a derived impl because a derive would require `I: HashStable` -impl HashStable for RegionKind -where - I::EarlyBoundRegion: HashStable, - I::BoundRegion: HashStable, - I::FreeRegion: HashStable, - I::RegionVid: HashStable, - I::PlaceholderRegion: HashStable, -{ - #[inline] - fn hash_stable( - &self, - hcx: &mut CTX, - hasher: &mut rustc_data_structures::stable_hasher::StableHasher, - ) { - std::mem::discriminant(self).hash_stable(hcx, hasher); - match self { - ReErased | ReStatic | ReError(_) => { - // No variant fields to hash for these ... - } - ReLateBound(d, r) => { - d.hash_stable(hcx, hasher); - r.hash_stable(hcx, hasher); - } - ReEarlyBound(r) => { - r.hash_stable(hcx, hasher); - } - ReFree(r) => { - r.hash_stable(hcx, hasher); - } - RePlaceholder(r) => { - r.hash_stable(hcx, hasher); - } - ReVar(_) => { - panic!("region variables should not be hashed: {self:?}") - } - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_info.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_info.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_info.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_info.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,13 +1,8 @@ -use std::{ - cmp::Ordering, - hash::{Hash, Hasher}, - ops::Deref, -}; - -use rustc_data_structures::{ - fingerprint::Fingerprint, - stable_hasher::{HashStable, StableHasher}, -}; +use rustc_data_structures::fingerprint::Fingerprint; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use std::cmp::Ordering; +use std::hash::{Hash, Hasher}; +use std::ops::Deref; use crate::{DebruijnIndex, TypeFlags}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_kind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_kind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_kind.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/ty_kind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,928 @@ +#![allow(rustc::usage_of_ty_tykind)] + +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::unify::{EqUnifyValue, UnifyKey}; +use std::fmt; +use std::mem::discriminant; + +use crate::HashStableContext; +use crate::Interner; +use crate::{DebruijnIndex, DebugWithInfcx, InferCtxtLike, WithInfcx}; + +use self::TyKind::*; + +/// The movability of a coroutine / closure literal: +/// whether a coroutine contains self-references, causing it to be `!Unpin`. +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable, Debug, Copy)] +#[derive(HashStable_Generic)] +pub enum Movability { + /// May contain self-references, `!Unpin`. + Static, + /// Must not contain self-references, `Unpin`. + Movable, +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Copy)] +#[derive(HashStable_Generic, Encodable, Decodable)] +pub enum Mutability { + // N.B. Order is deliberate, so that Not < Mut + Not, + Mut, +} + +impl Mutability { + pub fn invert(self) -> Self { + match self { + Mutability::Mut => Mutability::Not, + Mutability::Not => Mutability::Mut, + } + } + + /// Returns `""` (empty string) or `"mut "` depending on the mutability. + pub fn prefix_str(self) -> &'static str { + match self { + Mutability::Mut => "mut ", + Mutability::Not => "", + } + } + + /// Returns `"&"` or `"&mut "` depending on the mutability. + pub fn ref_prefix_str(self) -> &'static str { + match self { + Mutability::Not => "&", + Mutability::Mut => "&mut ", + } + } + + /// Returns `""` (empty string) or `"mutably "` depending on the mutability. + pub fn mutably_str(self) -> &'static str { + match self { + Mutability::Not => "", + Mutability::Mut => "mutably ", + } + } + + /// Return `true` if self is mutable + pub fn is_mut(self) -> bool { + matches!(self, Self::Mut) + } + + /// Return `true` if self is **not** mutable + pub fn is_not(self) -> bool { + matches!(self, Self::Not) + } +} + +/// Specifies how a trait object is represented. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[derive(Encodable, Decodable, HashStable_Generic)] +pub enum DynKind { + /// An unsized `dyn Trait` object + Dyn, + /// A sized `dyn* Trait` object + /// + /// These objects are represented as a `(data, vtable)` pair where `data` is a value of some + /// ptr-sized and ptr-aligned dynamically determined type `T` and `vtable` is a pointer to the + /// vtable of `impl T for Trait`. This allows a `dyn*` object to be treated agnostically with + /// respect to whether it points to a `Box`, `Rc`, etc. + DynStar, +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[derive(Encodable, Decodable, HashStable_Generic)] +pub enum AliasKind { + /// A projection `::AssocType`. + /// Can get normalized away if monomorphic enough. + Projection, + /// An associated type in an inherent `impl` + Inherent, + /// An opaque type (usually from `impl Trait` in type aliases or function return types) + /// Can only be normalized away in RevealAll mode + Opaque, + /// A type alias that actually checks its trait bounds. + /// Currently only used if the type alias references opaque types. + /// Can always be normalized away. + Weak, +} + +/// Defines the kinds of types used by the type system. +/// +/// Types written by the user start out as `hir::TyKind` and get +/// converted to this representation using `AstConv::ast_ty_to_ty`. +#[rustc_diagnostic_item = "IrTyKind"] +#[derive(derivative::Derivative)] +#[derivative( + Clone(bound = ""), + PartialOrd(bound = ""), + PartialOrd = "feature_allow_slow_enum", + Ord(bound = ""), + Ord = "feature_allow_slow_enum", + Hash(bound = "") +)] +#[derive(TyEncodable, TyDecodable)] +pub enum TyKind { + /// The primitive boolean type. Written as `bool`. + Bool, + + /// The primitive character type; holds a Unicode scalar value + /// (a non-surrogate code point). Written as `char`. + Char, + + /// A primitive signed integer type. For example, `i32`. + Int(IntTy), + + /// A primitive unsigned integer type. For example, `u32`. + Uint(UintTy), + + /// A primitive floating-point type. For example, `f64`. + Float(FloatTy), + + /// Algebraic data types (ADT). For example: structures, enumerations and unions. + /// + /// For example, the type `List` would be represented using the `AdtDef` + /// for `struct List` and the args `[i32]`. + /// + /// Note that generic parameters in fields only get lazily substituted + /// by using something like `adt_def.all_fields().map(|field| field.ty(tcx, args))`. + Adt(I::AdtDef, I::GenericArgs), + + /// An unsized FFI type that is opaque to Rust. Written as `extern type T`. + Foreign(I::DefId), + + /// The pointee of a string slice. Written as `str`. + Str, + + /// An array with the given length. Written as `[T; N]`. + Array(I::Ty, I::Const), + + /// The pointee of an array slice. Written as `[T]`. + Slice(I::Ty), + + /// A raw pointer. Written as `*mut T` or `*const T` + RawPtr(I::TypeAndMut), + + /// A reference; a pointer with an associated lifetime. Written as + /// `&'a mut T` or `&'a T`. + Ref(I::Region, I::Ty, Mutability), + + /// The anonymous type of a function declaration/definition. Each + /// function has a unique type. + /// + /// For the function `fn foo() -> i32 { 3 }` this type would be + /// shown to the user as `fn() -> i32 {foo}`. + /// + /// For example the type of `bar` here: + /// ```rust + /// fn foo() -> i32 { 1 } + /// let bar = foo; // bar: fn() -> i32 {foo} + /// ``` + FnDef(I::DefId, I::GenericArgs), + + /// A pointer to a function. Written as `fn() -> i32`. + /// + /// Note that both functions and closures start out as either + /// [FnDef] or [Closure] which can be then be coerced to this variant. + /// + /// For example the type of `bar` here: + /// + /// ```rust + /// fn foo() -> i32 { 1 } + /// let bar: fn() -> i32 = foo; + /// ``` + FnPtr(I::PolyFnSig), + + /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. + Dynamic(I::BoundExistentialPredicates, I::Region, DynKind), + + /// The anonymous type of a closure. Used to represent the type of `|a| a`. + /// + /// Closure args contain both the - potentially substituted - generic parameters + /// of its parent and some synthetic parameters. See the documentation for + /// `ClosureArgs` for more details. + Closure(I::DefId, I::GenericArgs), + + /// The anonymous type of a coroutine. Used to represent the type of + /// `|a| yield a`. + /// + /// For more info about coroutine args, visit the documentation for + /// `CoroutineArgs`. + Coroutine(I::DefId, I::GenericArgs, Movability), + + /// A type representing the types stored inside a coroutine. + /// This should only appear as part of the `CoroutineArgs`. + /// + /// Unlike upvars, the witness can reference lifetimes from + /// inside of the coroutine itself. To deal with them in + /// the type of the coroutine, we convert them to higher ranked + /// lifetimes bound by the witness itself. + /// + /// This contains the `DefId` and the `GenericArgsRef` of the coroutine. + /// The actual witness types are computed on MIR by the `mir_coroutine_witnesses` query. + /// + /// Looking at the following example, the witness for this coroutine + /// may end up as something like `for<'a> [Vec, &'a Vec]`: + /// + /// ```ignore UNSOLVED (ask @compiler-errors, should this error? can we just swap the yields?) + /// #![feature(coroutines)] + /// |a| { + /// let x = &vec![3]; + /// yield a; + /// yield x[0]; + /// } + /// # ; + /// ``` + CoroutineWitness(I::DefId, I::GenericArgs), + + /// The never type `!`. + Never, + + /// A tuple type. For example, `(i32, bool)`. + Tuple(I::Tys), + + /// A projection, opaque type, weak type alias, or inherent associated type. + /// All of these types are represented as pairs of def-id and args, and can + /// be normalized, so they are grouped conceptually. + Alias(AliasKind, I::AliasTy), + + /// A type parameter; for example, `T` in `fn f(x: T) {}`. + Param(I::ParamTy), + + /// Bound type variable, used to represent the `'a` in `for<'a> fn(&'a ())`. + /// + /// For canonical queries, we replace inference variables with bound variables, + /// so e.g. when checking whether `&'_ (): Trait<_>` holds, we canonicalize that to + /// `for<'a, T> &'a (): Trait` and then convert the introduced bound variables + /// back to inference variables in a new inference context when inside of the query. + /// + /// It is conventional to render anonymous bound types like `^N` or `^D_N`, + /// where `N` is the bound variable's anonymous index into the binder, and + /// `D` is the debruijn index, or totally omitted if the debruijn index is zero. + /// + /// See the `rustc-dev-guide` for more details about + /// [higher-ranked trait bounds][1] and [canonical queries][2]. + /// + /// [1]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html + /// [2]: https://rustc-dev-guide.rust-lang.org/traits/canonical-queries.html + Bound(DebruijnIndex, I::BoundTy), + + /// A placeholder type, used during higher ranked subtyping to instantiate + /// bound variables. + /// + /// It is conventional to render anonymous placeholer types like `!N` or `!U_N`, + /// where `N` is the placeholder variable's anonymous index (which corresponds + /// to the bound variable's index from the binder from which it was instantiated), + /// and `U` is the universe index in which it is instantiated, or totally omitted + /// if the universe index is zero. + Placeholder(I::PlaceholderTy), + + /// A type variable used during type checking. + /// + /// Similar to placeholders, inference variables also live in a universe to + /// correctly deal with higher ranked types. Though unlike placeholders, + /// that universe is stored in the `InferCtxt` instead of directly + /// inside of the type. + Infer(I::InferTy), + + /// A placeholder for a type which could not be computed; this is + /// propagated to avoid useless error messages. + Error(I::ErrorGuaranteed), +} + +impl TyKind { + #[inline] + pub fn is_primitive(&self) -> bool { + matches!(self, Bool | Char | Int(_) | Uint(_) | Float(_)) + } +} + +// This is manually implemented for `TyKind` because `std::mem::discriminant` +// returns an opaque value that is `PartialEq` but not `PartialOrd` +#[inline] +const fn tykind_discriminant(value: &TyKind) -> usize { + match value { + Bool => 0, + Char => 1, + Int(_) => 2, + Uint(_) => 3, + Float(_) => 4, + Adt(_, _) => 5, + Foreign(_) => 6, + Str => 7, + Array(_, _) => 8, + Slice(_) => 9, + RawPtr(_) => 10, + Ref(_, _, _) => 11, + FnDef(_, _) => 12, + FnPtr(_) => 13, + Dynamic(..) => 14, + Closure(_, _) => 15, + Coroutine(_, _, _) => 16, + CoroutineWitness(_, _) => 17, + Never => 18, + Tuple(_) => 19, + Alias(_, _) => 20, + Param(_) => 21, + Bound(_, _) => 22, + Placeholder(_) => 23, + Infer(_) => 24, + Error(_) => 25, + } +} + +// This is manually implemented because a derive would require `I: PartialEq` +impl PartialEq for TyKind { + #[inline] + fn eq(&self, other: &TyKind) -> bool { + // You might expect this `match` to be preceded with this: + // + // tykind_discriminant(self) == tykind_discriminant(other) && + // + // but the data patterns in practice are such that a comparison + // succeeds 99%+ of the time, and it's faster to omit it. + match (self, other) { + (Int(a_i), Int(b_i)) => a_i == b_i, + (Uint(a_u), Uint(b_u)) => a_u == b_u, + (Float(a_f), Float(b_f)) => a_f == b_f, + (Adt(a_d, a_s), Adt(b_d, b_s)) => a_d == b_d && a_s == b_s, + (Foreign(a_d), Foreign(b_d)) => a_d == b_d, + (Array(a_t, a_c), Array(b_t, b_c)) => a_t == b_t && a_c == b_c, + (Slice(a_t), Slice(b_t)) => a_t == b_t, + (RawPtr(a_t), RawPtr(b_t)) => a_t == b_t, + (Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => a_r == b_r && a_t == b_t && a_m == b_m, + (FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d == b_d && a_s == b_s, + (FnPtr(a_s), FnPtr(b_s)) => a_s == b_s, + (Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => { + a_p == b_p && a_r == b_r && a_repr == b_repr + } + (Closure(a_d, a_s), Closure(b_d, b_s)) => a_d == b_d && a_s == b_s, + (Coroutine(a_d, a_s, a_m), Coroutine(b_d, b_s, b_m)) => { + a_d == b_d && a_s == b_s && a_m == b_m + } + (CoroutineWitness(a_d, a_s), CoroutineWitness(b_d, b_s)) => a_d == b_d && a_s == b_s, + (Tuple(a_t), Tuple(b_t)) => a_t == b_t, + (Alias(a_i, a_p), Alias(b_i, b_p)) => a_i == b_i && a_p == b_p, + (Param(a_p), Param(b_p)) => a_p == b_p, + (Bound(a_d, a_b), Bound(b_d, b_b)) => a_d == b_d && a_b == b_b, + (Placeholder(a_p), Placeholder(b_p)) => a_p == b_p, + (Infer(a_t), Infer(b_t)) => a_t == b_t, + (Error(a_e), Error(b_e)) => a_e == b_e, + (Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => true, + _ => { + debug_assert!( + tykind_discriminant(self) != tykind_discriminant(other), + "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}" + ); + false + } + } + } +} + +// This is manually implemented because a derive would require `I: Eq` +impl Eq for TyKind {} + +impl DebugWithInfcx for TyKind { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut core::fmt::Formatter<'_>, + ) -> fmt::Result { + match this.data { + Bool => write!(f, "bool"), + Char => write!(f, "char"), + Int(i) => write!(f, "{i:?}"), + Uint(u) => write!(f, "{u:?}"), + Float(float) => write!(f, "{float:?}"), + Adt(d, s) => { + write!(f, "{d:?}")?; + let mut s = s.clone().into_iter(); + let first = s.next(); + match first { + Some(first) => write!(f, "<{:?}", first)?, + None => return Ok(()), + }; + + for arg in s { + write!(f, ", {:?}", arg)?; + } + + write!(f, ">") + } + Foreign(d) => f.debug_tuple_field1_finish("Foreign", d), + Str => write!(f, "str"), + Array(t, c) => write!(f, "[{:?}; {:?}]", &this.wrap(t), &this.wrap(c)), + Slice(t) => write!(f, "[{:?}]", &this.wrap(t)), + RawPtr(p) => { + let (ty, mutbl) = I::ty_and_mut_to_parts(p.clone()); + match mutbl { + Mutability::Mut => write!(f, "*mut "), + Mutability::Not => write!(f, "*const "), + }?; + write!(f, "{:?}", &this.wrap(ty)) + } + Ref(r, t, m) => match m { + Mutability::Mut => write!(f, "&{:?} mut {:?}", &this.wrap(r), &this.wrap(t)), + Mutability::Not => write!(f, "&{:?} {:?}", &this.wrap(r), &this.wrap(t)), + }, + FnDef(d, s) => f.debug_tuple_field2_finish("FnDef", d, &this.wrap(s)), + FnPtr(s) => write!(f, "{:?}", &this.wrap(s)), + Dynamic(p, r, repr) => match repr { + DynKind::Dyn => write!(f, "dyn {:?} + {:?}", &this.wrap(p), &this.wrap(r)), + DynKind::DynStar => { + write!(f, "dyn* {:?} + {:?}", &this.wrap(p), &this.wrap(r)) + } + }, + Closure(d, s) => f.debug_tuple_field2_finish("Closure", d, &this.wrap(s)), + Coroutine(d, s, m) => f.debug_tuple_field3_finish("Coroutine", d, &this.wrap(s), m), + CoroutineWitness(d, s) => { + f.debug_tuple_field2_finish("CoroutineWitness", d, &this.wrap(s)) + } + Never => write!(f, "!"), + Tuple(t) => { + write!(f, "(")?; + let mut count = 0; + for ty in t.clone() { + if count > 0 { + write!(f, ", ")?; + } + write!(f, "{:?}", &this.wrap(ty))?; + count += 1; + } + // unary tuples need a trailing comma + if count == 1 { + write!(f, ",")?; + } + write!(f, ")") + } + Alias(i, a) => f.debug_tuple_field2_finish("Alias", i, &this.wrap(a)), + Param(p) => write!(f, "{p:?}"), + Bound(d, b) => crate::debug_bound_var(f, *d, b), + Placeholder(p) => write!(f, "{p:?}"), + Infer(t) => write!(f, "{:?}", this.wrap(t)), + TyKind::Error(_) => write!(f, "{{type error}}"), + } + } +} + +// This is manually implemented because a derive would require `I: Debug` +impl fmt::Debug for TyKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + WithInfcx::with_no_infcx(self).fmt(f) + } +} + +// This is not a derived impl because a derive would require `I: HashStable` +#[allow(rustc::usage_of_ty_tykind)] +impl HashStable for TyKind +where + I::AdtDef: HashStable, + I::DefId: HashStable, + I::GenericArgs: HashStable, + I::Ty: HashStable, + I::Const: HashStable, + I::TypeAndMut: HashStable, + I::PolyFnSig: HashStable, + I::BoundExistentialPredicates: HashStable, + I::Region: HashStable, + I::Tys: HashStable, + I::AliasTy: HashStable, + I::BoundTy: HashStable, + I::ParamTy: HashStable, + I::PlaceholderTy: HashStable, + I::InferTy: HashStable, + I::ErrorGuaranteed: HashStable, +{ + #[inline] + fn hash_stable(&self, __hcx: &mut CTX, __hasher: &mut StableHasher) { + std::mem::discriminant(self).hash_stable(__hcx, __hasher); + match self { + Bool => {} + Char => {} + Int(i) => { + i.hash_stable(__hcx, __hasher); + } + Uint(u) => { + u.hash_stable(__hcx, __hasher); + } + Float(f) => { + f.hash_stable(__hcx, __hasher); + } + Adt(adt, args) => { + adt.hash_stable(__hcx, __hasher); + args.hash_stable(__hcx, __hasher); + } + Foreign(def_id) => { + def_id.hash_stable(__hcx, __hasher); + } + Str => {} + Array(t, c) => { + t.hash_stable(__hcx, __hasher); + c.hash_stable(__hcx, __hasher); + } + Slice(t) => { + t.hash_stable(__hcx, __hasher); + } + RawPtr(tam) => { + tam.hash_stable(__hcx, __hasher); + } + Ref(r, t, m) => { + r.hash_stable(__hcx, __hasher); + t.hash_stable(__hcx, __hasher); + m.hash_stable(__hcx, __hasher); + } + FnDef(def_id, args) => { + def_id.hash_stable(__hcx, __hasher); + args.hash_stable(__hcx, __hasher); + } + FnPtr(polyfnsig) => { + polyfnsig.hash_stable(__hcx, __hasher); + } + Dynamic(l, r, repr) => { + l.hash_stable(__hcx, __hasher); + r.hash_stable(__hcx, __hasher); + repr.hash_stable(__hcx, __hasher); + } + Closure(def_id, args) => { + def_id.hash_stable(__hcx, __hasher); + args.hash_stable(__hcx, __hasher); + } + Coroutine(def_id, args, m) => { + def_id.hash_stable(__hcx, __hasher); + args.hash_stable(__hcx, __hasher); + m.hash_stable(__hcx, __hasher); + } + CoroutineWitness(def_id, args) => { + def_id.hash_stable(__hcx, __hasher); + args.hash_stable(__hcx, __hasher); + } + Never => {} + Tuple(args) => { + args.hash_stable(__hcx, __hasher); + } + Alias(k, p) => { + k.hash_stable(__hcx, __hasher); + p.hash_stable(__hcx, __hasher); + } + Param(p) => { + p.hash_stable(__hcx, __hasher); + } + Bound(d, b) => { + d.hash_stable(__hcx, __hasher); + b.hash_stable(__hcx, __hasher); + } + Placeholder(p) => { + p.hash_stable(__hcx, __hasher); + } + Infer(i) => { + i.hash_stable(__hcx, __hasher); + } + Error(d) => { + d.hash_stable(__hcx, __hasher); + } + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Encodable, Decodable, HashStable_Generic)] +pub enum IntTy { + Isize, + I8, + I16, + I32, + I64, + I128, +} + +impl IntTy { + pub fn name_str(&self) -> &'static str { + match *self { + IntTy::Isize => "isize", + IntTy::I8 => "i8", + IntTy::I16 => "i16", + IntTy::I32 => "i32", + IntTy::I64 => "i64", + IntTy::I128 => "i128", + } + } + + pub fn bit_width(&self) -> Option { + Some(match *self { + IntTy::Isize => return None, + IntTy::I8 => 8, + IntTy::I16 => 16, + IntTy::I32 => 32, + IntTy::I64 => 64, + IntTy::I128 => 128, + }) + } + + pub fn normalize(&self, target_width: u32) -> Self { + match self { + IntTy::Isize => match target_width { + 16 => IntTy::I16, + 32 => IntTy::I32, + 64 => IntTy::I64, + _ => unreachable!(), + }, + _ => *self, + } + } + + pub fn to_unsigned(self) -> UintTy { + match self { + IntTy::Isize => UintTy::Usize, + IntTy::I8 => UintTy::U8, + IntTy::I16 => UintTy::U16, + IntTy::I32 => UintTy::U32, + IntTy::I64 => UintTy::U64, + IntTy::I128 => UintTy::U128, + } + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] +#[derive(Encodable, Decodable, HashStable_Generic)] +pub enum UintTy { + Usize, + U8, + U16, + U32, + U64, + U128, +} + +impl UintTy { + pub fn name_str(&self) -> &'static str { + match *self { + UintTy::Usize => "usize", + UintTy::U8 => "u8", + UintTy::U16 => "u16", + UintTy::U32 => "u32", + UintTy::U64 => "u64", + UintTy::U128 => "u128", + } + } + + pub fn bit_width(&self) -> Option { + Some(match *self { + UintTy::Usize => return None, + UintTy::U8 => 8, + UintTy::U16 => 16, + UintTy::U32 => 32, + UintTy::U64 => 64, + UintTy::U128 => 128, + }) + } + + pub fn normalize(&self, target_width: u32) -> Self { + match self { + UintTy::Usize => match target_width { + 16 => UintTy::U16, + 32 => UintTy::U32, + 64 => UintTy::U64, + _ => unreachable!(), + }, + _ => *self, + } + } + + pub fn to_signed(self) -> IntTy { + match self { + UintTy::Usize => IntTy::Isize, + UintTy::U8 => IntTy::I8, + UintTy::U16 => IntTy::I16, + UintTy::U32 => IntTy::I32, + UintTy::U64 => IntTy::I64, + UintTy::U128 => IntTy::I128, + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Encodable, Decodable, HashStable_Generic)] +pub enum FloatTy { + F32, + F64, +} + +impl FloatTy { + pub fn name_str(self) -> &'static str { + match self { + FloatTy::F32 => "f32", + FloatTy::F64 => "f64", + } + } + + pub fn bit_width(self) -> u64 { + match self { + FloatTy::F32 => 32, + FloatTy::F64 => 64, + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum IntVarValue { + IntType(IntTy), + UintType(UintTy), +} + +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct FloatVarValue(pub FloatTy); + +rustc_index::newtype_index! { + /// A **ty**pe **v**ariable **ID**. + #[debug_format = "?{}t"] + pub struct TyVid {} +} + +rustc_index::newtype_index! { + /// An **int**egral (`u32`, `i32`, `usize`, etc.) type **v**ariable **ID**. + #[debug_format = "?{}i"] + pub struct IntVid {} +} + +rustc_index::newtype_index! { + /// A **float**ing-point (`f32` or `f64`) type **v**ariable **ID**. + #[debug_format = "?{}f"] + pub struct FloatVid {} +} + +/// A placeholder for a type that hasn't been inferred yet. +/// +/// E.g., if we have an empty array (`[]`), then we create a fresh +/// type variable for the element type since we won't know until it's +/// used what the element type is supposed to be. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] +pub enum InferTy { + /// A type variable. + TyVar(TyVid), + /// An integral type variable (`{integer}`). + /// + /// These are created when the compiler sees an integer literal like + /// `1` that could be several different types (`u8`, `i32`, `u32`, etc.). + /// We don't know until it's used what type it's supposed to be, so + /// we create a fresh type variable. + IntVar(IntVid), + /// A floating-point type variable (`{float}`). + /// + /// These are created when the compiler sees an float literal like + /// `1.0` that could be either an `f32` or an `f64`. + /// We don't know until it's used what type it's supposed to be, so + /// we create a fresh type variable. + FloatVar(FloatVid), + + /// A [`FreshTy`][Self::FreshTy] is one that is generated as a replacement + /// for an unbound type variable. This is convenient for caching etc. See + /// `rustc_infer::infer::freshen` for more details. + /// + /// Compare with [`TyVar`][Self::TyVar]. + FreshTy(u32), + /// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`IntVar`][Self::IntVar]. + FreshIntTy(u32), + /// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`FloatVar`][Self::FloatVar]. + FreshFloatTy(u32), +} + +/// Raw `TyVid` are used as the unification key for `sub_relations`; +/// they carry no values. +impl UnifyKey for TyVid { + type Value = (); + #[inline] + fn index(&self) -> u32 { + self.as_u32() + } + #[inline] + fn from_index(i: u32) -> TyVid { + TyVid::from_u32(i) + } + fn tag() -> &'static str { + "TyVid" + } +} + +impl EqUnifyValue for IntVarValue {} + +impl UnifyKey for IntVid { + type Value = Option; + #[inline] // make this function eligible for inlining - it is quite hot. + fn index(&self) -> u32 { + self.as_u32() + } + #[inline] + fn from_index(i: u32) -> IntVid { + IntVid::from_u32(i) + } + fn tag() -> &'static str { + "IntVid" + } +} + +impl EqUnifyValue for FloatVarValue {} + +impl UnifyKey for FloatVid { + type Value = Option; + #[inline] + fn index(&self) -> u32 { + self.as_u32() + } + #[inline] + fn from_index(i: u32) -> FloatVid { + FloatVid::from_u32(i) + } + fn tag() -> &'static str { + "FloatVid" + } +} + +impl HashStable for InferTy { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { + use InferTy::*; + discriminant(self).hash_stable(ctx, hasher); + match self { + TyVar(_) | IntVar(_) | FloatVar(_) => { + panic!("type variables should not be hashed: {self:?}") + } + FreshTy(v) | FreshIntTy(v) | FreshFloatTy(v) => v.hash_stable(ctx, hasher), + } + } +} + +impl fmt::Debug for IntVarValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + IntVarValue::IntType(ref v) => v.fmt(f), + IntVarValue::UintType(ref v) => v.fmt(f), + } + } +} + +impl fmt::Debug for FloatVarValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl fmt::Display for InferTy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use InferTy::*; + match *self { + TyVar(_) => write!(f, "_"), + IntVar(_) => write!(f, "{}", "{integer}"), + FloatVar(_) => write!(f, "{}", "{float}"), + FreshTy(v) => write!(f, "FreshTy({v})"), + FreshIntTy(v) => write!(f, "FreshIntTy({v})"), + FreshFloatTy(v) => write!(f, "FreshFloatTy({v})"), + } + } +} + +impl fmt::Debug for IntTy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name_str()) + } +} + +impl fmt::Debug for UintTy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name_str()) + } +} + +impl fmt::Debug for FloatTy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name_str()) + } +} + +impl fmt::Debug for InferTy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use InferTy::*; + match *self { + TyVar(ref v) => v.fmt(f), + IntVar(ref v) => v.fmt(f), + FloatVar(ref v) => v.fmt(f), + FreshTy(v) => write!(f, "FreshTy({v:?})"), + FreshIntTy(v) => write!(f, "FreshIntTy({v:?})"), + FreshFloatTy(v) => write!(f, "FreshFloatTy({v:?})"), + } + } +} + +impl> DebugWithInfcx for InferTy { + fn fmt>( + this: WithInfcx<'_, Infcx, &Self>, + f: &mut fmt::Formatter<'_>, + ) -> fmt::Result { + use InferTy::*; + match this.infcx.universe_of_ty(*this.data) { + None => write!(f, "{:?}", this.data), + Some(universe) => match *this.data { + TyVar(ty_vid) => write!(f, "?{}_{}t", ty_vid.index(), universe.index()), + IntVar(_) | FloatVar(_) | FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_) => { + unreachable!() + } + }, + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/visit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/visit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/visit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/rustc_type_ir/src/visit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -40,11 +40,14 @@ //! - ty.super_visit_with(visitor) //! - u.visit_with(visitor) //! ``` -use crate::Interner; +use rustc_data_structures::sync::Lrc; +use rustc_index::{Idx, IndexVec}; use std::fmt; use std::ops::ControlFlow; +use crate::Interner; + /// This trait is implemented for every type that can be visited, /// providing the skeleton of the traversal. /// @@ -116,3 +119,80 @@ p.super_visit_with(self) } } + +/////////////////////////////////////////////////////////////////////////// +// Traversal implementations. + +impl, U: TypeVisitable> TypeVisitable for (T, U) { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + self.0.visit_with(visitor)?; + self.1.visit_with(visitor) + } +} + +impl, B: TypeVisitable, C: TypeVisitable> TypeVisitable + for (A, B, C) +{ + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + self.0.visit_with(visitor)?; + self.1.visit_with(visitor)?; + self.2.visit_with(visitor) + } +} + +impl> TypeVisitable for Option { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + match self { + Some(v) => v.visit_with(visitor), + None => ControlFlow::Continue(()), + } + } +} + +impl, E: TypeVisitable> TypeVisitable for Result { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + match self { + Ok(v) => v.visit_with(visitor), + Err(e) => e.visit_with(visitor), + } + } +} + +impl> TypeVisitable for Lrc { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + (**self).visit_with(visitor) + } +} + +impl> TypeVisitable for Box { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + (**self).visit_with(visitor) + } +} + +impl> TypeVisitable for Vec { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + self.iter().try_for_each(|t| t.visit_with(visitor)) + } +} + +// `TypeFoldable` isn't impl'd for `&[T]`. It doesn't make sense in the general +// case, because we can't return a new slice. But note that there are a couple +// of trivial impls of `TypeFoldable` for specific slice types elsewhere. +impl> TypeVisitable for &[T] { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + self.iter().try_for_each(|t| t.visit_with(visitor)) + } +} + +impl> TypeVisitable for Box<[T]> { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + self.iter().try_for_each(|t| t.visit_with(visitor)) + } +} + +impl, Ix: Idx> TypeVisitable for IndexVec { + fn visit_with>(&self, visitor: &mut V) -> ControlFlow { + self.iter().try_for_each(|t| t.visit_with(visitor)) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/error.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,76 @@ +//! When things go wrong, we need some error handling. +//! There are a few different types of errors in StableMIR: +//! +//! - [CompilerError]: This represents errors that can be raised when invoking the compiler. +//! - [Error]: Generic error that represents the reason why a request that could not be fulfilled. + +use std::convert::From; +use std::fmt::{Debug, Display, Formatter}; +use std::{error, fmt}; + +/// An error type used to represent an error that has already been reported by the compiler. +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum CompilerError { + /// Internal compiler error (I.e.: Compiler crashed). + ICE, + /// Compilation failed. + CompilationFailed, + /// Compilation was interrupted. + Interrupted(T), + /// Compilation skipped. This happens when users invoke rustc to retrieve information such as + /// --version. + Skipped, +} + +/// A generic error to represent an API request that cannot be fulfilled. +#[derive(Debug)] +pub struct Error(String); + +impl Error { + pub(crate) fn new(msg: String) -> Self { + Self(msg) + } +} + +impl From<&str> for Error { + fn from(value: &str) -> Self { + Self(value.into()) + } +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Display::fmt(&self.0, f) + } +} + +impl Display for CompilerError +where + T: Display, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + CompilerError::ICE => write!(f, "Internal Compiler Error"), + CompilerError::CompilationFailed => write!(f, "Compilation Failed"), + CompilerError::Interrupted(reason) => write!(f, "Compilation Interrupted: {reason}"), + CompilerError::Skipped => write!(f, "Compilation Skipped"), + } + } +} + +impl Debug for CompilerError +where + T: Debug, +{ + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + CompilerError::ICE => write!(f, "Internal Compiler Error"), + CompilerError::CompilationFailed => write!(f, "Compilation Failed"), + CompilerError::Interrupted(reason) => write!(f, "Compilation Interrupted: {reason:?}"), + CompilerError::Skipped => write!(f, "Compilation Skipped"), + } + } +} + +impl error::Error for Error {} +impl error::Error for CompilerError where T: Display + Debug {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/fold.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/fold.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/fold.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/fold.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,245 +0,0 @@ -use std::ops::ControlFlow; - -use crate::Opaque; - -use super::ty::{ - Allocation, Binder, Const, ConstDef, ConstantKind, ExistentialPredicate, FnSig, GenericArgKind, - GenericArgs, Promoted, Region, RigidTy, TermKind, Ty, TyKind, UnevaluatedConst, -}; - -pub trait Folder: Sized { - type Break; - fn fold_ty(&mut self, ty: &Ty) -> ControlFlow { - ty.super_fold(self) - } - fn fold_const(&mut self, c: &Const) -> ControlFlow { - c.super_fold(self) - } - fn fold_reg(&mut self, reg: &Region) -> ControlFlow { - reg.super_fold(self) - } -} - -pub trait Foldable: Sized + Clone { - fn fold(&self, folder: &mut V) -> ControlFlow { - self.super_fold(folder) - } - fn super_fold(&self, folder: &mut V) -> ControlFlow; -} - -impl Foldable for Ty { - fn fold(&self, folder: &mut V) -> ControlFlow { - folder.fold_ty(self) - } - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let mut kind = self.kind(); - match &mut kind { - super::ty::TyKind::RigidTy(ty) => *ty = ty.fold(folder)?, - super::ty::TyKind::Alias(_, alias) => alias.args = alias.args.fold(folder)?, - super::ty::TyKind::Param(_) => {} - super::ty::TyKind::Bound(_, _) => {} - } - ControlFlow::Continue(kind.into()) - } -} - -impl Foldable for Const { - fn fold(&self, folder: &mut V) -> ControlFlow { - folder.fold_const(self) - } - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let mut this = self.clone(); - match &mut this.literal { - super::ty::ConstantKind::Allocated(alloc) => *alloc = alloc.fold(folder)?, - super::ty::ConstantKind::Unevaluated(uv) => *uv = uv.fold(folder)?, - super::ty::ConstantKind::Param(_) => {} - } - this.ty = this.ty.fold(folder)?; - ControlFlow::Continue(this) - } -} - -impl Foldable for Opaque { - fn super_fold(&self, _folder: &mut V) -> ControlFlow { - ControlFlow::Continue(self.clone()) - } -} - -impl Foldable for Allocation { - fn super_fold(&self, _folder: &mut V) -> ControlFlow { - ControlFlow::Continue(self.clone()) - } -} - -impl Foldable for UnevaluatedConst { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let UnevaluatedConst { def, args, promoted } = self; - ControlFlow::Continue(UnevaluatedConst { - def: def.fold(folder)?, - args: args.fold(folder)?, - promoted: promoted.fold(folder)?, - }) - } -} - -impl Foldable for ConstDef { - fn super_fold(&self, _folder: &mut V) -> ControlFlow { - ControlFlow::Continue(*self) - } -} - -impl Foldable for Option { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - ControlFlow::Continue(match self { - Some(val) => Some(val.fold(folder)?), - None => None, - }) - } -} - -impl Foldable for Promoted { - fn super_fold(&self, _folder: &mut V) -> ControlFlow { - ControlFlow::Continue(*self) - } -} - -impl Foldable for GenericArgs { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - ControlFlow::Continue(GenericArgs(self.0.fold(folder)?)) - } -} - -impl Foldable for Region { - fn fold(&self, folder: &mut V) -> ControlFlow { - folder.fold_reg(self) - } - fn super_fold(&self, _: &mut V) -> ControlFlow { - ControlFlow::Continue(self.clone()) - } -} - -impl Foldable for GenericArgKind { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let mut this = self.clone(); - match &mut this { - GenericArgKind::Lifetime(lt) => *lt = lt.fold(folder)?, - GenericArgKind::Type(t) => *t = t.fold(folder)?, - GenericArgKind::Const(c) => *c = c.fold(folder)?, - } - ControlFlow::Continue(this) - } -} - -impl Foldable for RigidTy { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let mut this = self.clone(); - match &mut this { - RigidTy::Bool - | RigidTy::Char - | RigidTy::Int(_) - | RigidTy::Uint(_) - | RigidTy::Float(_) - | RigidTy::Never - | RigidTy::Foreign(_) - | RigidTy::Str => {} - RigidTy::Array(t, c) => { - *t = t.fold(folder)?; - *c = c.fold(folder)?; - } - RigidTy::Slice(inner) => *inner = inner.fold(folder)?, - RigidTy::RawPtr(ty, _) => *ty = ty.fold(folder)?, - RigidTy::Ref(reg, ty, _) => { - *reg = reg.fold(folder)?; - *ty = ty.fold(folder)? - } - RigidTy::FnDef(_, args) => *args = args.fold(folder)?, - RigidTy::FnPtr(sig) => *sig = sig.fold(folder)?, - RigidTy::Closure(_, args) => *args = args.fold(folder)?, - RigidTy::Generator(_, args, _) => *args = args.fold(folder)?, - RigidTy::Dynamic(pred, r, _) => { - *pred = pred.fold(folder)?; - *r = r.fold(folder)?; - } - RigidTy::Tuple(fields) => *fields = fields.fold(folder)?, - RigidTy::Adt(_, args) => *args = args.fold(folder)?, - } - ControlFlow::Continue(this) - } -} - -impl Foldable for Vec { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let mut this = self.clone(); - for arg in &mut this { - *arg = arg.fold(folder)?; - } - ControlFlow::Continue(this) - } -} - -impl Foldable for Binder { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - ControlFlow::Continue(Self { - value: self.value.fold(folder)?, - bound_vars: self.bound_vars.clone(), - }) - } -} - -impl Foldable for ExistentialPredicate { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - let mut this = self.clone(); - match &mut this { - ExistentialPredicate::Trait(tr) => tr.generic_args = tr.generic_args.fold(folder)?, - ExistentialPredicate::Projection(p) => { - p.term = p.term.fold(folder)?; - p.generic_args = p.generic_args.fold(folder)?; - } - ExistentialPredicate::AutoTrait(_) => {} - } - ControlFlow::Continue(this) - } -} - -impl Foldable for TermKind { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - ControlFlow::Continue(match self { - TermKind::Type(t) => TermKind::Type(t.fold(folder)?), - TermKind::Const(c) => TermKind::Const(c.fold(folder)?), - }) - } -} - -impl Foldable for FnSig { - fn super_fold(&self, folder: &mut V) -> ControlFlow { - ControlFlow::Continue(Self { - inputs_and_output: self.inputs_and_output.fold(folder)?, - c_variadic: self.c_variadic, - unsafety: self.unsafety, - abi: self.abi.clone(), - }) - } -} - -pub enum Never {} - -/// In order to instantiate a `Foldable`'s generic parameters with specific arguments, -/// `GenericArgs` can be used as a `Folder` that replaces all mentions of generic params -/// with the entries in its list. -impl Folder for GenericArgs { - type Break = Never; - - fn fold_ty(&mut self, ty: &Ty) -> ControlFlow { - ControlFlow::Continue(match ty.kind() { - TyKind::Param(p) => self[p], - _ => *ty, - }) - } - - fn fold_const(&mut self, c: &Const) -> ControlFlow { - ControlFlow::Continue(match &c.literal { - ConstantKind::Param(p) => self[p.clone()].clone(), - _ => c.clone(), - }) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,22 +17,29 @@ //! The goal is to eventually be published on //! [crates.io](https://crates.io). +use crate::mir::mono::InstanceDef; +use crate::mir::Body; use std::cell::Cell; use std::fmt; use std::fmt::Debug; use self::ty::{ - GenericPredicates, Generics, ImplDef, ImplTrait, Span, TraitDecl, TraitDef, Ty, TyKind, + GenericPredicates, Generics, ImplDef, ImplTrait, IndexedVal, LineInfo, Span, TraitDecl, + TraitDef, Ty, TyKind, }; #[macro_use] extern crate scoped_tls; -pub mod fold; +pub mod error; pub mod mir; pub mod ty; pub mod visitor; +pub use error::*; +use mir::mono::Instance; +use ty::{FnDef, GenericArgs}; + /// Use String for now but we should replace it. pub type Symbol = String; @@ -41,7 +48,7 @@ /// A unique identification number for each item accessible for the current compilation unit. #[derive(Clone, Copy, PartialEq, Eq)] -pub struct DefId(pub usize); +pub struct DefId(usize); impl Debug for DefId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -52,9 +59,28 @@ } } +impl IndexedVal for DefId { + fn to_val(index: usize) -> Self { + DefId(index) + } + + fn to_index(&self) -> usize { + self.0 + } +} + /// A unique identification number for each provenance #[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct AllocId(pub usize); +pub struct AllocId(usize); + +impl IndexedVal for AllocId { + fn to_val(index: usize) -> Self { + AllocId(index) + } + fn to_index(&self) -> usize { + self.0 + } +} /// A list of crate items. pub type CrateItems = Vec; @@ -65,20 +91,6 @@ /// A list of impl trait decls. pub type ImplTraitDecls = Vec; -/// An error type used to represent an error that has already been reported by the compiler. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum CompilerError { - /// Internal compiler error (I.e.: Compiler crashed). - ICE, - /// Compilation failed. - CompilationFailed, - /// Compilation was interrupted. - Interrupted(T), - /// Compilation skipped. This happens when users invoke rustc to retrieve information such as - /// --version. - Skipped, -} - /// Holds information about a crate. #[derive(Clone, PartialEq, Eq, Debug)] pub struct Crate { @@ -88,11 +100,10 @@ } pub type DefKind = Opaque; +pub type Filename = Opaque; /// Holds information about an item in the crate. -/// For now, it only stores the item DefId. Use functions inside `rustc_internal` module to -/// use this item. -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct CrateItem(pub DefId); impl CrateItem { @@ -111,6 +122,10 @@ pub fn kind(&self) -> DefKind { with(|cx| cx.def_kind(self.0)) } + + pub fn requires_monomorphization(&self) -> bool { + with(|cx| cx.requires_monomorphization(self.0)) + } } /// Return the function where execution starts if the current @@ -125,9 +140,9 @@ with(|cx| cx.local_crate()) } -/// Try to find a crate with the given name. -pub fn find_crate(name: &str) -> Option { - with(|cx| cx.find_crate(name)) +/// Try to find a crate or crates if multiple crates exist from given name. +pub fn find_crates(name: &str) -> Vec { + with(|cx| cx.find_crates(name)) } /// Try to find a crate with the given name. @@ -157,72 +172,95 @@ } pub trait Context { - fn entry_fn(&mut self) -> Option; + fn entry_fn(&self) -> Option; /// Retrieve all items of the local crate that have a MIR associated with them. - fn all_local_items(&mut self) -> CrateItems; - fn mir_body(&mut self, item: DefId) -> mir::Body; - fn all_trait_decls(&mut self) -> TraitDecls; - fn trait_decl(&mut self, trait_def: &TraitDef) -> TraitDecl; - fn all_trait_impls(&mut self) -> ImplTraitDecls; - fn trait_impl(&mut self, trait_impl: &ImplDef) -> ImplTrait; - fn generics_of(&mut self, def_id: DefId) -> Generics; - fn predicates_of(&mut self, def_id: DefId) -> GenericPredicates; - fn explicit_predicates_of(&mut self, def_id: DefId) -> GenericPredicates; + fn all_local_items(&self) -> CrateItems; + fn mir_body(&self, item: DefId) -> mir::Body; + fn all_trait_decls(&self) -> TraitDecls; + fn trait_decl(&self, trait_def: &TraitDef) -> TraitDecl; + fn all_trait_impls(&self) -> ImplTraitDecls; + fn trait_impl(&self, trait_impl: &ImplDef) -> ImplTrait; + fn generics_of(&self, def_id: DefId) -> Generics; + fn predicates_of(&self, def_id: DefId) -> GenericPredicates; + fn explicit_predicates_of(&self, def_id: DefId) -> GenericPredicates; /// Get information about the local crate. fn local_crate(&self) -> Crate; /// Retrieve a list of all external crates. fn external_crates(&self) -> Vec; /// Find a crate with the given name. - fn find_crate(&self, name: &str) -> Option; + fn find_crates(&self, name: &str) -> Vec; - /// Prints the name of given `DefId` + /// Returns the name of given `DefId` fn name_of_def_id(&self, def_id: DefId) -> String; - /// Prints a human readable form of `Span` - fn print_span(&self, span: Span) -> String; + /// Returns printable, human readable form of `Span` + fn span_to_string(&self, span: Span) -> String; - /// Prints the kind of given `DefId` - fn def_kind(&mut self, def_id: DefId) -> DefKind; + /// Return filename from given `Span`, for diagnostic purposes + fn get_filename(&self, span: &Span) -> Filename; + + /// Return lines corresponding to this `Span` + fn get_lines(&self, span: &Span) -> LineInfo; + + /// Returns the `kind` of given `DefId` + fn def_kind(&self, def_id: DefId) -> DefKind; /// `Span` of an item - fn span_of_an_item(&mut self, def_id: DefId) -> Span; + fn span_of_an_item(&self, def_id: DefId) -> Span; /// Obtain the representation of a type. - fn ty_kind(&mut self, ty: Ty) -> TyKind; + fn ty_kind(&self, ty: Ty) -> TyKind; + + /// Get the body of an Instance. + /// FIXME: Monomorphize the body. + fn instance_body(&self, instance: InstanceDef) -> Body; - /// Create a new `Ty` from scratch without information from rustc. - fn mk_ty(&mut self, kind: TyKind) -> Ty; + /// Get the instance type with generic substitutions applied and lifetimes erased. + fn instance_ty(&self, instance: InstanceDef) -> Ty; + + /// Get the instance. + fn instance_def_id(&self, instance: InstanceDef) -> DefId; + + /// Get the instance mangled name. + fn instance_mangled_name(&self, instance: InstanceDef) -> String; + + /// Convert a non-generic crate item into an instance. + /// This function will panic if the item is generic. + fn mono_instance(&self, item: CrateItem) -> Instance; + + /// Item requires monomorphization. + fn requires_monomorphization(&self, def_id: DefId) -> bool; + + /// Resolve an instance from the given function definition and generic arguments. + fn resolve_instance(&self, def: FnDef, args: &GenericArgs) -> Option; } // A thread local variable that stores a pointer to the tables mapping between TyCtxt // datastructures and stable MIR datastructures -scoped_thread_local! (static TLV: Cell<*mut ()>); +scoped_thread_local! (static TLV: Cell<*const ()>); -pub fn run(mut context: impl Context, f: impl FnOnce()) { +pub fn run(context: &dyn Context, f: impl FnOnce()) { assert!(!TLV.is_set()); - fn g<'a>(mut context: &mut (dyn Context + 'a), f: impl FnOnce()) { - let ptr: *mut () = &mut context as *mut &mut _ as _; - TLV.set(&Cell::new(ptr), || { - f(); - }); - } - g(&mut context, f); + let ptr: *const () = &context as *const &_ as _; + TLV.set(&Cell::new(ptr), || { + f(); + }); } /// Loads the current context and calls a function with it. /// Do not nest these, as that will ICE. -pub fn with(f: impl FnOnce(&mut dyn Context) -> R) -> R { +pub fn with(f: impl FnOnce(&dyn Context) -> R) -> R { assert!(TLV.is_set()); TLV.with(|tlv| { let ptr = tlv.get(); assert!(!ptr.is_null()); - f(unsafe { *(ptr as *mut &mut dyn Context) }) + f(unsafe { *(ptr as *const &dyn Context) }) }) } /// A type that provides internal information but that can still be used for debug purpose. -#[derive(Clone)] +#[derive(Clone, Eq, PartialEq)] pub struct Opaque(String); impl std::fmt::Display for Opaque { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/body.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/body.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/body.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/body.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,69 @@ -use crate::ty::{AdtDef, ClosureDef, Const, GeneratorDef, GenericArgs, Movability, Region}; +use crate::ty::{AdtDef, ClosureDef, Const, CoroutineDef, GenericArgs, Movability, Region, Ty}; use crate::Opaque; -use crate::{ty::Ty, Span}; +use crate::Span; +/// The SMIR representation of a single function. #[derive(Clone, Debug)] pub struct Body { pub blocks: Vec, - pub locals: Vec, + + // Declarations of locals within the function. + // + // The first local is the return value pointer, followed by `arg_count` + // locals for the function arguments, followed by any user-declared + // variables and temporaries. + pub(super) locals: LocalDecls, + + // The number of arguments this function takes. + pub(super) arg_count: usize, +} + +impl Body { + /// Constructs a `Body`. + /// + /// A constructor is required to build a `Body` from outside the crate + /// because the `arg_count` and `locals` fields are private. + pub fn new(blocks: Vec, locals: LocalDecls, arg_count: usize) -> Self { + // If locals doesn't contain enough entries, it can lead to panics in + // `ret_local`, `arg_locals`, and `inner_locals`. + assert!( + locals.len() > arg_count, + "A Body must contain at least a local for the return value and each of the function's arguments" + ); + Self { blocks, locals, arg_count } + } + + /// Return local that holds this function's return value. + pub fn ret_local(&self) -> &LocalDecl { + &self.locals[RETURN_LOCAL] + } + + /// Locals in `self` that correspond to this function's arguments. + pub fn arg_locals(&self) -> &[LocalDecl] { + &self.locals[1..][..self.arg_count] + } + + /// Inner locals for this function. These are the locals that are + /// neither the return local nor the argument locals. + pub fn inner_locals(&self) -> &[LocalDecl] { + &self.locals[self.arg_count + 1..] + } + + /// Convenience function to get all the locals in this function. + /// + /// Locals are typically accessed via the more specific methods `ret_local`, + /// `arg_locals`, and `inner_locals`. + pub fn locals(&self) -> &[LocalDecl] { + &self.locals + } +} + +type LocalDecls = Vec; + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct LocalDecl { + pub ty: Ty, + pub span: Span, } #[derive(Clone, Debug)] @@ -14,8 +72,14 @@ pub terminator: Terminator, } -#[derive(Clone, Debug)] -pub enum Terminator { +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Terminator { + pub kind: TerminatorKind, + pub span: Span, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum TerminatorKind { Goto { target: usize, }, @@ -47,7 +111,7 @@ target: usize, unwind: UnwindAction, }, - GeneratorDrop, + CoroutineDrop, InlineAsm { template: String, operands: Vec, @@ -58,7 +122,7 @@ }, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct InlineAsmOperand { pub in_value: Option, pub out_place: Option, @@ -67,7 +131,7 @@ pub raw_rpr: String, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum UnwindAction { Continue, Unreachable, @@ -75,19 +139,19 @@ Cleanup(usize), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum AssertMessage { BoundsCheck { len: Operand, index: Operand }, Overflow(BinOp, Operand, Operand), OverflowNeg(Operand), DivisionByZero(Operand), RemainderByZero(Operand), - ResumedAfterReturn(GeneratorKind), - ResumedAfterPanic(GeneratorKind), + ResumedAfterReturn(CoroutineKind), + ResumedAfterPanic(CoroutineKind), MisalignedPointerDereference { required: Operand, found: Operand }, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum BinOp { Add, AddUnchecked, @@ -113,20 +177,21 @@ Offset, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum UnOp { Not, Neg, } -#[derive(Clone, Debug)] -pub enum GeneratorKind { - Async(AsyncGeneratorKind), - Gen, +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum CoroutineKind { + Async(CoroutineSource), + Coroutine, + Gen(CoroutineSource), } -#[derive(Clone, Debug)] -pub enum AsyncGeneratorKind { +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum CoroutineSource { Block, Closure, Fn, @@ -139,7 +204,7 @@ pub(crate) type Coverage = Opaque; /// The FakeReadCause describes the type of pattern why a FakeRead statement exists. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum FakeReadCause { ForMatchGuard, ForMatchedPlace(LocalDefId), @@ -149,7 +214,7 @@ } /// Describes what kind of retag is to be performed -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum RetagKind { FnEntry, TwoPhase, @@ -157,7 +222,7 @@ Default, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum Variance { Covariant, Invariant, @@ -165,21 +230,27 @@ Bivariant, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct CopyNonOverlapping { pub src: Operand, pub dst: Operand, pub count: Operand, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum NonDivergingIntrinsic { Assume(Operand), CopyNonOverlapping(CopyNonOverlapping), } -#[derive(Clone, Debug)] -pub enum Statement { +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Statement { + pub kind: StatementKind, + pub span: Span, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum StatementKind { Assign(Place, Rvalue), FakeRead(FakeReadCause, Place), SetDiscriminant { place: Place, variant_index: VariantIdx }, @@ -195,7 +266,7 @@ Nop, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum Rvalue { /// Creates a pointer with the indicated mutability to the place. /// @@ -209,8 +280,8 @@ /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo` /// has a destructor. /// - /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After - /// generator lowering, `Generator` aggregate kinds are disallowed too. + /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After + /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too. Aggregate(AggregateKind, Vec), /// * `Offset` has the same semantics as `<*const T>::offset`, except that the second @@ -307,29 +378,30 @@ Use(Operand), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum AggregateKind { Array(Ty), Tuple, Adt(AdtDef, VariantIdx, GenericArgs, Option, Option), Closure(ClosureDef, GenericArgs), - Generator(GeneratorDef, GenericArgs, Movability), + Coroutine(CoroutineDef, GenericArgs, Movability), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum Operand { Copy(Place), Move(Place), Constant(Constant), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Place { pub local: Local, + /// projection out of a place (access a field, deref a pointer, etc) pub projection: String, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct UserTypeProjection { pub base: UserTypeAnnotationIndex, pub projection: String, @@ -337,6 +409,8 @@ pub type Local = usize; +pub const RETURN_LOCAL: Local = 0; + type FieldIdx = usize; /// The source-order index of a variant in a type. @@ -344,20 +418,20 @@ type UserTypeAnnotationIndex = usize; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Constant { pub span: Span, pub user_ty: Option, pub literal: Const, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct SwitchTarget { pub value: u128, pub target: usize, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum BorrowKind { /// Data must be immutable and is aliasable. Shared, @@ -375,26 +449,26 @@ }, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum MutBorrowKind { Default, TwoPhaseBorrow, ClosureCapture, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum Mutability { Not, Mut, } -#[derive(Copy, Clone, Debug)] +#[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Safety { Unsafe, Normal, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum PointerCoercion { /// Go from a fn-item type to a fn-pointer type. ReifyFnPointer, @@ -421,7 +495,7 @@ Unsize, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum CastKind { PointerExposeAddress, PointerFromExposedAddress, @@ -436,12 +510,34 @@ Transmute, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum NullOp { /// Returns the size of a value of that type. SizeOf, /// Returns the minimum alignment of a type. AlignOf, /// Returns the offset of a field. - OffsetOf(Vec), + OffsetOf(Vec<(VariantIdx, FieldIdx)>), +} + +impl Operand { + pub fn ty(&self, locals: &[LocalDecl]) -> Ty { + match self { + Operand::Copy(place) | Operand::Move(place) => place.ty(locals), + Operand::Constant(c) => c.ty(), + } + } +} + +impl Constant { + pub fn ty(&self) -> Ty { + self.literal.ty() + } +} + +impl Place { + pub fn ty(&self, locals: &[LocalDecl]) -> Ty { + let _start_ty = locals[self.local].ty; + todo!("Implement projection") + } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/mono.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/mono.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/mono.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/mono.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,102 @@ +use crate::mir::Body; +use crate::ty::{FnDef, GenericArgs, IndexedVal, Ty}; +use crate::{with, CrateItem, DefId, Error, Opaque}; +use std::fmt::Debug; + +#[derive(Clone, Debug)] +pub enum MonoItem { + Fn(Instance), + Static(StaticDef), + GlobalAsm(Opaque), +} + +#[derive(Copy, Clone, Debug)] +pub struct Instance { + /// The type of instance. + pub kind: InstanceKind, + /// An ID used to get the instance definition from the compiler. + /// Do not use this field directly. + pub def: InstanceDef, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum InstanceKind { + /// A user defined item. + Item, + /// A compiler intrinsic function. + Intrinsic, + /// A virtual function definition stored in a VTable. + Virtual, + /// A compiler generated shim. + Shim, +} + +impl Instance { + /// Get the body of an Instance. The body will be eagerly monomorphized. + pub fn body(&self) -> Body { + with(|context| context.instance_body(self.def)) + } + + /// Get the instance type with generic substitutions applied and lifetimes erased. + pub fn ty(&self) -> Ty { + with(|context| context.instance_ty(self.def)) + } + + pub fn mangled_name(&self) -> String { + with(|context| context.instance_mangled_name(self.def)) + } + + /// Resolve an instance starting from a function definition and generic arguments. + pub fn resolve(def: FnDef, args: &GenericArgs) -> Result { + with(|context| { + context.resolve_instance(def, args).ok_or_else(|| { + crate::Error::new(format!("Failed to resolve `{def:?}` with `{args:?}`")) + }) + }) + } +} + +/// Try to convert a crate item into an instance. +/// The item cannot be generic in order to be converted into an instance. +impl TryFrom for Instance { + type Error = crate::Error; + + fn try_from(item: CrateItem) -> Result { + with(|context| { + if !context.requires_monomorphization(item.0) { + Ok(context.mono_instance(item)) + } else { + Err(Error::new("Item requires monomorphization".to_string())) + } + }) + } +} + +/// Try to convert an instance into a crate item. +/// Only user defined instances can be converted. +impl TryFrom for CrateItem { + type Error = crate::Error; + + fn try_from(value: Instance) -> Result { + if value.kind == InstanceKind::Item { + Ok(CrateItem(with(|context| context.instance_def_id(value.def)))) + } else { + Err(Error::new(format!("Item kind `{:?}` cannot be converted", value.kind))) + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct InstanceDef(usize); + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub struct StaticDef(pub DefId); + +impl IndexedVal for InstanceDef { + fn to_val(index: usize) -> Self { + InstanceDef(index) + } + fn to_index(&self) -> usize { + self.0 + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/visit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/visit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/visit.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir/visit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,414 @@ +//! # The Stable MIR Visitor +//! +//! ## Overview +//! +//! We currently only support an immutable visitor. +//! The structure of this visitor is similar to the ones internal to `rustc`, +//! and it follows the following conventions: +//! +//! For every mir item, the trait has a `visit_` and a `super_` method. +//! - `visit_`, by default, calls `super_` +//! - `super_`, by default, destructures the `` and calls `visit_` for +//! all sub-items that compose the original item. +//! +//! In order to implement a visitor, override the `visit_*` methods for the types you are +//! interested in analyzing, and invoke (within that method call) +//! `self.super_*` to continue to the traverse. +//! Avoid calling `super` methods in other circumstances. +//! +//! For the most part, we do not destructure things external to the +//! MIR, e.g., types, spans, etc, but simply visit them and stop. +//! This avoids duplication with other visitors like `TypeFoldable`. +//! +//! ## Updating +//! +//! The code is written in a very deliberate style intended to minimize +//! the chance of things being overlooked. +//! +//! Use pattern matching to reference fields and ensure that all +//! matches are exhaustive. +//! +//! For this to work, ALL MATCHES MUST BE EXHAUSTIVE IN FIELDS AND VARIANTS. +//! That means you never write `..` to skip over fields, nor do you write `_` +//! to skip over variants in a `match`. +//! +//! The only place that `_` is acceptable is to match a field (or +//! variant argument) that does not require visiting. + +use crate::mir::*; +use crate::ty::{Const, GenericArgs, Region, Ty}; +use crate::{Opaque, Span}; + +pub trait MirVisitor { + fn visit_body(&mut self, body: &Body) { + self.super_body(body) + } + + fn visit_basic_block(&mut self, bb: &BasicBlock) { + self.super_basic_block(bb) + } + + fn visit_ret_decl(&mut self, local: Local, decl: &LocalDecl) { + self.super_ret_decl(local, decl) + } + + fn visit_arg_decl(&mut self, local: Local, decl: &LocalDecl) { + self.super_arg_decl(local, decl) + } + + fn visit_local_decl(&mut self, local: Local, decl: &LocalDecl) { + self.super_local_decl(local, decl) + } + + fn visit_statement(&mut self, stmt: &Statement, location: Location) { + self.super_statement(stmt, location) + } + + fn visit_terminator(&mut self, term: &Terminator, location: Location) { + self.super_terminator(term, location) + } + + fn visit_span(&mut self, span: &Span) { + self.super_span(span) + } + + fn visit_place(&mut self, place: &Place, ptx: PlaceContext, location: Location) { + self.super_place(place, ptx, location) + } + + fn visit_local(&mut self, local: &Local, ptx: PlaceContext, location: Location) { + let _ = (local, ptx, location); + } + + fn visit_rvalue(&mut self, rvalue: &Rvalue, location: Location) { + self.super_rvalue(rvalue, location) + } + + fn visit_operand(&mut self, operand: &Operand, location: Location) { + self.super_operand(operand, location) + } + + fn visit_user_type_projection(&mut self, projection: &UserTypeProjection) { + self.super_user_type_projection(projection) + } + + fn visit_ty(&mut self, ty: &Ty, location: Location) { + let _ = location; + self.super_ty(ty) + } + + fn visit_constant(&mut self, constant: &Constant, location: Location) { + self.super_constant(constant, location) + } + + fn visit_const(&mut self, constant: &Const, location: Location) { + self.super_const(constant, location) + } + + fn visit_region(&mut self, region: &Region, location: Location) { + let _ = location; + self.super_region(region) + } + + fn visit_args(&mut self, args: &GenericArgs, location: Location) { + let _ = location; + self.super_args(args) + } + + fn visit_assert_msg(&mut self, msg: &AssertMessage, location: Location) { + self.super_assert_msg(msg, location) + } + + fn super_body(&mut self, body: &Body) { + let Body { blocks, locals: _, arg_count } = body; + + for bb in blocks { + self.visit_basic_block(bb); + } + + self.visit_ret_decl(RETURN_LOCAL, body.ret_local()); + + for (idx, arg) in body.arg_locals().iter().enumerate() { + self.visit_arg_decl(idx + 1, arg) + } + + let local_start = arg_count + 1; + for (idx, arg) in body.arg_locals().iter().enumerate() { + self.visit_local_decl(idx + local_start, arg) + } + } + + fn super_basic_block(&mut self, bb: &BasicBlock) { + let BasicBlock { statements, terminator } = bb; + for stmt in statements { + self.visit_statement(stmt, Location(stmt.span)); + } + self.visit_terminator(terminator, Location(terminator.span)); + } + + fn super_local_decl(&mut self, local: Local, decl: &LocalDecl) { + let _ = local; + let LocalDecl { ty, span } = decl; + self.visit_ty(ty, Location(*span)); + } + + fn super_ret_decl(&mut self, local: Local, decl: &LocalDecl) { + self.super_local_decl(local, decl) + } + + fn super_arg_decl(&mut self, local: Local, decl: &LocalDecl) { + self.super_local_decl(local, decl) + } + + fn super_statement(&mut self, stmt: &Statement, location: Location) { + let Statement { kind, span } = stmt; + self.visit_span(span); + match kind { + StatementKind::Assign(place, rvalue) => { + self.visit_place(place, PlaceContext::MUTATING, location); + self.visit_rvalue(rvalue, location); + } + StatementKind::FakeRead(_, place) => { + self.visit_place(place, PlaceContext::NON_MUTATING, location); + } + StatementKind::SetDiscriminant { place, .. } => { + self.visit_place(place, PlaceContext::MUTATING, location); + } + StatementKind::Deinit(place) => { + self.visit_place(place, PlaceContext::MUTATING, location); + } + StatementKind::StorageLive(local) => { + self.visit_local(local, PlaceContext::NON_USE, location); + } + StatementKind::StorageDead(local) => { + self.visit_local(local, PlaceContext::NON_USE, location); + } + StatementKind::Retag(_, place) => { + self.visit_place(place, PlaceContext::MUTATING, location); + } + StatementKind::PlaceMention(place) => { + self.visit_place(place, PlaceContext::NON_MUTATING, location); + } + StatementKind::AscribeUserType { place, projections, variance: _ } => { + self.visit_place(place, PlaceContext::NON_USE, location); + self.visit_user_type_projection(projections); + } + StatementKind::Coverage(coverage) => visit_opaque(coverage), + StatementKind::Intrinsic(intrisic) => match intrisic { + NonDivergingIntrinsic::Assume(operand) => { + self.visit_operand(operand, location); + } + NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { + src, + dst, + count, + }) => { + self.visit_operand(src, location); + self.visit_operand(dst, location); + self.visit_operand(count, location); + } + }, + StatementKind::ConstEvalCounter => {} + StatementKind::Nop => {} + } + } + + fn super_terminator(&mut self, term: &Terminator, location: Location) { + let Terminator { kind, span } = term; + self.visit_span(&span); + match kind { + TerminatorKind::Goto { .. } + | TerminatorKind::Resume + | TerminatorKind::Abort + | TerminatorKind::Unreachable + | TerminatorKind::CoroutineDrop => {} + TerminatorKind::Assert { cond, expected: _, msg, target: _, unwind: _ } => { + self.visit_operand(cond, location); + self.visit_assert_msg(msg, location); + } + TerminatorKind::Drop { place, target: _, unwind: _ } => { + self.visit_place(place, PlaceContext::MUTATING, location); + } + TerminatorKind::Call { func, args, destination, target: _, unwind: _ } => { + self.visit_operand(func, location); + for arg in args { + self.visit_operand(arg, location); + } + self.visit_place(destination, PlaceContext::MUTATING, location); + } + TerminatorKind::InlineAsm { operands, .. } => { + for op in operands { + let InlineAsmOperand { in_value, out_place, raw_rpr: _ } = op; + if let Some(input) = in_value { + self.visit_operand(input, location); + } + if let Some(output) = out_place { + self.visit_place(output, PlaceContext::MUTATING, location); + } + } + } + TerminatorKind::Return => { + let local = RETURN_LOCAL; + self.visit_local(&local, PlaceContext::NON_MUTATING, location); + } + TerminatorKind::SwitchInt { discr, targets: _, otherwise: _ } => { + self.visit_operand(discr, location); + } + } + } + + fn super_span(&mut self, span: &Span) { + let _ = span; + } + + fn super_place(&mut self, place: &Place, ptx: PlaceContext, location: Location) { + let _ = location; + let _ = ptx; + visit_opaque(&Opaque(place.projection.clone())); + } + + fn super_rvalue(&mut self, rvalue: &Rvalue, location: Location) { + match rvalue { + Rvalue::AddressOf(mutability, place) => { + let pcx = PlaceContext { is_mut: *mutability == Mutability::Mut }; + self.visit_place(place, pcx, location); + } + Rvalue::Aggregate(_, operands) => { + for op in operands { + self.visit_operand(op, location); + } + } + Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => { + self.visit_operand(lhs, location); + self.visit_operand(rhs, location); + } + Rvalue::Cast(_, op, ty) => { + self.visit_operand(op, location); + self.visit_ty(ty, location); + } + Rvalue::CopyForDeref(place) | Rvalue::Discriminant(place) | Rvalue::Len(place) => { + self.visit_place(place, PlaceContext::NON_MUTATING, location); + } + Rvalue::Ref(region, kind, place) => { + self.visit_region(region, location); + let pcx = PlaceContext { is_mut: matches!(kind, BorrowKind::Mut { .. }) }; + self.visit_place(place, pcx, location); + } + Rvalue::Repeat(op, constant) => { + self.visit_operand(op, location); + self.visit_const(constant, location); + } + Rvalue::ShallowInitBox(op, ty) => { + self.visit_ty(ty, location); + self.visit_operand(op, location) + } + Rvalue::ThreadLocalRef(_) => {} + Rvalue::NullaryOp(_, ty) => { + self.visit_ty(ty, location); + } + Rvalue::UnaryOp(_, op) | Rvalue::Use(op) => { + self.visit_operand(op, location); + } + } + } + + fn super_operand(&mut self, operand: &Operand, location: Location) { + match operand { + Operand::Copy(place) | Operand::Move(place) => { + self.visit_place(place, PlaceContext::NON_MUTATING, location) + } + Operand::Constant(constant) => { + self.visit_constant(constant, location); + } + } + } + + fn super_user_type_projection(&mut self, projection: &UserTypeProjection) { + // This is a no-op on mir::Visitor. + let _ = projection; + } + + fn super_ty(&mut self, ty: &Ty) { + let _ = ty; + } + + fn super_constant(&mut self, constant: &Constant, location: Location) { + let Constant { span, user_ty: _, literal } = constant; + self.visit_span(span); + self.visit_const(literal, location); + } + + fn super_const(&mut self, constant: &Const, location: Location) { + let Const { kind: _, ty, id: _ } = constant; + self.visit_ty(ty, location); + } + + fn super_region(&mut self, region: &Region) { + let _ = region; + } + + fn super_args(&mut self, args: &GenericArgs) { + let _ = args; + } + + fn super_assert_msg(&mut self, msg: &AssertMessage, location: Location) { + match msg { + AssertMessage::BoundsCheck { len, index } => { + self.visit_operand(len, location); + self.visit_operand(index, location); + } + AssertMessage::Overflow(_, left, right) => { + self.visit_operand(left, location); + self.visit_operand(right, location); + } + AssertMessage::OverflowNeg(op) + | AssertMessage::DivisionByZero(op) + | AssertMessage::RemainderByZero(op) => { + self.visit_operand(op, location); + } + AssertMessage::ResumedAfterReturn(_) | AssertMessage::ResumedAfterPanic(_) => { //nothing to visit + } + AssertMessage::MisalignedPointerDereference { required, found } => { + self.visit_operand(required, location); + self.visit_operand(found, location); + } + } + } +} + +/// This function is a no-op that gets used to ensure this visitor is kept up-to-date. +/// +/// The idea is that whenever we replace an Opaque type by a real type, the compiler will fail +/// when trying to invoke `visit_opaque`. +/// +/// If you are here because your compilation is broken, replace the failing call to `visit_opaque()` +/// by a `visit_` for your construct. +fn visit_opaque(_: &Opaque) {} + +/// The location of a statement / terminator in the code and the CFG. +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct Location(Span); + +impl Location { + pub fn span(&self) -> Span { + self.0 + } +} + +/// Information about a place's usage. +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct PlaceContext { + /// Whether the access is mutable or not. Keep this private so we can increment the type in a + /// backward compatible manner. + is_mut: bool, +} + +impl PlaceContext { + const MUTATING: Self = PlaceContext { is_mut: true }; + const NON_MUTATING: Self = PlaceContext { is_mut: false }; + const NON_USE: Self = PlaceContext { is_mut: false }; + + pub fn is_mutating(&self) -> bool { + self.is_mut + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/mir.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,3 +1,6 @@ mod body; +pub mod mono; +pub mod visit; pub use body::*; +pub use visit::MirVisitor; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/ty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/ty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/ty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/ty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,10 +3,10 @@ mir::{Body, Mutability}, with, AllocId, DefId, Symbol, }; -use crate::Opaque; +use crate::{Filename, Opaque}; use std::fmt::{self, Debug, Formatter}; -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Eq, PartialEq, Hash)] pub struct Ty(pub usize); impl Debug for Ty { @@ -21,26 +21,45 @@ } } -impl From for Ty { - fn from(value: TyKind) -> Self { - with(|context| context.mk_ty(value)) - } +/// Represents a constant in MIR or from the Type system. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Const { + /// The constant kind. + pub(crate) kind: ConstantKind, + /// The constant type. + pub(crate) ty: Ty, + /// Used for internal tracking of the internal constant. + pub id: ConstId, } -#[derive(Debug, Clone)] -pub struct Const { - pub literal: ConstantKind, - pub ty: Ty, +impl Const { + /// Build a constant. Note that this should only be used by the compiler. + pub fn new(kind: ConstantKind, ty: Ty, id: ConstId) -> Const { + Const { kind, ty, id } + } + + /// Retrieve the constant kind. + pub fn kind(&self) -> &ConstantKind { + &self.kind + } + + /// Get the constant type. + pub fn ty(&self) -> Ty { + self.ty + } } +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct ConstId(pub usize); + type Ident = Opaque; -#[derive(Debug, Clone)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Region { pub kind: RegionKind, } -#[derive(Debug, Clone)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum RegionKind { ReEarlyBound(EarlyBoundRegion), ReLateBound(DebruijnIndex, BoundRegion), @@ -51,7 +70,7 @@ pub(crate) type DebruijnIndex = u32; -#[derive(Debug, Clone)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct EarlyBoundRegion { pub def_id: RegionDef, pub index: u32, @@ -60,7 +79,7 @@ pub(crate) type BoundVar = u32; -#[derive(Debug, Clone)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct BoundRegion { pub var: BoundVar, pub kind: BoundRegionKind, @@ -68,25 +87,47 @@ pub(crate) type UniverseIndex = u32; -#[derive(Debug, Clone)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Placeholder { pub universe: UniverseIndex, pub bound: T, } #[derive(Clone, Copy, PartialEq, Eq)] -pub struct Span(pub usize); +pub struct Span(usize); impl Debug for Span { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("Span") .field("id", &self.0) - .field("repr", &with(|cx| cx.print_span(*self))) + .field("repr", &with(|cx| cx.span_to_string(*self))) .finish() } } -#[derive(Clone, Debug)] +impl Span { + /// Return filename for diagnostic purposes + pub fn get_filename(&self) -> Filename { + with(|c| c.get_filename(self)) + } + + /// Return lines that corespond to this `Span` + pub fn get_lines(&self) -> LineInfo { + with(|c| c.get_lines(&self)) + } +} + +#[derive(Clone, Copy, Debug)] +/// Information you get from `Span` in a struct form. +/// Line and col start from 1. +pub struct LineInfo { + pub start_line: usize, + pub start_col: usize, + pub end_line: usize, + pub end_col: usize, +} + +#[derive(Clone, Debug, Eq, PartialEq)] pub enum TyKind { RigidTy(RigidTy), Alias(AliasKind, AliasTy), @@ -94,7 +135,7 @@ Bound(usize, BoundTy), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum RigidTy { Bool, Char, @@ -111,7 +152,7 @@ FnDef(FnDef, GenericArgs), FnPtr(PolyFnSig), Closure(ClosureDef, GenericArgs), - Generator(GeneratorDef, GenericArgs, Movability), + Coroutine(CoroutineDef, GenericArgs, Movability), Dynamic(Vec>, Region, DynKind), Never, Tuple(Vec), @@ -165,7 +206,7 @@ pub struct ClosureDef(pub DefId); #[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct GeneratorDef(pub DefId); +pub struct CoroutineDef(pub DefId); #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub struct ParamDef(pub DefId); @@ -194,7 +235,8 @@ #[derive(Clone, PartialEq, Eq, Debug)] pub struct RegionDef(pub DefId); -#[derive(Clone, Debug)] +/// A list of generic arguments. +#[derive(Clone, Debug, Eq, PartialEq)] pub struct GenericArgs(pub Vec); impl std::ops::Index for GenericArgs { @@ -213,7 +255,7 @@ } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum GenericArgKind { Lifetime(Region), Type(Ty), @@ -242,13 +284,13 @@ } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum TermKind { Type(Ty), Const(Const), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum AliasKind { Projection, Inherent, @@ -256,7 +298,7 @@ Weak, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct AliasTy { pub def_id: AliasDef, pub args: GenericArgs, @@ -264,7 +306,7 @@ pub type PolyFnSig = Binder; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct FnSig { pub inputs_and_output: Vec, pub c_variadic: bool, @@ -303,18 +345,18 @@ RiscvInterruptS, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Binder { pub value: T, pub bound_vars: Vec, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct EarlyBinder { pub value: T, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum BoundVariableKind { Ty(BoundTyKind), Region(BoundRegionKind), @@ -327,46 +369,46 @@ Param(ParamDef, String), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum BoundRegionKind { BrAnon, BrNamed(BrNamedDef, String), BrEnv, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum DynKind { Dyn, DynStar, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum ExistentialPredicate { Trait(ExistentialTraitRef), Projection(ExistentialProjection), AutoTrait(TraitDef), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ExistentialTraitRef { pub def_id: TraitDef, pub generic_args: GenericArgs, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ExistentialProjection { pub def_id: TraitDef, pub generic_args: GenericArgs, pub term: TermKind, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ParamTy { pub index: u32, pub name: String, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct BoundTy { pub var: usize, pub kind: BoundTyKind, @@ -382,14 +424,14 @@ pub type InitMaskMaterialized = Vec; /// Stores the provenance information of pointers stored in memory. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ProvenanceMap { /// Provenance in this map applies from the given offset for an entire pointer-size worth of /// bytes. Two entries in this map are always at least a pointer size apart. pub ptrs: Vec<(Size, Prov)>, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Allocation { pub bytes: Bytes, pub provenance: ProvenanceMap, @@ -397,20 +439,23 @@ pub mutability: Mutability, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum ConstantKind { Allocated(Allocation), Unevaluated(UnevaluatedConst), Param(ParamConst), + /// Store ZST constants. + /// We have to special handle these constants since its type might be generic. + ZeroSized, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ParamConst { pub index: u32, pub name: String, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct UnevaluatedConst { pub def: ConstDef, pub args: GenericArgs, @@ -424,7 +469,7 @@ AlwaysApplicable, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct TraitDecl { pub def_id: TraitDef, pub unsafety: Safety, @@ -455,13 +500,13 @@ pub type ImplTrait = EarlyBinder; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct TraitRef { pub def_id: TraitDef, pub args: GenericArgs, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct Generics { pub parent: Option, pub parent_count: usize, @@ -472,14 +517,14 @@ pub host_effect_index: Option, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum GenericParamDefKind { Lifetime, Type { has_default: bool, synthetic: bool }, Const { has_default: bool }, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct GenericParamDef { pub name: super::Symbol, pub def_id: GenericDef, @@ -493,7 +538,7 @@ pub predicates: Vec<(PredicateKind, Span)>, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum PredicateKind { Clause(ClauseKind), ObjectSafe(TraitDef), @@ -505,7 +550,7 @@ AliasRelate(TermKind, TermKind, AliasRelationDirection), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum ClauseKind { Trait(TraitPredicate), RegionOutlives(RegionOutlivesPredicate), @@ -516,52 +561,75 @@ ConstEvaluatable(Const), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum ClosureKind { Fn, FnMut, FnOnce, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct SubtypePredicate { pub a: Ty, pub b: Ty, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct CoercePredicate { pub a: Ty, pub b: Ty, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum AliasRelationDirection { Equate, Subtype, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct TraitPredicate { pub trait_ref: TraitRef, pub polarity: ImplPolarity, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct OutlivesPredicate(pub A, pub B); pub type RegionOutlivesPredicate = OutlivesPredicate; pub type TypeOutlivesPredicate = OutlivesPredicate; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct ProjectionPredicate { pub projection_ty: AliasTy, pub term: TermKind, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, PartialEq)] pub enum ImplPolarity { Positive, Negative, Reservation, } + +pub trait IndexedVal { + fn to_val(index: usize) -> Self; + + fn to_index(&self) -> usize; +} + +macro_rules! index_impl { + ($name:ident) => { + impl IndexedVal for $name { + fn to_val(index: usize) -> Self { + $name(index) + } + fn to_index(&self) -> usize { + self.0 + } + } + }; +} + +index_impl!(ConstId); +index_impl!(Ty); +index_impl!(Span); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/visitor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/visitor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/visitor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/compiler/stable_mir/src/visitor.rs 2023-12-21 16:55:28.000000000 +0000 @@ -47,12 +47,12 @@ visitor.visit_const(self) } fn super_visit(&self, visitor: &mut V) -> ControlFlow { - match &self.literal { + match &self.kind() { super::ty::ConstantKind::Allocated(alloc) => alloc.visit(visitor)?, super::ty::ConstantKind::Unevaluated(uv) => uv.visit(visitor)?, - super::ty::ConstantKind::Param(_) => {} + super::ty::ConstantKind::Param(_) | super::ty::ConstantKind::ZeroSized => {} } - self.ty.visit(visitor) + self.ty().visit(visitor) } } @@ -148,7 +148,7 @@ RigidTy::FnDef(_, args) => args.visit(visitor), RigidTy::FnPtr(sig) => sig.visit(visitor), RigidTy::Closure(_, args) => args.visit(visitor), - RigidTy::Generator(_, args, _) => args.visit(visitor), + RigidTy::Coroutine(_, args, _) => args.visit(visitor), RigidTy::Dynamic(pred, r, _) => { pred.visit(visitor)?; r.visit(visitor) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/config.example.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/config.example.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/config.example.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/config.example.toml 2023-12-21 16:55:28.000000000 +0000 @@ -19,11 +19,18 @@ # Note that this has no default value (x.py uses the defaults in `config.example.toml`). #profile = -# Keeps track of the last version of `x.py` used. -# If `changelog-seen` does not match the version that is currently running, -# `x.py` will prompt you to update it and to read the changelog. -# See `src/bootstrap/CHANGELOG.md` for more information. -changelog-seen = 2 +# Keeps track of major changes made to this configuration. +# +# This value also represents ID of the PR that caused major changes. Meaning, +# you can visit github.com/rust-lang/rust/pull/{change-id} to check for more details. +# +# A 'major change' includes any of the following +# - A new option +# - A change in the default values +# +# If `change-id` does not match the version that is currently running, +# `x.py` will prompt you to update it and check the related PR for more details. +change-id = 116881 # ============================================================================= # Tweaking how LLVM is compiled @@ -35,11 +42,15 @@ # Unless you're developing for a target where Rust CI doesn't build a compiler # toolchain or changing LLVM locally, you probably want to leave this enabled. # -# All tier 1 targets are currently supported; set this to `"if-available"` if -# you are not sure whether you're on a tier 1 target. +# Set this to `"if-available"` if you are not sure whether you're on a tier 1 +# target. All tier 1 targets are currently supported; # # We also currently only support this when building LLVM for the build triple. # +# Set this to `"if-unchanged"` to only download if the llvm-project have not +# been modified. (If there are no changes or if built from tarball source, +# the logic is the same as "if-available") +# # Note that many of the LLVM options are not currently supported for # downloading. Currently only the "assertions" option can be toggled. #download-ci-llvm = if rust.channel == "dev" { "if-available" } else { false } @@ -370,6 +381,9 @@ # this is not intended to be used during local development. #metrics = false +# Specify the location of the Android NDK. Used when targeting Android. +#android-ndk = "/path/to/android-ndk-r25b" + # ============================================================================= # General install configuration options # ============================================================================= @@ -543,10 +557,11 @@ # Whether to always use incremental compilation when building rustc #incremental = false -# Build a multi-threaded rustc -# FIXME(#75760): Some UI tests fail when this option is enabled. -# NOTE: This option is NOT SUPPORTED. See #48685. -#parallel-compiler = false +# Build a multi-threaded rustc. This allows users to use parallel rustc +# via the unstable option `-Z threads=n`. +# Since stable/beta channels only allow using stable features, +# `parallel-compiler = false` should be set for these channels. +#parallel-compiler = true # The default linker that will be hard-coded into the generated # compiler for targets that don't specify a default linker explicitly @@ -749,12 +764,6 @@ # it must link to `libgcc_eh.a` to get a working output, and this option have no effect. #llvm-libunwind = 'no' if Linux, 'in-tree' if Fuchsia -# If this target is for Android, this option will be required to specify where -# the NDK for the target lives. This is used to find the C compiler to link and -# build native code. -# See `src/bootstrap/cc_detect.rs` for details. -#android-ndk = (path) - # Build the sanitizer runtimes for this target. # This option will override the same option under [build] section. #sanitizers = build.sanitizers (bool) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/changelog rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/changelog --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/changelog 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/changelog 2024-02-14 22:09:28.000000000 +0000 @@ -1,12 +1,25 @@ -rustc (1.74.1+dfsg0ubuntu1~bpo10-0ubuntu0.23.10) mantic; urgency=medium +rustc (1.75.0+dfsg0ubuntu1~bpo10-0ubuntu0.23.10) mantic; urgency=medium - * Backport to Mantic (LP: #2044036) + * Backport to Mantic (LP: #2047858) - d/p/ubuntu-backport-disable-newer-tests.patch: add a patch to disable newer tests that can't pass on older Ubuntu series - Re-enable libgit2 vendoring: - d/control: remove libgit2-dev and libhttp-parser-dev from B-D - -- Zixing Liu Wed, 17 Jan 2024 13:44:02 -0700 + -- Zixing Liu Wed, 14 Feb 2024 15:09:28 -0700 + +rustc (1.75.0+dfsg0ubuntu1-0ubuntu1) noble; urgency=medium + + * New upstream release 1.75.0 (LP: #2047858) + - d/patches: Refresh patches + - d/copyright: update the list of unused crates + - d/copyright: update copyright data + - d/libstd-rust-*: bump version to 1.75 + - d/patches: remove obsoleted patches + - d/s/lintian-overrides: update lintian overrides + - d/control: update vendored crate list + + -- Zixing Liu Tue, 02 Jan 2024 12:09:03 -0700 rustc (1.74.1+dfsg0ubuntu1-0ubuntu1) noble; urgency=medium diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/control rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/control --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/control 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/control 2024-02-14 22:09:28.000000000 +0000 @@ -15,9 +15,9 @@ dh-cargo (>= 28ubuntu1~), dpkg-dev (>= 1.17.14), python3:native, - cargo:native (>= 1.73.0+dfsg) , - rustc:native (>= 1.73.0+dfsg) , - rustc:native (<= 1.74.1++) , + cargo:native (>= 1.74.0+dfsg) , + rustc:native (>= 1.74.0+dfsg) , + rustc:native (<= 1.75.1++) , llvm-17-dev:native, llvm-17-tools:native, libclang-rt-17-dev (>= 1:17.0.2), @@ -59,7 +59,7 @@ Homepage: http://www.rust-lang.org/ Vcs-Git: https://salsa.debian.org/rust-team/rust.git Vcs-Browser: https://salsa.debian.org/rust-team/rust -XS-Vendored-Sources-Rust: addr2line@0.21.0, adler@1.0.2, ahash@0.8.3, aho-corasick@0.7.20, aho-corasick@1.0.2, allocator-api2@0.2.15, always-assert@0.1.3, ammonia@3.3.0, android-tzdata@0.1.1, android_system_properties@0.1.5, anes@0.1.6, annotate-snippets@0.9.1, ansi_term@0.12.1, anstream@0.6.3, anstyle-parse@0.2.1, anstyle-query@1.0.0, anstyle@1.0.4, anyhow@1.0.75, ar_archive_writer@0.1.5, arc-swap@1.6.0, array_tool@1.0.3, arrayvec@0.7.4, askama@0.12.0, askama_derive@0.12.1, askama_escape@0.10.3, atty@0.2.14, autocfg@1.1.0, backtrace@0.3.69, base16ct@0.2.0, base64@0.21.3, base64ct@1.6.0, basic-toml@0.1.2, bincode@1.3.3, bit-set@0.5.3, bit-vec@0.6.3, bitflags@1.3.2, bitflags@2.4.0, bitmaps@2.1.0, block-buffer@0.10.4, bstr@1.6.2, btoi@0.4.3, bumpalo@3.13.0, bytecount@0.6.3, byteorder@1.4.3, bytes@1.4.0, bytesize@1.3.0, byteyarn@0.2.3, camino@1.1.4, cargo-platform@0.1.2, cargo_metadata@0.15.4, cargo_metadata@0.17.0, cast@0.3.0, cc@1.0.79, cfg-if@1.0.0, chrono@0.4.26, ciborium-io@0.2.1, ciborium-ll@0.2.1, ciborium@0.2.1, clap@4.4.6, clap_builder@4.4.6, clap_complete@4.3.1, clap_derive@4.4.2, clap_lex@0.5.0, clru@0.6.1, cmake@0.1.48, color-eyre@0.6.2, color-print-proc-macro@0.3.4, color-print@0.3.4, color-spantrace@0.2.0, colorchoice@1.0.0, colored@2.0.4, comma@1.0.0, compiler_builtins@0.1.101, console@0.15.7, const-oid@0.9.2, content_inspector@0.2.4, convert_case@0.4.0, core-foundation-sys@0.8.4, core-foundation@0.9.3, countme@3.0.1, cov-mark@2.0.0-pre.1, cpufeatures@0.2.8, crc32fast@1.3.2, criterion-plot@0.5.0, criterion@0.5.1, crossbeam-channel@0.5.8, crossbeam-deque@0.8.3, crossbeam-epoch@0.9.15, crossbeam-utils@0.8.16, crypto-bigint@0.5.2, crypto-common@0.1.6, cstr@0.2.8, ct-codecs@1.1.1, curl-sys@0.4.68+curl-8.4.0, curl@0.4.44, darling@0.14.4, darling@0.20.3, darling_core@0.14.4, darling_core@0.20.3, darling_macro@0.14.4, darling_macro@0.20.3, dashmap@5.4.0, datafrog@2.0.1, der@0.7.6, deranged@0.3.8, derive_builder@0.12.0, derive_builder_core@0.12.0, derive_builder_macro@0.12.0, derive_more@0.99.17, derive_setters@0.1.6, diff@0.1.13, digest@0.10.7, dirs-next@2.0.0, dirs-sys-next@0.1.2, dirs-sys@0.3.7, dirs@4.0.0, displaydoc@0.2.4, dissimilar@1.0.6, dlmalloc@0.2.4, drop_bomb@0.1.5, dunce@1.0.4, ecdsa@0.16.7, ed25519-compact@2.0.4, either@1.8.1, elasticlunr-rs@3.0.2, elliptic-curve@0.13.5, elsa@1.7.1, ena@0.14.2, encoding_rs@0.8.33, env_logger@0.10.0, env_logger@0.7.1, equivalent@1.0.1, erased-serde@0.3.31, errno-dragonfly@0.1.2, errno@0.3.1, escargot@0.5.8, expect-test@1.4.1, eyre@0.6.8, fallible-iterator@0.3.0, faster-hex@0.8.1, fastrand@2.0.0, fd-lock@3.0.11, ff@0.13.0, fiat-crypto@0.1.20, field-offset@0.3.6, filetime@0.2.22, flate2@1.0.27, fluent-bundle@0.15.2, fluent-langneg@0.13.0, fluent-syntax@0.11.0, fnv@1.0.7, foreign-types-shared@0.1.1, foreign-types@0.3.2, form_urlencoded@1.2.0, fortanix-sgx-abi@0.5.0, fs-err@2.9.0, fs_extra@1.3.0, futf@0.1.5, futures-channel@0.3.28, futures-core@0.3.28, futures-executor@0.3.28, futures-io@0.3.28, futures-macro@0.3.28, futures-sink@0.3.28, futures-task@0.3.28, futures-util@0.3.28, futures@0.3.28, generic-array@0.14.7, getopts@0.2.21, getrandom@0.2.10, gimli@0.28.0, git2-curl@0.19.0, git2@0.18.0, gix-actor@0.27.0, gix-attributes@0.19.0, gix-bitmap@0.2.7, gix-chunk@0.4.4, gix-command@0.2.9, gix-commitgraph@0.21.0, gix-config-value@0.14.0, gix-config@0.30.0, gix-credentials@0.20.0, gix-date@0.8.0, gix-diff@0.36.0, gix-discover@0.25.0, gix-features@0.35.0, gix-filter@0.5.0, gix-fs@0.7.0, gix-glob@0.13.0, gix-hash@0.13.0, gix-hashtable@0.4.0, gix-ignore@0.8.0, gix-index@0.25.0, gix-lock@10.0.0, gix-macros@0.1.0, gix-negotiate@0.8.0, gix-object@0.37.0, gix-odb@0.53.0, gix-pack@0.43.0, gix-packetline-blocking@0.16.6, gix-packetline@0.16.6, gix-path@0.10.0, gix-pathspec@0.3.0, gix-prompt@0.7.0, gix-protocol@0.40.0, gix-quote@0.4.7, gix-ref@0.37.0, gix-refspec@0.18.0, gix-revision@0.22.0, gix-revwalk@0.8.0, gix-sec@0.10.0, gix-submodule@0.4.0, gix-tempfile@10.0.0, gix-trace@0.1.3, gix-transport@0.37.0, gix-traverse@0.33.0, gix-url@0.24.0, gix-utils@0.1.5, gix-validate@0.8.0, gix-worktree@0.26.0, gix@0.54.1, glob@0.3.1, globset@0.4.13, group@0.13.0, gsgdt@0.1.2, h2@0.3.19, half@1.8.2, handlebars@3.5.5, handlebars@4.3.7, hashbrown@0.12.3, hashbrown@0.14.0, heck@0.4.1, hermit-abi@0.1.19, hermit-abi@0.3.2, hex@0.4.3, hkdf@0.12.3, hmac@0.12.1, home@0.5.5, html5ever@0.26.0, http-auth@0.1.8, http-body@0.4.5, http@0.2.9, httparse@1.8.0, httpdate@1.0.2, humansize@2.1.3, humantime@1.3.0, humantime@2.1.0, hyper-tls@0.5.0, hyper@0.14.22, iana-time-zone-haiku@0.1.2, iana-time-zone@0.1.57, icu_list@1.2.0, icu_locid@1.2.0, icu_provider@1.2.0, icu_provider_adapters@1.2.0, icu_provider_macros@1.2.0, ident_case@1.0.1, idna@0.4.0, if_chain@1.0.2, ignore@0.4.20, im-rc@15.1.0, indenter@0.3.3, indexmap@1.9.3, indexmap@2.0.0, indicatif@0.17.6, indoc@1.0.9, instant@0.1.12, intl-memoizer@0.5.1, intl_pluralrules@7.0.2, io-lifetimes@1.0.9, ipnet@2.7.2, is-terminal@0.4.9, itertools@0.10.5, itoa@0.4.8, itoa@1.0.6, jobserver@0.1.26, jod-thread@0.1.2, js-sys@0.3.64, jsonpath_lib@0.2.6, la-arena@0.3.1, lazy_static@1.4.0, lazycell@1.3.0, leb128@0.2.5, levenshtein@1.0.5, libc@0.2.148, libgit2-sys@0.16.1+1.7.1, libloading@0.7.4, libloading@0.8.0, libm@0.1.4, libm@0.2.7, libssh2-sys@0.3.0, libz-sys@1.1.9, line-index@0.1.0-pre.1, linux-raw-sys@0.3.2, linux-raw-sys@0.4.7, litemap@0.7.0, lock_api@0.4.10, log@0.4.19, lsp-server@0.7.4, lsp-types@0.94.0, lzma-sys@0.1.20, mac@0.1.1, maplit@1.0.2, markup5ever@0.11.0, matchers@0.1.0, maybe-async@0.2.7, md-5@0.10.5, mdbook@0.4.31, measureme@10.1.1, memchr@2.6.2, memmap2@0.2.3, memmap2@0.5.10, memmap2@0.7.1, memoffset@0.8.0, memoffset@0.9.0, mime@0.3.17, mime_guess@2.0.4, minifier@0.2.2, minimal-lexical@0.2.1, miniz_oxide@0.7.1, mio@0.8.8, native-tls@0.2.11, new_debug_unreachable@1.0.4, nohash-hasher@0.2.0, nom@7.1.3, normalize-line-endings@0.3.0, nu-ansi-term@0.46.0, num-traits@0.2.15, num_cpus@1.16.0, num_threads@0.1.6, number_prefix@0.4.0, object@0.32.0, odht@0.3.1, once_cell@1.18.0, oorandom@11.1.3, opener@0.5.2, opener@0.6.1, openssl-macros@0.1.1, openssl-probe@0.1.5, openssl-sys@0.9.92, openssl@0.10.57, ordered-float@2.10.0, orion@0.17.4, os_info@3.7.0, overload@0.1.1, owo-colors@3.5.0, p384@0.13.0, packed_simd_2@0.3.8, pad@0.1.6, parking_lot@0.11.2, parking_lot@0.12.1, parking_lot_core@0.8.6, parking_lot_core@0.9.8, partial_ref@0.3.3, partial_ref_derive@0.3.3, pasetors@0.6.7, pathdiff@0.2.1, pem-rfc7468@0.7.0, percent-encoding@2.3.0, perf-event-open-sys@1.0.1, perf-event-open-sys@3.0.0, perf-event@0.4.7, pest@2.7.0, pest_derive@2.7.0, pest_generator@2.7.0, pest_meta@2.7.0, phf@0.10.1, phf_codegen@0.10.0, phf_generator@0.10.0, phf_shared@0.10.0, pin-project-lite@0.2.10, pin-utils@0.1.0, pkcs8@0.10.2, pkg-config@0.3.27, plotters-backend@0.3.4, plotters-svg@0.3.3, plotters@0.3.4, polonius-engine@0.13.0, portable-atomic@1.4.2, ppv-lite86@0.2.17, precomputed-hash@0.1.1, pretty_assertions@1.4.0, prettydiff@0.6.4, primeorder@0.13.2, proc-macro-hack@0.5.20+deprecated, proc-macro2@1.0.66, prodash@26.2.2, proptest@1.2.0, psm@0.1.21, pulldown-cmark@0.9.3, punycode@0.4.1, quick-error@1.2.3, quick-error@2.0.1, quine-mc_cluskey@0.2.4, quote@1.0.32, r-efi-alloc@1.0.0, r-efi@4.2.0, ra-ap-rustc_lexer@0.10.0, rand@0.8.5, rand_chacha@0.3.1, rand_core@0.6.4, rand_xorshift@0.3.0, rand_xoshiro@0.6.0, rayon-core@1.11.0, rayon@1.7.0, redox_syscall@0.2.16, redox_syscall@0.3.5, redox_users@0.4.3, regex-automata@0.1.10, regex-automata@0.2.0, regex-automata@0.3.8, regex-syntax@0.6.29, regex-syntax@0.7.2, regex@1.8.4, reqwest@0.11.18, rfc6979@0.4.0, rowan@0.15.11, rustc-demangle@0.1.23, rustc-hash@1.1.0, rustc-rayon-core@0.5.0, rustc-rayon@0.5.0, rustc-semver@1.1.0, rustc_apfloat@0.2.0+llvm-462a31f5a5ab, rustc_tools_util@0.3.0, rustc_version@0.4.0, rustfix@0.6.1, rustix@0.37.6, rustix@0.38.14, rustversion@1.0.12, rusty-fork@0.3.0, ruzstd@0.4.0, ryu@1.0.13, same-file@1.0.6, scoped-tls@1.0.1, scopeguard@1.1.0, sec1@0.7.2, security-framework-sys@2.9.0, security-framework@2.9.2, self_cell@0.10.2, semver@1.0.18, serde-untagged@0.1.1, serde-value@0.7.0, serde@1.0.188, serde_derive@1.0.188, serde_ignored@0.1.9, serde_json@1.0.105, serde_repr@0.1.12, serde_spanned@0.6.3, serde_urlencoded@0.7.1, sha1@0.10.5, sha1_smol@1.0.0, sha2@0.10.7, sharded-slab@0.1.4, shell-escape@0.1.5, shlex@1.1.0, signature@2.1.0, similar@2.2.1, siphasher@0.3.10, sized-chunks@0.6.5, slab@0.4.8, smallvec@1.11.0, smol_str@0.2.0, snap@1.1.0, snapbox-macros@0.3.6, snapbox@0.4.13, socket2@0.4.9, spdx-expression@0.5.2, spdx-rs@0.5.3, spki@0.7.2, stable_deref_trait@1.2.0, stacker@0.1.15, static_assertions@1.1.0, string_cache@0.8.7, string_cache_codegen@0.5.2, strsim@0.10.0, strum@0.24.1, strum_macros@0.24.3, subtle@2.5.0, syn@1.0.109, syn@2.0.29, synstructure@0.12.6, synstructure@0.13.0, sysinfo@0.26.7, sysinfo@0.29.2, tar@0.4.38, tar@0.4.40, tempfile@3.8.0, tendril@0.4.3, term@0.7.0, termcolor@1.2.0, terminal_size@0.3.0, termize@0.1.1, tester@0.9.1, text-size@1.1.0, thin-vec@0.2.12, thiserror-core-impl@1.0.38, thiserror-core@1.0.38, thiserror-impl@1.0.47, thiserror@1.0.47, thorin-dwp@0.7.0, thread_local@1.1.7, threadpool@1.8.1, time-core@0.1.2, time-macros@0.2.15, time@0.3.29, tinystr@0.7.1, tinytemplate@1.2.1, tinyvec@1.6.0, tinyvec_macros@0.1.1, tokio-native-tls@0.3.1, tokio-util@0.7.2, tokio@1.29.1, toml@0.5.11, toml@0.7.6, toml_datetime@0.6.3, toml_edit@0.19.14, topological-sort@0.2.2, tower-service@0.3.2, tracing-attributes@0.1.26, tracing-core@0.1.30, tracing-core@0.1.31, tracing-error@0.2.0, tracing-log@0.1.3, tracing-subscriber@0.3.17, tracing-tree@0.2.4, tracing@0.1.37, triomphe@0.1.8, try-lock@0.2.4, twox-hash@1.6.3, type-map@0.4.0, typenum@1.16.0, ucd-parse@0.1.10, ucd-trie@0.1.5, ui_test@0.20.0, unarray@0.1.4, ungrammar@1.16.1, unic-langid-impl@0.9.1, unic-langid-macros-impl@0.9.1, unic-langid-macros@0.9.1, unic-langid@0.9.1, unicase@2.7.0, unicode-bidi@0.3.13, unicode-bom@2.0.2, unicode-ident@1.0.9, unicode-normalization@0.1.22, unicode-properties@0.1.0, unicode-script@0.5.5, unicode-security@0.1.0, unicode-segmentation@1.10.1, unicode-width@0.1.10, unicode-xid@0.2.4, unicode_categories@0.1.1, unified-diff@0.2.1, url@2.4.1, utf-8@0.7.6, utf8parse@0.2.1, uuid@1.4.0, valuable@0.1.0, varisat-checker@0.2.2, varisat-dimacs@0.2.2, varisat-formula@0.2.2, varisat-internal-macros@0.2.2, varisat-internal-proof@0.2.2, varisat@0.2.2, vcpkg@0.2.15, vec_mut_scan@0.3.0, version_check@0.9.4, wait-timeout@0.2.0, walkdir@2.3.3, want@0.3.1, wasi@0.11.0+wasi-snapshot-preview1, wasm-bindgen-backend@0.2.87, wasm-bindgen-futures@0.4.34, wasm-bindgen-macro-support@0.2.87, wasm-bindgen-macro@0.2.87, wasm-bindgen-shared@0.2.87, wasm-bindgen@0.2.87, web-sys@0.3.64, winapi-i686-pc-windows-gnu@0.4.0, winapi-x86_64-pc-windows-gnu@0.4.0, winapi@0.3.9, winnow@0.5.15, write-json@0.1.2, writeable@0.5.2, xattr@0.2.3, xflags-macros@0.3.1, xflags@0.3.1, xshell-macros@0.2.3, xshell@0.2.3, xz2@0.1.7, xz@0.1.0, yansi-term@0.1.2, yansi@0.5.1, yoke-derive@0.7.1, yoke@0.7.1, zerofrom-derive@0.1.2, zerofrom@0.1.2, zeroize@1.6.0, zerovec-derive@0.9.4, zerovec@0.9.4, zip@0.6.6 +XS-Vendored-Sources-Rust: addr2line@0.21.0, adler@1.0.2, ahash@0.8.3, aho-corasick@0.7.20, aho-corasick@1.0.2, allocator-api2@0.2.15, always-assert@0.1.3, ammonia@3.3.0, android-tzdata@0.1.1, android_system_properties@0.1.5, anes@0.1.6, annotate-snippets@0.9.1, ansi_term@0.12.1, anstream@0.5.0, anstream@0.6.4, anstyle-parse@0.2.1, anstyle-query@1.0.0, anstyle@1.0.4, anyhow@1.0.75, ar_archive_writer@0.1.5, arc-swap@1.6.0, array_tool@1.0.3, arrayvec@0.7.4, askama@0.12.0, askama_derive@0.12.1, askama_escape@0.10.3, atty@0.2.14, autocfg@1.1.0, backtrace@0.3.69, base16ct@0.2.0, base64@0.21.5, base64ct@1.6.0, basic-toml@0.1.2, bincode@1.3.3, bit-set@0.5.3, bit-vec@0.6.3, bitflags@1.3.2, bitflags@2.4.1, bitmaps@2.1.0, block-buffer@0.10.4, bstr@1.6.2, btoi@0.4.3, bumpalo@3.14.0, bytecount@0.6.4, byteorder@1.4.3, bytes@1.4.0, bytesize@1.3.0, camino@1.1.4, cargo-platform@0.1.2, cargo_metadata@0.15.4, cargo_metadata@0.18.1, cast@0.3.0, cc@1.0.79, cfg-if@1.0.0, chrono@0.4.26, ciborium-io@0.2.1, ciborium-ll@0.2.1, ciborium@0.2.1, clap-cargo@0.12.0, clap@4.4.7, clap_builder@4.4.7, clap_complete@4.4.3, clap_derive@4.4.7, clap_lex@0.6.0, clru@0.6.1, cmake@0.1.48, color-eyre@0.6.2, color-print-proc-macro@0.3.5, color-print@0.3.5, color-spantrace@0.2.0, colorchoice@1.0.0, colored@2.0.4, comma@1.0.0, compiler_builtins@0.1.103, console@0.15.7, const-oid@0.9.2, content_inspector@0.2.4, convert_case@0.4.0, core-foundation-sys@0.8.4, core-foundation@0.9.3, countme@3.0.1, cov-mark@2.0.0-pre.1, cpufeatures@0.2.8, crc32fast@1.3.2, criterion-plot@0.5.0, criterion@0.5.1, crossbeam-channel@0.5.8, crossbeam-deque@0.8.3, crossbeam-epoch@0.9.15, crossbeam-utils@0.8.16, crypto-bigint@0.5.2, crypto-common@0.1.6, cstr@0.2.8, ct-codecs@1.1.1, curl-sys@0.4.68+curl-8.4.0, curl@0.4.44, darling@0.14.4, darling@0.20.3, darling_core@0.14.4, darling_core@0.20.3, darling_macro@0.14.4, darling_macro@0.20.3, dashmap@5.4.0, datafrog@2.0.1, der@0.7.6, deranged@0.3.8, derivative@2.2.0, derive_builder@0.12.0, derive_builder_core@0.12.0, derive_builder_macro@0.12.0, derive_more@0.99.17, derive_setters@0.1.6, diff@0.1.13, digest@0.10.7, dirs-next@2.0.0, dirs-sys-next@0.1.2, dirs-sys@0.3.7, dirs@4.0.0, displaydoc@0.2.4, dissimilar@1.0.6, dlmalloc@0.2.4, drop_bomb@0.1.5, dunce@1.0.4, ecdsa@0.16.7, ed25519-compact@2.0.4, either@1.8.1, elasticlunr-rs@3.0.2, elliptic-curve@0.13.5, elsa@1.7.1, ena@0.14.2, encoding_rs@0.8.33, env_logger@0.10.0, env_logger@0.7.1, equivalent@1.0.1, erased-serde@0.3.31, errno@0.3.5, escargot@0.5.8, expect-test@1.4.1, eyre@0.6.8, fallible-iterator@0.3.0, faster-hex@0.8.1, fastrand@2.0.0, fd-lock@3.0.13, ff@0.13.0, fiat-crypto@0.1.20, field-offset@0.3.6, filetime@0.2.22, flate2@1.0.28, fluent-bundle@0.15.2, fluent-langneg@0.13.0, fluent-syntax@0.11.0, fnv@1.0.7, foreign-types-shared@0.1.1, foreign-types@0.3.2, form_urlencoded@1.2.0, fortanix-sgx-abi@0.5.0, fs-err@2.9.0, fs_extra@1.3.0, futf@0.1.5, futures-channel@0.3.28, futures-core@0.3.28, futures-executor@0.3.28, futures-io@0.3.28, futures-macro@0.3.28, futures-sink@0.3.28, futures-task@0.3.28, futures-util@0.3.28, futures@0.3.28, generic-array@0.14.7, getopts@0.2.21, getrandom@0.2.10, gimli@0.28.0, git2-curl@0.19.0, git2@0.18.1, gix-actor@0.28.0, gix-attributes@0.20.0, gix-bitmap@0.2.7, gix-chunk@0.4.4, gix-command@0.2.10, gix-commitgraph@0.22.0, gix-config-value@0.14.0, gix-config@0.31.0, gix-credentials@0.21.0, gix-date@0.8.0, gix-diff@0.37.0, gix-discover@0.26.0, gix-features@0.35.0, gix-features@0.36.0, gix-filter@0.6.0, gix-fs@0.8.0, gix-glob@0.14.0, gix-hash@0.13.1, gix-hashtable@0.4.0, gix-ignore@0.9.0, gix-index@0.26.0, gix-lock@11.0.0, gix-macros@0.1.0, gix-negotiate@0.9.0, gix-object@0.38.0, gix-odb@0.54.0, gix-pack@0.44.0, gix-packetline-blocking@0.16.6, gix-packetline@0.16.7, gix-path@0.10.0, gix-pathspec@0.4.0, gix-prompt@0.7.0, gix-protocol@0.41.1, gix-quote@0.4.7, gix-ref@0.38.0, gix-refspec@0.19.0, gix-revision@0.23.0, gix-revwalk@0.9.0, gix-sec@0.10.0, gix-submodule@0.5.0, gix-tempfile@11.0.0, gix-trace@0.1.3, gix-transport@0.38.0, gix-traverse@0.34.0, gix-url@0.25.1, gix-utils@0.1.5, gix-validate@0.8.0, gix-worktree@0.27.0, gix@0.55.2, glob@0.3.1, globset@0.4.13, group@0.13.0, gsgdt@0.1.2, h2@0.3.19, half@1.8.2, handlebars@3.5.5, handlebars@4.3.7, hashbrown@0.12.3, hashbrown@0.14.2, heck@0.4.1, hermit-abi@0.1.19, hermit-abi@0.3.2, hex@0.4.3, hkdf@0.12.3, hmac@0.12.1, home@0.5.5, html5ever@0.26.0, http-auth@0.1.8, http-body@0.4.5, http@0.2.9, httparse@1.8.0, httpdate@1.0.2, humansize@2.1.3, humantime@1.3.0, humantime@2.1.0, hyper-tls@0.5.0, hyper@0.14.22, iana-time-zone-haiku@0.1.2, iana-time-zone@0.1.57, icu_list@1.3.2, icu_list_data@1.3.2, icu_locid@1.3.2, icu_locid_transform@1.3.2, icu_locid_transform_data@1.3.2, icu_provider@1.3.2, icu_provider_adapters@1.3.2, icu_provider_macros@1.3.2, ident_case@1.0.1, idna@0.4.0, if_chain@1.0.2, ignore@0.4.20, im-rc@15.1.0, indenter@0.3.3, indexmap@1.9.3, indexmap@2.0.0, indicatif@0.17.6, indoc@1.0.9, instant@0.1.12, intl-memoizer@0.5.1, intl_pluralrules@7.0.2, ipnet@2.7.2, is-terminal@0.4.9, itertools@0.10.5, itertools@0.11.0, itoa@0.4.8, itoa@1.0.6, jobserver@0.1.27, jod-thread@0.1.2, js-sys@0.3.64, jsonpath_lib@0.2.6, kstring@2.0.0, la-arena@0.3.1, lazy_static@1.4.0, lazycell@1.3.0, leb128@0.2.5, levenshtein@1.0.5, libc@0.2.150, libgit2-sys@0.16.1+1.7.1, libloading@0.7.4, libloading@0.8.1, libm@0.2.7, libssh2-sys@0.3.0, libz-sys@1.1.9, line-index@0.1.0-pre.1, linux-raw-sys@0.4.10, litemap@0.7.1, lock_api@0.4.10, log@0.4.20, lsp-server@0.7.4, lsp-types@0.94.0, lzma-sys@0.1.20, mac@0.1.1, maplit@1.0.2, markup5ever@0.11.0, matchers@0.1.0, maybe-async@0.2.7, md-5@0.10.5, mdbook@0.4.31, measureme@10.1.1, memchr@2.6.4, memmap2@0.2.3, memmap2@0.5.10, memmap2@0.7.1, memoffset@0.8.0, memoffset@0.9.0, mime@0.3.17, mime_guess@2.0.4, minifier@0.3.0, minimal-lexical@0.2.1, miniz_oxide@0.7.1, mio@0.8.8, native-tls@0.2.11, new_debug_unreachable@1.0.4, nohash-hasher@0.2.0, nom@7.1.3, normalize-line-endings@0.3.0, nu-ansi-term@0.46.0, num-traits@0.2.15, num_cpus@1.16.0, num_threads@0.1.6, number_prefix@0.4.0, object@0.32.0, odht@0.3.1, once_cell@1.18.0, oorandom@11.1.3, opener@0.5.2, opener@0.6.1, openssl-macros@0.1.1, openssl-probe@0.1.5, openssl-sys@0.9.92, openssl@0.10.57, ordered-float@2.10.0, orion@0.17.4, os_info@3.7.0, overload@0.1.1, owo-colors@3.5.0, p384@0.13.0, packed_simd@0.3.9, pad@0.1.6, papergrid@0.10.0, parking_lot@0.11.2, parking_lot@0.12.1, parking_lot_core@0.8.6, parking_lot_core@0.9.8, partial_ref@0.3.3, partial_ref_derive@0.3.3, pasetors@0.6.7, pathdiff@0.2.1, pem-rfc7468@0.7.0, percent-encoding@2.3.0, perf-event-open-sys@1.0.1, perf-event-open-sys@3.0.0, perf-event@0.4.7, pest@2.7.0, pest_derive@2.7.0, pest_generator@2.7.0, pest_meta@2.7.0, phf@0.10.1, phf_codegen@0.10.0, phf_generator@0.10.0, phf_shared@0.10.0, pin-project-lite@0.2.10, pin-utils@0.1.0, pkcs8@0.10.2, pkg-config@0.3.27, plotters-backend@0.3.4, plotters-svg@0.3.3, plotters@0.3.4, polonius-engine@0.13.0, portable-atomic@1.5.1, ppv-lite86@0.2.17, precomputed-hash@0.1.1, pretty_assertions@1.4.0, prettydiff@0.6.4, primeorder@0.13.2, proc-macro-hack@0.5.20+deprecated, proc-macro2@1.0.67, prodash@26.2.2, proptest@1.3.1, psm@0.1.21, pulldown-cmark@0.9.3, punycode@0.4.1, quick-error@1.2.3, quick-error@2.0.1, quine-mc_cluskey@0.2.4, quote@1.0.32, r-efi-alloc@1.0.0, r-efi@4.2.0, ra-ap-rustc_lexer@0.10.0, rand@0.8.5, rand_chacha@0.3.1, rand_core@0.6.4, rand_xorshift@0.3.0, rand_xoshiro@0.6.0, rayon-core@1.12.0, rayon@1.8.0, redox_syscall@0.2.16, redox_syscall@0.3.5, redox_syscall@0.4.1, redox_users@0.4.3, regex-automata@0.1.10, regex-automata@0.2.0, regex-automata@0.3.8, regex-syntax@0.6.29, regex-syntax@0.7.2, regex@1.8.4, reqwest@0.11.18, rfc6979@0.4.0, rowan@0.15.11, rustc-demangle@0.1.23, rustc-hash@1.1.0, rustc-rayon-core@0.5.0, rustc-rayon@0.5.0, rustc-semver@1.1.0, rustc_apfloat@0.2.0+llvm-462a31f5a5ab, rustc_tools_util@0.3.0, rustc_version@0.4.0, rustfix@0.6.1, rustix@0.38.21, rustversion@1.0.12, rusty-fork@0.3.0, ruzstd@0.4.0, ryu@1.0.13, same-file@1.0.6, scoped-tls@1.0.1, scopeguard@1.1.0, sec1@0.7.2, security-framework-sys@2.9.0, security-framework@2.9.2, self_cell@0.10.2, semver@1.0.20, serde-untagged@0.1.1, serde-value@0.7.0, serde@1.0.190, serde_derive@1.0.190, serde_ignored@0.1.9, serde_json@1.0.108, serde_repr@0.1.12, serde_spanned@0.6.4, serde_urlencoded@0.7.1, sha1@0.10.6, sha1_smol@1.0.0, sha2@0.10.8, sharded-slab@0.1.4, shell-escape@0.1.5, shlex@1.1.0, signature@2.1.0, similar@2.2.1, siphasher@0.3.10, sized-chunks@0.6.5, slab@0.4.8, smallvec@1.11.0, smol_str@0.2.0, snap@1.1.0, snapbox-macros@0.3.6, snapbox@0.4.14, socket2@0.4.9, spdx-expression@0.5.2, spdx-rs@0.5.3, spki@0.7.2, stable_deref_trait@1.2.0, stacker@0.1.15, static_assertions@1.1.0, string_cache@0.8.7, string_cache_codegen@0.5.2, strsim@0.10.0, strum@0.24.1, strum_macros@0.24.3, subtle@2.5.0, supports-hyperlinks@2.1.0, syn@1.0.109, syn@2.0.38, synstructure@0.12.6, synstructure@0.13.0, sysinfo@0.26.7, sysinfo@0.29.2, tabled@0.13.0, tar@0.4.38, tar@0.4.40, tempfile@3.8.1, tendril@0.4.3, term@0.7.0, termcolor@1.2.0, terminal_size@0.3.0, termize@0.1.1, tester@0.9.1, text-size@1.1.0, thin-vec@0.2.12, thiserror-core-impl@1.0.38, thiserror-core@1.0.38, thiserror-impl@1.0.50, thiserror@1.0.50, thorin-dwp@0.7.0, thread_local@1.1.7, threadpool@1.8.1, time-core@0.1.2, time-macros@0.2.15, time@0.3.29, tinystr@0.7.4, tinytemplate@1.2.1, tinyvec@1.6.0, tinyvec_macros@0.1.1, tokio-native-tls@0.3.1, tokio-util@0.7.2, tokio@1.29.1, toml@0.5.11, toml@0.7.5, toml@0.8.6, toml_datetime@0.6.5, toml_edit@0.19.11, toml_edit@0.20.7, topological-sort@0.2.2, tower-service@0.3.2, tracing-attributes@0.1.27, tracing-core@0.1.30, tracing-core@0.1.32, tracing-error@0.2.0, tracing-log@0.1.3, tracing-subscriber@0.3.17, tracing-tree@0.2.4, tracing@0.1.37, tracing@0.1.40, triomphe@0.1.8, try-lock@0.2.4, twox-hash@1.6.3, type-map@0.4.0, typenum@1.16.0, ucd-parse@0.1.10, ucd-trie@0.1.5, ui_test@0.21.2, unarray@0.1.4, ungrammar@1.16.1, unic-langid-impl@0.9.1, unic-langid-macros-impl@0.9.1, unic-langid-macros@0.9.1, unic-langid@0.9.1, unicase@2.7.0, unicode-bidi@0.3.13, unicode-bom@2.0.2, unicode-ident@1.0.9, unicode-normalization@0.1.22, unicode-properties@0.1.0, unicode-script@0.5.5, unicode-security@0.1.0, unicode-segmentation@1.10.1, unicode-width@0.1.11, unicode-xid@0.2.4, unified-diff@0.2.1, url@2.4.1, utf-8@0.7.6, utf8parse@0.2.1, uuid@1.4.0, valuable@0.1.0, varisat-checker@0.2.2, varisat-dimacs@0.2.2, varisat-formula@0.2.2, varisat-internal-macros@0.2.2, varisat-internal-proof@0.2.2, varisat@0.2.2, vcpkg@0.2.15, vec_mut_scan@0.3.0, version_check@0.9.4, wait-timeout@0.2.0, walkdir@2.4.0, want@0.3.1, wasi@0.11.0+wasi-snapshot-preview1, wasm-bindgen-backend@0.2.87, wasm-bindgen-futures@0.4.34, wasm-bindgen-macro-support@0.2.87, wasm-bindgen-macro@0.2.87, wasm-bindgen-shared@0.2.87, wasm-bindgen@0.2.87, web-sys@0.3.64, winapi-i686-pc-windows-gnu@0.4.0, winapi-x86_64-pc-windows-gnu@0.4.0, winapi@0.3.9, winnow@0.4.7, winnow@0.5.15, write-json@0.1.2, writeable@0.5.3, xattr@0.2.3, xflags-macros@0.3.1, xflags@0.3.1, xshell-macros@0.2.3, xshell@0.2.3, xz2@0.1.7, xz@0.1.0, yansi-term@0.1.2, yansi@0.5.1, yoke-derive@0.7.2, yoke@0.7.2, zerofrom-derive@0.1.3, zerofrom@0.1.3, zeroize@1.6.0, zerovec-derive@0.10.0, zerovec@0.10.0, zip@0.6.6 Package: rustc Architecture: any @@ -88,7 +88,7 @@ generic programming and meta-programming, in both static and dynamic styles. -Package: libstd-rust-1.74 +Package: libstd-rust-1.75 Section: libs Architecture: any Multi-Arch: same @@ -115,7 +115,7 @@ Architecture: any Multi-Arch: same Depends: ${shlibs:Depends}, ${misc:Depends}, - libstd-rust-1.74 (= ${binary:Version}), + libstd-rust-1.75 (= ${binary:Version}), Description: Rust standard libraries - development files Rust is a curly-brace, block-structured expression language. It visually resembles the C language family, but differs significantly @@ -222,7 +222,7 @@ Architecture: any Multi-Arch: allowed Depends: ${misc:Depends}, ${shlibs:Depends}, - libstd-rust-1.74 (= ${binary:Version}) + libstd-rust-1.75 (= ${binary:Version}) Recommends: cargo Description: Rust linter Rust is a curly-brace, block-structured expression language. It diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/copyright rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/copyright --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/copyright 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/copyright 2024-02-14 22:09:28.000000000 +0000 @@ -51,37 +51,35 @@ # unused dependencies, generated by debian/prune-unused-deps # DO NOT EDIT below, AUTOGENERATED vendor/addr2line-0.19.0 + vendor/aes vendor/aho-corasick-0.7.18 - vendor/anstream-0.5.0 vendor/anstyle-1.0.0 - vendor/anstyle-1.0.1 vendor/anstyle-parse-0.2.0 vendor/anstyle-wincon-2.1.0 vendor/anstyle-wincon - vendor/anyhow-1.0.66 vendor/anyhow-1.0.71 vendor/anymap vendor/arbitrary vendor/backtrace-0.3.67 vendor/base64-0.21.2 vendor/bitflags-2.3.2 - vendor/bitflags-2.3.3 + vendor/bitflags-2.4.0 vendor/block-buffer-0.10.2 vendor/bstr-0.2.17 vendor/bstr-1.5.0 - vendor/bumpalo-3.11.1 + vendor/bumpalo-3.13.0 + vendor/cargo_metadata-0.18.0 vendor/cc-1.0.73 vendor/chalk-derive vendor/chalk-ir vendor/chalk-recursive vendor/chalk-solve - vendor/clap-4.2.4 + vendor/cipher vendor/clap-4.4.4 - vendor/clap_builder-4.2.4 vendor/clap_builder-4.4.4 - vendor/clap_complete-4.2.2 - vendor/clap_derive-4.2.0 - vendor/clap_lex-0.4.1 + vendor/clap_complete-4.3.1 + vendor/clap_derive-4.4.2 + vendor/clap_lex-0.5.0 vendor/command-group vendor/core-foundation-sys-0.8.3 vendor/cpufeatures-0.2.5 @@ -98,7 +96,6 @@ vendor/cranelift-module vendor/cranelift-native vendor/cranelift-object - vendor/crossbeam-channel-0.5.6 vendor/crossbeam-deque-0.8.2 vendor/crossbeam-epoch-0.9.13 vendor/crossbeam-utils-0.8.14 @@ -109,13 +106,14 @@ vendor/diff-0.1.12 vendor/digest-0.10.3 vendor/directories + vendor/dirs-sys vendor/dot vendor/either-1.6.1 vendor/encode_unicode vendor/encoding_rs-0.8.32 vendor/equivalent-1.0.0 - vendor/errno-0.3.0 - vendor/fallible-iterator-0.2.0 + vendor/errno-0.3.1 + vendor/errno-dragonfly vendor/filetime-0.2.16 vendor/filetime-0.2.21 vendor/fixedbitset @@ -123,37 +121,42 @@ vendor/fsevent-sys vendor/fst vendor/generic-array-0.14.5 - vendor/gimli-0.27.2 vendor/gimli-0.27.3 vendor/globset-0.4.10 vendor/globset-0.4.8 vendor/hashbrown-0.13.2 + vendor/hashbrown-0.14.0 vendor/heck-0.3.3 vendor/hermit-abi-0.2.6 vendor/hkalbasi-rustc-ap-rustc_abi vendor/hkalbasi-rustc-ap-rustc_index + vendor/home-0.5.4 vendor/ignore-0.4.18 vendor/inotify-sys vendor/inotify + vendor/inout vendor/is-terminal-0.4.8 vendor/itoa-1.0.2 vendor/jemalloc-sys + vendor/jobserver-0.1.26 vendor/junction vendor/kqueue-sys vendor/kqueue - vendor/libc-0.2.138 - vendor/libc-0.2.140 vendor/libc-0.2.146 + vendor/libc-0.2.148 + vendor/libc-0.2.149 vendor/libffi-sys vendor/libffi + vendor/libloading-0.8.0 vendor/libmimalloc-sys vendor/libnghttp2-sys vendor/linked-hash-map - vendor/linux-raw-sys-0.4.5 vendor/log-0.4.17 + vendor/log-0.4.19 vendor/lzma-sys-0.1.17 vendor/mach vendor/memchr-2.5.0 + vendor/memchr-2.6.3 vendor/memoffset-0.7.1 vendor/mimalloc vendor/miniz_oxide-0.6.2 @@ -165,15 +168,14 @@ vendor/notify vendor/ntapi-0.4.0 vendor/ntapi - vendor/num_cpus-1.13.1 vendor/num_cpus-1.15.0 vendor/object-0.30.4 vendor/once_cell-1.12.0 - vendor/once_cell-1.16.0 vendor/opener-0.5.0 vendor/openssl-0.10.55 vendor/openssl-src vendor/openssl-sys-0.9.90 + vendor/option-ext vendor/parking_lot_core-0.9.6 vendor/paste vendor/pest-2.6.0 @@ -193,15 +195,15 @@ vendor/quote-1.0.29 vendor/ra-ap-rustc_index vendor/ra-ap-rustc_parse_format - vendor/rayon-1.6.0 - vendor/rayon-core-1.10.1 + vendor/rayon-1.7.0 + vendor/rayon-core-1.11.0 vendor/redox_syscall-0.2.13 vendor/regalloc2 vendor/regex-1.5.6 vendor/regex-syntax-0.6.26 vendor/region vendor/rustc-build-sysroot - vendor/rustix-0.38.6 + vendor/rustix-0.38.19 vendor/ryu-1.0.10 vendor/salsa-macros vendor/salsa @@ -218,14 +220,21 @@ vendor/serde_json-1.0.81 vendor/serde_json-1.0.97 vendor/serde_json-1.0.99 + vendor/serde_spanned-0.6.3 + vendor/sha1-0.10.5 vendor/sha2-0.10.2 + vendor/sha2-0.10.7 vendor/slice-group-by vendor/smallvec-1.10.0 vendor/syn-2.0.18 + vendor/syn-2.0.29 vendor/syn-2.0.8 vendor/target-lexicon + vendor/tempfile-3.8.0 vendor/thiserror-1.0.40 + vendor/thiserror-1.0.47 vendor/thiserror-impl-1.0.40 + vendor/thiserror-impl-1.0.47 vendor/thread_local-1.1.4 vendor/tikv-jemalloc-ctl vendor/tikv-jemalloc-sys @@ -234,44 +243,54 @@ vendor/time-core-0.1.1 vendor/time-macros-0.2.9 vendor/toml-0.5.9 - vendor/toml-0.7.5 - vendor/toml_edit-0.19.11 + vendor/toml_datetime-0.6.3 + vendor/tracing-attributes-0.1.26 + vendor/tracing-core-0.1.31 vendor/tracing-tree-0.2.3 vendor/typed-arena vendor/typenum-1.15.0 - vendor/ui_test vendor/unicase-2.6.0 vendor/unicode-ident-1.0.0 + vendor/unicode-width-0.1.10 vendor/url-2.4.0 vendor/walkdir-2.3.2 + vendor/walkdir-2.3.3 vendor/wasmtime-jit-icache-coherence vendor/web-sys-0.3.61 vendor/winapi-util - vendor/windows-0.46.0 + vendor/windows-0.48.0 vendor/windows-bindgen + vendor/windows-core vendor/windows-metadata vendor/windows-sys-0.42.0 vendor/windows-sys-0.45.0 vendor/windows-sys vendor/windows-targets-0.42.2 vendor/windows-targets-0.48.0 + vendor/windows-targets-0.48.1 vendor/windows-targets vendor/windows vendor/windows_aarch64_gnullvm-0.42.2 + vendor/windows_aarch64_gnullvm-0.48.0 vendor/windows_aarch64_gnullvm vendor/windows_aarch64_msvc-0.42.2 + vendor/windows_aarch64_msvc-0.48.0 vendor/windows_aarch64_msvc vendor/windows_i686_gnu-0.42.2 + vendor/windows_i686_gnu-0.48.0 vendor/windows_i686_gnu vendor/windows_i686_msvc-0.42.2 + vendor/windows_i686_msvc-0.48.0 vendor/windows_i686_msvc vendor/windows_x86_64_gnu-0.42.2 + vendor/windows_x86_64_gnu-0.48.0 vendor/windows_x86_64_gnu vendor/windows_x86_64_gnullvm-0.42.2 + vendor/windows_x86_64_gnullvm-0.48.0 vendor/windows_x86_64_gnullvm vendor/windows_x86_64_msvc-0.42.2 + vendor/windows_x86_64_msvc-0.48.0 vendor/windows_x86_64_msvc - vendor/winnow-0.4.7 vendor/winreg vendor/xz2-0.1.6 vendor/yaml-merge-keys @@ -341,6 +360,7 @@ vendor/anstream/* vendor/anstyle/* vendor/anstyle-parse/* + vendor/anstream-0.5.0/* Copyright: 2023 Ed Page License: MIT or Apache-2.0 @@ -447,6 +467,7 @@ vendor/git2-curl/* vendor/socket2/* vendor/toml-0.5.11/* + vendor/toml-0.7.5/* vendor/tar-0.4.38/* Copyright: 2014-2023 Alex Crichton 2015-2023 The Rust Project Developers @@ -706,6 +727,7 @@ vendor/itertools/* vendor/maplit/* vendor/scopeguard/* + vendor/itertools-0.10.5/* Copyright: 2014-2020 bluss License: MIT or Apache-2.0 Comment: @@ -718,7 +740,7 @@ Files: vendor/dirs/* - vendor/dirs-sys/* + vendor/dirs-sys-0.3.7/* Copyright: 2015-2020 Simon Ochsenreither 2015-2020 dirs-rs contributors License: MIT OR Apache-2.0 @@ -763,11 +785,6 @@ License: MIT or Apache-2.0 Comment: see https://github.com/lambda-fairy/rust-errno -Files: vendor/errno-dragonfly/* -Copyright: 2017-2021 Michael Neumann -License: MIT -Comment: see https://github.com/mneumann/errno-dragonfly-rs - Files: vendor/expect-test/* Copyright: 2020-2022 rust-analyzer developers License: MIT OR Apache-2.0 @@ -878,6 +895,7 @@ Files: vendor/humantime/* vendor/humantime-1*/* + vendor/humantime-1.3.0/* Copyright: 2016-2018 Paul Colomiets 2016 The humantime Developers @@ -924,11 +942,6 @@ License: Apache-2.0 or MIT Comment: see https://github.com/zbraniecki/pluralrules -Files: vendor/io-lifetimes/* -Copyright: 2021-2022 Dan Gohman -License: Apache-2.0 with LLVM exception OR Apache-2.0 OR MIT -Comment: see https://github.com/sunfishcode/io-lifetimes - Files: vendor/is-terminal/* Copyright: 2022-2023 softprops 2022-2023 Dan Gohman @@ -954,21 +967,18 @@ see https://github.com/Kimundi/owning-ref-rs Files: vendor/libloading/* - vendor/libloading-0.7.4/* vendor/libloading-0.*/* Copyright: 2015-2022 Simonas Kazlauskas License: ISC Comment: see https://github.com/nagisa/rust_libloading/ Files: vendor/libm/* - vendor/libm-0.*/* Copyright: 2018-2021 Jorge Aparicio License: MIT OR Apache-2.0 Comment: see https://github.com/rust-lang-nursery/libm Files: vendor/linux-raw-sys/* - vendor/linux-raw-sys-0.*/* Copyright: 2021-2022 Dan Gohman License: Apache-2.0 with LLVM exception OR Apache-2.0 OR MIT Comment: see https://github.com/sunfishcode/linux-raw-sys @@ -1103,12 +1113,6 @@ see https://github.com/Amanieu/thread_local-rs see https://github.com/Amanieu/parking_lot -Files: vendor/packed_simd_2/* -Copyright: 2018-2021 Gonzalo Brito Gadeschi - 2018-2021 Jubilee Young -License: MIT or Apache-2.0 -Comment: see https://github.com/rust-lang-nursery/packed_simd - Files: vendor/pathdiff/* Copyright: 2017-2020 Manish Goregaokar License: MIT or Apache-2.0 @@ -1212,6 +1216,7 @@ Files: vendor/redox_syscall/* vendor/redox_syscall-0.2.16/* + vendor/redox_syscall-0.3.5/* Copyright: 2016-2021 Jeremy Soller License: MIT Comment: @@ -1254,7 +1259,6 @@ Files: vendor/rustix/* - vendor/rustix-0.37.6/* Copyright: 2020-2023 Dan Gohman 2020-2023 Jakub Konka License: Apache-2.0 with LLVM exception OR Apache-2.0 OR MIT @@ -1472,6 +1476,7 @@ vendor/tracing-log/* vendor/tracing-subscriber/* vendor/tracing-core-0.1.30/* + vendor/tracing-0.1.37/* Copyright: 2018-2020 Eliza Weisman 2018-2020 Tokio Contributors @@ -1525,11 +1530,6 @@ License: MIT or Apache-2.0 Comment: see https://github.com/seanmonstar/unicase -Files: vendor/unicode_categories/* -Copyright: 2015-2016 Sean Gillespie -License: MIT OR Apache-2.0 -Comment: see https://github.com/swgillespie/unicode-categories - Files: vendor/unic-*/* Copyright: 2017-2022 The UNIC Project Developers License: MIT or Apache-2.0 @@ -2024,6 +2024,7 @@ vendor/gix-packetline-blocking/* vendor/gix-filter/* vendor/gix-trace/* + vendor/gix-features-0.35.0/* Copyright: 2023 Sebastian Thiel License: MIT OR Apache-2.0 Comment: see https://github.com/Byron/gitoxide @@ -2039,6 +2040,7 @@ Comment: see https://github.com/contain-rs/bit-vec Files: vendor/winnow/* + vendor/winnow-0.4.7/* Copyright: 2023 Ed Page License: MIT Comment: see https://github.com/winnow-rs/winnow @@ -2222,6 +2224,7 @@ Relevant discussion in https://github.com/rust-lang/rust/issues/11562 Files: vendor/toml_edit/* + vendor/toml_edit-0.19.11/* Copyright: 2017-2023 Andronik Ordian 2017-2023 Ed Page License: MIT OR Apache-2.0 @@ -2408,12 +2411,6 @@ License: Apache-2.0 OR MIT Comment: see https://github.com/paritytech/nohash-hasher -Files: vendor/ui_test-0.*/* -Copyright: 2022-2023 Oli Scherer - 2022-2023 Ralf Jung -License: MIT OR Apache-2.0 -Comment: see https://github.com/oli-obk/ui_test - Files: vendor/tracing-error/* Copyright: 2020-2021 Eliza Weisman 2020-2021 Jane Lusby @@ -2728,11 +2725,6 @@ License: MIT OR Apache-2.0 Comment: see https://github.com/colin-kiegel/rust-derive-builder -Files: vendor/byteyarn/* -Copyright: 2023 Miguel Young de la Sota -License: Apache-2.0 -Comment: see https://github.com/mcy/byteyarn - Files: vendor/pad/* Copyright: 2015-2019 Ben S License: MIT @@ -2793,6 +2785,65 @@ License: MIT OR Apache-2.0 Comment: see https://gitlab.com/yolenoyer/color-print +Files: vendor/clap-cargo/* +Copyright: 2019-2023 Ed Page +License: MIT OR Apache-2.0 +Comment: see https://github.com/crate-ci/clap-cargo + +Files: vendor/papergrid/* +Copyright: 2020-2023 Maxim Zhiburt +License: MIT +Comment: see https://github.com/zhiburt/tabled + +Files: vendor/tabled/* +Copyright: 2020-2023 Maxim Zhiburt +License: MIT +Comment: see https://github.com/zhiburt/tabled + +Files: vendor/icu_locid_transform/* +Copyright: 2022-2023 The ICU4X Project Developers +License: Unicode-DFS-2016 +Comment: see https://github.com/unicode-org/icu4x + +Files: vendor/ui_test/* +Copyright: 2022-2023 Oli Scherer + 2022-2023 Ralf Jung +License: MIT OR Apache-2.0 +Comment: see https://github.com/oli-obk/ui_test + +Files: vendor/icu_locid_transform_data/* +Copyright: 2023 The ICU4X Project Developers +License: Unicode-DFS-2016 +Comment: see https://github.com/unicode-org/icu4x + +Files: vendor/icu_list_data/* +Copyright: 2023 The ICU4X Project Developers +License: Unicode-DFS-2016 +Comment: see https://github.com/unicode-org/icu4x + +Files: vendor/supports-hyperlinks/* +Copyright: 2021-2023 Kat Marchán +License: Apache-2.0 +Comment: see https://github.com/zkat/supports-hyperlinks + +Files: vendor/derivative/* +Copyright: 2016-2021 mcarton +License: MIT OR Apache-2.0 +Comment: see https://github.com/mcarton/rust-derivative + +Files: vendor/kstring/* +Copyright: 2020-2022 Ed Page +License: MIT OR Apache-2.0 +Comment: see https://github.com/cobalt-org/kstring + +Files: vendor/packed_simd/* +Copyright: 2018-2023 gnzlbg + 2018-2023 rust-lang-owner + 2018-2023 Caleb Zulawski + 2018-2023 Jubilee +License: MIT OR Apache-2.0 +Comment: see https://github.com/rust-lang/packed_simd + License: 0BSD Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.install rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.install --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.install 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.install 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -usr/lib/${DEB_HOST_MULTIARCH}/* diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.lintian-overrides rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.lintian-overrides --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.lintian-overrides 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.74.lintian-overrides 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -# "libstd" just seemed too generic -libstd-rust-1.73 binary: package-name-doesnt-match-sonames -libstd-rust-1.73 binary: sharedobject-in-library-directory-missing-soname - -# Rust doesn't use dev shlib symlinks nor any of the other shlib support stuff -libstd-rust-1.73 binary: dev-pkg-without-shlib-symlink -libstd-rust-1.73 binary: shlib-without-versioned-soname -libstd-rust-1.73 binary: unused-shlib-entry-in-control-file - -# Libraries that use libc symbols (libterm, libstd, etc) *are* linked -# to libc. Lintian gets upset that some Rust libraries don't need -# libc, boo hoo! -libstd-rust-1.73 binary: library-not-linked-against-libc diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.install rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.install --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.install 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.install 2024-02-14 22:09:28.000000000 +0000 @@ -0,0 +1 @@ +usr/lib/${DEB_HOST_MULTIARCH}/* diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.lintian-overrides rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.lintian-overrides --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.lintian-overrides 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/libstd-rust-1.75.lintian-overrides 2024-02-14 22:09:28.000000000 +0000 @@ -0,0 +1,13 @@ +# "libstd" just seemed too generic +libstd-rust-1.75 binary: package-name-doesnt-match-sonames +libstd-rust-1.75 binary: sharedobject-in-library-directory-missing-soname + +# Rust doesn't use dev shlib symlinks nor any of the other shlib support stuff +libstd-rust-1.75 binary: dev-pkg-without-shlib-symlink +libstd-rust-1.75 binary: shlib-without-versioned-soname +libstd-rust-1.75 binary: unused-shlib-entry-in-control-file + +# Libraries that use libc symbols (libterm, libstd, etc) *are* linked +# to libc. Lintian gets upset that some Rust libraries don't need +# libc, boo hoo! +libstd-rust-1.75 binary: library-not-linked-against-libc diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0000-ignore-removed-submodules.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0000-ignore-removed-submodules.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0000-ignore-removed-submodules.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0000-ignore-removed-submodules.patch 2024-02-14 22:09:28.000000000 +0000 @@ -65,8 +65,8 @@ args.append("build-metrics") Index: rustc/src/bootstrap/builder.rs =================================================================== ---- rustc.orig/src/bootstrap/builder.rs -+++ rustc/src/bootstrap/builder.rs +--- rustc.orig/src/bootstrap/src/core/builder.rs ++++ rustc/src/bootstrap/src/core/builder.rs @@ -686,12 +686,8 @@ impl<'a> Builder<'a> { tool::Linkchecker, tool::CargoTest, @@ -177,8 +177,8 @@ Index: rustc/src/bootstrap/test.rs =================================================================== ---- rustc.orig/src/bootstrap/test.rs -+++ rustc/src/bootstrap/test.rs +--- rustc.orig/src/bootstrap/src/core/build_steps/test.rs ++++ rustc/src/bootstrap/src/core/build_steps/test.rs @@ -2167,17 +2167,7 @@ impl Step for RustcGuide { } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0005-no-jemalloc.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0005-no-jemalloc.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0005-no-jemalloc.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0005-no-jemalloc.patch 2024-02-14 22:09:28.000000000 +0000 @@ -11,9 +11,9 @@ =================================================================== --- rustc.orig/compiler/rustc/Cargo.toml +++ rustc/compiler/rustc/Cargo.toml -@@ -15,13 +15,7 @@ rustc_codegen_ssa = { path = "../rustc_c - rustc_smir = { path = "../rustc_smir" } +@@ -20,14 +20,8 @@ rustc_smir = { path = "../rustc_smir" } stable_mir = { path = "../stable_mir" } + # tidy-alphabetical-end -[dependencies.jemalloc-sys] -version = "0.5.0" @@ -21,6 +21,7 @@ -features = ['unprefixed_malloc_on_supported_platforms'] - [features] + # tidy-alphabetical-start -jemalloc = ['jemalloc-sys'] llvm = ['rustc_driver_impl/llvm'] max_level_info = ['rustc_driver_impl/max_level_info'] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0020-remove-windows-dependencies.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0020-remove-windows-dependencies.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0020-remove-windows-dependencies.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0020-remove-windows-dependencies.patch 2024-02-14 22:09:28.000000000 +0000 @@ -2,7 +2,7 @@ =================================================================== --- rustc.orig/src/tools/compiletest/Cargo.toml +++ rustc/src/tools/compiletest/Cargo.toml -@@ -28,13 +28,3 @@ home = "0.5.5" +@@ -29,13 +29,3 @@ home = "0.5.5" [target.'cfg(unix)'.dependencies] libc = "0.2" @@ -54,34 +54,26 @@ +++ rustc/src/tools/cargo/Cargo.toml @@ -24,8 +24,6 @@ bytesize = "1.3" cargo = { path = "" } - cargo-credential = { version = "0.4.0", path = "credential/cargo-credential" } - cargo-credential-libsecret = { version = "0.3.1", path = "credential/cargo-credential-libsecret" } --cargo-credential-wincred = { version = "0.3.0", path = "credential/cargo-credential-wincred" } --cargo-credential-macos-keychain = { version = "0.3.0", path = "credential/cargo-credential-macos-keychain" } + cargo-credential = { version = "0.4.1", path = "credential/cargo-credential" } + cargo-credential-libsecret = { version = "0.4.1", path = "credential/cargo-credential-libsecret" } +-cargo-credential-macos-keychain = { version = "0.4.1", path = "credential/cargo-credential-macos-keychain" } +-cargo-credential-wincred = { version = "0.4.1", path = "credential/cargo-credential-wincred" } cargo-platform = { path = "crates/cargo-platform", version = "0.1.4" } cargo-test-macro = { path = "crates/cargo-test-macro" } cargo-test-support = { path = "crates/cargo-test-support" } -@@ -101,7 +99,6 @@ unicode-xid = "0.2.4" - url = "2.4.1" - varisat = "0.2.2" - walkdir = "2.3.3" --windows-sys = "0.48" - - [package] - name = "cargo" -@@ -128,8 +125,6 @@ bytesize.workspace = true - cargo-platform.workspace = true - cargo-credential.workspace = true +@@ -189,27 +187,9 @@ walkdir.workspace = true + [target.'cfg(target_os = "linux")'.dependencies] cargo-credential-libsecret.workspace = true + +-[target.'cfg(target_os = "macos")'.dependencies] -cargo-credential-macos-keychain.workspace = true --cargo-credential-wincred.workspace = true - cargo-util.workspace = true - color-print.workspace = true - clap = { workspace = true, features = ["wrap_help"] } -@@ -189,18 +184,6 @@ walkdir.workspace = true +- [target.'cfg(not(windows))'.dependencies] openssl = { workspace = true, optional = true } +-[target.'cfg(windows)'.dependencies] +-cargo-credential-wincred.workspace = true +- -[target.'cfg(windows)'.dependencies.windows-sys] -workspace = true -features = [ @@ -124,7 +116,7 @@ =================================================================== --- rustc.orig/src/tools/cargo/crates/home/Cargo.toml +++ rustc/src/tools/cargo/crates/home/Cargo.toml -@@ -14,6 +14,3 @@ include = [ +@@ -15,6 +15,3 @@ include = [ license.workspace = true repository = "https://github.com/rust-lang/cargo" description = "Shared definitions of home directories." @@ -144,26 +136,11 @@ - [dev-dependencies] snapbox = { workspace = true, features = ["examples"] } -Index: rustc/src/tools/cargo/src/cargo/util/auth/mod.rs -=================================================================== ---- rustc.orig/src/tools/cargo/src/cargo/util/auth/mod.rs -+++ rustc/src/tools/cargo/src/cargo/util/auth/mod.rs -@@ -529,8 +529,8 @@ fn credential_action( - } - "cargo:paseto" => bail!("cargo:paseto requires -Zasymmetric-token"), - "cargo:token-from-stdout" => Box::new(BasicProcessCredential {}), -- "cargo:wincred" => Box::new(cargo_credential_wincred::WindowsCredential {}), -- "cargo:macos-keychain" => Box::new(cargo_credential_macos_keychain::MacKeychain {}), -+ "cargo:wincred" => unreachable!(), -+ "cargo:macos-keychain" => unreachable!(), - "cargo:libsecret" => Box::new(cargo_credential_libsecret::LibSecretCredential {}), - process => Box::new(CredentialProcessCredential::new(process)), - }; Index: rustc/compiler/rustc_codegen_ssa/Cargo.toml =================================================================== --- rustc.orig/compiler/rustc_codegen_ssa/Cargo.toml +++ rustc/compiler/rustc_codegen_ssa/Cargo.toml -@@ -45,7 +45,3 @@ libc = "0.2.50" +@@ -49,7 +49,3 @@ libc = "0.2.50" version = "0.32.0" default-features = false features = ["read_core", "elf", "macho", "pe", "xcoff", "unaligned", "archive", "write"] @@ -175,7 +152,7 @@ =================================================================== --- rustc.orig/compiler/rustc_data_structures/Cargo.toml +++ rustc/compiler/rustc_data_structures/Cargo.toml -@@ -37,16 +37,6 @@ itertools = "0.10.1" +@@ -32,16 +32,6 @@ tracing = "0.1" [dependencies.parking_lot] version = "0.12" @@ -190,15 +167,15 @@ -] - [target.'cfg(not(target_arch = "wasm32"))'.dependencies] + # tidy-alphabetical-start memmap2 = "0.2.1" - Index: rustc/compiler/rustc_driver_impl/Cargo.toml =================================================================== --- rustc.orig/compiler/rustc_driver_impl/Cargo.toml +++ rustc/compiler/rustc_driver_impl/Cargo.toml -@@ -57,12 +57,6 @@ rustc_mir_transform = { path = "../rustc - [target.'cfg(unix)'.dependencies] +@@ -58,12 +58,6 @@ tracing = { version = "0.1.35" } libc = "0.2" + # tidy-alphabetical-end -[target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" @@ -207,15 +184,15 @@ -] - [features] + # tidy-alphabetical-start llvm = ['rustc_interface/llvm'] - max_level_info = ['rustc_log/max_level_info'] Index: rustc/compiler/rustc_errors/Cargo.toml =================================================================== --- rustc.orig/compiler/rustc_errors/Cargo.toml +++ rustc/compiler/rustc_errors/Cargo.toml -@@ -27,13 +27,5 @@ serde = { version = "1.0.125", features - serde_json = "1.0.59" - derive_setters = "0.1.6" +@@ -27,14 +27,6 @@ tracing = "0.1" + unicode-width = "0.1.4" + # tidy-alphabetical-end -[target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" @@ -226,15 +203,17 @@ -] - [features] + # tidy-alphabetical-start rustc_use_parallel_compiler = ['rustc_error_messages/rustc_use_parallel_compiler'] Index: rustc/compiler/rustc_session/Cargo.toml =================================================================== --- rustc.orig/compiler/rustc_session/Cargo.toml +++ rustc/compiler/rustc_session/Cargo.toml -@@ -24,10 +24,3 @@ termize = "0.1.1" - +@@ -27,11 +27,3 @@ tracing = "0.1" [target.'cfg(unix)'.dependencies] + # tidy-alphabetical-start libc = "0.2" +-# tidy-alphabetical-end - -[target.'cfg(windows)'.dependencies.windows] -version = "0.48.0" @@ -246,9 +225,9 @@ =================================================================== --- rustc.orig/library/backtrace/Cargo.toml +++ rustc/library/backtrace/Cargo.toml -@@ -47,9 +47,6 @@ version = "0.32.0" +@@ -48,9 +48,6 @@ version = "0.32.0" default-features = false - features = ['read_core', 'elf', 'macho', 'pe', 'unaligned', 'archive'] + features = ['read_core', 'elf', 'macho', 'pe', 'xcoff', 'unaligned', 'archive'] -[target.'cfg(windows)'.dependencies] -winapi = { version = "0.3.9", optional = true } @@ -256,7 +235,7 @@ [build-dependencies] # Only needed for Android, but cannot be target dependent # https://github.com/rust-lang/cargo/issues/4932 -@@ -87,16 +84,6 @@ libbacktrace = [] +@@ -88,16 +85,6 @@ libbacktrace = [] libunwind = [] unix-backtrace = [] verify-winapi = [ @@ -277,15 +256,15 @@ =================================================================== --- rustc.orig/src/bootstrap/Cargo.toml +++ rustc/src/bootstrap/Cargo.toml -@@ -62,21 +62,6 @@ semver = "1.0.17" +@@ -66,21 +66,6 @@ sysinfo = { version = "0.26.0", optional [target.'cfg(not(target_os = "solaris"))'.dependencies] - fd-lock = "3.0.8" + fd-lock = "3.0.13" -[target.'cfg(windows)'.dependencies.junction] -version = "1.0.0" - -[target.'cfg(windows)'.dependencies.windows] --version = "0.46.0" +-version = "0.51.1" -features = [ - "Win32_Foundation", - "Win32_Security", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0021-vendor-remove-windows-dependencies.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0021-vendor-remove-windows-dependencies.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-0021-vendor-remove-windows-dependencies.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-0021-vendor-remove-windows-dependencies.patch 2024-02-14 22:09:28.000000000 +0000 @@ -216,25 +216,14 @@ -features = ["knownfolders", "objbase", "shlobj", "winbase", "winerror"] [badges.maintenance] status = "as-is" -Index: rustc/vendor/dirs-sys/Cargo.toml -=================================================================== ---- rustc.orig/vendor/dirs-sys/Cargo.toml -+++ rustc/vendor/dirs-sys/Cargo.toml -@@ -22,6 +22,3 @@ version = "0.4" - default-features = false - [target."cfg(unix)".dependencies.libc] - version = "0.2" --[target."cfg(windows)".dependencies.winapi] --version = "0.3" --features = ["knownfolders", "objbase", "shlobj", "winbase", "winerror"] Index: rustc/vendor/errno/Cargo.toml =================================================================== --- rustc.orig/vendor/errno/Cargo.toml +++ rustc/vendor/errno/Cargo.toml -@@ -40,10 +40,3 @@ version = "0.2" - +@@ -40,10 +40,3 @@ default-features = false [target."cfg(unix)".dependencies.libc] version = "0.2" + default-features = false - -[target."cfg(windows)".dependencies.windows-sys] -version = "0.48" @@ -248,11 +237,11 @@ +++ rustc/vendor/fd-lock/Cargo.toml @@ -43,11 +43,3 @@ version = "3.0.8" [target."cfg(unix)".dependencies.rustix] - version = "0.37.0" + version = "0.38.0" features = ["fs"] - -[target."cfg(windows)".dependencies.windows-sys] --version = "0.45.0" +-version = "0.48.0" -features = [ - "Win32_Foundation", - "Win32_Storage_FileSystem", @@ -343,34 +332,6 @@ - -[target."cfg(windows)".dependencies.winapi-util] -version = "0.1.2" -Index: rustc/vendor/io-lifetimes/Cargo.toml -=================================================================== ---- rustc.orig/vendor/io-lifetimes/Cargo.toml -+++ rustc/vendor/io-lifetimes/Cargo.toml -@@ -43,7 +43,6 @@ optional = true - close = [ - "libc", - "hermit-abi", -- "windows-sys", - ] - default = ["close"] - -@@ -85,15 +84,3 @@ optional = true - [target."cfg(target_os = \"hermit\")".dependencies.hermit-abi] - version = "0.3" - optional = true -- --[target."cfg(windows)".dependencies.windows-sys] --version = "0.45.0" --features = [ -- "Win32_Foundation", -- "Win32_Storage_FileSystem", -- "Win32_Networking_WinSock", -- "Win32_Security", -- "Win32_System_IO", -- "Win32_System_Threading", --] --optional = true Index: rustc/vendor/is-terminal/Cargo.toml =================================================================== --- rustc.orig/vendor/is-terminal/Cargo.toml @@ -589,31 +550,11 @@ - -[target."cfg(windows)".dependencies.winreg] -version = "0.10" -Index: rustc/vendor/rustix-0.37.6/Cargo.toml -=================================================================== ---- rustc.orig/vendor/rustix-0.37.6/Cargo.toml -+++ rustc/vendor/rustix-0.37.6/Cargo.toml -@@ -233,15 +233,3 @@ package = "errno" - [target."cfg(any(target_os = \"android\", target_os = \"linux\"))".dependencies.once_cell] - version = "1.5.2" - optional = true -- --[target."cfg(windows)".dependencies.windows-sys] --version = "0.45.0" --features = [ -- "Win32_Foundation", -- "Win32_Networking_WinSock", -- "Win32_NetworkManagement_IpHelper", -- "Win32_System_Threading", --] -- --[target."cfg(windows)".dev-dependencies.ctor] --version = "0.1.21" Index: rustc/vendor/rustix/Cargo.toml =================================================================== --- rustc.orig/vendor/rustix/Cargo.toml +++ rustc/vendor/rustix/Cargo.toml -@@ -196,7 +196,6 @@ thread = ["linux-raw-sys/prctl"] +@@ -199,7 +199,6 @@ thread = ["linux-raw-sys/prctl"] time = [] use-explicitly-provided-auxv = [] use-libc = [ @@ -621,7 +562,7 @@ "libc", ] use-libc-auxv = [] -@@ -249,20 +248,3 @@ package = "errno" +@@ -252,20 +251,3 @@ package = "errno" [target."cfg(any(target_os = \"android\", target_os = \"linux\"))".dependencies.once_cell] version = "1.5.2" optional = true @@ -809,7 +750,7 @@ @@ -50,10 +50,3 @@ features = ["fs"] [target."cfg(target_os = \"redox\")".dependencies.redox_syscall] - version = "0.3" + version = "0.4" - -[target."cfg(windows)".dependencies.windows-sys] -version = "0.48" @@ -937,3 +878,28 @@ [badges.appveyor] repository = "BurntSushi/walkdir" +Index: rustc/vendor/anstream-0.5.0/Cargo.toml +=================================================================== +--- rustc.orig/vendor/anstream-0.5.0/Cargo.toml ++++ rustc/vendor/anstream-0.5.0/Cargo.toml +@@ -133,8 +133,4 @@ default = [ + "auto", + "wincon", + ] +-wincon = ["dep:anstyle-wincon"] +- +-[target."cfg(windows)".dependencies.anstyle-wincon] +-version = "2.0.0" +-optional = true ++wincon = [] +Index: rustc/vendor/dirs-sys-0.3.7/Cargo.toml +=================================================================== +--- rustc.orig/vendor/dirs-sys-0.3.7/Cargo.toml ++++ rustc/vendor/dirs-sys-0.3.7/Cargo.toml +@@ -22,6 +22,3 @@ version = "0.4" + default-features = false + [target."cfg(unix)".dependencies.libc] + version = "0.2" +-[target."cfg(windows)".dependencies.winapi] +-version = "0.3" +-features = ["knownfolders", "objbase", "shlobj", "winbase", "winerror"] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-armel-fix-lldb.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-armel-fix-lldb.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-armel-fix-lldb.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-armel-fix-lldb.patch 2024-02-14 22:09:28.000000000 +0000 @@ -2,8 +2,8 @@ diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index c0fa8c9acb..2b5559efc7 100644 ---- a/src/bootstrap/test.rs -+++ b/src/bootstrap/test.rs +--- a/src/bootstrap/src/core/build_steps/test.rs ++++ b/src/bootstrap/src/core/build_steps/test.rs @@ -1476,7 +1476,11 @@ note: if you're sure you want to do this, please open an issue as to why. In the .ok(); if let Some(ref vers) = lldb_version { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-cargo-check-cfg.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-cargo-check-cfg.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-cargo-check-cfg.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-cargo-check-cfg.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -our cargo doesn't know about the 'output' part yet, this patch can be dropped -with cargo >= 0.64 - -diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs -index 0ab4824ac0a..76c476f449b 100644 ---- a/src/bootstrap/builder.rs -+++ b/src/bootstrap/builder.rs -@@ -1472,9 +1472,9 @@ impl<'a> Builder<'a> { - // complete list of features, so for that reason we don't enable checking of - // features for std crates. - cargo.arg(if mode != Mode::Std { -- "-Zcheck-cfg=names,values,output,features" -+ "-Zcheck-cfg=names,values,features" - } else { -- "-Zcheck-cfg=names,values,output" -+ "-Zcheck-cfg=names,values" - }); - - // Add extra cfg not defined in/by rustc diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-disable-git.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-disable-git.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-disable-git.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-disable-git.patch 2024-02-14 22:09:28.000000000 +0000 @@ -10,11 +10,11 @@ src/bootstrap/dist.rs | 5 ++++- 2 files changed, 9 insertions(+), 2 deletions(-) -Index: rustc/src/bootstrap/channel.rs +Index: rustc/src/bootstrap/src/utils/channel.rs =================================================================== ---- rustc.orig/src/bootstrap/channel.rs -+++ rustc/src/bootstrap/channel.rs -@@ -37,7 +37,11 @@ pub struct Info { +--- rustc.orig/src/bootstrap/src/utils/channel.rs ++++ rustc/src/bootstrap/src/utils/channel.rs +@@ -36,7 +36,11 @@ pub struct Info { impl GitInfo { pub fn new(omit_git_hash: bool, dir: &Path) -> GitInfo { // See if this even begins to look like a git dir @@ -27,19 +27,18 @@ match read_commit_info_file(dir) { Some(info) => return GitInfo::RecordedForTarball(info), None => return GitInfo::Absent, -Index: rustc/src/bootstrap/dist.rs +Index: rustc/src/bootstrap/src/core/build_steps/dist.rs =================================================================== ---- rustc.orig/src/bootstrap/dist.rs -+++ rustc/src/bootstrap/dist.rs -@@ -997,7 +997,10 @@ impl Step for PlainSourceTarball { - } - - // If we're building from git sources, we need to vendor a complete distribution. -- if builder.rust_info().is_managed_git_subrepository() { -+ // -+ // Debian: short-circuited because the Debian package is also in a git -+ // repository, but cargo-vendor should not be installed or run. -+ if false && builder.rust_info().is_managed_git_subrepository() { - // Ensure we have the submodules checked out. - builder.update_submodule(Path::new("src/tools/cargo")); - builder.update_submodule(Path::new("src/tools/rust-analyzer")); +--- rustc.orig/src/bootstrap/src/core/build_steps/dist.rs ++++ rustc/src/bootstrap/src/core/build_steps/dist.rs +@@ -994,7 +994,9 @@ impl Step for PlainSourceTarball { + if builder.rust_info().is_managed_git_subrepository() + || builder.rust_info().is_from_tarball() + { +- if builder.rust_info().is_managed_git_subrepository() { ++ // Debian: short-circuited because the Debian package is also in a git ++ // repository, but cargo-vendor should not be installed or run. ++ if false && builder.rust_info().is_managed_git_subrepository() { + // Ensure we have the submodules checked out. + builder.update_submodule(Path::new("src/tools/cargo")); + } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-no-assume-tools.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-no-assume-tools.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-no-assume-tools.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-no-assume-tools.patch 2024-02-14 22:09:28.000000000 +0000 @@ -9,8 +9,8 @@ diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs index 4ab502e..5ce7fc8 100644 ---- a/src/bootstrap/builder/tests.rs -+++ b/src/bootstrap/builder/tests.rs +--- a/src/bootstrap/src/tests/builder.rs ++++ b/src/bootstrap/src/tests/builder.rs @@ -364,9 +364,13 @@ mod dist { #[test] fn dist_only_cross_host() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-permit-symlink-in-docs.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-permit-symlink-in-docs.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-permit-symlink-in-docs.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-permit-symlink-in-docs.patch 2024-02-14 22:09:28.000000000 +0000 @@ -2,8 +2,8 @@ diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index b1fae356d89..10ed8ffb714 100644 ---- a/src/bootstrap/dist.rs -+++ b/src/bootstrap/dist.rs +--- a/src/bootstrap/src/core/build_steps/dist.rs ++++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -83,6 +83,7 @@ impl Step for Docs { tarball.set_product_name("Rust Documentation"); tarball.add_bulk_dir(&builder.doc_out(host), dest); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-rustflags.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-rustflags.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-rustflags.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-rustflags.patch 2024-02-14 22:09:28.000000000 +0000 @@ -7,12 +7,12 @@ src/bootstrap/builder.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) -diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs -index 23ea2fe..b2b1c54 100644 ---- a/src/bootstrap/builder.rs -+++ b/src/bootstrap/builder.rs -@@ -1499,6 +1499,18 @@ impl<'a> Builder<'a> { - } +Index: rustc/src/bootstrap/src/core/builder.rs +=================================================================== +--- rustc.orig/src/bootstrap/src/core/builder.rs ++++ rustc/src/bootstrap/src/core/builder.rs +@@ -1452,6 +1452,18 @@ impl<'a> Builder<'a> { + hostflags.arg("--check-cfg=values(bootstrap)"); } + // Debian-specific stuff here diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-use-local-css.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-use-local-css.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-use-local-css.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-bootstrap-use-local-css.patch 2024-02-14 22:09:28.000000000 +0000 @@ -7,8 +7,8 @@ src/bootstrap/doc.rs | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) ---- a/src/bootstrap/doc.rs -+++ b/src/bootstrap/doc.rs +--- a/src/bootstrap/src/core/build_steps/doc.rs ++++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -350,7 +350,27 @@ .arg("--index-page") .arg(&builder.src.join("src/doc/index.md")) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-disable-download-tests.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-disable-download-tests.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-disable-download-tests.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-disable-download-tests.patch 2024-02-14 22:09:28.000000000 +0000 @@ -1,18 +1,18 @@ -Index: rust/src/bootstrap/config/tests.rs +Index: rustc/src/bootstrap/src/tests/config.rs =================================================================== ---- rust.orig/src/bootstrap/config/tests.rs -+++ rust/src/bootstrap/config/tests.rs -@@ -11,6 +11,9 @@ fn parse(config: &str) -> Config { +--- rustc.orig/src/bootstrap/src/tests/config.rs ++++ rustc/src/bootstrap/src/tests/config.rs +@@ -18,6 +18,9 @@ fn parse(config: &str) -> Config { #[test] fn download_ci_llvm() { + // Debian: this will attempt to download LLVM + return; + - if crate::llvm::is_ci_llvm_modified(&parse("")) { + if crate::core::build_steps::llvm::is_ci_llvm_modified(&parse("")) { eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); return; -@@ -39,7 +42,11 @@ fn download_ci_llvm() { +@@ -46,7 +49,11 @@ fn download_ci_llvm() { // - https://github.com/rust-lang/rust/pull/109162#issuecomment-1496782487 #[test] fn detect_src_and_out() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-fix-rustix-outline.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-fix-rustix-outline.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-fix-rustix-outline.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-fix-rustix-outline.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ -Always enable cc even if the feature is not enabled. - -Some Debian architectures need outline asm, and Debian does not ship pre-built -outline asm. - -Index: rustc/vendor/rustix-0.37.6/Cargo.toml -=================================================================== ---- rustc.orig/vendor/rustix-0.37.6/Cargo.toml -+++ rustc/vendor/rustix-0.37.6/Cargo.toml -@@ -125,9 +125,9 @@ version = "0.6" - [dev-dependencies.tempfile] - version = "3.4.0" - --[build-dependencies.cc] -+[build-dependencies.cc_dep] - version = "1.0.68" --optional = true -+package = "cc" - - [features] - all-apis = [ -@@ -187,6 +187,7 @@ use-libc = [ - "libc_errno", - "libc", - ] -+cc = [] - use-libc-auxv = ["libc"] - - [target."cfg(all(any(target_os = \"android\", target_os = \"linux\"), any(rustix_use_libc, miri, not(all(target_os = \"linux\", any(target_arch = \"x86\", all(target_arch = \"x86_64\", target_pointer_width = \"64\"), all(target_endian = \"little\", any(target_arch = \"arm\", all(target_arch = \"aarch64\", target_pointer_width = \"64\"), target_arch = \"powerpc64\", target_arch = \"riscv64\", target_arch = \"mips\", target_arch = \"mips64\"))))))))".dependencies.linux-raw-sys] -Index: rustc/vendor/rustix-0.37.6/build.rs -=================================================================== ---- rustc.orig/vendor/rustix-0.37.6/build.rs -+++ rustc/vendor/rustix-0.37.6/build.rs -@@ -1,5 +1,4 @@ --#[cfg(feature = "cc")] --use cc::Build; -+use cc_dep::Build; - use std::env::var; - use std::io::Write; - -@@ -158,16 +157,16 @@ fn link_in_librustix_outline(arch: &str, - println!("cargo:rerun-if-changed={}", to); - - // If "cc" is not enabled, use a pre-built library. -- #[cfg(not(feature = "cc"))] -+ /*#[cfg(not(feature = "cc"))] - { - let _ = asm_name; - println!("cargo:rustc-link-search={}/{}", OUTLINE_PATH, profile); - println!("cargo:rustc-link-lib=static={}", name); -- } -+ }*/ - - // If "cc" is enabled, build the library from source, update the pre-built - // version, and assert that the pre-built version is checked in. -- #[cfg(feature = "cc")] -+ //#[cfg(feature = "cc")] - { - let out_dir = var("OUT_DIR").unwrap(); - // Add `-gdwarf-3` so that we always get the same output, regardless of diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-i686-baseline.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-i686-baseline.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-i686-baseline.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-i686-baseline.patch 2024-02-14 22:09:28.000000000 +0000 @@ -7,23 +7,23 @@ compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -Index: rust/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs +Index: rustc/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs =================================================================== ---- rust.orig/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs -+++ rust/compiler/rustc_target/src/spec/i686_unknown_linux_gnu.rs -@@ -2,7 +2,7 @@ use crate::spec::{Cc, LinkerFlavor, Lld, +--- rustc.orig/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs ++++ rustc/compiler/rustc_target/src/spec/targets/i686_unknown_linux_gnu.rs +@@ -2,7 +2,7 @@ use crate::spec::{base, Cc, LinkerFlavor pub fn target() -> Target { - let mut base = super::linux_gnu_base::opts(); + let mut base = base::linux_gnu::opts(); - base.cpu = "pentium4".into(); + base.cpu = "pentiumpro".into(); base.max_atomic_width = Some(64); base.supported_sanitizers = SanitizerSet::ADDRESS; base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m32"]); -Index: rust/tests/ui/sse2.rs +Index: rustc/tests/ui/sse2.rs =================================================================== ---- rust.orig/tests/ui/sse2.rs -+++ rust/tests/ui/sse2.rs +--- rustc.orig/tests/ui/sse2.rs ++++ rustc/tests/ui/sse2.rs @@ -15,7 +15,7 @@ fn main() { } Err(_) => return, @@ -33,10 +33,10 @@ assert!(cfg!(target_feature = "sse2"), "SSE2 was not detected as available on an x86 platform"); } -Index: rust/tests/ui/abi/homogenous-floats-target-feature-mixup.rs +Index: rustc/tests/ui/abi/homogenous-floats-target-feature-mixup.rs =================================================================== ---- rust.orig/tests/ui/abi/homogenous-floats-target-feature-mixup.rs -+++ rust/tests/ui/abi/homogenous-floats-target-feature-mixup.rs +--- rustc.orig/tests/ui/abi/homogenous-floats-target-feature-mixup.rs ++++ rustc/tests/ui/abi/homogenous-floats-target-feature-mixup.rs @@ -24,7 +24,8 @@ fn main() { match std::env::var("TARGET") { Ok(s) => { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-windows-ssp.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-windows-ssp.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-windows-ssp.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/d-rustc-windows-ssp.patch 2024-02-14 22:09:28.000000000 +0000 @@ -9,8 +9,8 @@ diff --git a/compiler/rustc_target/src/spec/windows_gnu_base.rs b/compiler/rustc_target/src/spec/windows_gnu_base.rs index d11f1f7..137f8eb 100644 ---- a/compiler/rustc_target/src/spec/windows_gnu_base.rs -+++ b/compiler/rustc_target/src/spec/windows_gnu_base.rs +--- a/compiler/rustc_target/src/spec/base/windows_gnu.rs ++++ b/compiler/rustc_target/src/spec/base/windows_gnu.rs @@ -41,6 +41,8 @@ pub fn opts() -> TargetOptions { "-lmsvcrt", "-luser32", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/series rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/series --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/series 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/series 2024-02-14 22:09:28.000000000 +0000 @@ -11,7 +11,6 @@ # pending, or forwarded u-reproducible-build.patch #u-fix-get-toml-when-test.patch -u-disable-fp-precision-test-on-i386.patch # not forwarded, or forwarded but unlikely to be merged u-ignore-ppc-hangs.patch @@ -44,7 +43,6 @@ # d-bootstrap-custom-debuginfo-path.patch d-bootstrap-permit-symlink-in-docs.patch d-test-ignore-avx-44056.patch -d-bootstrap-cargo-check-cfg.patch d-armel-fix-lldb.patch # Work around for some porterboxes, keep this commented @@ -61,7 +59,6 @@ # Experimental patch not yet working #d-rustc-prefer-dynamic.patch d-rustdoc-disable-embedded-fonts.patch -d-fix-rustix-outline.patch # cherry-picked from ubuntu ubuntu-disable-ppc64el-asm-tests.patch diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/u-disable-fp-precision-test-on-i386.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/u-disable-fp-precision-test-on-i386.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/u-disable-fp-precision-test-on-i386.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/u-disable-fp-precision-test-on-i386.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -From: liushuyu -Date: Tue, 25 Jul 2023 09:48:12 +0800 -Subject: [PATCH] core library: Disable fpmath tests for i386 ... - -This patch disables the floating-point epsilon test for i386 since -x87 registers are too imprecise and can't produce the expected -results. - -Forwarded: https://github.com/rust-lang/rust/pull/114042 ---- - library/core/src/num/f32.rs | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs -index d050d21c8c57..f5c2d1c6bf68 100644 ---- a/library/core/src/num/f32.rs -+++ b/library/core/src/num/f32.rs -@@ -800,7 +800,7 @@ impl f32 { - /// let angle = std::f32::consts::PI; - /// - /// let abs_difference = (angle.to_degrees() - 180.0).abs(); -- /// -+ /// # #[cfg(not(target_arch = "x86"))] - /// assert!(abs_difference <= f32::EPSILON); - /// ``` - #[must_use = "this returns the result of the operation, \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/ubuntu-use-system-compiler-during-tests.patch rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/ubuntu-use-system-compiler-during-tests.patch --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/patches/ubuntu-use-system-compiler-during-tests.patch 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/patches/ubuntu-use-system-compiler-during-tests.patch 2024-02-14 22:09:28.000000000 +0000 @@ -7,12 +7,12 @@ Last-Update: 2023-07-18 --- This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ -Index: rustc/src/bootstrap/config.rs +Index: rustc/src/bootstrap/src/core/config/config.rs =================================================================== ---- rustc.orig/src/bootstrap/config.rs -+++ rustc/src/bootstrap/config.rs -@@ -1261,7 +1261,6 @@ impl Config { - // config.check_build_rustc_version(&rustc); +--- rustc.orig/src/bootstrap/src/core/config/config.rs ++++ rustc/src/bootstrap/src/core/config/config.rs +@@ -1283,7 +1283,6 @@ impl Config { + } PathBuf::from(rustc) } else { - config.download_beta_toolchain(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/source/lintian-overrides rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/source/lintian-overrides --- rustc-1.74.1+dfsg0ubuntu1~bpo10/debian/source/lintian-overrides 2024-01-17 20:44:02.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/debian/source/lintian-overrides 2024-02-14 22:09:28.000000000 +0000 @@ -3,6 +3,7 @@ rustc source: source-is-missing [tests/rustdoc/notable-trait/doc-notable_trait.*.html] rustc source: source-is-missing [tests/rustdoc/notable-trait/doc-notable_trait-slice.bare_fn_matches.html] rustc source: source-is-missing [tests/rustdoc/notable-trait/spotlight-from-dependency.odd.html] +rustc source: source-is-missing [tests/rustdoc/decl-trailing-whitespace.declaration.html] rustc source: source-is-missing [vendor/html5ever/data/bench/*.html] rustc source: source-is-missing [vendor/minifier/tests/files/minified_main.js] rustc source: source-contains-prebuilt-windows-binary [vendor/libloading/tests/nagisa32.dll] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/git-commit-hash rustc-1.75.0+dfsg0ubuntu1~bpo10/git-commit-hash --- rustc-1.74.1+dfsg0ubuntu1~bpo10/git-commit-hash 2023-12-04 21:32:04.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/git-commit-hash 2023-12-21 18:26:55.000000000 +0000 @@ -1 +1 @@ -a28077b28a02b92985b3a3faecf92813155f1ea1 \ No newline at end of file +82e1608dfa6e0b5569232559e3d385fea5a93112 \ No newline at end of file diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/git-commit-info rustc-1.75.0+dfsg0ubuntu1~bpo10/git-commit-info --- rustc-1.74.1+dfsg0ubuntu1~bpo10/git-commit-info 2023-12-04 21:32:04.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/git-commit-info 2023-12-21 18:26:55.000000000 +0000 @@ -1,3 +1,3 @@ -a28077b28a02b92985b3a3faecf92813155f1ea1 -a28077b28 -2023-12-04 +82e1608dfa6e0b5569232559e3d385fea5a93112 +82e1608df +2023-12-21 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -36,3 +36,5 @@ compiler-builtins-no-asm = ["compiler_builtins/no-asm"] compiler-builtins-mangled-names = ["compiler_builtins/mangled-names"] compiler-builtins-weak-intrinsics = ["compiler_builtins/weak-intrinsics"] +# Make panics and failed asserts immediately abort without formatting any message +panic_immediate_abort = [] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/alloc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/alloc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/alloc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/alloc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -377,13 +377,20 @@ panic!("allocation failed"); } + #[inline] fn rt_error(layout: Layout) -> ! { unsafe { __rust_alloc_error_handler(layout.size(), layout.align()); } } - unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) } + #[cfg(not(feature = "panic_immediate_abort"))] + unsafe { + core::intrinsics::const_eval_select((layout,), ct_error, rt_error) + } + + #[cfg(feature = "panic_immediate_abort")] + ct_error(layout) } // For alloc test `std::alloc::handle_alloc_error` can be used directly. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/borrow.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/borrow.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/borrow.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/borrow.rs 2023-12-21 16:55:28.000000000 +0000 @@ -55,6 +55,7 @@ /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "cloning is often expensive and is not expected to have side effects"] + #[cfg_attr(not(test), rustc_diagnostic_item = "to_owned_method")] fn to_owned(&self) -> Self::Owned; /// Uses borrowed data to replace owned data, usually by cloning. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/boxed.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/boxed.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/boxed.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/boxed.rs 2023-12-21 16:55:28.000000000 +0000 @@ -159,7 +159,7 @@ use core::marker::Unsize; use core::mem::{self, SizedTypeProperties}; use core::ops::{ - CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver, + CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut, DispatchFromDyn, Receiver, }; use core::pin::Pin; use core::ptr::{self, NonNull, Unique}; @@ -207,7 +207,7 @@ /// ``` /// let five = Box::new(5); /// ``` - #[cfg(all(not(no_global_oom_handling)))] + #[cfg(not(no_global_oom_handling))] #[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] #[must_use] @@ -2106,28 +2106,28 @@ #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Box where A: 'static {} -#[unstable(feature = "generator_trait", issue = "43122")] -impl + Unpin, R, A: Allocator> Generator for Box +#[unstable(feature = "coroutine_trait", issue = "43122")] +impl + Unpin, R, A: Allocator> Coroutine for Box where A: 'static, { type Yield = G::Yield; type Return = G::Return; - fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState { + fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState { G::resume(Pin::new(&mut *self), arg) } } -#[unstable(feature = "generator_trait", issue = "43122")] -impl, R, A: Allocator> Generator for Pin> +#[unstable(feature = "coroutine_trait", issue = "43122")] +impl, R, A: Allocator> Coroutine for Pin> where A: 'static, { type Yield = G::Yield; type Return = G::Return; - fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState { + fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState { G::resume((*self).as_mut(), arg) } } @@ -2444,4 +2444,8 @@ fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { core::error::Error::source(&**self) } + + fn provide<'b>(&'b self, request: &mut core::error::Request<'b>) { + core::error::Error::provide(&**self, request); + } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/binary_heap/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/binary_heap/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/binary_heap/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/binary_heap/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -434,8 +434,9 @@ /// heap.push(4); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_binary_heap_constructor", issue = "112353")] #[must_use] - pub fn new() -> BinaryHeap { + pub const fn new() -> BinaryHeap { BinaryHeap { data: vec![] } } @@ -477,8 +478,9 @@ /// heap.push(4); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] + #[rustc_const_unstable(feature = "const_binary_heap_constructor", issue = "112353")] #[must_use] - pub fn new_in(alloc: A) -> BinaryHeap { + pub const fn new_in(alloc: A) -> BinaryHeap { BinaryHeap { data: Vec::new_in(alloc) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/dedup_sorted_iter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/dedup_sorted_iter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/dedup_sorted_iter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/dedup_sorted_iter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ use core::iter::Peekable; -/// A iterator for deduping the key of a sorted iterator. +/// An iterator for deduping the key of a sorted iterator. /// When encountering the duplicated key, only the last key-value pair is yielded. /// /// Used by [`BTreeMap::bulk_build_from_sorted_iter`][1]. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/map.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/map.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/map.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/map.rs 2023-12-21 16:55:28.000000000 +0000 @@ -669,7 +669,7 @@ /// map.insert(1, "a"); /// ``` #[unstable(feature = "btreemap_alloc", issue = "32838")] - pub fn new_in(alloc: A) -> BTreeMap { + pub const fn new_in(alloc: A) -> BTreeMap { BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(alloc), _marker: PhantomData } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/set.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/set.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/set.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/collections/btree/set.rs 2023-12-21 16:55:28.000000000 +0000 @@ -358,7 +358,7 @@ /// let mut set: BTreeSet = BTreeSet::new_in(Global); /// ``` #[unstable(feature = "btreemap_alloc", issue = "32838")] - pub fn new_in(alloc: A) -> BTreeSet { + pub const fn new_in(alloc: A) -> BTreeSet { BTreeSet { map: BTreeMap::new_in(alloc) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/fmt.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/fmt.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/fmt.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/fmt.rs 2023-12-21 16:55:28.000000000 +0000 @@ -555,6 +555,8 @@ pub use core::fmt::Alignment; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::Error; +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +pub use core::fmt::FormatterFn; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{write, Arguments}; #[stable(feature = "rust1", since = "1.0.0")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -78,6 +78,8 @@ not(no_sync), target_has_atomic = "ptr" ))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![no_std] #![needs_allocator] // Lints: @@ -113,7 +115,6 @@ #![feature(const_eval_select)] #![feature(const_maybe_uninit_as_mut_ptr)] #![feature(const_maybe_uninit_write)] -#![feature(const_maybe_uninit_zeroed)] #![feature(const_pin)] #![feature(const_refs_to_cell)] #![feature(const_size_of_val)] @@ -139,7 +140,7 @@ #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_uninit_array_transpose)] #![feature(pattern)] -#![feature(pointer_byte_offsets)] +#![feature(ptr_addr_eq)] #![feature(ptr_internals)] #![feature(ptr_metadata)] #![feature(ptr_sub_ptr)] @@ -166,7 +167,7 @@ // // Language features: // tidy-alphabetical-start -#![cfg_attr(not(test), feature(generator_trait))] +#![cfg_attr(not(test), feature(coroutine_trait))] #![cfg_attr(test, feature(panic_update_hook))] #![cfg_attr(test, feature(test))] #![feature(allocator_internals)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/raw_vec.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/raw_vec.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/raw_vec.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/raw_vec.rs 2023-12-21 16:55:28.000000000 +0000 @@ -305,10 +305,13 @@ /// The same as `reserve`, but returns on errors instead of panicking or aborting. pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional) { - self.grow_amortized(len, additional) - } else { - Ok(()) + self.grow_amortized(len, additional)?; + } + unsafe { + // Inform the optimizer that the reservation has succeeded or wasn't needed + core::intrinsics::assume(!self.needs_to_grow(len, additional)); } + Ok(()) } /// Ensures that the buffer contains at least enough space to hold `len + @@ -339,7 +342,14 @@ len: usize, additional: usize, ) -> Result<(), TryReserveError> { - if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) } + if self.needs_to_grow(len, additional) { + self.grow_exact(len, additional)?; + } + unsafe { + // Inform the optimizer that the reservation has succeeded or wasn't needed + core::intrinsics::assume(!self.needs_to_grow(len, additional)); + } + Ok(()) } /// Shrinks the buffer down to the specified capacity. If the given amount @@ -530,6 +540,7 @@ // ensure that the code generation related to these panics is minimal as there's // only one location which panics rather than a bunch throughout the module. #[cfg(not(no_global_oom_handling))] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] fn capacity_overflow() -> ! { panic!("capacity overflow"); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/rc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/rc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/rc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/rc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1304,7 +1304,7 @@ /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub fn into_raw(this: Self) -> *const T { let ptr = Self::as_ptr(&this); mem::forget(this); @@ -1328,7 +1328,7 @@ /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` #[stable(feature = "weak_into_raw", since = "1.45.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { let ptr: *mut RcBox = NonNull::as_ptr(this.ptr); @@ -1649,7 +1649,7 @@ /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr.as_ptr() as *const () == other.ptr.as_ptr() as *const () + ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr()) } } @@ -2701,6 +2701,7 @@ /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "rc_weak", since = "1.4.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "RcWeak")] pub struct Weak< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, @@ -3146,7 +3147,7 @@ #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - ptr::eq(self.ptr.as_ptr() as *const (), other.ptr.as_ptr() as *const ()) + ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/string.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/string.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/string.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/string.rs 2023-12-21 16:55:28.000000000 +0000 @@ -714,6 +714,156 @@ .collect() } + /// Decode a UTF-16LE–encoded vector `v` into a `String`, returning [`Err`] + /// if `v` contains any invalid data. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(str_from_utf16_endian)] + /// // 𝄞music + /// let v = &[0x34, 0xD8, 0x1E, 0xDD, 0x6d, 0x00, 0x75, 0x00, + /// 0x73, 0x00, 0x69, 0x00, 0x63, 0x00]; + /// assert_eq!(String::from("𝄞music"), + /// String::from_utf16le(v).unwrap()); + /// + /// // 𝄞muic + /// let v = &[0x34, 0xD8, 0x1E, 0xDD, 0x6d, 0x00, 0x75, 0x00, + /// 0x00, 0xD8, 0x69, 0x00, 0x63, 0x00]; + /// assert!(String::from_utf16le(v).is_err()); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "str_from_utf16_endian", issue = "116258")] + pub fn from_utf16le(v: &[u8]) -> Result { + if v.len() % 2 != 0 { + return Err(FromUtf16Error(())); + } + match (cfg!(target_endian = "little"), unsafe { v.align_to::() }) { + (true, ([], v, [])) => Self::from_utf16(v), + _ => char::decode_utf16(v.array_chunks::<2>().copied().map(u16::from_le_bytes)) + .collect::>() + .map_err(|_| FromUtf16Error(())), + } + } + + /// Decode a UTF-16LE–encoded slice `v` into a `String`, replacing + /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD]. + /// + /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`], + /// `from_utf16le_lossy` returns a `String` since the UTF-16 to UTF-8 + /// conversion requires a memory allocation. + /// + /// [`from_utf8_lossy`]: String::from_utf8_lossy + /// [`Cow<'a, str>`]: crate::borrow::Cow "borrow::Cow" + /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(str_from_utf16_endian)] + /// // 𝄞music + /// let v = &[0x34, 0xD8, 0x1E, 0xDD, 0x6d, 0x00, 0x75, 0x00, + /// 0x73, 0x00, 0x1E, 0xDD, 0x69, 0x00, 0x63, 0x00, + /// 0x34, 0xD8]; + /// + /// assert_eq!(String::from("𝄞mus\u{FFFD}ic\u{FFFD}"), + /// String::from_utf16le_lossy(v)); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "str_from_utf16_endian", issue = "116258")] + pub fn from_utf16le_lossy(v: &[u8]) -> String { + match (cfg!(target_endian = "little"), unsafe { v.align_to::() }) { + (true, ([], v, [])) => Self::from_utf16_lossy(v), + (true, ([], v, [_remainder])) => Self::from_utf16_lossy(v) + "\u{FFFD}", + _ => { + let mut iter = v.array_chunks::<2>(); + let string = char::decode_utf16(iter.by_ref().copied().map(u16::from_le_bytes)) + .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER)) + .collect(); + if iter.remainder().is_empty() { string } else { string + "\u{FFFD}" } + } + } + } + + /// Decode a UTF-16BE–encoded vector `v` into a `String`, returning [`Err`] + /// if `v` contains any invalid data. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(str_from_utf16_endian)] + /// // 𝄞music + /// let v = &[0xD8, 0x34, 0xDD, 0x1E, 0x00, 0x6d, 0x00, 0x75, + /// 0x00, 0x73, 0x00, 0x69, 0x00, 0x63]; + /// assert_eq!(String::from("𝄞music"), + /// String::from_utf16be(v).unwrap()); + /// + /// // 𝄞muic + /// let v = &[0xD8, 0x34, 0xDD, 0x1E, 0x00, 0x6d, 0x00, 0x75, + /// 0xD8, 0x00, 0x00, 0x69, 0x00, 0x63]; + /// assert!(String::from_utf16be(v).is_err()); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "str_from_utf16_endian", issue = "116258")] + pub fn from_utf16be(v: &[u8]) -> Result { + if v.len() % 2 != 0 { + return Err(FromUtf16Error(())); + } + match (cfg!(target_endian = "big"), unsafe { v.align_to::() }) { + (true, ([], v, [])) => Self::from_utf16(v), + _ => char::decode_utf16(v.array_chunks::<2>().copied().map(u16::from_be_bytes)) + .collect::>() + .map_err(|_| FromUtf16Error(())), + } + } + + /// Decode a UTF-16BE–encoded slice `v` into a `String`, replacing + /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD]. + /// + /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`], + /// `from_utf16le_lossy` returns a `String` since the UTF-16 to UTF-8 + /// conversion requires a memory allocation. + /// + /// [`from_utf8_lossy`]: String::from_utf8_lossy + /// [`Cow<'a, str>`]: crate::borrow::Cow "borrow::Cow" + /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(str_from_utf16_endian)] + /// // 𝄞music + /// let v = &[0xD8, 0x34, 0xDD, 0x1E, 0x00, 0x6d, 0x00, 0x75, + /// 0x00, 0x73, 0xDD, 0x1E, 0x00, 0x69, 0x00, 0x63, + /// 0xD8, 0x34]; + /// + /// assert_eq!(String::from("𝄞mus\u{FFFD}ic\u{FFFD}"), + /// String::from_utf16be_lossy(v)); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "str_from_utf16_endian", issue = "116258")] + pub fn from_utf16be_lossy(v: &[u8]) -> String { + match (cfg!(target_endian = "big"), unsafe { v.align_to::() }) { + (true, ([], v, [])) => Self::from_utf16_lossy(v), + (true, ([], v, [_remainder])) => Self::from_utf16_lossy(v) + "\u{FFFD}", + _ => { + let mut iter = v.array_chunks::<2>(); + let string = char::decode_utf16(iter.by_ref().copied().map(u16::from_be_bytes)) + .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER)) + .collect(); + if iter.remainder().is_empty() { string } else { string + "\u{FFFD}" } + } + } + } + /// Decomposes a `String` into its raw components. /// /// Returns the raw pointer to the underlying data, the length of @@ -2435,6 +2585,7 @@ /// ``` #[rustc_conversion_suggestion] #[stable(feature = "rust1", since = "1.0.0")] + #[cfg_attr(not(test), rustc_diagnostic_item = "to_string_method")] fn to_string(&self) -> String; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/sync.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/sync.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/sync.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/sync.rs 2023-12-21 16:55:28.000000000 +0000 @@ -311,6 +311,7 @@ /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "arc_weak", since = "1.4.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "ArcWeak")] pub struct Weak< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, @@ -1454,7 +1455,7 @@ /// ``` #[must_use = "losing the pointer will leak memory"] #[stable(feature = "rc_raw", since = "1.17.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub fn into_raw(this: Self) -> *const T { let ptr = Self::as_ptr(&this); mem::forget(this); @@ -1479,7 +1480,7 @@ /// ``` #[must_use] #[stable(feature = "rc_as_ptr", since = "1.45.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub fn as_ptr(this: &Self) -> *const T { let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); @@ -1778,7 +1779,7 @@ #[must_use] #[stable(feature = "ptr_eq", since = "1.17.0")] pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr.as_ptr() as *const () == other.ptr.as_ptr() as *const () + ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr()) } } @@ -2900,7 +2901,7 @@ #[must_use] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - ptr::eq(self.ptr.as_ptr() as *const (), other.ptr.as_ptr() as *const ()) + ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/vec/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/vec/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/src/vec/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/src/vec/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1258,7 +1258,7 @@ /// [`as_mut_ptr`]: Vec::as_mut_ptr /// [`as_ptr`]: Vec::as_ptr #[stable(feature = "vec_as_ptr", since = "1.37.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[inline] pub fn as_ptr(&self) -> *const T { // We shadow the slice method of the same name to avoid going through @@ -1318,7 +1318,7 @@ /// [`as_mut_ptr`]: Vec::as_mut_ptr /// [`as_ptr`]: Vec::as_ptr #[stable(feature = "vec_as_ptr", since = "1.37.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[inline] pub fn as_mut_ptr(&mut self) -> *mut T { // We shadow the slice method of the same name to avoid going through @@ -1447,7 +1447,8 @@ #[stable(feature = "rust1", since = "1.0.0")] pub fn swap_remove(&mut self, index: usize) -> T { #[cold] - #[inline(never)] + #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] + #[track_caller] fn assert_failed(index: usize, len: usize) -> ! { panic!("swap_remove index (is {index}) should be < len (is {len})"); } @@ -1488,7 +1489,8 @@ #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, index: usize, element: T) { #[cold] - #[inline(never)] + #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] + #[track_caller] fn assert_failed(index: usize, len: usize) -> ! { panic!("insertion index (is {index}) should be <= len (is {len})"); } @@ -1549,7 +1551,7 @@ #[track_caller] pub fn remove(&mut self, index: usize) -> T { #[cold] - #[inline(never)] + #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] #[track_caller] fn assert_failed(index: usize, len: usize) -> ! { panic!("removal index (is {index}) should be < len (is {len})"); @@ -1956,6 +1958,7 @@ } else { unsafe { self.len -= 1; + core::intrinsics::assume(self.len < self.capacity()); Some(ptr::read(self.as_ptr().add(self.len()))) } } @@ -2147,7 +2150,8 @@ A: Clone, { #[cold] - #[inline(never)] + #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] + #[track_caller] fn assert_failed(at: usize, len: usize) -> ! { panic!("`at` split index (is {at}) should be <= len (is {len})"); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/tests/autotraits.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/tests/autotraits.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/alloc/tests/autotraits.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/alloc/tests/autotraits.rs 2023-12-21 16:55:28.000000000 +0000 @@ -14,8 +14,8 @@ // // We test autotraits in this convoluted way, instead of a straightforward // `require_send_sync::()`, because the interaction with - // generators exposes some current limitations in rustc's ability to prove a - // lifetime bound on the erased generator witness types. See the above link. + // coroutines exposes some current limitations in rustc's ability to prove a + // lifetime bound on the erased coroutine witness types. See the above link. // // A typical way this would surface in real code is: // diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/build-with-patched-std/action.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/build-with-patched-std/action.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/build-with-patched-std/action.yml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/build-with-patched-std/action.yml 2023-12-21 16:55:31.000000000 +0000 @@ -0,0 +1,48 @@ +# Github composite action to build a single-source-file test binary with an +# already-checked-out version of Rust's stdlib, that will be patched with a +# given revision of the backtrace crate. + +name: Build with patched std +description: > + Build a binary with a version of std that's had a specific revision of + backtrace patched in. +inputs: + backtrace-commit: + description: The git commit of backtrace to patch in to std + required: true + main-rs: + description: The (single) source code file to compile + required: true + rustc-dir: + description: The root directory of the rustc repo + required: true +outputs: + test-binary-size: + description: The size in bytes of the built test binary + value: ${{ steps.measure.outputs.test-binary-size }} +runs: + using: composite + steps: + - shell: bash + id: measure + env: + RUSTC_FLAGS: -Copt-level=3 -Cstrip=symbols + # This symlink is made by Build::new() in the bootstrap crate, using a + # symlink on Linux and a junction on Windows, so it will exist on both + # platforms. + RUSTC_BUILD_DIR: build/host + working-directory: ${{ inputs.rustc-dir }} + run: | + rm -rf "$RUSTC_BUILD_DIR/stage0-std" + + (cd library/backtrace && git checkout ${{ inputs.backtrace-commit }}) + git add library/backtrace + + python3 x.py build library --stage 0 + + TEMP_BUILD_OUTPUT=$(mktemp test-binary-XXXXXXXX) + "$RUSTC_BUILD_DIR/stage0-sysroot/bin/rustc" $RUSTC_FLAGS "${{ inputs.main-rs }}" -o "$TEMP_BUILD_OUTPUT" + BINARY_SIZE=$(stat -c '%s' "$TEMP_BUILD_OUTPUT") + rm "$TEMP_BUILD_OUTPUT" + + echo "test-binary-size=$BINARY_SIZE" >> "$GITHUB_OUTPUT" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/report-code-size-changes/action.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/report-code-size-changes/action.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/report-code-size-changes/action.yml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/.github/actions/report-code-size-changes/action.yml 2023-12-21 16:55:31.000000000 +0000 @@ -0,0 +1,111 @@ +# Github composite action to report on code size changes across different +# platforms. + +name: Report binary size changes on PR +description: | + Report on code size changes across different platforms resulting from a PR. + The only input argument is the path to a directory containing a set of + "*.json" files (extension required), each file containing the keys: + + - platform: the platform that the code size change was measured on + - reference: the size in bytes of the reference binary (base of PR) + - updated: the size in bytes of the updated binary (head of PR) + + The size is reported as a comment on the PR (accessed via context). +inputs: + data-directory: + description: > + Path to directory containing size data as a set of "*.json" files. + required: true +runs: + using: composite + steps: + - name: Post a PR comment if the size has changed + uses: actions/github-script@v6 + env: + DATA_DIRECTORY: ${{ inputs.data-directory }} + with: + script: | + const fs = require("fs"); + + const size_dir = process.env.DATA_DIRECTORY; + + // Map the set of all the *.json files into an array of objects. + const globber = await glob.create(`${size_dir}/*.json`); + const files = await globber.glob(); + const sizes = files.map(path => { + const contents = fs.readFileSync(path); + return JSON.parse(contents); + }); + + // Map each object into some text, but only if it shows any difference + // to report. + const size_reports = sizes.flatMap(size_data => { + const platform = size_data["platform"]; + const reference = size_data["reference"]; + const updated = size_data["updated"]; + + if (!(reference > 0)) { + core.setFailed(`Reference size invalid: ${reference}`); + return; + } + + if (!(updated > 0)) { + core.setFailed(`Updated size invalid: ${updated}`); + return; + } + + const formatter = Intl.NumberFormat("en", { + useGrouping: "always" + }); + + const updated_str = formatter.format(updated); + const reference_str = formatter.format(reference); + + const diff = updated - reference; + const diff_pct = (updated / reference) - 1; + + const diff_str = Intl.NumberFormat("en", { + useGrouping: "always", + sign: "exceptZero" + }).format(diff); + + const diff_pct_str = Intl.NumberFormat("en", { + style: "percent", + useGrouping: "always", + sign: "exceptZero", + maximumFractionDigits: 2 + }).format(diff_pct); + + if (diff !== 0) { + // The body is created here and wrapped so "weirdly" to avoid whitespace at the start of the lines, + // which is interpreted as a code block by Markdown. + const report = `On platform \`${platform}\`: + + - Original binary size: **${reference_str} B** + - Updated binary size: **${updated_str} B** + - Difference: **${diff_str} B** (${diff_pct_str}) + + `; + + return [report]; + } else { + return []; + } + }); + + // If there are any size changes to report, format a comment and post + // it. + if (size_reports.length > 0) { + const comment_sizes = size_reports.join(""); + const body = `Code size changes for a hello-world Rust program linked with libstd with backtrace: + + ${comment_sizes}`; + + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body + }); + } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/.github/workflows/check-binary-size.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/.github/workflows/check-binary-size.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/.github/workflows/check-binary-size.yml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/.github/workflows/check-binary-size.yml 2023-12-21 16:55:31.000000000 +0000 @@ -9,75 +9,143 @@ branches: - master +# Both the "measure" and "report" jobs need to know this. +env: + SIZE_DATA_DIR: sizes + +# Responsibility is divided between two jobs "measure" and "report", so that the +# job that builds (and potentnially runs) untrusted code does not have PR write +# permission, and vice-versa. jobs: - test: + measure: name: Check binary size - runs-on: ubuntu-latest + strategy: + matrix: + platform: [ubuntu-latest, windows-latest] + runs-on: ${{ matrix.platform }} permissions: - pull-requests: write + contents: read + env: + # This cannot be used as a context variable in the 'uses' key later. If it + # changes, update those steps too. + BACKTRACE_DIR: backtrace + RUSTC_DIR: rustc + TEST_MAIN_RS: foo.rs + BASE_COMMIT: ${{ github.event.pull_request.base.sha }} + HEAD_COMMIT: ${{ github.event.pull_request.head.sha }} + SIZE_DATA_FILE: size-${{ strategy.job-index }}.json steps: - name: Print info + shell: bash run: | - echo "Current SHA: ${{ github.event.pull_request.head.sha }}" - echo "Base SHA: ${{ github.event.pull_request.base.sha }}" + echo "Current SHA: $HEAD_COMMIT" + echo "Base SHA: $BASE_COMMIT" + # Note: the backtrace source that's cloned here is NOT the version to be + # patched in to std. It's cloned here to access the Github action for + # building and measuring the test binary. + - name: Clone backtrace to access Github action + uses: actions/checkout@v3 + with: + path: ${{ env.BACKTRACE_DIR }} - name: Clone Rustc uses: actions/checkout@v3 with: repository: rust-lang/rust - fetch-depth: 1 - - name: Fetch backtrace - run: git submodule update --init library/backtrace - - name: Create hello world program that uses backtrace - run: printf "fn main() { panic!(); }" > foo.rs - - name: Build binary with base version of backtrace + path: ${{ env.RUSTC_DIR }} + - name: Set up std repository and backtrace submodule for size test + shell: bash + working-directory: ${{ env.RUSTC_DIR }} + env: + PR_SOURCE_REPO: ${{ github.event.pull_request.head.repo.full_name }} run: | - printf "[llvm]\ndownload-ci-llvm = true\n\n[rust]\nincremental = false\n" > config.toml + # Bootstrap config + cat < config.toml + [llvm] + download-ci-llvm = true + [rust] + incremental = false + EOF + + # Test program source + cat < $TEST_MAIN_RS + fn main() { + panic!(); + } + EOF + + git submodule update --init library/backtrace + cd library/backtrace - git remote add head-pr https://github.com/${{ github.event.pull_request.head.repo.full_name }} + git remote add head-pr "https://github.com/$PR_SOURCE_REPO" git fetch --all - git checkout ${{ github.event.pull_request.base.sha }} - cd ../.. - git add library/backtrace - python3 x.py build library --stage 0 - ./build/x86_64-unknown-linux-gnu/stage0-sysroot/bin/rustc -O foo.rs -o binary-reference + - name: Build binary with base version of backtrace + uses: ./backtrace/.github/actions/build-with-patched-std + with: + backtrace-commit: ${{ env.BASE_COMMIT }} + main-rs: ${{ env.TEST_MAIN_RS }} + rustc-dir: ${{ env.RUSTC_DIR }} + id: size-reference - name: Build binary with PR version of backtrace - run: | - cd library/backtrace - git checkout ${{ github.event.pull_request.head.sha }} - cd ../.. - git add library/backtrace - rm -rf build/x86_64-unknown-linux-gnu/stage0-std - python3 x.py build library --stage 0 - ./build/x86_64-unknown-linux-gnu/stage0-sysroot/bin/rustc -O foo.rs -o binary-updated - - name: Display binary size - run: | - ls -la binary-* - echo "SIZE_REFERENCE=$(stat -c '%s' binary-reference)" >> "$GITHUB_ENV" - echo "SIZE_UPDATED=$(stat -c '%s' binary-updated)" >> "$GITHUB_ENV" - - name: Post a PR comment if the size has changed + uses: ./backtrace/.github/actions/build-with-patched-std + with: + backtrace-commit: ${{ env.HEAD_COMMIT }} + main-rs: ${{ env.TEST_MAIN_RS }} + rustc-dir: ${{ env.RUSTC_DIR }} + id: size-updated + # There is no built-in way to "collect" all the outputs of a set of jobs + # run with a matrix strategy. Subsequent jobs that have a "needs" + # dependency on this one will be run once, when the last matrix job is + # run. Appending data to a single file within a matrix is subject to race + # conditions. So we write the size data to files with distinct names + # generated from the job index. + - name: Write sizes to file uses: actions/github-script@v6 + env: + SIZE_REFERENCE: ${{ steps.size-reference.outputs.test-binary-size }} + SIZE_UPDATED: ${{ steps.size-updated.outputs.test-binary-size }} + PLATFORM: ${{ matrix.platform }} with: script: | - const reference = process.env.SIZE_REFERENCE; - const updated = process.env.SIZE_UPDATED; - const diff = updated - reference; - const plus = diff > 0 ? "+" : ""; - const diff_str = `${plus}${diff}B`; - - if (diff !== 0) { - const percent = (((updated / reference) - 1) * 100).toFixed(2); - // The body is created here and wrapped so "weirdly" to avoid whitespace at the start of the lines, - // which is interpreted as a code block by Markdown. - const body = `Below is the size of a hello-world Rust program linked with libstd with backtrace. - - Original binary size: **${reference}B** - Updated binary size: **${updated}B** - Difference: **${diff_str}** (${percent}%)`; - - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body - }) - } + const fs = require("fs"); + const path = require("path"); + + fs.mkdirSync(process.env.SIZE_DATA_DIR, {recursive: true}); + + const output_data = JSON.stringify({ + platform: process.env.PLATFORM, + reference: process.env.SIZE_REFERENCE, + updated: process.env.SIZE_UPDATED, + }); + + // The "wx" flag makes this fail if the file exists, which we want, + // because there should be no collisions. + fs.writeFileSync( + path.join(process.env.SIZE_DATA_DIR, process.env.SIZE_DATA_FILE), + output_data, + { flag: "wx" }, + ); + - name: Upload size data + uses: actions/upload-artifact@v3 + with: + name: size-files + path: ${{ env.SIZE_DATA_DIR }}/${{ env.SIZE_DATA_FILE }} + retention-days: 1 + if-no-files-found: error + report: + name: Report binary size changes + runs-on: ubuntu-latest + needs: measure + permissions: + pull-requests: write + steps: + # Clone backtrace to access Github composite actions to report size. + - uses: actions/checkout@v3 + - name: Download size data + uses: actions/download-artifact@v3 + with: + name: size-files + path: ${{ env.SIZE_DATA_DIR }} + - name: Analyze and report size changes + uses: ./.github/actions/report-code-size-changes + with: + data-directory: ${{ env.SIZE_DATA_DIR }} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.lock 2023-12-21 16:55:31.000000000 +0000 @@ -0,0 +1,221 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "as-if-std" +version = "0.1.0" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "cpp_demangle", + "dylib-dep", + "libc", + "libloading", + "miniz_oxide", + "object", + "rustc-demangle", + "rustc-serialize", + "serde", + "winapi", +] + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cpp_demangle" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8227005286ec39567949b33df9896bcadfa6051bccca2488129f108ca23119" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "cpp_smoke_test" +version = "0.1.0" +dependencies = [ + "backtrace", + "cc", +] + +[[package]] +name = "dylib-dep" +version = "0.1.0" + +[[package]] +name = "gimli" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "object" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" +dependencies = [ + "memchr", +] + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" + +[[package]] +name = "serde" +version = "1.0.188" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.188" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "syn" +version = "2.0.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -13,8 +13,9 @@ """ autoexamples = true autotests = true -edition = "2018" +edition = "2021" exclude = ["/ci/"] +rust-version = "1.65.0" [workspace] members = ['crates/cpp_smoke_test', 'crates/as-if-std'] @@ -45,7 +46,7 @@ [target.'cfg(not(all(windows, target_env = "msvc", not(target_vendor = "uwp"))))'.dependencies.object] version = "0.32.0" default-features = false -features = ['read_core', 'elf', 'macho', 'pe', 'unaligned', 'archive'] +features = ['read_core', 'elf', 'macho', 'pe', 'xcoff', 'unaligned', 'archive'] [target.'cfg(windows)'.dependencies] winapi = { version = "0.3.9", optional = true } @@ -118,12 +119,12 @@ [[test]] name = "smoke" required-features = ["std"] -edition = '2018' +edition = '2021' [[test]] name = "accuracy" required-features = ["std"] -edition = '2018' +edition = '2021' [[test]] name = "concurrent-panics" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/build.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/build.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/build.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/build.rs 2023-12-21 16:55:31.000000000 +0000 @@ -11,17 +11,27 @@ } } +// Used to detect the value of the `__ANDROID_API__` +// builtin #define +const MARKER: &str = "BACKTRACE_RS_ANDROID_APIVERSION"; +const ANDROID_API_C: &str = " +BACKTRACE_RS_ANDROID_APIVERSION __ANDROID_API__ +"; + fn build_android() { - // Resolve `src/android-api.c` relative to this file. + // Create `android-api.c` on demand. // Required to support calling this from the `std` build script. - let android_api_c = Path::new(file!()) - .parent() - .unwrap() - .join("src/android-api.c"); - let expansion = match cc::Build::new().file(android_api_c).try_expand() { + let out_dir = env::var_os("OUT_DIR").unwrap(); + let android_api_c = Path::new(&out_dir).join("android-api.c"); + std::fs::write(&android_api_c, ANDROID_API_C).unwrap(); + + let expansion = match cc::Build::new().file(&android_api_c).try_expand() { Ok(result) => result, Err(e) => { - println!("failed to run C compiler: {}", e); + eprintln!( + "warning: android version detection failed while running C compiler: {}", + e + ); return; } }; @@ -29,13 +39,12 @@ Ok(s) => s, Err(_) => return, }; - println!("expanded android version detection:\n{}", expansion); - let marker = "APIVERSION"; - let i = match expansion.find(marker) { + eprintln!("expanded android version detection:\n{}", expansion); + let i = match expansion.find(MARKER) { Some(i) => i, None => return, }; - let version = match expansion[i + marker.len() + 1..].split_whitespace().next() { + let version = match expansion[i + MARKER.len() + 1..].split_whitespace().next() { Some(s) => s, None => return, }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/as-if-std/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/as-if-std/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/as-if-std/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/as-if-std/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -2,7 +2,7 @@ name = "as-if-std" version = "0.1.0" authors = ["Alex Crichton "] -edition = "2018" +edition = "2021" publish = false [lib] @@ -24,7 +24,7 @@ version = "0.32.0" default-features = false optional = true -features = ['read_core', 'elf', 'macho', 'pe', 'unaligned', 'archive'] +features = ['read_core', 'elf', 'macho', 'pe', 'xcoff', 'unaligned', 'archive'] [build-dependencies] # Dependency of the `backtrace` crate diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/debuglink/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/debuglink/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/debuglink/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/debuglink/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -1,7 +1,7 @@ [package] name = "debuglink" version = "0.1.0" -edition = "2018" +edition = "2021" [dependencies] backtrace = { path = "../.." } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/dylib-dep/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/dylib-dep/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/dylib-dep/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/dylib-dep/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -1,7 +1,7 @@ [package] name = "dylib-dep" version = "0.1.0" -edition = "2018" +edition = "2021" authors = [] publish = false diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -1,7 +1,7 @@ [package] name = "line-tables-only" version = "0.1.0" -edition = "2018" +edition = "2021" [build-dependencies] cc = "1.0" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/src/lib.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/line-tables-only/src/lib.rs 2023-12-21 16:55:31.000000000 +0000 @@ -1,8 +1,8 @@ #[cfg(test)] mod tests { - use std::path::Path; use backtrace::Backtrace; use libc::c_void; + use std::path::Path; pub type Callback = extern "C" fn(data: *mut c_void); @@ -15,11 +15,12 @@ unsafe { *(data as *mut Option) = Some(bt) }; } - fn assert_contains(backtrace: &Backtrace, - expected_name: &str, - expected_file: &str, - expected_line: u32) { - + fn assert_contains( + backtrace: &Backtrace, + expected_name: &str, + expected_file: &str, + expected_line: u32, + ) { let expected_file = Path::new(expected_file); for frame in backtrace.frames() { @@ -34,7 +35,7 @@ } } - panic!("symbol {:?} not found in backtrace: {:?}", expected_name, backtrace); + panic!("symbol {expected_name:?} not found in backtrace: {backtrace:?}"); } /// Verifies that when debug info includes only lines tables the generated diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/macos_frames_test/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/macos_frames_test/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/macos_frames_test/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/macos_frames_test/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -2,7 +2,7 @@ name = "macos_frames_test" version = "0.1.0" authors = ["Aaron Hill "] -edition = "2018" +edition = "2021" [dependencies.backtrace] path = "../.." diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/without_debuginfo/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/without_debuginfo/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/crates/without_debuginfo/Cargo.toml 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/crates/without_debuginfo/Cargo.toml 2023-12-21 16:55:31.000000000 +0000 @@ -2,7 +2,7 @@ name = "without_debuginfo" version = "0.1.0" authors = ["Alex Crichton "] -edition = "2018" +edition = "2021" [dependencies.backtrace] path = "../.." diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/android-api.c rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/android-api.c --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/android-api.c 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/android-api.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -// Used from the build script to detect the value of the `__ANDROID_API__` -// builtin #define - -APIVERSION __ANDROID_API__ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/dbghelp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/dbghelp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/dbghelp.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/dbghelp.rs 2023-12-21 16:55:31.000000000 +0000 @@ -1,30 +1,32 @@ //! Backtrace strategy for MSVC platforms. //! -//! This module contains the ability to generate a backtrace on MSVC using one -//! of two possible methods. The `StackWalkEx` function is primarily used if -//! possible, but not all systems have that. Failing that the `StackWalk64` -//! function is used instead. Note that `StackWalkEx` is favored because it -//! handles debuginfo internally and returns inline frame information. +//! This module contains the ability to capture a backtrace on MSVC using one +//! of three possible methods. For `x86_64` and `aarch64`, we use `RtlVirtualUnwind` +//! to walk the stack one frame at a time. This function is much faster than using +//! `dbghelp!StackWalk*` because it does not load debug info to report inlined frames. +//! We still report inlined frames during symbolization by consulting the appropriate +//! `dbghelp` functions. +//! +//! For all other platforms, primarily `i686`, the `StackWalkEx` function is used if +//! possible, but not all systems have that. Failing that the `StackWalk64` function +//! is used instead. Note that `StackWalkEx` is favored because it handles debuginfo +//! internally and returns inline frame information. //! //! Note that all dbghelp support is loaded dynamically, see `src/dbghelp.rs` //! for more information about that. #![allow(bad_style)] -use super::super::{dbghelp, windows::*}; +use super::super::windows::*; use core::ffi::c_void; -use core::mem; - -#[derive(Clone, Copy)] -pub enum StackFrame { - New(STACKFRAME_EX), - Old(STACKFRAME64), -} #[derive(Clone, Copy)] pub struct Frame { - pub(crate) stack_frame: StackFrame, base_address: *mut c_void, + ip: *mut c_void, + sp: *mut c_void, + #[cfg(not(target_env = "gnu"))] + inline_context: Option, } // we're just sending around raw pointers and reading them, never interpreting @@ -34,62 +36,144 @@ impl Frame { pub fn ip(&self) -> *mut c_void { - self.addr_pc().Offset as *mut _ + self.ip } pub fn sp(&self) -> *mut c_void { - self.addr_stack().Offset as *mut _ + self.sp } pub fn symbol_address(&self) -> *mut c_void { - self.ip() + self.ip } pub fn module_base_address(&self) -> Option<*mut c_void> { Some(self.base_address) } - fn addr_pc(&self) -> &ADDRESS64 { - match self.stack_frame { - StackFrame::New(ref new) => &new.AddrPC, - StackFrame::Old(ref old) => &old.AddrPC, - } + #[cfg(not(target_env = "gnu"))] + pub fn inline_context(&self) -> Option { + self.inline_context } +} - fn addr_pc_mut(&mut self) -> &mut ADDRESS64 { - match self.stack_frame { - StackFrame::New(ref mut new) => &mut new.AddrPC, - StackFrame::Old(ref mut old) => &mut old.AddrPC, - } +#[repr(C, align(16))] // required by `CONTEXT`, is a FIXME in winapi right now +struct MyContext(CONTEXT); + +#[cfg(target_arch = "x86_64")] +impl MyContext { + #[inline(always)] + fn ip(&self) -> DWORD64 { + self.0.Rip } - fn addr_frame_mut(&mut self) -> &mut ADDRESS64 { - match self.stack_frame { - StackFrame::New(ref mut new) => &mut new.AddrFrame, - StackFrame::Old(ref mut old) => &mut old.AddrFrame, - } + #[inline(always)] + fn sp(&self) -> DWORD64 { + self.0.Rsp } +} - fn addr_stack(&self) -> &ADDRESS64 { - match self.stack_frame { - StackFrame::New(ref new) => &new.AddrStack, - StackFrame::Old(ref old) => &old.AddrStack, - } +#[cfg(target_arch = "aarch64")] +impl MyContext { + #[inline(always)] + fn ip(&self) -> DWORD64 { + self.0.Pc } - fn addr_stack_mut(&mut self) -> &mut ADDRESS64 { - match self.stack_frame { - StackFrame::New(ref mut new) => &mut new.AddrStack, - StackFrame::Old(ref mut old) => &mut old.AddrStack, - } + #[inline(always)] + fn sp(&self) -> DWORD64 { + self.0.Sp } } -#[repr(C, align(16))] // required by `CONTEXT`, is a FIXME in winapi right now -struct MyContext(CONTEXT); +#[cfg(target_arch = "x86")] +impl MyContext { + #[inline(always)] + fn ip(&self) -> DWORD { + self.0.Eip + } + + #[inline(always)] + fn sp(&self) -> DWORD { + self.0.Esp + } + + #[inline(always)] + fn fp(&self) -> DWORD { + self.0.Ebp + } +} + +#[cfg(target_arch = "arm")] +impl MyContext { + #[inline(always)] + fn ip(&self) -> DWORD { + self.0.Pc + } + + #[inline(always)] + fn sp(&self) -> DWORD { + self.0.Sp + } + + #[inline(always)] + fn fp(&self) -> DWORD { + self.0.R11 + } +} + +#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))] +#[inline(always)] +pub unsafe fn trace(cb: &mut dyn FnMut(&super::Frame) -> bool) { + use core::ptr; + + let mut context = core::mem::zeroed::(); + RtlCaptureContext(&mut context.0); + + // Call `RtlVirtualUnwind` to find the previous stack frame, walking until we hit ip = 0. + while context.ip() != 0 { + let mut base = 0; + + let fn_entry = RtlLookupFunctionEntry(context.ip(), &mut base, ptr::null_mut()); + if fn_entry.is_null() { + break; + } + + let frame = super::Frame { + inner: Frame { + base_address: fn_entry as *mut c_void, + ip: context.ip() as *mut c_void, + sp: context.sp() as *mut c_void, + #[cfg(not(target_env = "gnu"))] + inline_context: None, + }, + }; + + if !cb(&frame) { + break; + } + + let mut handler_data = 0usize; + let mut establisher_frame = 0; + RtlVirtualUnwind( + 0, + base, + context.ip(), + fn_entry, + &mut context.0, + &mut handler_data as *mut usize as *mut PVOID, + &mut establisher_frame, + ptr::null_mut(), + ); + } +} + +#[cfg(any(target_arch = "x86", target_arch = "arm"))] #[inline(always)] pub unsafe fn trace(cb: &mut dyn FnMut(&super::Frame) -> bool) { + use core::mem; + // Allocate necessary structures for doing the stack walk let process = GetCurrentProcess(); let thread = GetCurrentThread(); @@ -98,65 +182,40 @@ RtlCaptureContext(&mut context.0); // Ensure this process's symbols are initialized - let dbghelp = match dbghelp::init() { + let dbghelp = match super::super::dbghelp::init() { Ok(dbghelp) => dbghelp, Err(()) => return, // oh well... }; - // On x86_64 and ARM64 we opt to not use the default `Sym*` functions from - // dbghelp for getting the function table and module base. Instead we use - // the `RtlLookupFunctionEntry` function in kernel32 which will account for - // JIT compiler frames as well. These should be equivalent, but using - // `Rtl*` allows us to backtrace through JIT frames. - // - // Note that `RtlLookupFunctionEntry` only works for in-process backtraces, - // but that's all we support anyway, so it all lines up well. - cfg_if::cfg_if! { - if #[cfg(target_pointer_width = "64")] { - use core::ptr; - - unsafe extern "system" fn function_table_access(_process: HANDLE, addr: DWORD64) -> PVOID { - let mut base = 0; - RtlLookupFunctionEntry(addr, &mut base, ptr::null_mut()).cast() - } - - unsafe extern "system" fn get_module_base(_process: HANDLE, addr: DWORD64) -> DWORD64 { - let mut base = 0; - RtlLookupFunctionEntry(addr, &mut base, ptr::null_mut()); - base - } - } else { - let function_table_access = dbghelp.SymFunctionTableAccess64(); - let get_module_base = dbghelp.SymGetModuleBase64(); - } - } + let function_table_access = dbghelp.SymFunctionTableAccess64(); + let get_module_base = dbghelp.SymGetModuleBase64(); let process_handle = GetCurrentProcess(); + #[cfg(target_arch = "x86")] + let image = IMAGE_FILE_MACHINE_I386; + #[cfg(target_arch = "arm")] + let image = IMAGE_FILE_MACHINE_ARMNT; + // Attempt to use `StackWalkEx` if we can, but fall back to `StackWalk64` // since it's in theory supported on more systems. match (*dbghelp.dbghelp()).StackWalkEx() { Some(StackWalkEx) => { - let mut inner: STACKFRAME_EX = mem::zeroed(); - inner.StackFrameSize = mem::size_of::() as DWORD; - let mut frame = super::Frame { - inner: Frame { - stack_frame: StackFrame::New(inner), - base_address: 0 as _, - }, - }; - let image = init_frame(&mut frame.inner, &context.0); - let frame_ptr = match &mut frame.inner.stack_frame { - StackFrame::New(ptr) => ptr as *mut STACKFRAME_EX, - _ => unreachable!(), - }; + let mut stack_frame_ex: STACKFRAME_EX = mem::zeroed(); + stack_frame_ex.StackFrameSize = mem::size_of::() as DWORD; + stack_frame_ex.AddrPC.Offset = context.ip() as u64; + stack_frame_ex.AddrPC.Mode = AddrModeFlat; + stack_frame_ex.AddrStack.Offset = context.sp() as u64; + stack_frame_ex.AddrStack.Mode = AddrModeFlat; + stack_frame_ex.AddrFrame.Offset = context.fp() as u64; + stack_frame_ex.AddrFrame.Mode = AddrModeFlat; while StackWalkEx( image as DWORD, process, thread, - frame_ptr, - &mut context.0 as *mut CONTEXT as *mut _, + &mut stack_frame_ex, + &mut context.0 as *mut CONTEXT as PVOID, None, Some(function_table_access), Some(get_module_base), @@ -164,7 +223,16 @@ 0, ) == TRUE { - frame.inner.base_address = get_module_base(process_handle, frame.ip() as _) as _; + let frame = super::Frame { + inner: Frame { + base_address: get_module_base(process_handle, stack_frame_ex.AddrPC.Offset) + as *mut c_void, + ip: stack_frame_ex.AddrPC.Offset as *mut c_void, + sp: stack_frame_ex.AddrStack.Offset as *mut c_void, + #[cfg(not(target_env = "gnu"))] + inline_context: Some(stack_frame_ex.InlineFrameContext), + }, + }; if !cb(&frame) { break; @@ -172,31 +240,36 @@ } } None => { - let mut frame = super::Frame { - inner: Frame { - stack_frame: StackFrame::Old(mem::zeroed()), - base_address: 0 as _, - }, - }; - let image = init_frame(&mut frame.inner, &context.0); - let frame_ptr = match &mut frame.inner.stack_frame { - StackFrame::Old(ptr) => ptr as *mut STACKFRAME64, - _ => unreachable!(), - }; + let mut stack_frame64: STACKFRAME64 = mem::zeroed(); + stack_frame64.AddrPC.Offset = context.ip() as u64; + stack_frame64.AddrPC.Mode = AddrModeFlat; + stack_frame64.AddrStack.Offset = context.sp() as u64; + stack_frame64.AddrStack.Mode = AddrModeFlat; + stack_frame64.AddrFrame.Offset = context.fp() as u64; + stack_frame64.AddrFrame.Mode = AddrModeFlat; while dbghelp.StackWalk64()( image as DWORD, process, thread, - frame_ptr, - &mut context.0 as *mut CONTEXT as *mut _, + &mut stack_frame64, + &mut context.0 as *mut CONTEXT as PVOID, None, Some(function_table_access), Some(get_module_base), None, ) == TRUE { - frame.inner.base_address = get_module_base(process_handle, frame.ip() as _) as _; + let frame = super::Frame { + inner: Frame { + base_address: get_module_base(process_handle, stack_frame64.AddrPC.Offset) + as *mut c_void, + ip: stack_frame64.AddrPC.Offset as *mut c_void, + sp: stack_frame64.AddrStack.Offset as *mut c_void, + #[cfg(not(target_env = "gnu"))] + inline_context: None, + }, + }; if !cb(&frame) { break; @@ -205,53 +278,3 @@ } } } - -#[cfg(target_arch = "x86_64")] -fn init_frame(frame: &mut Frame, ctx: &CONTEXT) -> WORD { - frame.addr_pc_mut().Offset = ctx.Rip as u64; - frame.addr_pc_mut().Mode = AddrModeFlat; - frame.addr_stack_mut().Offset = ctx.Rsp as u64; - frame.addr_stack_mut().Mode = AddrModeFlat; - frame.addr_frame_mut().Offset = ctx.Rbp as u64; - frame.addr_frame_mut().Mode = AddrModeFlat; - - IMAGE_FILE_MACHINE_AMD64 -} - -#[cfg(target_arch = "x86")] -fn init_frame(frame: &mut Frame, ctx: &CONTEXT) -> WORD { - frame.addr_pc_mut().Offset = ctx.Eip as u64; - frame.addr_pc_mut().Mode = AddrModeFlat; - frame.addr_stack_mut().Offset = ctx.Esp as u64; - frame.addr_stack_mut().Mode = AddrModeFlat; - frame.addr_frame_mut().Offset = ctx.Ebp as u64; - frame.addr_frame_mut().Mode = AddrModeFlat; - - IMAGE_FILE_MACHINE_I386 -} - -#[cfg(target_arch = "aarch64")] -fn init_frame(frame: &mut Frame, ctx: &CONTEXT) -> WORD { - frame.addr_pc_mut().Offset = ctx.Pc as u64; - frame.addr_pc_mut().Mode = AddrModeFlat; - frame.addr_stack_mut().Offset = ctx.Sp as u64; - frame.addr_stack_mut().Mode = AddrModeFlat; - unsafe { - frame.addr_frame_mut().Offset = ctx.u.s().Fp as u64; - } - frame.addr_frame_mut().Mode = AddrModeFlat; - IMAGE_FILE_MACHINE_ARM64 -} - -#[cfg(target_arch = "arm")] -fn init_frame(frame: &mut Frame, ctx: &CONTEXT) -> WORD { - frame.addr_pc_mut().Offset = ctx.Pc as u64; - frame.addr_pc_mut().Mode = AddrModeFlat; - frame.addr_stack_mut().Offset = ctx.Sp as u64; - frame.addr_stack_mut().Mode = AddrModeFlat; - unsafe { - frame.addr_frame_mut().Offset = ctx.R11 as u64; - } - frame.addr_frame_mut().Mode = AddrModeFlat; - IMAGE_FILE_MACHINE_ARMNT -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/libunwind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/libunwind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/libunwind.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/libunwind.rs 2023-12-21 16:55:31.000000000 +0000 @@ -40,7 +40,18 @@ Frame::Raw(ctx) => ctx, Frame::Cloned { ip, .. } => return ip, }; - unsafe { uw::_Unwind_GetIP(ctx) as *mut c_void } + #[allow(unused_mut)] + let mut ip = unsafe { uw::_Unwind_GetIP(ctx) as *mut c_void }; + + // To reduce TCB size in SGX enclaves, we do not want to implement + // symbol resolution functionality. Rather, we can print the offset of + // the address here, which could be later mapped to correct function. + #[cfg(all(target_env = "sgx", target_vendor = "fortanix"))] + { + let image_base = super::get_image_base(); + ip = usize::wrapping_sub(ip as usize, image_base as _) as _; + } + ip } pub fn sp(&self) -> *mut c_void { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/mod.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/backtrace/mod.rs 2023-12-21 16:55:31.000000000 +0000 @@ -125,6 +125,39 @@ } } +#[cfg(all(target_env = "sgx", target_vendor = "fortanix", not(feature = "std")))] +mod sgx_no_std_image_base { + use core::ffi::c_void; + use core::sync::atomic::{AtomicUsize, Ordering::SeqCst}; + + static IMAGE_BASE: AtomicUsize = AtomicUsize::new(0); + + /// Set the image base address. This is only available for Fortanix SGX + /// target when the `std` feature is not enabled. This can be used in the + /// standard library to set the correct base address. + #[doc(hidden)] + pub fn set_image_base(base_addr: *mut c_void) { + IMAGE_BASE.store(base_addr as _, SeqCst); + } + + pub(crate) fn get_image_base() -> *mut c_void { + IMAGE_BASE.load(SeqCst) as _ + } +} + +#[cfg(all(target_env = "sgx", target_vendor = "fortanix", not(feature = "std")))] +pub use self::sgx_no_std_image_base::set_image_base; + +#[cfg(all(target_env = "sgx", target_vendor = "fortanix", not(feature = "std")))] +#[deny(unused)] +pub(crate) use self::sgx_no_std_image_base::get_image_base; + +#[cfg(all(target_env = "sgx", target_vendor = "fortanix", feature = "std"))] +#[deny(unused)] +pub(crate) fn get_image_base() -> *mut c_void { + std::os::fortanix_sgx::mem::image_base() as _ +} + cfg_if::cfg_if! { // This needs to come first, to ensure that // Miri takes priority over the host platform @@ -153,8 +186,6 @@ mod dbghelp; use self::dbghelp::trace as trace_imp; pub(crate) use self::dbghelp::Frame as FrameImp; - #[cfg(target_env = "msvc")] // only used in dbghelp symbolize - pub(crate) use self::dbghelp::StackFrame; } else { mod noop; use self::noop::trace as trace_imp; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/dbghelp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/dbghelp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/dbghelp.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/dbghelp.rs 2023-12-21 16:55:31.000000000 +0000 @@ -34,8 +34,8 @@ mod dbghelp { use crate::windows::*; pub use winapi::um::dbghelp::{ - StackWalk64, StackWalkEx, SymCleanup, SymFromAddrW, SymFunctionTableAccess64, - SymGetLineFromAddrW64, SymGetModuleBase64, SymGetOptions, SymInitializeW, SymSetOptions, + StackWalk64, StackWalkEx, SymFromAddrW, SymFunctionTableAccess64, SymGetLineFromAddrW64, + SymGetModuleBase64, SymGetOptions, SymInitializeW, SymSetOptions, }; extern "system" { @@ -55,6 +55,16 @@ pdwDisplacement: PDWORD, Line: PIMAGEHLP_LINEW64, ) -> BOOL; + pub fn SymAddrIncludeInlineTrace(hProcess: HANDLE, Address: DWORD64) -> DWORD; + pub fn SymQueryInlineTrace( + hProcess: HANDLE, + StartAddress: DWORD64, + StartContext: DWORD, + StartRetAddress: DWORD64, + CurAddress: DWORD64, + CurContext: LPDWORD, + CurFrameIndex: LPDWORD, + ) -> BOOL; } pub fn assert_equal_types(a: T, _b: T) -> T { @@ -164,7 +174,6 @@ path: PCWSTR, invade: BOOL ) -> BOOL; - fn SymCleanup(handle: HANDLE) -> BOOL; fn StackWalk64( MachineType: DWORD, hProcess: HANDLE, @@ -184,18 +193,6 @@ hProcess: HANDLE, AddrBase: DWORD64 ) -> DWORD64; - fn SymFromAddrW( - hProcess: HANDLE, - Address: DWORD64, - Displacement: PDWORD64, - Symbol: PSYMBOL_INFOW - ) -> BOOL; - fn SymGetLineFromAddrW64( - hProcess: HANDLE, - dwAddr: DWORD64, - pdwDisplacement: PDWORD, - Line: PIMAGEHLP_LINEW64 - ) -> BOOL; fn StackWalkEx( MachineType: DWORD, hProcess: HANDLE, @@ -223,6 +220,31 @@ pdwDisplacement: PDWORD, Line: PIMAGEHLP_LINEW64 ) -> BOOL; + fn SymAddrIncludeInlineTrace( + hProcess: HANDLE, + Address: DWORD64 + ) -> DWORD; + fn SymQueryInlineTrace( + hProcess: HANDLE, + StartAddress: DWORD64, + StartContext: DWORD, + StartRetAddress: DWORD64, + CurAddress: DWORD64, + CurContext: LPDWORD, + CurFrameIndex: LPDWORD + ) -> BOOL; + fn SymFromAddrW( + hProcess: HANDLE, + Address: DWORD64, + Displacement: PDWORD64, + Symbol: PSYMBOL_INFOW + ) -> BOOL; + fn SymGetLineFromAddrW64( + hProcess: HANDLE, + dwAddr: DWORD64, + pdwDisplacement: PDWORD, + Line: PIMAGEHLP_LINEW64 + ) -> BOOL; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/lib.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/lib.rs 2023-12-21 16:55:31.000000000 +0000 @@ -134,6 +134,12 @@ } } +cfg_if::cfg_if! { + if #[cfg(all(target_env = "sgx", target_vendor = "fortanix", not(feature = "std")))] { + pub use self::backtrace::set_image_base; + } +} + #[allow(dead_code)] struct Bomb { enabled: bool, @@ -186,7 +192,14 @@ } } -#[cfg(all(windows, not(target_vendor = "uwp")))] +#[cfg(all( + windows, + any( + target_env = "msvc", + all(target_env = "gnu", any(target_arch = "x86", target_arch = "arm")) + ), + not(target_vendor = "uwp") +))] mod dbghelp; #[cfg(windows)] mod windows; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/print.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/print.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/print.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/print.rs 2023-12-21 16:55:31.000000000 +0000 @@ -219,7 +219,7 @@ #[allow(unused_mut)] fn print_raw_generic( &mut self, - mut frame_ip: *mut c_void, + frame_ip: *mut c_void, symbol_name: Option>, filename: Option>, lineno: Option, @@ -233,15 +233,6 @@ } } - // To reduce TCB size in Sgx enclave, we do not want to implement symbol - // resolution functionality. Rather, we can print the offset of the - // address here, which could be later mapped to correct function. - #[cfg(all(feature = "std", target_env = "sgx", target_vendor = "fortanix"))] - { - let image_base = std::os::fortanix_sgx::mem::image_base(); - frame_ip = usize::wrapping_sub(frame_ip as usize, image_base as _) as _; - } - // Print the index of the frame as well as the optional instruction // pointer of the frame. If we're beyond the first symbol of this frame // though we just print appropriate whitespace. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/dbghelp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/dbghelp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/dbghelp.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/dbghelp.rs 2023-12-21 16:55:31.000000000 +0000 @@ -17,7 +17,7 @@ #![allow(bad_style)] -use super::super::{backtrace::StackFrame, dbghelp, windows::*}; +use super::super::{dbghelp, windows::*}; use super::{BytesOrWideString, ResolveWhat, SymbolName}; use core::char; use core::ffi::c_void; @@ -78,54 +78,103 @@ Err(()) => return, // oh well... }; + let resolve_inner = if (*dbghelp.dbghelp()).SymAddrIncludeInlineTrace().is_some() { + // We are on a version of dbghelp 6.2+, which contains the more modern + // Inline APIs. + resolve_with_inline + } else { + // We are on an older version of dbghelp which doesn't contain the Inline + // APIs. + resolve_legacy + }; match what { - ResolveWhat::Address(_) => resolve_without_inline(&dbghelp, what.address_or_ip(), cb), - ResolveWhat::Frame(frame) => match &frame.inner.stack_frame { - StackFrame::New(frame) => resolve_with_inline(&dbghelp, frame, cb), - StackFrame::Old(_) => resolve_without_inline(&dbghelp, frame.ip(), cb), - }, + ResolveWhat::Address(_) => resolve_inner(&dbghelp, what.address_or_ip(), None, cb), + ResolveWhat::Frame(frame) => { + resolve_inner(&dbghelp, frame.ip(), frame.inner.inline_context(), cb) + } } } -unsafe fn resolve_with_inline( +/// Resolve the address using the legacy dbghelp API. +/// +/// This should work all the way down to Windows XP. The inline context is +/// ignored, since this concept was only introduced in dbghelp 6.2+. +unsafe fn resolve_legacy( dbghelp: &dbghelp::Init, - frame: &STACKFRAME_EX, + addr: *mut c_void, + _inline_context: Option, cb: &mut dyn FnMut(&super::Symbol), ) { + let addr = super::adjust_ip(addr) as DWORD64; do_resolve( - |info| { - dbghelp.SymFromInlineContextW()( - GetCurrentProcess(), - super::adjust_ip(frame.AddrPC.Offset as *mut _) as u64, - frame.InlineFrameContext, - &mut 0, - info, - ) - }, - |line| { - dbghelp.SymGetLineFromInlineContextW()( - GetCurrentProcess(), - super::adjust_ip(frame.AddrPC.Offset as *mut _) as u64, - frame.InlineFrameContext, - 0, - &mut 0, - line, - ) - }, + |info| dbghelp.SymFromAddrW()(GetCurrentProcess(), addr, &mut 0, info), + |line| dbghelp.SymGetLineFromAddrW64()(GetCurrentProcess(), addr, &mut 0, line), cb, ) } -unsafe fn resolve_without_inline( +/// Resolve the address using the modern dbghelp APIs. +/// +/// Note that calling this function requires having dbghelp 6.2+ loaded - and +/// will panic otherwise. +unsafe fn resolve_with_inline( dbghelp: &dbghelp::Init, addr: *mut c_void, + inline_context: Option, cb: &mut dyn FnMut(&super::Symbol), ) { - do_resolve( - |info| dbghelp.SymFromAddrW()(GetCurrentProcess(), addr as DWORD64, &mut 0, info), - |line| dbghelp.SymGetLineFromAddrW64()(GetCurrentProcess(), addr as DWORD64, &mut 0, line), - cb, - ) + let current_process = GetCurrentProcess(); + + let addr = super::adjust_ip(addr) as DWORD64; + + let (inlined_frame_count, inline_context) = if let Some(ic) = inline_context { + (0, ic) + } else { + let mut inlined_frame_count = dbghelp.SymAddrIncludeInlineTrace()(current_process, addr); + + let mut inline_context = 0; + + // If there is are inlined frames but we can't load them for some reason OR if there are no + // inlined frames, then we disregard inlined_frame_count and inline_context. + if (inlined_frame_count > 0 + && dbghelp.SymQueryInlineTrace()( + current_process, + addr, + 0, + addr, + addr, + &mut inline_context, + &mut 0, + ) != TRUE) + || inlined_frame_count == 0 + { + inlined_frame_count = 0; + inline_context = 0; + } + + (inlined_frame_count, inline_context) + }; + + let last_inline_context = inline_context + 1 + inlined_frame_count; + + for inline_context in inline_context..last_inline_context { + do_resolve( + |info| { + dbghelp.SymFromInlineContextW()(current_process, addr, inline_context, &mut 0, info) + }, + |line| { + dbghelp.SymGetLineFromInlineContextW()( + current_process, + addr, + inline_context, + 0, + &mut 0, + line, + ) + }, + cb, + ); + } } unsafe fn do_resolve( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/elf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/elf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/elf.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/elf.rs 2023-12-21 16:55:31.000000000 +0000 @@ -308,7 +308,7 @@ fn debug_path_exists() -> bool { cfg_if::cfg_if! { - if #[cfg(any(target_os = "freebsd", target_os = "linux"))] { + if #[cfg(any(target_os = "freebsd", target_os = "hurd", target_os = "linux"))] { use core::sync::atomic::{AtomicU8, Ordering}; static DEBUG_PATH_EXISTS: AtomicU8 = AtomicU8::new(0); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_aix.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_aix.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_aix.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_aix.rs 2023-12-21 16:55:31.000000000 +0000 @@ -0,0 +1,74 @@ +use super::mystd::borrow::ToOwned; +use super::mystd::env; +use super::mystd::ffi::{CStr, OsStr}; +use super::mystd::io::Error; +use super::mystd::os::unix::prelude::*; +use super::xcoff; +use super::{Library, LibrarySegment, Vec}; +use alloc::vec; +use core::mem; + +const EXE_IMAGE_BASE: u64 = 0x100000000; + +/// On AIX, we use `loadquery` with `L_GETINFO` flag to query libraries mmapped. +/// See https://www.ibm.com/docs/en/aix/7.2?topic=l-loadquery-subroutine for +/// detailed information of `loadquery`. +pub(super) fn native_libraries() -> Vec { + let mut ret = Vec::new(); + unsafe { + let mut buffer = vec![mem::zeroed::(); 64]; + loop { + if libc::loadquery( + libc::L_GETINFO, + buffer.as_mut_ptr() as *mut libc::c_char, + (mem::size_of::() * buffer.len()) as u32, + ) != -1 + { + break; + } else { + match Error::last_os_error().raw_os_error() { + Some(libc::ENOMEM) => { + buffer.resize(buffer.len() * 2, mem::zeroed::()); + } + Some(_) => { + // If other error occurs, return empty libraries. + return Vec::new(); + } + _ => unreachable!(), + } + } + } + let mut current = buffer.as_mut_ptr(); + loop { + let text_base = (*current).ldinfo_textorg as usize; + let filename_ptr: *const libc::c_char = &(*current).ldinfo_filename[0]; + let bytes = CStr::from_ptr(filename_ptr).to_bytes(); + let member_name_ptr = filename_ptr.offset((bytes.len() + 1) as isize); + let mut filename = OsStr::from_bytes(bytes).to_owned(); + if text_base == EXE_IMAGE_BASE as usize { + if let Ok(exe) = env::current_exe() { + filename = exe.into_os_string(); + } + } + let bytes = CStr::from_ptr(member_name_ptr).to_bytes(); + let member_name = OsStr::from_bytes(bytes).to_owned(); + if let Some(image) = xcoff::parse_image(filename.as_ref(), &member_name) { + ret.push(Library { + name: filename, + member_name, + segments: vec![LibrarySegment { + stated_virtual_memory_address: image.base as usize, + len: image.size, + }], + bias: (text_base + image.offset).wrapping_sub(image.base as usize), + }); + } + if (*current).ldinfo_next == 0 { + break; + } + current = (current as *mut libc::c_char).offset((*current).ldinfo_next as isize) + as *mut libc::ld_info; + } + } + return ret; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/libs_dl_iterate_phdr.rs 2023-12-21 16:55:31.000000000 +0000 @@ -18,14 +18,18 @@ } fn infer_current_exe(base_addr: usize) -> OsString { - if let Ok(entries) = super::parse_running_mmaps::parse_maps() { - let opt_path = entries - .iter() - .find(|e| e.ip_matches(base_addr) && e.pathname().len() > 0) - .map(|e| e.pathname()) - .cloned(); - if let Some(path) = opt_path { - return path; + cfg_if::cfg_if! { + if #[cfg(not(target_os = "hurd"))] { + if let Ok(entries) = super::parse_running_mmaps::parse_maps() { + let opt_path = entries + .iter() + .find(|e| e.ip_matches(base_addr) && e.pathname().len() > 0) + .map(|e| e.pathname()) + .cloned(); + if let Some(path) = opt_path { + return path; + } + } } } env::current_exe().map(|e| e.into()).unwrap_or_default() diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/xcoff.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/xcoff.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/xcoff.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli/xcoff.rs 2023-12-21 16:55:31.000000000 +0000 @@ -0,0 +1,186 @@ +use super::mystd::ffi::{OsStr, OsString}; +use super::mystd::os::unix::ffi::OsStrExt; +use super::mystd::str; +use super::{gimli, Context, Endian, EndianSlice, Mapping, Path, Stash, Vec}; +use alloc::sync::Arc; +use core::ops::Deref; +use object::read::archive::ArchiveFile; +use object::read::xcoff::{FileHeader, SectionHeader, XcoffFile, XcoffSymbol}; +use object::Object as _; +use object::ObjectSection as _; +use object::ObjectSymbol as _; +use object::SymbolFlags; + +#[cfg(target_pointer_width = "32")] +type Xcoff = object::xcoff::FileHeader32; +#[cfg(target_pointer_width = "64")] +type Xcoff = object::xcoff::FileHeader64; + +impl Mapping { + pub fn new(path: &Path, member_name: &OsString) -> Option { + let map = super::mmap(path)?; + Mapping::mk(map, |data, stash| { + if member_name.is_empty() { + Context::new(stash, Object::parse(data)?, None, None) + } else { + let archive = ArchiveFile::parse(data).ok()?; + for member in archive + .members() + .filter_map(|m| m.ok()) + .filter(|m| OsStr::from_bytes(m.name()) == member_name) + { + let member_data = member.data(data).ok()?; + if let Some(obj) = Object::parse(member_data) { + return Context::new(stash, obj, None, None); + } + } + None + } + }) + } +} + +struct ParsedSym<'a> { + address: u64, + size: u64, + name: &'a str, +} + +pub struct Object<'a> { + syms: Vec>, + file: XcoffFile<'a, Xcoff>, +} + +pub struct Image { + pub offset: usize, + pub base: u64, + pub size: usize, +} + +pub fn parse_xcoff(data: &[u8]) -> Option { + let mut offset = 0; + let header = Xcoff::parse(data, &mut offset).ok()?; + let _ = header.aux_header(data, &mut offset).ok()?; + let sections = header.sections(data, &mut offset).ok()?; + if let Some(section) = sections.iter().find(|s| { + if let Ok(name) = str::from_utf8(&s.s_name()[0..5]) { + name == ".text" + } else { + false + } + }) { + Some(Image { + offset: section.s_scnptr() as usize, + base: section.s_paddr() as u64, + size: section.s_size() as usize, + }) + } else { + None + } +} + +pub fn parse_image(path: &Path, member_name: &OsString) -> Option { + let map = super::mmap(path)?; + let data = map.deref(); + if member_name.is_empty() { + return parse_xcoff(data); + } else { + let archive = ArchiveFile::parse(data).ok()?; + for member in archive + .members() + .filter_map(|m| m.ok()) + .filter(|m| OsStr::from_bytes(m.name()) == member_name) + { + let member_data = member.data(data).ok()?; + if let Some(image) = parse_xcoff(member_data) { + return Some(image); + } + } + None + } +} + +impl<'a> Object<'a> { + fn get_concrete_size(file: &XcoffFile<'a, Xcoff>, sym: &XcoffSymbol<'a, '_, Xcoff>) -> u64 { + match sym.flags() { + SymbolFlags::Xcoff { + n_sclass: _, + x_smtyp: _, + x_smclas: _, + containing_csect: Some(index), + } => { + if let Ok(tgt_sym) = file.symbol_by_index(index) { + Self::get_concrete_size(file, &tgt_sym) + } else { + 0 + } + } + _ => sym.size(), + } + } + + fn parse(data: &'a [u8]) -> Option> { + let file = XcoffFile::parse(data).ok()?; + let mut syms = file + .symbols() + .filter_map(|sym| { + let name = sym.name().map_or("", |v| v); + let address = sym.address(); + let size = Self::get_concrete_size(&file, &sym); + if name == ".text" || name == ".data" { + // We don't want to include ".text" and ".data" symbols. + // If they are included, since their ranges cover other + // symbols, when searching a symbol for a given address, + // ".text" or ".data" is returned. That's not what we expect. + None + } else { + Some(ParsedSym { + address, + size, + name, + }) + } + }) + .collect::>(); + syms.sort_by_key(|s| s.address); + Some(Object { syms, file }) + } + + pub fn section(&self, _: &Stash, name: &str) -> Option<&'a [u8]> { + Some(self.file.section_by_name(name)?.data().ok()?) + } + + pub fn search_symtab<'b>(&'b self, addr: u64) -> Option<&'b [u8]> { + // Symbols, except ".text" and ".data", are sorted and are not overlapped each other, + // so we can just perform a binary search here. + let i = match self.syms.binary_search_by_key(&addr, |sym| sym.address) { + Ok(i) => i, + Err(i) => i.checked_sub(1)?, + }; + let sym = self.syms.get(i)?; + if (sym.address..sym.address + sym.size).contains(&addr) { + // On AIX, for a function call, for example, `foo()`, we have + // two symbols `foo` and `.foo`. `foo` references the function + // descriptor and `.foo` references the function entry. + // See https://www.ibm.com/docs/en/xl-fortran-aix/16.1.0?topic=calls-linkage-convention-function + // for more information. + // We trim the prefix `.` here, so that the rust demangler can work + // properly. + Some(sym.name.trim_start_matches(".").as_bytes()) + } else { + None + } + } + + pub(super) fn search_object_map(&self, _addr: u64) -> Option<(&Context<'_>, u64)> { + None + } +} + +pub(super) fn handle_split_dwarf<'data>( + _package: Option<&gimli::DwarfPackage>>, + _stash: &'data Stash, + _load: addr2line::SplitDwarfLoad>, +) -> Option>>> { + None +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/symbolize/gimli.rs 2023-12-21 16:55:31.000000000 +0000 @@ -35,12 +35,14 @@ target_os = "freebsd", target_os = "fuchsia", target_os = "haiku", + target_os = "hurd", target_os = "ios", target_os = "linux", target_os = "macos", target_os = "openbsd", target_os = "solaris", target_os = "illumos", + target_os = "aix", ))] { #[path = "gimli/mmap_unix.rs"] mod mmap; @@ -116,8 +118,17 @@ dwp: Option>, ) -> Option> { let mut sections = gimli::Dwarf::load(|id| -> Result<_, ()> { - let data = object.section(stash, id.name()).unwrap_or(&[]); - Ok(EndianSlice::new(data, Endian)) + if cfg!(not(target_os = "aix")) { + let data = object.section(stash, id.name()).unwrap_or(&[]); + Ok(EndianSlice::new(data, Endian)) + } else { + if let Some(name) = id.xcoff_name() { + let data = object.section(stash, name).unwrap_or(&[]); + Ok(EndianSlice::new(data, Endian)) + } else { + Ok(EndianSlice::new(&[], Endian)) + } + } }) .ok()?; @@ -192,6 +203,9 @@ ))] { mod macho; use self::macho::{handle_split_dwarf, Object}; + } else if #[cfg(target_os = "aix")] { + mod xcoff; + use self::xcoff::{handle_split_dwarf, Object}; } else { mod elf; use self::elf::{handle_split_dwarf, Object}; @@ -218,6 +232,7 @@ target_os = "linux", target_os = "fuchsia", target_os = "freebsd", + target_os = "hurd", target_os = "openbsd", target_os = "netbsd", all(target_os = "android", feature = "dl_iterate_phdr"), @@ -234,6 +249,9 @@ } else if #[cfg(target_os = "haiku")] { mod libs_haiku; use libs_haiku::native_libraries; + } else if #[cfg(target_os = "aix")] { + mod libs_aix; + use libs_aix::native_libraries; } else { // Everything else should doesn't know how to load native libraries. fn native_libraries() -> Vec { @@ -261,6 +279,13 @@ struct Library { name: OsString, + #[cfg(target_os = "aix")] + /// On AIX, the library mmapped can be a member of a big-archive file. + /// For example, with a big-archive named libfoo.a containing libbar.so, + /// one can use `dlopen("libfoo.a(libbar.so)", RTLD_MEMBER | RTLD_LAZY)` + /// to use the `libbar.so` library. In this case, only `libbar.so` is + /// mmapped, not the whole `libfoo.a`. + member_name: OsString, /// Segments of this library loaded into memory, and where they're loaded. segments: Vec, /// The "bias" of this library, typically where it's loaded into memory. @@ -280,6 +305,19 @@ len: usize, } +#[cfg(target_os = "aix")] +fn create_mapping(lib: &Library) -> Option { + let name = &lib.name; + let member_name = &lib.member_name; + Mapping::new(name.as_ref(), member_name) +} + +#[cfg(not(target_os = "aix"))] +fn create_mapping(lib: &Library) -> Option { + let name = &lib.name; + Mapping::new(name.as_ref()) +} + // unsafe because this is required to be externally synchronized pub unsafe fn clear_symbol_cache() { Cache::with_global(|cache| cache.mappings.clear()); @@ -360,8 +398,7 @@ // When the mapping is not in the cache, create a new mapping, // insert it into the front of the cache, and evict the oldest cache // entry if necessary. - let name = &self.libraries[lib].name; - let mapping = Mapping::new(name.as_ref())?; + let mapping = create_mapping(&self.libraries[lib])?; if self.mappings.len() == MAPPINGS_CACHE_SIZE { self.mappings.pop(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/windows.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/windows.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/src/windows.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/src/windows.rs 2023-12-21 16:55:31.000000000 +0000 @@ -19,6 +19,9 @@ pub use self::winapi::PUNWIND_HISTORY_TABLE; #[cfg(target_pointer_width = "64")] pub use self::winapi::PRUNTIME_FUNCTION; + pub use self::winapi::PEXCEPTION_ROUTINE; + #[cfg(target_pointer_width = "64")] + pub use self::winapi::PKNONVOLATILE_CONTEXT_POINTERS; mod winapi { pub use winapi::ctypes::*; @@ -35,6 +38,22 @@ pub use winapi::um::tlhelp32::*; pub use winapi::um::winbase::*; pub use winapi::um::winnt::*; + + // Work around winapi not having this function on aarch64. + #[cfg(target_arch = "aarch64")] + #[link(name = "kernel32")] + extern "system" { + pub fn RtlVirtualUnwind( + HandlerType: ULONG, + ImageBase: ULONG64, + ControlPc: ULONG64, + FunctionEntry: PRUNTIME_FUNCTION, + ContextRecord: PCONTEXT, + HandlerData: *mut PVOID, + EstablisherFrame: PULONG64, + ContextPointers: PKNONVOLATILE_CONTEXT_POINTERS + ) -> PEXCEPTION_ROUTINE; + } } } else { pub use core::ffi::c_void; @@ -45,6 +64,9 @@ pub type PRUNTIME_FUNCTION = *mut c_void; #[cfg(target_pointer_width = "64")] pub type PUNWIND_HISTORY_TABLE = *mut c_void; + pub type PEXCEPTION_ROUTINE = *mut c_void; + #[cfg(target_pointer_width = "64")] + pub type PKNONVOLATILE_CONTEXT_POINTERS = *mut c_void; } } @@ -359,6 +381,7 @@ pub type LPCSTR = *const i8; pub type PWSTR = *mut u16; pub type WORD = u16; + pub type USHORT = u16; pub type ULONG = u32; pub type ULONG64 = u64; pub type WCHAR = u16; @@ -370,6 +393,8 @@ pub type LPVOID = *mut c_void; pub type LPCVOID = *const c_void; pub type LPMODULEENTRY32W = *mut MODULEENTRY32W; + pub type PULONG = *mut ULONG; + pub type PULONG64 = *mut ULONG64; #[link(name = "kernel32")] extern "system" { @@ -435,6 +460,33 @@ lpme: LPMODULEENTRY32W, ) -> BOOL; } + + #[link(name = "ntdll")] + extern "system" { + pub fn RtlCaptureStackBackTrace( + FramesToSkip: ULONG, + FramesToCapture: ULONG, + BackTrace: *mut PVOID, + BackTraceHash: PULONG, + ) -> USHORT; + } +} + +#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))] +ffi! { + #[link(name = "kernel32")] + extern "system" { + pub fn RtlVirtualUnwind( + HandlerType: ULONG, + ImageBase: ULONG64, + ControlPc: ULONG64, + FunctionEntry: PRUNTIME_FUNCTION, + ContextRecord: PCONTEXT, + HandlerData: *mut PVOID, + EstablisherFrame: PULONG64, + ContextPointers: PKNONVOLATILE_CONTEXT_POINTERS + ) -> PEXCEPTION_ROUTINE; + } } #[cfg(target_pointer_width = "64")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/tests/accuracy/main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/tests/accuracy/main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/tests/accuracy/main.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/tests/accuracy/main.rs 2023-12-21 16:55:31.000000000 +0000 @@ -31,6 +31,8 @@ dir.push("dylib_dep.dll"); } else if cfg!(target_os = "macos") { dir.push("libdylib_dep.dylib"); + } else if cfg!(target_os = "aix") { + dir.push("libdylib_dep.a"); } else { dir.push("libdylib_dep.so"); } @@ -103,7 +105,7 @@ loop { let sym = match symbols.next() { Some(sym) => sym, - None => panic!("failed to find {}:{}", file, line), + None => panic!("failed to find {file}:{line}"), }; if let Some(filename) = sym.filename() { if let Some(lineno) = sym.lineno() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/tests/sgx-image-base.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/tests/sgx-image-base.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/tests/sgx-image-base.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/tests/sgx-image-base.rs 2023-12-21 16:55:31.000000000 +0000 @@ -0,0 +1,56 @@ +#![cfg(all(target_env = "sgx", target_vendor = "fortanix"))] +#![feature(sgx_platform)] + +#[cfg(feature = "std")] +#[test] +fn sgx_image_base_with_std() { + use backtrace::trace; + + let image_base = std::os::fortanix_sgx::mem::image_base(); + + let mut frame_ips = Vec::new(); + trace(|frame| { + frame_ips.push(frame.ip()); + true + }); + + assert!(frame_ips.len() > 0); + for ip in frame_ips { + let ip: u64 = ip as _; + assert!(ip < image_base); + } +} + +#[cfg(not(feature = "std"))] +#[test] +fn sgx_image_base_no_std() { + use backtrace::trace_unsynchronized; + + fn guess_image_base() -> u64 { + let mut top_frame_ip = None; + unsafe { + trace_unsynchronized(|frame| { + top_frame_ip = Some(frame.ip()); + false + }); + } + top_frame_ip.unwrap() as u64 & 0xFFFFFF000000 + } + + let image_base = guess_image_base(); + backtrace::set_image_base(image_base as _); + + let mut frame_ips = Vec::new(); + unsafe { + trace_unsynchronized(|frame| { + frame_ips.push(frame.ip()); + true + }); + } + + assert!(frame_ips.len() > 0); + for ip in frame_ips { + let ip: u64 = ip as _; + assert!(ip < image_base); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/tests/smoke.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/tests/smoke.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/backtrace/tests/smoke.rs 2023-12-04 19:48:36.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/backtrace/tests/smoke.rs 2023-12-21 16:55:31.000000000 +0000 @@ -1,6 +1,27 @@ use backtrace::Frame; use std::thread; +fn get_actual_fn_pointer(fp: usize) -> usize { + // On AIX, the function name references a function descriptor. + // A function descriptor consists of (See https://reviews.llvm.org/D62532) + // * The address of the entry point of the function. + // * The TOC base address for the function. + // * The environment pointer. + // Deref `fp` directly so that we can get the address of `fp`'s + // entry point in text section. + // + // For TOC, one can find more information in + // https://www.ibm.com/docs/en/aix/7.2?topic=program-understanding-programming-toc + if cfg!(target_os = "aix") { + unsafe { + let actual_fn_entry = *(fp as *const usize); + actual_fn_entry + } + } else { + fp + } +} + #[test] // FIXME: shouldn't ignore this test on i686-msvc, unsure why it's failing #[cfg_attr(all(target_arch = "x86", target_env = "msvc"), ignore)] @@ -20,7 +41,7 @@ // Various platforms have various bits of weirdness about their // backtraces. To find a good starting spot let's search through the // frames - let target = frame_4 as usize; + let target = get_actual_fn_pointer(frame_4 as usize); let offset = v .iter() .map(|frame| frame.symbol_address() as usize) @@ -39,7 +60,7 @@ assert_frame( frames.next().unwrap(), - frame_4 as usize, + get_actual_fn_pointer(frame_4 as usize), "frame_4", "tests/smoke.rs", start_line + 6, @@ -47,7 +68,7 @@ ); assert_frame( frames.next().unwrap(), - frame_3 as usize, + get_actual_fn_pointer(frame_3 as usize), "frame_3", "tests/smoke.rs", start_line + 3, @@ -55,7 +76,7 @@ ); assert_frame( frames.next().unwrap(), - frame_2 as usize, + get_actual_fn_pointer(frame_2 as usize), "frame_2", "tests/smoke.rs", start_line + 2, @@ -63,7 +84,7 @@ ); assert_frame( frames.next().unwrap(), - frame_1 as usize, + get_actual_fn_pointer(frame_1 as usize), "frame_1", "tests/smoke.rs", start_line + 1, @@ -71,7 +92,7 @@ ); assert_frame( frames.next().unwrap(), - smoke_test_frames as usize, + get_actual_fn_pointer(smoke_test_frames as usize), "smoke_test_frames", "", 0, @@ -150,9 +171,7 @@ if cfg!(debug_assertions) { assert!( name.contains(expected_name), - "didn't find `{}` in `{}`", - expected_name, - name + "didn't find `{expected_name}` in `{name}`" ); } @@ -164,18 +183,13 @@ if !expected_file.is_empty() { assert!( file.ends_with(expected_file), - "{:?} didn't end with {:?}", - file, - expected_file + "{file:?} didn't end with {expected_file:?}" ); } if expected_line != 0 { assert!( line == expected_line, - "bad line number on frame for `{}`: {} != {}", - expected_name, - line, - expected_line + "bad line number on frame for `{expected_name}`: {line} != {expected_line}" ); } @@ -185,10 +199,7 @@ if expected_col != 0 { assert!( col == expected_col, - "bad column number on frame for `{}`: {} != {}", - expected_name, - col, - expected_col + "bad column number on frame for `{expected_name}`: {col} != {expected_col}", ); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/alloc/layout.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/alloc/layout.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/alloc/layout.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/alloc/layout.rs 2023-12-21 16:55:28.000000000 +0000 @@ -130,6 +130,8 @@ } /// The minimum byte alignment for a memory block of this layout. + /// + /// The returned alignment is guaranteed to be a power of two. #[stable(feature = "alloc_layout", since = "1.28.0")] #[rustc_const_stable(feature = "const_alloc_layout_size_align", since = "1.50.0")] #[must_use = "this returns the minimum alignment, \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/arch.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/arch.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/arch.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/arch.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,6 @@ #![doc = include_str!("../../stdarch/crates/core_arch/src/core_arch_docs.md")] +#[allow(unused_imports)] #[stable(feature = "simd_arch", since = "1.27.0")] pub use crate::core_arch::arch::*; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/array/iter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/array/iter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/array/iter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/array/iter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ use crate::{ fmt, intrinsics::transmute_unchecked, - iter::{self, ExactSizeIterator, FusedIterator, TrustedLen}, + iter::{self, ExactSizeIterator, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce}, mem::MaybeUninit, ops::{IndexRange, Range}, ptr, @@ -13,6 +13,7 @@ /// A by-value [array] iterator. #[stable(feature = "array_value_iter", since = "1.51.0")] #[rustc_insignificant_dtor] +#[rustc_diagnostic_item = "ArrayIntoIter"] pub struct IntoIter { /// This is the array we are iterating over. /// @@ -293,6 +294,12 @@ NonZeroUsize::new(remaining).map_or(Ok(()), Err) } + + #[inline] + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { + // SAFETY: The caller must provide an idx that is in bound of the remainder. + unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::().read() } + } } #[stable(feature = "array_value_iter_impls", since = "1.40.0")] @@ -374,6 +381,25 @@ #[stable(feature = "array_value_iter_impls", since = "1.40.0")] unsafe impl TrustedLen for IntoIter {} +#[doc(hidden)] +#[unstable(issue = "none", feature = "std_internals")] +#[rustc_unsafe_specialization_marker] +pub trait NonDrop {} + +// T: Copy as approximation for !Drop since get_unchecked does not advance self.alive +// and thus we can't implement drop-handling +#[unstable(issue = "none", feature = "std_internals")] +impl NonDrop for T {} + +#[doc(hidden)] +#[unstable(issue = "none", feature = "std_internals")] +unsafe impl TrustedRandomAccessNoCoerce for IntoIter +where + T: NonDrop, +{ + const MAY_HAVE_SIDE_EFFECT: bool = false; +} + #[stable(feature = "array_value_iter_impls", since = "1.40.0")] impl Clone for IntoIter { fn clone(&self) -> Self { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/cell/once.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/cell/once.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/cell/once.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/cell/once.rs 2023-12-21 16:55:28.000000000 +0000 @@ -87,10 +87,40 @@ #[inline] #[stable(feature = "once_cell", since = "1.70.0")] pub fn set(&self, value: T) -> Result<(), T> { - // SAFETY: Safe because we cannot have overlapping mutable borrows - let slot = unsafe { &*self.inner.get() }; - if slot.is_some() { - return Err(value); + match self.try_insert(value) { + Ok(_) => Ok(()), + Err((_, value)) => Err(value), + } + } + + /// Sets the contents of the cell to `value` if the cell was empty, then + /// returns a reference to it. + /// + /// # Errors + /// + /// This method returns `Ok(&value)` if the cell was empty and + /// `Err(¤t_value, value)` if it was full. + /// + /// # Examples + /// + /// ``` + /// #![feature(once_cell_try_insert)] + /// + /// use std::cell::OnceCell; + /// + /// let cell = OnceCell::new(); + /// assert!(cell.get().is_none()); + /// + /// assert_eq!(cell.try_insert(92), Ok(&92)); + /// assert_eq!(cell.try_insert(62), Err((&92, 62))); + /// + /// assert!(cell.get().is_some()); + /// ``` + #[inline] + #[unstable(feature = "once_cell_try_insert", issue = "116693")] + pub fn try_insert(&self, value: T) -> Result<&T, (&T, T)> { + if let Some(old) = self.get() { + return Err((old, value)); } // SAFETY: This is the only place where we set the slot, no races @@ -98,8 +128,7 @@ // checked that slot is currently `None`, so this write // maintains the `inner`'s invariant. let slot = unsafe { &mut *self.inner.get() }; - *slot = Some(value); - Ok(()) + Ok(slot.insert(value)) } /// Gets the contents of the cell, initializing it with `f` @@ -183,10 +212,9 @@ let val = outlined_call(f)?; // Note that *some* forms of reentrant initialization might lead to // UB (see `reentrant_init` test). I believe that just removing this - // `assert`, while keeping `set/get` would be sound, but it seems + // `panic`, while keeping `try_insert` would be sound, but it seems // better to panic, rather than to silently use an old value. - assert!(self.set(val).is_ok(), "reentrant init"); - Ok(self.get().unwrap()) + if let Ok(val) = self.try_insert(val) { Ok(val) } else { panic!("reentrant init") } } /// Consumes the cell, returning the wrapped value. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/cell.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/cell.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/cell.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/cell.rs 2023-12-21 16:55:28.000000000 +0000 @@ -556,7 +556,7 @@ #[inline] #[stable(feature = "cell_as_ptr", since = "1.12.0")] #[rustc_const_stable(feature = "const_cell_as_ptr", since = "1.32.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn as_ptr(&self) -> *mut T { self.value.get() } @@ -755,7 +755,7 @@ } // This ensures the panicking code is outlined from `borrow_mut` for `RefCell`. -#[inline(never)] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] #[track_caller] #[cold] fn panic_already_borrowed(err: BorrowMutError) -> ! { @@ -763,7 +763,7 @@ } // This ensures the panicking code is outlined from `borrow` for `RefCell`. -#[inline(never)] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] #[track_caller] #[cold] fn panic_already_mutably_borrowed(err: BorrowError) -> ! { @@ -1112,7 +1112,7 @@ /// ``` #[inline] #[stable(feature = "cell_as_ptr", since = "1.12.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub fn as_ptr(&self) -> *mut T { self.value.get() } @@ -1423,6 +1423,7 @@ /// See the [module-level documentation](self) for more. #[stable(feature = "rust1", since = "1.0.0")] #[must_not_suspend = "holding a Ref across suspend points can cause BorrowErrors"] +#[rustc_diagnostic_item = "RefCellRef"] pub struct Ref<'b, T: ?Sized + 'b> { // NB: we use a pointer instead of `&'b T` to avoid `noalias` violations, because a // `Ref` argument doesn't hold immutability for its whole scope, only until it drops. @@ -1804,6 +1805,7 @@ /// See the [module-level documentation](self) for more. #[stable(feature = "rust1", since = "1.0.0")] #[must_not_suspend = "holding a RefMut across suspend points can cause BorrowErrors"] +#[rustc_diagnostic_item = "RefCellRefMut"] pub struct RefMut<'b, T: ?Sized + 'b> { // NB: we use a pointer instead of `&'b mut T` to avoid `noalias` violations, because a // `RefMut` argument doesn't hold exclusivity for its whole scope, only until it drops. @@ -2107,7 +2109,7 @@ #[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_unsafecell_get", since = "1.32.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn get(&self) -> *mut T { // We can just cast the pointer from `UnsafeCell` to `T` because of // #[repr(transparent)]. This exploits std's special status, there is @@ -2251,7 +2253,7 @@ /// when casting to `&mut T`, and ensure that there are no mutations /// or mutable aliases going on when casting to `&T` #[inline] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn get(&self) -> *mut T { self.value.get() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/char/decode.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/char/decode.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/char/decode.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/char/decode.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,6 +2,7 @@ use crate::error::Error; use crate::fmt; +use crate::iter::FusedIterator; /// An iterator that decodes UTF-16 encoded code points from an iterator of `u16`s. /// @@ -105,6 +106,9 @@ } } +#[stable(feature = "decode_utf16_fused_iterator", since = "1.75.0")] +impl + FusedIterator> FusedIterator for DecodeUtf16 {} + impl DecodeUtf16Error { /// Returns the unpaired surrogate which caused this error. #[must_use] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/char/methods.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/char/methods.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/char/methods.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/char/methods.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1450,7 +1450,7 @@ #[rustc_const_stable(feature = "const_ascii_ctype_on_intrinsics", since = "1.47.0")] #[inline] pub const fn is_ascii_alphanumeric(&self) -> bool { - matches!(*self, '0'..='9' | 'A'..='Z' | 'a'..='z') + matches!(*self, '0'..='9') | matches!(*self, 'A'..='Z') | matches!(*self, 'a'..='z') } /// Checks if the value is an ASCII decimal digit: @@ -1553,7 +1553,7 @@ #[rustc_const_stable(feature = "const_ascii_ctype_on_intrinsics", since = "1.47.0")] #[inline] pub const fn is_ascii_hexdigit(&self) -> bool { - matches!(*self, '0'..='9' | 'A'..='F' | 'a'..='f') + matches!(*self, '0'..='9') | matches!(*self, 'A'..='F') | matches!(*self, 'a'..='f') } /// Checks if the value is an ASCII punctuation character: @@ -1591,7 +1591,10 @@ #[rustc_const_stable(feature = "const_ascii_ctype_on_intrinsics", since = "1.47.0")] #[inline] pub const fn is_ascii_punctuation(&self) -> bool { - matches!(*self, '!'..='/' | ':'..='@' | '['..='`' | '{'..='~') + matches!(*self, '!'..='/') + | matches!(*self, ':'..='@') + | matches!(*self, '['..='`') + | matches!(*self, '{'..='~') } /// Checks if the value is an ASCII graphic character: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/cmp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/cmp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/cmp.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/cmp.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,14 +3,17 @@ //! This module contains various tools for comparing and ordering values. In //! summary: //! -//! * [`Eq`] and [`PartialEq`] are traits that allow you to define total and -//! partial equality between values, respectively. Implementing them overloads -//! the `==` and `!=` operators. +//! * [`PartialEq`] overloads the `==` and `!=` operators. In cases where +//! `Rhs` (the right hand side's type) is `Self`, this trait corresponds to a +//! partial equivalence relation. +//! * [`Eq`] indicates that the overloaded `==` operator corresponds to an +//! equivalence relation. //! * [`Ord`] and [`PartialOrd`] are traits that allow you to define total and //! partial orderings between values, respectively. Implementing them overloads //! the `<`, `<=`, `>`, and `>=` operators. //! * [`Ordering`] is an enum returned by the main functions of [`Ord`] and -//! [`PartialOrd`], and describes an ordering. +//! [`PartialOrd`], and describes an ordering of two values (less, equal, or +//! greater). //! * [`Reverse`] is a struct that allows you to easily reverse an ordering. //! * [`max`] and [`min`] are functions that build off of [`Ord`] and allow you //! to find the maximum or minimum of two values. @@ -27,16 +30,21 @@ use self::Ordering::*; -/// Trait for equality comparisons. +/// Trait for comparisons using the equality operator. +/// +/// Implementing this trait for types provides the `==` and `!=` operators for +/// those types. /// /// `x.eq(y)` can also be written `x == y`, and `x.ne(y)` can be written `x != y`. /// We use the easier-to-read infix notation in the remainder of this documentation. /// -/// This trait allows for partial equality, for types that do not have a full -/// equivalence relation. For example, in floating point numbers `NaN != NaN`, -/// so floating point types implement `PartialEq` but not [`trait@Eq`]. -/// Formally speaking, when `Rhs == Self`, this trait corresponds to a [partial equivalence -/// relation](https://en.wikipedia.org/wiki/Partial_equivalence_relation). +/// This trait allows for comparisons using the equality operator, for types +/// that do not have a full equivalence relation. For example, in floating point +/// numbers `NaN != NaN`, so floating point types implement `PartialEq` but not +/// [`trait@Eq`]. Formally speaking, when `Rhs == Self`, this trait corresponds +/// to a [partial equivalence relation]. +/// +/// [partial equivalence relation]: https://en.wikipedia.org/wiki/Partial_equivalence_relation /// /// Implementations must ensure that `eq` and `ne` are consistent with each other: /// @@ -242,15 +250,15 @@ /* compiler built-in */ } -/// Trait for equality comparisons which are [equivalence relations]( +/// Trait for comparisons corresponding to [equivalence relations]( /// https://en.wikipedia.org/wiki/Equivalence_relation). /// -/// This means, that in addition to `a == b` and `a != b` being strict inverses, the equality must -/// be (for all `a`, `b` and `c`): +/// This means, that in addition to `a == b` and `a != b` being strict inverses, +/// the relation must be (for all `a`, `b` and `c`): /// /// - reflexive: `a == a`; -/// - symmetric: `a == b` implies `b == a`; and -/// - transitive: `a == b` and `b == c` implies `a == c`. +/// - symmetric: `a == b` implies `b == a` (required by `PartialEq` as well); and +/// - transitive: `a == b` and `b == c` implies `a == c` (required by `PartialEq` as well). /// /// This property cannot be checked by the compiler, and therefore `Eq` implies /// [`PartialEq`], and has no extra methods. @@ -260,6 +268,10 @@ /// undefined behavior. This means that `unsafe` code **must not** rely on the correctness of these /// methods. /// +/// Implement `Eq` in addition to `PartialEq` if it's guaranteed that +/// `PartialEq::eq(a, a)` always returns `true` (reflexivity), in addition to +/// the symmetric and transitive properties already required by `PartialEq`. +/// /// ## Derivable /// /// This trait can be used with `#[derive]`. When `derive`d, because `Eq` has @@ -299,8 +311,7 @@ // // This should never be implemented by hand. #[doc(hidden)] - #[cfg_attr(bootstrap, no_coverage)] // rust-lang/rust#84605 - #[cfg_attr(not(bootstrap), coverage(off))] // + #[coverage(off)] #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn assert_receiver_is_total_eq(&self) {} @@ -310,8 +321,7 @@ #[rustc_builtin_macro] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics, derive_eq, structural_match)] -#[cfg_attr(bootstrap, allow_internal_unstable(no_coverage))] -#[cfg_attr(not(bootstrap), allow_internal_unstable(coverage_attribute))] +#[allow_internal_unstable(coverage_attribute)] pub macro Eq($item:item) { /* compiler built-in */ } @@ -676,12 +686,19 @@ /// /// ## Corollaries /// -/// From the above and the requirements of `PartialOrd`, it follows that `<` defines a strict total order. -/// This means that for all `a`, `b` and `c`: +/// From the above and the requirements of `PartialOrd`, it follows that for +/// all `a`, `b` and `c`: /// /// - exactly one of `a < b`, `a == b` or `a > b` is true; and /// - `<` is transitive: `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`. /// +/// Mathematically speaking, the `<` operator defines a strict [weak order]. In +/// cases where `==` conforms to mathematical equality, it also defines a +/// strict [total order]. +/// +/// [weak order]: https://en.wikipedia.org/wiki/Weak_ordering +/// [total order]: https://en.wikipedia.org/wiki/Total_order +/// /// ## Derivable /// /// This trait can be used with `#[derive]`. @@ -723,7 +740,7 @@ /// - Two sequences are compared element by element. /// - The first mismatching element defines which sequence is lexicographically less or greater than the other. /// - If one sequence is a prefix of another, the shorter sequence is lexicographically less than the other. -/// - If two sequence have equivalent elements and are of the same length, then the sequences are lexicographically equal. +/// - If two sequences have equivalent elements and are of the same length, then the sequences are lexicographically equal. /// - An empty sequence is lexicographically less than any non-empty sequence. /// - Two empty sequences are lexicographically equal. /// @@ -790,6 +807,7 @@ /// ``` #[must_use] #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_diagnostic_item = "ord_cmp_method"] fn cmp(&self, other: &Self) -> Ordering; /// Compares and returns the maximum of two values. @@ -920,6 +938,20 @@ /// - transitivity of `>`: if `a > b` and `b > c` then `a > c` /// - duality of `partial_cmp`: `partial_cmp(a, b) == partial_cmp(b, a).map(Ordering::reverse)` /// +/// ## Strict and non-strict partial orders +/// +/// The `<` and `>` operators behave according to a *strict* partial order. +/// However, `<=` and `>=` do **not** behave according to a *non-strict* +/// partial order. +/// That is because mathematically, a non-strict partial order would require +/// reflexivity, i.e. `a <= a` would need to be true for every `a`. This isn't +/// always the case for types that implement `PartialOrd`, for example: +/// +/// ``` +/// let a = f64::sqrt(-1.0); +/// assert_eq!(a <= a, false); +/// ``` +/// /// ## Derivable /// /// This trait can be used with `#[derive]`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/convert/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/convert/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/convert/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/convert/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -100,6 +100,7 @@ #[stable(feature = "convert_id", since = "1.33.0")] #[rustc_const_stable(feature = "const_identity", since = "1.33.0")] #[inline(always)] +#[rustc_diagnostic_item = "convert_identity"] pub const fn identity(x: T) -> T { x } @@ -137,7 +138,7 @@ /// /// [dereferenceable types]: core::ops::Deref /// [pointed-to value]: core::ops::Deref::Target -/// ['`Deref` coercion']: core::ops::Deref#more-on-deref-coercion +/// ['`Deref` coercion']: core::ops::Deref#deref-coercion /// /// ``` /// let x = Box::new(5i32); @@ -243,7 +244,7 @@ /// /// [mutably dereferenceable types]: core::ops::DerefMut /// [pointed-to value]: core::ops::Deref::Target -/// ['`Deref` coercion']: core::ops::DerefMut#more-on-deref-coercion +/// ['`Deref` coercion']: core::ops::DerefMut#mutable-deref-coercion /// /// ``` /// let mut x = Box::new(5i32); @@ -478,6 +479,46 @@ /// - `From for U` implies [`Into`]` for T` /// - `From` is reflexive, which means that `From for T` is implemented /// +/// # When to implement `From` +/// +/// While there's no technical restrictions on which conversions can be done using +/// a `From` implementation, the general expectation is that the conversions +/// should typically be restricted as follows: +/// +/// * The conversion is *infallible*: if the conversion can fail, use [`TryFrom`] +/// instead; don't provide a `From` impl that panics. +/// +/// * The conversion is *lossless*: semantically, it should not lose or discard +/// information. For example, `i32: From` exists, where the original +/// value can be recovered using `u16: TryFrom`. And `String: From<&str>` +/// exists, where you can get something equivalent to the original value via +/// `Deref`. But `From` cannot be used to convert from `u32` to `u16`, since +/// that cannot succeed in a lossless way. (There's some wiggle room here for +/// information not considered semantically relevant. For example, +/// `Box<[T]>: From>` exists even though it might not preserve capacity, +/// like how two vectors can be equal despite differing capacities.) +/// +/// * The conversion is *value-preserving*: the conceptual kind and meaning of +/// the resulting value is the same, even though the Rust type and technical +/// representation might be different. For example `-1_i8 as u8` is *lossless*, +/// since `as` casting back can recover the original value, but that conversion +/// is *not* available via `From` because `-1` and `255` are different conceptual +/// values (despite being identical bit patterns technically). But +/// `f32: From` *is* available because `1_i16` and `1.0_f32` are conceptually +/// the same real number (despite having very different bit patterns technically). +/// `String: From` is available because they're both *text*, but +/// `String: From` is *not* available, since `1` (a number) and `"1"` +/// (text) are too different. (Converting values to text is instead covered +/// by the [`Display`](crate::fmt::Display) trait.) +/// +/// * The conversion is *obvious*: it's the only reasonable conversion between +/// the two types. Otherwise it's better to have it be a named method or +/// constructor, like how [`str::as_bytes`] is a method and how integers have +/// methods like [`u32::from_ne_bytes`], [`u32::from_le_bytes`], and +/// [`u32::from_be_bytes`], none of which are `From` implementations. Whereas +/// there's only one reasonable way to wrap an [`Ipv6Addr`](crate::net::Ipv6Addr) +/// into an [`IpAddr`](crate::net::IpAddr), thus `IpAddr: From` exists. +/// /// # Examples /// /// [`String`] implements `From<&str>`: @@ -532,7 +573,7 @@ #[rustc_diagnostic_item = "From"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented(on( - all(_Self = "&str", any(T = "alloc::string::String", T = "std::string::String")), + all(_Self = "&str", T = "alloc::string::String"), note = "to coerce a `{T}` into a `{Self}`, use `&*` as a prefix", ))] pub trait From: Sized { @@ -577,12 +618,11 @@ /// For example, there is no way to convert an [`i64`] into an [`i32`] /// using the [`From`] trait, because an [`i64`] may contain a value /// that an [`i32`] cannot represent and so the conversion would lose data. -/// This might be handled by truncating the [`i64`] to an [`i32`] (essentially -/// giving the [`i64`]'s value modulo [`i32::MAX`]) or by simply returning -/// [`i32::MAX`], or by some other method. The [`From`] trait is intended -/// for perfect conversions, so the `TryFrom` trait informs the -/// programmer when a type conversion could go bad and lets them -/// decide how to handle it. +/// This might be handled by truncating the [`i64`] to an [`i32`] or by +/// simply returning [`i32::MAX`], or by some other method. The [`From`] +/// trait is intended for perfect conversions, so the `TryFrom` trait +/// informs the programmer when a type conversion could go bad and lets +/// them decide how to handle it. /// /// # Generic Implementations /// @@ -642,6 +682,7 @@ /// Performs the conversion. #[stable(feature = "try_from", since = "1.34.0")] + #[rustc_diagnostic_item = "try_from_fn"] fn try_from(value: T) -> Result; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/default.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/default.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/default.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/default.rs 2023-12-21 16:55:28.000000000 +0000 @@ -130,6 +130,7 @@ /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_diagnostic_item = "default_fn"] fn default() -> Self; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/error.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/error.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/error.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/error.rs 2023-12-21 16:55:28.000000000 +0000 @@ -439,10 +439,10 @@ /// * A Producer initializes the value of one of its fields of a specific type. (or is otherwise /// prepared to generate a value requested). eg, `backtrace::Backtrace` or /// `std::backtrace::Backtrace` -/// * A Consumer requests an object of a specific type (say `std::backtrace::Backtrace). In the case -/// of a `dyn Error` trait object (the Producer), there are methods called `request_ref` and -/// `request_value` are available to simplify obtaining an ``Option`` for a given type. * The -/// Producer, when requested, populates the given Request object which is given as a mutable +/// * A Consumer requests an object of a specific type (say `std::backtrace::Backtrace`). In the +/// case of a `dyn Error` trait object (the Producer), there are functions called `request_ref` and +/// `request_value` to simplify obtaining an `Option` for a given type. +/// * The Producer, when requested, populates the given Request object which is given as a mutable /// reference. /// * The Consumer extracts a value or reference to the requested type from the `Request` object /// wrapped in an `Option`; in the case of `dyn Error` the aforementioned `request_ref` and ` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ffi/c_str.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ffi/c_str.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ffi/c_str.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ffi/c_str.rs 2023-12-21 16:55:28.000000000 +0000 @@ -487,7 +487,7 @@ #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_str_as_ptr", since = "1.32.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn as_ptr(&self) -> *const c_char { self.inner.as_ptr() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ffi/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ffi/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ffi/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ffi/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,7 @@ //! match those defined by C, so that code that interacts with C will //! refer to the correct types. -#![stable(feature = "", since = "1.30.0")] +#![stable(feature = "core_ffi", since = "1.30.0")] #![allow(non_camel_case_types)] use crate::fmt; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/fmt/builders.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/fmt/builders.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/fmt/builders.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/fmt/builders.rs 2023-12-21 16:55:28.000000000 +0000 @@ -84,6 +84,7 @@ #[must_use = "must eventually call `finish()` on Debug builders"] #[allow(missing_debug_implementations)] #[stable(feature = "debug_builders", since = "1.2.0")] +#[rustc_diagnostic_item = "DebugStruct"] pub struct DebugStruct<'a, 'b: 'a> { fmt: &'a mut fmt::Formatter<'b>, result: fmt::Result, @@ -129,6 +130,18 @@ /// ``` #[stable(feature = "debug_builders", since = "1.2.0")] pub fn field(&mut self, name: &str, value: &dyn fmt::Debug) -> &mut Self { + self.field_with(name, |f| value.fmt(f)) + } + + /// Adds a new field to the generated struct output. + /// + /// This method is equivalent to [`DebugStruct::field`], but formats the + /// value using a provided closure rather than by calling [`Debug::fmt`]. + #[unstable(feature = "debug_closure_helpers", issue = "117729")] + pub fn field_with(&mut self, name: &str, value_fmt: F) -> &mut Self + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { self.result = self.result.and_then(|_| { if self.is_pretty() { if !self.has_fields { @@ -139,14 +152,14 @@ let mut writer = PadAdapter::wrap(self.fmt, &mut slot, &mut state); writer.write_str(name)?; writer.write_str(": ")?; - value.fmt(&mut writer)?; + value_fmt(&mut writer)?; writer.write_str(",\n") } else { let prefix = if self.has_fields { ", " } else { " { " }; self.fmt.write_str(prefix)?; self.fmt.write_str(name)?; self.fmt.write_str(": ")?; - value.fmt(self.fmt) + value_fmt(self.fmt) } }); @@ -314,6 +327,18 @@ /// ``` #[stable(feature = "debug_builders", since = "1.2.0")] pub fn field(&mut self, value: &dyn fmt::Debug) -> &mut Self { + self.field_with(|f| value.fmt(f)) + } + + /// Adds a new field to the generated tuple struct output. + /// + /// This method is equivalent to [`DebugTuple::field`], but formats the + /// value using a provided closure rather than by calling [`Debug::fmt`]. + #[unstable(feature = "debug_closure_helpers", issue = "117729")] + pub fn field_with(&mut self, value_fmt: F) -> &mut Self + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { self.result = self.result.and_then(|_| { if self.is_pretty() { if self.fields == 0 { @@ -322,12 +347,12 @@ let mut slot = None; let mut state = Default::default(); let mut writer = PadAdapter::wrap(self.fmt, &mut slot, &mut state); - value.fmt(&mut writer)?; + value_fmt(&mut writer)?; writer.write_str(",\n") } else { let prefix = if self.fields == 0 { "(" } else { ", " }; self.fmt.write_str(prefix)?; - value.fmt(self.fmt) + value_fmt(self.fmt) } }); @@ -384,7 +409,10 @@ } impl<'a, 'b: 'a> DebugInner<'a, 'b> { - fn entry(&mut self, entry: &dyn fmt::Debug) { + fn entry_with(&mut self, entry_fmt: F) + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { self.result = self.result.and_then(|_| { if self.is_pretty() { if !self.has_fields { @@ -393,13 +421,13 @@ let mut slot = None; let mut state = Default::default(); let mut writer = PadAdapter::wrap(self.fmt, &mut slot, &mut state); - entry.fmt(&mut writer)?; + entry_fmt(&mut writer)?; writer.write_str(",\n") } else { if self.has_fields { self.fmt.write_str(", ")? } - entry.fmt(self.fmt) + entry_fmt(self.fmt) } }); @@ -474,7 +502,20 @@ /// ``` #[stable(feature = "debug_builders", since = "1.2.0")] pub fn entry(&mut self, entry: &dyn fmt::Debug) -> &mut Self { - self.inner.entry(entry); + self.inner.entry_with(|f| entry.fmt(f)); + self + } + + /// Adds a new entry to the set output. + /// + /// This method is equivalent to [`DebugSet::entry`], but formats the + /// entry using a provided closure rather than by calling [`Debug::fmt`]. + #[unstable(feature = "debug_closure_helpers", issue = "117729")] + pub fn entry_with(&mut self, entry_fmt: F) -> &mut Self + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { + self.inner.entry_with(entry_fmt); self } @@ -604,7 +645,20 @@ /// ``` #[stable(feature = "debug_builders", since = "1.2.0")] pub fn entry(&mut self, entry: &dyn fmt::Debug) -> &mut Self { - self.inner.entry(entry); + self.inner.entry_with(|f| entry.fmt(f)); + self + } + + /// Adds a new entry to the list output. + /// + /// This method is equivalent to [`DebugList::entry`], but formats the + /// entry using a provided closure rather than by calling [`Debug::fmt`]. + #[unstable(feature = "debug_closure_helpers", issue = "117729")] + pub fn entry_with(&mut self, entry_fmt: F) -> &mut Self + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { + self.inner.entry_with(entry_fmt); self } @@ -774,6 +828,18 @@ /// ``` #[stable(feature = "debug_map_key_value", since = "1.42.0")] pub fn key(&mut self, key: &dyn fmt::Debug) -> &mut Self { + self.key_with(|f| key.fmt(f)) + } + + /// Adds the key part of a new entry to the map output. + /// + /// This method is equivalent to [`DebugMap::key`], but formats the + /// key using a provided closure rather than by calling [`Debug::fmt`]. + #[unstable(feature = "debug_closure_helpers", issue = "117729")] + pub fn key_with(&mut self, key_fmt: F) -> &mut Self + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { self.result = self.result.and_then(|_| { assert!( !self.has_key, @@ -788,13 +854,13 @@ let mut slot = None; self.state = Default::default(); let mut writer = PadAdapter::wrap(self.fmt, &mut slot, &mut self.state); - key.fmt(&mut writer)?; + key_fmt(&mut writer)?; writer.write_str(": ")?; } else { if self.has_fields { self.fmt.write_str(", ")? } - key.fmt(self.fmt)?; + key_fmt(self.fmt)?; self.fmt.write_str(": ")?; } @@ -838,16 +904,28 @@ /// ``` #[stable(feature = "debug_map_key_value", since = "1.42.0")] pub fn value(&mut self, value: &dyn fmt::Debug) -> &mut Self { + self.value_with(|f| value.fmt(f)) + } + + /// Adds the value part of a new entry to the map output. + /// + /// This method is equivalent to [`DebugMap::value`], but formats the + /// value using a provided closure rather than by calling [`Debug::fmt`]. + #[unstable(feature = "debug_closure_helpers", issue = "117729")] + pub fn value_with(&mut self, value_fmt: F) -> &mut Self + where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, + { self.result = self.result.and_then(|_| { assert!(self.has_key, "attempted to format a map value before its key"); if self.is_pretty() { let mut slot = None; let mut writer = PadAdapter::wrap(self.fmt, &mut slot, &mut self.state); - value.fmt(&mut writer)?; + value_fmt(&mut writer)?; writer.write_str(",\n")?; } else { - value.fmt(self.fmt)?; + value_fmt(self.fmt)?; } self.has_key = false; @@ -935,3 +1013,44 @@ self.fmt.alternate() } } + +/// Implements [`fmt::Debug`] and [`fmt::Display`] using a function. +/// +/// # Examples +/// +/// ``` +/// #![feature(debug_closure_helpers)] +/// use std::fmt; +/// +/// let value = 'a'; +/// assert_eq!(format!("{}", value), "a"); +/// assert_eq!(format!("{:?}", value), "'a'"); +/// +/// let wrapped = fmt::FormatterFn(|f| write!(f, "{:?}", &value)); +/// assert_eq!(format!("{}", wrapped), "'a'"); +/// assert_eq!(format!("{:?}", wrapped), "'a'"); +/// ``` +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +pub struct FormatterFn(pub F) +where + F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result; + +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +impl fmt::Debug for FormatterFn +where + F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (self.0)(f) + } +} + +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +impl fmt::Display for FormatterFn +where + F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (self.0)(f) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/fmt/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/fmt/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/fmt/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/fmt/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -39,6 +39,9 @@ #[stable(feature = "debug_builders", since = "1.2.0")] pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +pub use self::builders::FormatterFn; + /// The type returned by formatter methods. /// /// # Examples @@ -239,6 +242,7 @@ /// documentation of the methods defined on `Formatter` below. #[allow(missing_debug_implementations)] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_diagnostic_item = "Formatter"] pub struct Formatter<'a> { flags: u32, fill: char, @@ -791,8 +795,10 @@ /// assert_eq!(format!("l as binary is: {l:b}"), "l as binary is: 1101011"); /// /// assert_eq!( -/// format!("l as binary is: {l:#032b}"), -/// "l as binary is: 0b000000000000000000000001101011" +/// // Note that the `0b` prefix added by `#` is included in the total width, so we +/// // need to add two to correctly display all 32 bits. +/// format!("l as binary is: {l:#034b}"), +/// "l as binary is: 0b00000000000000000000000001101011" /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/fmt/rt.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/fmt/rt.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/fmt/rt.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/fmt/rt.rs 2023-12-21 16:55:28.000000000 +0000 @@ -133,6 +133,10 @@ Self::new(x, USIZE_MARKER) } + // FIXME: Transmuting formatter in new and indirectly branching to/calling + // it here is an explicit CFI violation. + #[allow(inline_no_sanitize)] + #[no_sanitize(cfi, kcfi)] #[inline(always)] pub(super) fn fmt(&self, f: &mut Formatter<'_>) -> Result { (self.formatter)(self.value, f) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/future/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/future/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/future/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/future/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -38,7 +38,7 @@ /// This type is needed because: /// -/// a) Generators cannot implement `for<'a, 'b> Generator<&'a mut Context<'b>>`, so we need to pass +/// a) Coroutines cannot implement `for<'a, 'b> Coroutine<&'a mut Context<'b>>`, so we need to pass /// a raw pointer (see ). /// b) Raw pointers and `NonNull` aren't `Send` or `Sync`, so that would make every single future /// non-Send/Sync as well, and we don't want that. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/hint.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/hint.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/hint.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/hint.rs 2023-12-21 16:55:28.000000000 +0000 @@ -277,7 +277,7 @@ /// - Treats the call to `contains` and its result as volatile: the body of `benchmark` cannot /// optimize this away /// -/// This makes our benchmark much more realistic to how the function would be used in situ, where +/// This makes our benchmark much more realistic to how the function would actually be used, where /// arguments are usually not known at compile time and the result is used in some way. #[inline] #[stable(feature = "bench_black_box", since = "1.66.0")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/intrinsics/mir.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/intrinsics/mir.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/intrinsics/mir.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/intrinsics/mir.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,8 +12,7 @@ //! //! Typical usage will look like this: //! -#![cfg_attr(bootstrap, doc = "```rust,ignore")] -#![cfg_attr(not(bootstrap), doc = "```rust")] +//! ```rust //! #![feature(core_intrinsics, custom_mir)] //! #![allow(internal_features)] //! @@ -63,8 +62,7 @@ //! //! # Examples //! -#![cfg_attr(bootstrap, doc = "```rust,ignore")] -#![cfg_attr(not(bootstrap), doc = "```rust")] +//! ```rust //! #![feature(core_intrinsics, custom_mir)] //! #![allow(internal_features)] //! @@ -106,7 +104,6 @@ //! } //! //! #[custom_mir(dialect = "runtime", phase = "optimized")] -#![cfg_attr(bootstrap, doc = "#[cfg(any())]")] // disable the following function in doctests when `bootstrap` is set //! fn push_and_pop(v: &mut Vec, value: T) { //! mir!( //! let _unused; @@ -319,8 +316,7 @@ /// /// # Examples /// - #[cfg_attr(bootstrap, doc = "```rust,ignore")] - #[cfg_attr(not(bootstrap), doc = "```rust")] + /// ```rust /// #![allow(internal_features)] /// #![feature(custom_mir, core_intrinsics)] /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/intrinsics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/intrinsics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/intrinsics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/intrinsics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1072,7 +1072,7 @@ /// zero-initialization: This will statically either panic, or do nothing. /// /// This intrinsic does not have a stable counterpart. - #[rustc_const_unstable(feature = "const_assert_type2", issue = "none")] + #[rustc_const_stable(feature = "const_assert_type2", since = "1.75.0")] #[rustc_safe_intrinsic] #[rustc_nounwind] pub fn assert_zero_valid(); @@ -1080,7 +1080,7 @@ /// A guard for `std::mem::uninitialized`. This will statically either panic, or do nothing. /// /// This intrinsic does not have a stable counterpart. - #[rustc_const_unstable(feature = "const_assert_type2", issue = "none")] + #[rustc_const_stable(feature = "const_assert_type2", since = "1.75.0")] #[rustc_safe_intrinsic] #[rustc_nounwind] pub fn assert_mem_uninitialized_valid(); @@ -1509,12 +1509,14 @@ /// /// This intrinsic does not have a stable counterpart. #[rustc_nounwind] + #[rustc_diagnostic_item = "intrinsics_unaligned_volatile_load"] pub fn unaligned_volatile_load(src: *const T) -> T; /// Performs a volatile store to the `dst` pointer. /// The pointer is not required to be aligned. /// /// This intrinsic does not have a stable counterpart. #[rustc_nounwind] + #[rustc_diagnostic_item = "intrinsics_unaligned_volatile_store"] pub fn unaligned_volatile_store(dst: *mut T, val: T); /// Returns the square root of an `f32` @@ -2277,7 +2279,7 @@ /// any safety invariants. /// /// The stabilized version of this intrinsic is [`core::mem::discriminant`]. - #[rustc_const_unstable(feature = "const_discriminant", issue = "69821")] + #[rustc_const_stable(feature = "const_discriminant", since = "1.75.0")] #[rustc_safe_intrinsic] #[rustc_nounwind] pub fn discriminant_value(v: &T) -> ::Discriminant; @@ -2666,6 +2668,7 @@ #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces +#[rustc_diagnostic_item = "ptr_copy_nonoverlapping"] pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { extern "rust-intrinsic" { #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")] @@ -2761,6 +2764,7 @@ #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces +#[rustc_diagnostic_item = "ptr_copy"] pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { extern "rust-intrinsic" { #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")] @@ -2834,6 +2838,7 @@ #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces +#[rustc_diagnostic_item = "ptr_write_bytes"] pub const unsafe fn write_bytes(dst: *mut T, val: u8, count: usize) { extern "rust-intrinsic" { #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/io/borrowed_buf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/io/borrowed_buf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/io/borrowed_buf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/io/borrowed_buf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,301 @@ +#![unstable(feature = "core_io_borrowed_buf", issue = "117693")] + +use crate::fmt::{self, Debug, Formatter}; +use crate::mem::{self, MaybeUninit}; +use crate::{cmp, ptr}; + +/// A borrowed byte buffer which is incrementally filled and initialized. +/// +/// This type is a sort of "double cursor". It tracks three regions in the buffer: a region at the beginning of the +/// buffer that has been logically filled with data, a region that has been initialized at some point but not yet +/// logically filled, and a region at the end that is fully uninitialized. The filled region is guaranteed to be a +/// subset of the initialized region. +/// +/// In summary, the contents of the buffer can be visualized as: +/// ```not_rust +/// [ capacity ] +/// [ filled | unfilled ] +/// [ initialized | uninitialized ] +/// ``` +/// +/// A `BorrowedBuf` is created around some existing data (or capacity for data) via a unique reference +/// (`&mut`). The `BorrowedBuf` can be configured (e.g., using `clear` or `set_init`), but cannot be +/// directly written. To write into the buffer, use `unfilled` to create a `BorrowedCursor`. The cursor +/// has write-only access to the unfilled portion of the buffer (you can think of it as a +/// write-only iterator). +/// +/// The lifetime `'data` is a bound on the lifetime of the underlying data. +pub struct BorrowedBuf<'data> { + /// The buffer's underlying data. + buf: &'data mut [MaybeUninit], + /// The length of `self.buf` which is known to be filled. + filled: usize, + /// The length of `self.buf` which is known to be initialized. + init: usize, +} + +impl Debug for BorrowedBuf<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("BorrowedBuf") + .field("init", &self.init) + .field("filled", &self.filled) + .field("capacity", &self.capacity()) + .finish() + } +} + +/// Create a new `BorrowedBuf` from a fully initialized slice. +impl<'data> From<&'data mut [u8]> for BorrowedBuf<'data> { + #[inline] + fn from(slice: &'data mut [u8]) -> BorrowedBuf<'data> { + let len = slice.len(); + + BorrowedBuf { + // SAFETY: initialized data never becoming uninitialized is an invariant of BorrowedBuf + buf: unsafe { (slice as *mut [u8]).as_uninit_slice_mut().unwrap() }, + filled: 0, + init: len, + } + } +} + +/// Create a new `BorrowedBuf` from an uninitialized buffer. +/// +/// Use `set_init` if part of the buffer is known to be already initialized. +impl<'data> From<&'data mut [MaybeUninit]> for BorrowedBuf<'data> { + #[inline] + fn from(buf: &'data mut [MaybeUninit]) -> BorrowedBuf<'data> { + BorrowedBuf { buf, filled: 0, init: 0 } + } +} + +impl<'data> BorrowedBuf<'data> { + /// Returns the total capacity of the buffer. + #[inline] + pub fn capacity(&self) -> usize { + self.buf.len() + } + + /// Returns the length of the filled part of the buffer. + #[inline] + pub fn len(&self) -> usize { + self.filled + } + + /// Returns the length of the initialized part of the buffer. + #[inline] + pub fn init_len(&self) -> usize { + self.init + } + + /// Returns a shared reference to the filled portion of the buffer. + #[inline] + pub fn filled(&self) -> &[u8] { + // SAFETY: We only slice the filled part of the buffer, which is always valid + unsafe { MaybeUninit::slice_assume_init_ref(&self.buf[0..self.filled]) } + } + + /// Returns a mutable reference to the filled portion of the buffer. + #[inline] + pub fn filled_mut(&mut self) -> &mut [u8] { + // SAFETY: We only slice the filled part of the buffer, which is always valid + unsafe { MaybeUninit::slice_assume_init_mut(&mut self.buf[0..self.filled]) } + } + + /// Returns a cursor over the unfilled part of the buffer. + #[inline] + pub fn unfilled<'this>(&'this mut self) -> BorrowedCursor<'this> { + BorrowedCursor { + start: self.filled, + // SAFETY: we never assign into `BorrowedCursor::buf`, so treating its + // lifetime covariantly is safe. + buf: unsafe { + mem::transmute::<&'this mut BorrowedBuf<'data>, &'this mut BorrowedBuf<'this>>(self) + }, + } + } + + /// Clears the buffer, resetting the filled region to empty. + /// + /// The number of initialized bytes is not changed, and the contents of the buffer are not modified. + #[inline] + pub fn clear(&mut self) -> &mut Self { + self.filled = 0; + self + } + + /// Asserts that the first `n` bytes of the buffer are initialized. + /// + /// `BorrowedBuf` assumes that bytes are never de-initialized, so this method does nothing when called with fewer + /// bytes than are already known to be initialized. + /// + /// # Safety + /// + /// The caller must ensure that the first `n` unfilled bytes of the buffer have already been initialized. + #[inline] + pub unsafe fn set_init(&mut self, n: usize) -> &mut Self { + self.init = cmp::max(self.init, n); + self + } +} + +/// A writeable view of the unfilled portion of a [`BorrowedBuf`](BorrowedBuf). +/// +/// Provides access to the initialized and uninitialized parts of the underlying `BorrowedBuf`. +/// Data can be written directly to the cursor by using [`append`](BorrowedCursor::append) or +/// indirectly by getting a slice of part or all of the cursor and writing into the slice. In the +/// indirect case, the caller must call [`advance`](BorrowedCursor::advance) after writing to inform +/// the cursor how many bytes have been written. +/// +/// Once data is written to the cursor, it becomes part of the filled portion of the underlying +/// `BorrowedBuf` and can no longer be accessed or re-written by the cursor. I.e., the cursor tracks +/// the unfilled part of the underlying `BorrowedBuf`. +/// +/// The lifetime `'a` is a bound on the lifetime of the underlying buffer (which means it is a bound +/// on the data in that buffer by transitivity). +#[derive(Debug)] +pub struct BorrowedCursor<'a> { + /// The underlying buffer. + // Safety invariant: we treat the type of buf as covariant in the lifetime of `BorrowedBuf` when + // we create a `BorrowedCursor`. This is only safe if we never replace `buf` by assigning into + // it, so don't do that! + buf: &'a mut BorrowedBuf<'a>, + /// The length of the filled portion of the underlying buffer at the time of the cursor's + /// creation. + start: usize, +} + +impl<'a> BorrowedCursor<'a> { + /// Reborrow this cursor by cloning it with a smaller lifetime. + /// + /// Since a cursor maintains unique access to its underlying buffer, the borrowed cursor is + /// not accessible while the new cursor exists. + #[inline] + pub fn reborrow<'this>(&'this mut self) -> BorrowedCursor<'this> { + BorrowedCursor { + // SAFETY: we never assign into `BorrowedCursor::buf`, so treating its + // lifetime covariantly is safe. + buf: unsafe { + mem::transmute::<&'this mut BorrowedBuf<'a>, &'this mut BorrowedBuf<'this>>( + self.buf, + ) + }, + start: self.start, + } + } + + /// Returns the available space in the cursor. + #[inline] + pub fn capacity(&self) -> usize { + self.buf.capacity() - self.buf.filled + } + + /// Returns the number of bytes written to this cursor since it was created from a `BorrowedBuf`. + /// + /// Note that if this cursor is a reborrowed clone of another, then the count returned is the + /// count written via either cursor, not the count since the cursor was reborrowed. + #[inline] + pub fn written(&self) -> usize { + self.buf.filled - self.start + } + + /// Returns a shared reference to the initialized portion of the cursor. + #[inline] + pub fn init_ref(&self) -> &[u8] { + // SAFETY: We only slice the initialized part of the buffer, which is always valid + unsafe { MaybeUninit::slice_assume_init_ref(&self.buf.buf[self.buf.filled..self.buf.init]) } + } + + /// Returns a mutable reference to the initialized portion of the cursor. + #[inline] + pub fn init_mut(&mut self) -> &mut [u8] { + // SAFETY: We only slice the initialized part of the buffer, which is always valid + unsafe { + MaybeUninit::slice_assume_init_mut(&mut self.buf.buf[self.buf.filled..self.buf.init]) + } + } + + /// Returns a mutable reference to the uninitialized part of the cursor. + /// + /// It is safe to uninitialize any of these bytes. + #[inline] + pub fn uninit_mut(&mut self) -> &mut [MaybeUninit] { + &mut self.buf.buf[self.buf.init..] + } + + /// Returns a mutable reference to the whole cursor. + /// + /// # Safety + /// + /// The caller must not uninitialize any bytes in the initialized portion of the cursor. + #[inline] + pub unsafe fn as_mut(&mut self) -> &mut [MaybeUninit] { + &mut self.buf.buf[self.buf.filled..] + } + + /// Advance the cursor by asserting that `n` bytes have been filled. + /// + /// After advancing, the `n` bytes are no longer accessible via the cursor and can only be + /// accessed via the underlying buffer. I.e., the buffer's filled portion grows by `n` elements + /// and its unfilled portion (and the capacity of this cursor) shrinks by `n` elements. + /// + /// # Safety + /// + /// The caller must ensure that the first `n` bytes of the cursor have been properly + /// initialised. + #[inline] + pub unsafe fn advance(&mut self, n: usize) -> &mut Self { + self.buf.filled += n; + self.buf.init = cmp::max(self.buf.init, self.buf.filled); + self + } + + /// Initializes all bytes in the cursor. + #[inline] + pub fn ensure_init(&mut self) -> &mut Self { + let uninit = self.uninit_mut(); + // SAFETY: 0 is a valid value for MaybeUninit and the length matches the allocation + // since it is comes from a slice reference. + unsafe { + ptr::write_bytes(uninit.as_mut_ptr(), 0, uninit.len()); + } + self.buf.init = self.buf.capacity(); + + self + } + + /// Asserts that the first `n` unfilled bytes of the cursor are initialized. + /// + /// `BorrowedBuf` assumes that bytes are never de-initialized, so this method does nothing when + /// called with fewer bytes than are already known to be initialized. + /// + /// # Safety + /// + /// The caller must ensure that the first `n` bytes of the buffer have already been initialized. + #[inline] + pub unsafe fn set_init(&mut self, n: usize) -> &mut Self { + self.buf.init = cmp::max(self.buf.init, self.buf.filled + n); + self + } + + /// Appends data to the cursor, advancing position within its buffer. + /// + /// # Panics + /// + /// Panics if `self.capacity()` is less than `buf.len()`. + #[inline] + pub fn append(&mut self, buf: &[u8]) { + assert!(self.capacity() >= buf.len()); + + // SAFETY: we do not de-initialize any of the elements of the slice + unsafe { + MaybeUninit::write_slice(&mut self.as_mut()[..buf.len()], buf); + } + + // SAFETY: We just added the entire contents of buf to the filled section. + unsafe { + self.set_init(buf.len()); + } + self.buf.filled += buf.len(); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/io/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/io/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/io/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/io/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,6 @@ +//! Traits, helpers, and type definitions for core I/O functionality. + +mod borrowed_buf; + +#[unstable(feature = "core_io_borrowed_buf", issue = "117693")] +pub use self::borrowed_buf::{BorrowedBuf, BorrowedCursor}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/peekable.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/peekable.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/peekable.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/peekable.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,6 +12,7 @@ #[derive(Clone, Debug)] #[must_use = "iterators are lazy and do nothing unless consumed"] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_diagnostic_item = "IterPeekable"] pub struct Peekable { iter: I, /// Remember a peeked value, even if it was None. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/zip.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/zip.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/zip.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/adapters/zip.rs 2023-12-21 16:55:28.000000000 +0000 @@ -95,6 +95,14 @@ } #[inline] + fn fold(self, init: Acc, f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + ZipImpl::fold(self, init, f) + } + + #[inline] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, @@ -129,6 +137,9 @@ where A: DoubleEndedIterator + ExactSizeIterator, B: DoubleEndedIterator + ExactSizeIterator; + fn fold(self, init: Acc, f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc; // This has the same safety requirements as `Iterator::__iterator_get_unchecked` unsafe fn get_unchecked(&mut self, idx: usize) -> ::Item where @@ -228,6 +239,14 @@ { unreachable!("Always specialized"); } + + #[inline] + default fn fold(self, init: Acc, f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + SpecFold::spec_fold(self, init, f) + } } #[doc(hidden)] @@ -251,6 +270,24 @@ // `Iterator::__iterator_get_unchecked`. unsafe { (self.a.__iterator_get_unchecked(idx), self.b.__iterator_get_unchecked(idx)) } } + + #[inline] + fn fold(mut self, init: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let mut accum = init; + let len = ZipImpl::size_hint(&self).0; + for i in 0..len { + // SAFETY: since Self: TrustedRandomAccessNoCoerce we can trust the size-hint to + // calculate the length and then use that to do unchecked iteration. + // fold consumes the iterator so we don't need to fixup any state. + unsafe { + accum = f(accum, self.get_unchecked(i)); + } + } + accum + } } #[doc(hidden)] @@ -590,3 +627,56 @@ unsafe { self.__iterator_get_unchecked(index) } } } + +trait SpecFold: Iterator { + fn spec_fold(self, init: B, f: F) -> B + where + Self: Sized, + F: FnMut(B, Self::Item) -> B; +} + +impl SpecFold for Zip { + // Adapted from default impl from the Iterator trait + #[inline] + default fn spec_fold(mut self, init: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let mut accum = init; + while let Some(x) = ZipImpl::next(&mut self) { + accum = f(accum, x); + } + accum + } +} + +impl SpecFold for Zip { + #[inline] + fn spec_fold(mut self, init: Acc, mut f: F) -> Acc + where + F: FnMut(Acc, Self::Item) -> Acc, + { + let mut accum = init; + loop { + let (upper, more) = if let Some(upper) = ZipImpl::size_hint(&self).1 { + (upper, false) + } else { + // Per TrustedLen contract a None upper bound means more than usize::MAX items + (usize::MAX, true) + }; + + for _ in 0..upper { + let pair = + // SAFETY: TrustedLen guarantees that at least `upper` many items are available + // therefore we know they can't be None + unsafe { (self.a.next().unwrap_unchecked(), self.b.next().unwrap_unchecked()) }; + accum = f(accum, pair); + } + + if !more { + break; + } + } + accum + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -391,11 +391,11 @@ pub use self::range::Step; #[unstable( - feature = "iter_from_generator", + feature = "iter_from_coroutine", issue = "43122", - reason = "generators are unstable" + reason = "coroutines are unstable" )] -pub use self::sources::from_generator; +pub use self::sources::from_coroutine; #[stable(feature = "iter_empty", since = "1.2.0")] pub use self::sources::{empty, Empty}; #[stable(feature = "iter_from_fn", since = "1.34.0")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/empty.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/empty.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/empty.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/empty.rs 2023-12-21 16:55:28.000000000 +0000 @@ -27,6 +27,7 @@ /// This `struct` is created by the [`empty()`] function. See its documentation for more. #[must_use = "iterators are lazy and do nothing unless consumed"] #[stable(feature = "iter_empty", since = "1.2.0")] +#[rustc_diagnostic_item = "IterEmpty"] pub struct Empty(marker::PhantomData T>); #[stable(feature = "core_impl_debug", since = "1.9.0")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_coroutine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_coroutine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_coroutine.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_coroutine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,59 @@ +use crate::fmt; +use crate::ops::{Coroutine, CoroutineState}; +use crate::pin::Pin; + +/// Creates a new iterator where each iteration calls the provided coroutine. +/// +/// Similar to [`iter::from_fn`]. +/// +/// [`iter::from_fn`]: crate::iter::from_fn +/// +/// # Examples +/// +/// ``` +/// #![cfg_attr(bootstrap, feature(generators))] +/// #![cfg_attr(not(bootstrap), feature(coroutines))] +/// #![feature(iter_from_coroutine)] +/// +/// let it = std::iter::from_coroutine(|| { +/// yield 1; +/// yield 2; +/// yield 3; +/// }); +/// let v: Vec<_> = it.collect(); +/// assert_eq!(v, [1, 2, 3]); +/// ``` +#[inline] +#[unstable(feature = "iter_from_coroutine", issue = "43122", reason = "coroutines are unstable")] +pub fn from_coroutine + Unpin>(coroutine: G) -> FromCoroutine { + FromCoroutine(coroutine) +} + +/// An iterator over the values yielded by an underlying coroutine. +/// +/// This `struct` is created by the [`iter::from_coroutine()`] function. See its documentation for +/// more. +/// +/// [`iter::from_coroutine()`]: from_coroutine +#[unstable(feature = "iter_from_coroutine", issue = "43122", reason = "coroutines are unstable")] +#[derive(Clone)] +pub struct FromCoroutine(G); + +#[unstable(feature = "iter_from_coroutine", issue = "43122", reason = "coroutines are unstable")] +impl + Unpin> Iterator for FromCoroutine { + type Item = G::Yield; + + fn next(&mut self) -> Option { + match Pin::new(&mut self.0).resume(()) { + CoroutineState::Yielded(n) => Some(n), + CoroutineState::Complete(()) => None, + } + } +} + +#[unstable(feature = "iter_from_coroutine", issue = "43122", reason = "coroutines are unstable")] +impl fmt::Debug for FromCoroutine { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("FromCoroutine").finish() + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_generator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_generator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_generator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/from_generator.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -use crate::fmt; -use crate::ops::{Generator, GeneratorState}; -use crate::pin::Pin; - -/// Creates a new iterator where each iteration calls the provided generator. -/// -/// Similar to [`iter::from_fn`]. -/// -/// [`iter::from_fn`]: crate::iter::from_fn -/// -/// # Examples -/// -/// ``` -/// #![feature(generators)] -/// #![feature(iter_from_generator)] -/// -/// let it = std::iter::from_generator(|| { -/// yield 1; -/// yield 2; -/// yield 3; -/// }); -/// let v: Vec<_> = it.collect(); -/// assert_eq!(v, [1, 2, 3]); -/// ``` -#[inline] -#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")] -pub fn from_generator + Unpin>(generator: G) -> FromGenerator { - FromGenerator(generator) -} - -/// An iterator over the values yielded by an underlying generator. -/// -/// This `struct` is created by the [`iter::from_generator()`] function. See its documentation for -/// more. -/// -/// [`iter::from_generator()`]: from_generator -#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")] -#[derive(Clone)] -pub struct FromGenerator(G); - -#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")] -impl + Unpin> Iterator for FromGenerator { - type Item = G::Yield; - - fn next(&mut self) -> Option { - match Pin::new(&mut self.0).resume(()) { - GeneratorState::Yielded(n) => Some(n), - GeneratorState::Complete(()) => None, - } - } -} - -#[unstable(feature = "iter_from_generator", issue = "43122", reason = "generators are unstable")] -impl fmt::Debug for FromGenerator { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("FromGenerator").finish() - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once.rs 2023-12-21 16:55:28.000000000 +0000 @@ -61,6 +61,7 @@ /// This `struct` is created by the [`once()`] function. See its documentation for more. #[derive(Clone, Debug)] #[stable(feature = "iter_once", since = "1.2.0")] +#[rustc_diagnostic_item = "IterOnce"] pub struct Once { inner: crate::option::IntoIter, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once_with.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once_with.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once_with.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/once_with.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ /// Creates an iterator that lazily generates a value exactly once by invoking /// the provided closure. /// -/// This is commonly used to adapt a single value generator into a [`chain()`] of +/// This is commonly used to adapt a single value coroutine into a [`chain()`] of /// other kinds of iteration. Maybe you have an iterator that covers almost /// everything, but you need an extra special case. Maybe you have a function /// which works on iterators, but you only need to process one value. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/successors.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/successors.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/successors.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources/successors.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,7 +17,7 @@ F: FnMut(&T) -> Option, { // If this function returned `impl Iterator` - // it could be based on `unfold` and not need a dedicated type. + // it could be based on `from_fn` and not need a dedicated type. // However having a named `Successors` type allows it to be `Clone` when `T` and `F` are. Successors { next: first, succ } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/sources.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/sources.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ mod empty; +mod from_coroutine; mod from_fn; -mod from_generator; mod once; mod once_with; mod repeat; @@ -27,11 +27,11 @@ pub use self::from_fn::{from_fn, FromFn}; #[unstable( - feature = "iter_from_generator", + feature = "iter_from_coroutine", issue = "43122", - reason = "generators are unstable" + reason = "coroutines are unstable" )] -pub use self::from_generator::from_generator; +pub use self::from_coroutine::from_coroutine; #[stable(feature = "iter_successors", since = "1.34.0")] pub use self::successors::{successors, Successors}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/collect.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/collect.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/collect.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/collect.rs 2023-12-21 16:55:28.000000000 +0000 @@ -146,6 +146,7 @@ /// assert_eq!(v, vec![5, 5, 5, 5, 5]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_diagnostic_item = "from_iter_fn"] fn from_iter>(iter: T) -> Self; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/iterator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/iterator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/iterator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/iter/traits/iterator.rs 2023-12-21 16:55:28.000000000 +0000 @@ -27,13 +27,13 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( on( - any(_Self = "core::ops::RangeTo", _Self = "std::ops::RangeTo"), + _Self = "core::ops::range::RangeTo", label = "if you meant to iterate until a value, add a starting value", note = "`..end` is a `RangeTo`, which cannot be iterated on; you might have meant to have a \ bounded `Range`: `0..end`" ), on( - any(_Self = "core::ops::RangeToInclusive", _Self = "std::ops::RangeToInclusive"), + _Self = "core::ops::range::RangeToInclusive", label = "if you meant to iterate until a value (including it), add a starting value", note = "`..=end` is a `RangeToInclusive`, which cannot be iterated on; you might have meant \ to have a bounded `RangeInclusive`: `0..=end`" @@ -44,7 +44,7 @@ ), on(_Self = "&[]", label = "`{Self}` is not an iterator; try calling `.iter()`"), on( - any(_Self = "alloc::vec::Vec", _Self = "std::vec::Vec"), + _Self = "alloc::vec::Vec", label = "`{Self}` is not an iterator; try calling `.into_iter()` or `.iter()`" ), on( @@ -52,7 +52,7 @@ label = "`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" ), on( - any(_Self = "alloc::string::String", _Self = "std::string::String"), + _Self = "alloc::string::String", label = "`{Self}` is not an iterator; try calling `.chars()` or `.bytes()`" ), on( @@ -69,6 +69,7 @@ message = "`{Self}` is not an iterator" )] #[doc(notable_trait)] +#[cfg_attr(not(bootstrap), lang = "iterator")] #[rustc_diagnostic_item = "Iterator"] #[must_use = "iterators are lazy and do nothing unless consumed"] pub trait Iterator { @@ -2141,7 +2142,7 @@ /// passed collection. The collection is then returned, so the call chain /// can be continued. /// - /// This is useful when you already have a collection and wants to add + /// This is useful when you already have a collection and want to add /// the iterator items to it. /// /// This method is a convenience method to call [Extend::extend](trait.Extend.html), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -26,7 +26,8 @@ //! assumptions about their semantics: For `memcpy`, `memmove`, `memset`, `memcmp`, and `bcmp`, if //! the `n` parameter is 0, the function is assumed to not be UB. Furthermore, for `memcpy`, if //! source and target pointer are equal, the function is assumed to not be UB. -//! (Note that these are [standard assumptions](https://reviews.llvm.org/D86993) among compilers.) +//! (Note that these are standard assumptions among compilers: +//! [clang](https://reviews.llvm.org/D86993) and [GCC](https://gcc.gnu.org/bugzilla/show_bug.cgi?id=32667) do the same.) //! These functions are often provided by the system libc, but can also be provided by the //! [compiler-builtins crate](https://crates.io/crates/compiler_builtins). //! Note that the library does not guarantee that it will always make these assumptions, so Rust @@ -68,6 +69,7 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) )] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![doc(cfg_hide( not(test), any(not(feature = "miri-test-libstd"), test, doctest), @@ -110,8 +112,6 @@ // // Library features: // tidy-alphabetical-start -#![cfg_attr(bootstrap, feature(no_coverage))] // rust-lang/rust#84605 -#![cfg_attr(not(bootstrap), feature(coverage_attribute))] // rust-lang/rust#84605 #![feature(char_indices_offset)] #![feature(const_align_of_val)] #![feature(const_align_of_val_raw)] @@ -126,7 +126,6 @@ #![feature(const_caller_location)] #![feature(const_cell_into_inner)] #![feature(const_char_from_u32_unchecked)] -#![feature(const_discriminant)] #![feature(const_eval_select)] #![feature(const_exact_div)] #![feature(const_float_bits_conv)] @@ -135,7 +134,6 @@ #![feature(const_hash)] #![feature(const_heap)] #![feature(const_index_range_slice_index)] -#![feature(const_inherent_unchecked_arith)] #![feature(const_int_unchecked_arith)] #![feature(const_intrinsic_forget)] #![feature(const_ipv4)] @@ -149,7 +147,6 @@ #![feature(const_option)] #![feature(const_option_ext)] #![feature(const_pin)] -#![feature(const_pointer_byte_offsets)] #![feature(const_pointer_is_aligned)] #![feature(const_ptr_as_ref)] #![feature(const_ptr_is_null)] @@ -173,6 +170,7 @@ #![feature(const_unsafecell_get_mut)] #![feature(const_waker)] #![feature(core_panic)] +#![feature(coverage_attribute)] #![feature(duration_consts_float)] #![feature(internal_impls_macro)] #![feature(ip)] @@ -189,6 +187,8 @@ #![feature(str_split_inclusive_remainder)] #![feature(str_split_remainder)] #![feature(strict_provenance)] +#![feature(unchecked_math)] +#![feature(unchecked_shifts)] #![feature(utf16_extra)] #![feature(utf16_extra_const)] #![feature(variant_count)] @@ -237,6 +237,7 @@ #![feature(negative_impls)] #![feature(never_type)] #![feature(no_core)] +#![feature(no_sanitize)] #![feature(platform_intrinsics)] #![feature(prelude_import)] #![feature(repr_simd)] @@ -252,6 +253,7 @@ #![feature(try_blocks)] #![feature(unboxed_closures)] #![feature(unsized_fn_params)] +#![feature(with_negative_coherence)] // tidy-alphabetical-end // // Target features: @@ -367,6 +369,8 @@ pub mod cell; pub mod char; pub mod ffi; +#[unstable(feature = "core_io_borrowed_buf", issue = "117693")] +pub mod io; pub mod iter; pub mod net; pub mod option; @@ -414,7 +418,8 @@ dead_code, unused_imports, unsafe_op_in_unsafe_fn, - ambiguous_glob_reexports + ambiguous_glob_reexports, + deprecated_in_future )] #[allow(rustdoc::bare_urls)] // FIXME: This annotation should be moved into rust-lang/stdarch after clashing_extern_declarations is diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/macros/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/macros/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/macros/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/macros/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -718,7 +718,8 @@ /// The difference between `unimplemented!` and [`todo!`] is that while `todo!` /// conveys an intent of implementing the functionality later and the message is "not yet /// implemented", `unimplemented!` makes no such claims. Its message is "not implemented". -/// Also some IDEs will mark `todo!`s. +/// +/// Also, some IDEs will mark `todo!`s. /// /// # Panics /// @@ -804,11 +805,15 @@ /// The difference between [`unimplemented!`] and `todo!` is that while `todo!` conveys /// an intent of implementing the functionality later and the message is "not yet /// implemented", `unimplemented!` makes no such claims. Its message is "not implemented". -/// Also some IDEs will mark `todo!`s. +/// +/// Also, some IDEs will mark `todo!`s. /// /// # Panics /// -/// This will always [`panic!`]. +/// This will always [`panic!`] because `todo!` is just a shorthand for `panic!` with a +/// fixed, specific message. +/// +/// Like `panic!`, this macro has a second form for displaying custom values. /// /// # Examples /// @@ -816,38 +821,47 @@ /// /// ``` /// trait Foo { -/// fn bar(&self); +/// fn bar(&self) -> u8; /// fn baz(&self); +/// fn qux(&self) -> Result; /// } /// ``` /// /// We want to implement `Foo` on one of our types, but we also want to work on /// just `bar()` first. In order for our code to compile, we need to implement -/// `baz()`, so we can use `todo!`: +/// `baz()` and `qux()`, so we can use `todo!`: /// /// ``` /// # trait Foo { -/// # fn bar(&self); +/// # fn bar(&self) -> u8; /// # fn baz(&self); +/// # fn qux(&self) -> Result; /// # } /// struct MyStruct; /// /// impl Foo for MyStruct { -/// fn bar(&self) { -/// // implementation goes here +/// fn bar(&self) -> u8 { +/// 1 + 1 /// } /// /// fn baz(&self) { -/// // let's not worry about implementing baz() for now +/// // Let's not worry about implementing baz() for now /// todo!(); /// } +/// +/// fn qux(&self) -> Result { +/// // We can add a message to todo! to display our omission. +/// // This will display: +/// // "thread 'main' panicked at 'not yet implemented: MyStruct is not yet quxable'". +/// todo!("MyStruct is not yet quxable"); +/// } /// } /// /// fn main() { /// let s = MyStruct; /// s.bar(); /// -/// // we aren't even using baz(), so this is fine. +/// // We aren't even using baz() or qux(), so this is fine. /// } /// ``` #[macro_export] @@ -1030,6 +1044,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_builtin_macro] #[macro_export] + #[rustc_diagnostic_item = "env_macro"] // useful for external lints macro_rules! env { ($name:expr $(,)?) => {{ /* compiler built-in */ }}; ($name:expr, $error_msg:expr $(,)?) => {{ /* compiler built-in */ }}; @@ -1060,6 +1075,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_builtin_macro] #[macro_export] + #[rustc_diagnostic_item = "option_env_macro"] // useful for external lints macro_rules! option_env { ($name:expr $(,)?) => {{ /* compiler built-in */ }}; } @@ -1465,6 +1481,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_builtin_macro] #[macro_export] + #[rustc_diagnostic_item = "include_macro"] // useful for external lints macro_rules! include { ($file:expr $(,)?) => {{ /* compiler built-in */ }}; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/marker.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/marker.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/marker.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/marker.rs 2023-12-21 16:55:28.000000000 +0000 @@ -155,12 +155,18 @@ /// Those implementations are: /// /// - Arrays `[T; N]` implement `Unsize<[T]>`. -/// - Types implementing a trait `Trait` also implement `Unsize`. -/// - Structs `Foo<..., T, ...>` implement `Unsize>` if all of these conditions -/// are met: -/// - `T: Unsize`. -/// - Only the last field of `Foo` has a type involving `T`. -/// - `Bar: Unsize>`, where `Bar` stands for the actual type of that last field. +/// - A type implements `Unsize` if all of these conditions are met: +/// - The type implements `Trait`. +/// - `Trait` is object safe. +/// - The type is sized. +/// - The type outlives `'a`. +/// - Structs `Foo<..., T1, ..., Tn, ...>` implement `Unsize>` +/// where any number of (type and const) parameters may be changed if all of these conditions +/// are met: +/// - Only the last field of `Foo` has a type involving the parameters `T1`, ..., `Tn`. +/// - All other parameters of the struct are equal. +/// - `Field: Unsize>`, where `Field<...>` stands for the actual +/// type of the struct's last field. /// /// `Unsize` is used along with [`ops::CoerceUnsized`] to allow /// "user-defined" containers such as [`Rc`] to contain dynamically-sized @@ -247,6 +253,7 @@ /// /// const CFN: Wrap = Wrap(higher_order); /// +/// #[allow(pointer_structural_match)] /// fn main() { /// match CFN { /// CFN => {} @@ -573,59 +580,72 @@ #[lang = "sync"] #[rustc_on_unimplemented( on( - any(_Self = "core::cell:OnceCell", _Self = "std::cell::OnceCell"), + _Self = "core::cell::once::OnceCell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::OnceLock` instead" ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicU8` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicU16` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicU32` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicU64` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicUsize` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicI8` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicI16` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicI32` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicI64` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicIsize` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + _Self = "core::cell::Cell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` or `std::sync::atomic::AtomicBool` instead", ), on( - any(_Self = "core::cell::Cell", _Self = "std::cell::Cell"), + all( + _Self = "core::cell::Cell", + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell"), + not(_Self = "core::cell::Cell") + ), note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock`", ), on( - any(_Self = "core::cell::RefCell", _Self = "std::cell::RefCell"), + _Self = "core::cell::RefCell", note = "if you want to do aliasing and mutation between multiple threads, use `std::sync::RwLock` instead", ), message = "`{Self}` cannot be shared between threads safely", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/mem/manually_drop.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/mem/manually_drop.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/mem/manually_drop.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/mem/manually_drop.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,12 +4,12 @@ /// A wrapper to inhibit compiler from automatically calling `T`’s destructor. /// This wrapper is 0-cost. /// -/// `ManuallyDrop` is guaranteed to have the same layout as `T`, and is subject -/// to the same layout optimizations as `T`. As a consequence, it has *no effect* -/// on the assumptions that the compiler makes about its contents. For example, -/// initializing a `ManuallyDrop<&mut T>` with [`mem::zeroed`] is undefined -/// behavior. If you need to handle uninitialized data, use [`MaybeUninit`] -/// instead. +/// `ManuallyDrop` is guaranteed to have the same layout and bit validity as +/// `T`, and is subject to the same layout optimizations as `T`. As a consequence, +/// it has *no effect* on the assumptions that the compiler makes about its +/// contents. For example, initializing a `ManuallyDrop<&mut T>` with [`mem::zeroed`] +/// is undefined behavior. If you need to handle uninitialized data, use +/// [`MaybeUninit`] instead. /// /// Note that accessing the value inside a `ManuallyDrop` is safe. /// This means that a `ManuallyDrop` whose content has been dropped must not diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/mem/maybe_uninit.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/mem/maybe_uninit.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/mem/maybe_uninit.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/mem/maybe_uninit.rs 2023-12-21 16:55:28.000000000 +0000 @@ -242,7 +242,7 @@ /// the same size, alignment, and ABI as `T`; it's just that the way `MaybeUninit` implements that /// guarantee may evolve. #[stable(feature = "maybe_uninit", since = "1.36.0")] -// Lang item so we can wrap other types in it. This is useful for generators. +// Lang item so we can wrap other types in it. This is useful for coroutines. #[lang = "maybe_uninit"] #[derive(Copy)] #[repr(transparent)] @@ -374,6 +374,9 @@ /// assert_eq!(x, (0, false)); /// ``` /// + /// This can be used in const contexts, such as to indicate the end of static arrays for + /// plugin registration. + /// /// *Incorrect* usage of this function: calling `x.zeroed().assume_init()` /// when `0` is not a valid bit-pattern for the type: /// @@ -387,17 +390,19 @@ /// // Inside a pair, we create a `NotZero` that does not have a valid discriminant. /// // This is undefined behavior. ⚠️ /// ``` - #[stable(feature = "maybe_uninit", since = "1.36.0")] - #[rustc_const_unstable(feature = "const_maybe_uninit_zeroed", issue = "91850")] - #[must_use] #[inline] + #[must_use] #[rustc_diagnostic_item = "maybe_uninit_zeroed"] + #[stable(feature = "maybe_uninit", since = "1.36.0")] + // These are OK to allow since we do not leak &mut to user-visible API + #[rustc_allow_const_fn_unstable(const_mut_refs)] + #[rustc_allow_const_fn_unstable(const_ptr_write)] + #[rustc_allow_const_fn_unstable(const_maybe_uninit_as_mut_ptr)] + #[rustc_const_stable(feature = "const_maybe_uninit_zeroed", since = "1.75.0")] pub const fn zeroed() -> MaybeUninit { let mut u = MaybeUninit::::uninit(); // SAFETY: `u.as_mut_ptr()` points to allocated memory. - unsafe { - u.as_mut_ptr().write_bytes(0u8, 1); - } + unsafe { u.as_mut_ptr().write_bytes(0u8, 1) }; u } @@ -686,7 +691,10 @@ /// // they both get dropped! /// ``` #[stable(feature = "maybe_uninit_extra", since = "1.60.0")] - #[rustc_const_unstable(feature = "const_maybe_uninit_assume_init_read", issue = "63567")] + #[rustc_const_stable( + feature = "const_maybe_uninit_assume_init_read", + since = "1.75.0" + )] #[inline(always)] #[track_caller] pub const unsafe fn assume_init_read(&self) -> T { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/mem/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/mem/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/mem/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/mem/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -647,7 +647,8 @@ #[allow(deprecated)] #[rustc_diagnostic_item = "mem_zeroed"] #[track_caller] -pub unsafe fn zeroed() -> T { +#[rustc_const_stable(feature = "const_mem_zeroed", since = "1.75.0")] +pub const unsafe fn zeroed() -> T { // SAFETY: the caller must guarantee that an all-zero value is valid for `T`. unsafe { intrinsics::assert_zero_valid::(); @@ -723,15 +724,12 @@ #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_swap", issue = "83163")] +#[rustc_diagnostic_item = "mem_swap"] pub const fn swap(x: &mut T, y: &mut T) { // NOTE(eddyb) SPIR-V's Logical addressing model doesn't allow for arbitrary // reinterpretation of values as (chunkable) byte arrays, and the loop in the // block optimization in `swap_slice` is hard to rewrite back // into the (unoptimized) direct swapping implementation, so we disable it. - // FIXME(eddyb) the block optimization also prevents MIR optimizations from - // understanding `mem::replace`, `Option::take`, etc. - a better overall - // solution might be to make `ptr::swap_nonoverlapping` into an intrinsic, which - // a backend can choose to implement using the block optimization, or not. #[cfg(not(any(target_arch = "spirv")))] { // For types that are larger multiples of their alignment, the simple way @@ -768,11 +766,14 @@ // And LLVM actually optimizes it to 3×memcpy if called with // a type larger than it's willing to keep in a register. // Having typed reads and writes in MIR here is also good as - // it lets MIRI and CTFE understand them better, including things + // it lets Miri and CTFE understand them better, including things // like enforcing type validity for them. // Importantly, read+copy_nonoverlapping+write introduces confusing // asymmetry to the behaviour where one value went through read+write // whereas the other was copied over by the intrinsic (see #94371). + // Furthermore, using only read+write here benefits limited backends + // such as SPIR-V that work on an underlying *typed* view of memory, + // and thus have trouble with Rust's untyped memory operations. // SAFETY: exclusive references are always valid to read/write, // including being aligned, and nothing here panics so it's drop-safe. @@ -909,6 +910,10 @@ #[rustc_const_unstable(feature = "const_replace", issue = "83164")] #[cfg_attr(not(test), rustc_diagnostic_item = "mem_replace")] pub const fn replace(dest: &mut T, src: T) -> T { + // It may be tempting to use `swap` to avoid `unsafe` here. Don't! + // The compiler optimizes the implementation below to two `memcpy`s + // while `swap` would require at least three. See PR#83022 for details. + // SAFETY: We read from `dest` but directly write `src` into it afterwards, // such that the old value is not duplicated. Nothing is dropped and // nothing here can panic. @@ -930,7 +935,7 @@ /// This function is not magic; it is literally defined as /// /// ``` -/// pub fn drop(_x: T) { } +/// pub fn drop(_x: T) {} /// ``` /// /// Because `_x` is moved into the function, it is automatically dropped before @@ -1050,6 +1055,7 @@ /// ``` #[inline] #[must_use] +#[track_caller] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_transmute_copy", since = "1.74.0")] pub const unsafe fn transmute_copy(src: &Src) -> Dst { @@ -1204,7 +1210,7 @@ /// // assert_eq!(0, unsafe { std::mem::transmute::<_, u8>(std::mem::discriminant(&unit_like)) }); /// ``` #[stable(feature = "discriminant_value", since = "1.21.0")] -#[rustc_const_unstable(feature = "const_discriminant", issue = "69821")] +#[rustc_const_stable(feature = "const_discriminant", since = "1.75.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "mem_discriminant")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const fn discriminant(v: &T) -> Discriminant { @@ -1290,16 +1296,71 @@ /// Expands to the offset in bytes of a field from the beginning of the given type. /// -/// Only structs, unions and tuples are supported. +/// Structs, enums, unions and tuples are supported. +/// +/// Nested field accesses may be used, but not array indexes. +/// +/// Enum variants may be traversed as if they were fields. Variants themselves do +/// not have an offset. +/// +/// Visibility is respected - all types and fields must be visible to the call site: +/// +/// ``` +/// #![feature(offset_of)] +/// +/// mod nested { +/// #[repr(C)] +/// pub struct Struct { +/// private: u8, +/// } +/// } +/// +/// // assert_eq!(mem::offset_of!(nested::Struct, private), 0); +/// // ^^^ error[E0616]: field `private` of struct `Struct` is private +/// ``` +/// +/// Note that type layout is, in general, [subject to change and +/// platform-specific](https://doc.rust-lang.org/reference/type-layout.html). If +/// layout stability is required, consider using an [explicit `repr` attribute]. +/// +/// Rust guarantees that the offset of a given field within a given type will not +/// change over the lifetime of the program. However, two different compilations of +/// the same program may result in different layouts. Also, even within a single +/// program execution, no guarantees are made about types which are *similar* but +/// not *identical*, e.g.: /// -/// Nested field accesses may be used, but not array indexes like in `C`'s `offsetof`. +/// ``` +/// #![feature(offset_of)] /// -/// Note that the output of this macro is not stable, except for `#[repr(C)]` types. +/// struct Wrapper(T, U); +/// +/// type A = Wrapper; +/// type B = Wrapper; +/// +/// // Not necessarily identical even though `u8` and `i8` have the same layout! +/// // assert!(mem::offset_of!(A, 1), mem::offset_of!(B, 1)); +/// +/// #[repr(transparent)] +/// struct U8(u8); +/// +/// type C = Wrapper; +/// +/// // Not necessarily identical even though `u8` and `U8` have the same layout! +/// // assert!(mem::offset_of!(A, 1), mem::offset_of!(C, 1)); +/// +/// struct Empty(core::marker::PhantomData); +/// +/// // Not necessarily identical even though `PhantomData` always has the same layout! +/// // assert!(mem::offset_of!(Empty, 0), mem::offset_of!(Empty, 0)); +/// ``` +/// +/// [explicit `repr` attribute]: https://doc.rust-lang.org/reference/type-layout.html#representations /// /// # Examples /// /// ``` /// #![feature(offset_of)] +/// # #![cfg_attr(not(bootstrap), feature(offset_of_enum))] /// /// use std::mem; /// #[repr(C)] @@ -1322,6 +1383,20 @@ /// struct NestedB(u8); /// /// assert_eq!(mem::offset_of!(NestedA, b.0), 0); +/// +/// #[repr(u8)] +/// enum Enum { +/// A(u8, u16), +/// B { one: u8, two: u16 }, +/// } +/// +/// # #[cfg(not(bootstrap))] +/// assert_eq!(mem::offset_of!(Enum, A.0), 1); +/// # #[cfg(not(bootstrap))] +/// assert_eq!(mem::offset_of!(Enum, B.two), 2); +/// +/// # #[cfg(not(bootstrap))] +/// assert_eq!(mem::offset_of!(Option<&u8>, Some.0), 0); /// ``` #[unstable(feature = "offset_of", issue = "106655")] #[allow_internal_unstable(builtin_syntax, hint_must_use)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/net/ip_addr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/net/ip_addr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/net/ip_addr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/net/ip_addr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,8 @@ use crate::cmp::Ordering; use crate::fmt::{self, Write}; +use crate::iter; use crate::mem::transmute; +use crate::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, Not}; use super::display_buffer::DisplayBuffer; @@ -410,9 +412,12 @@ /// # Examples /// /// ``` - /// #![feature(ip)] /// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// + /// let localhost_v4 = Ipv4Addr::new(127, 0, 0, 1); + /// + /// assert_eq!(IpAddr::V4(localhost_v4).to_canonical(), localhost_v4); + /// assert_eq!(IpAddr::V6(localhost_v4.to_ipv6_mapped()).to_canonical(), localhost_v4); /// assert_eq!(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)).to_canonical().is_loopback(), true); /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x7f00, 0x1)).is_loopback(), false); /// assert_eq!(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x7f00, 0x1)).to_canonical().is_loopback(), true); @@ -420,11 +425,11 @@ #[inline] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_ip", issue = "76205")] - #[unstable(feature = "ip", issue = "27709")] + #[stable(feature = "ip_to_canonical", since = "1.75.0")] + #[rustc_const_stable(feature = "ip_to_canonical", since = "1.75.0")] pub const fn to_canonical(&self) -> IpAddr { match self { - &v4 @ IpAddr::V4(_) => v4, + IpAddr::V4(_) => *self, IpAddr::V6(v6) => v6.to_canonical(), } } @@ -1748,11 +1753,11 @@ /// Some(Ipv4Addr::new(192, 10, 2, 255))); /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1).to_ipv4_mapped(), None); /// ``` - #[rustc_const_unstable(feature = "const_ipv6", issue = "76205")] - #[stable(feature = "ipv6_to_ipv4_mapped", since = "1.63.0")] + #[inline] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline] + #[stable(feature = "ipv6_to_ipv4_mapped", since = "1.63.0")] + #[rustc_const_stable(feature = "const_ipv6_to_ipv4_mapped", since = "1.75.0")] pub const fn to_ipv4_mapped(&self) -> Option { match self.octets() { [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, a, b, c, d] => { @@ -1817,11 +1822,11 @@ /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x7f00, 0x1).is_loopback(), false); /// assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x7f00, 0x1).to_canonical().is_loopback(), true); /// ``` - #[rustc_const_unstable(feature = "const_ipv6", issue = "76205")] - #[unstable(feature = "ip", issue = "27709")] + #[inline] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[inline] + #[stable(feature = "ip_to_canonical", since = "1.75.0")] + #[rustc_const_stable(feature = "ip_to_canonical", since = "1.75.0")] pub const fn to_canonical(&self) -> IpAddr { if let Some(mapped) = self.to_ipv4_mapped() { return IpAddr::V4(mapped); @@ -2122,3 +2127,132 @@ IpAddr::V6(Ipv6Addr::from(segments)) } } + +#[stable(feature = "ip_bitops", since = "1.75.0")] +impl Not for Ipv4Addr { + type Output = Ipv4Addr; + + #[inline] + fn not(mut self) -> Ipv4Addr { + for octet in &mut self.octets { + *octet = !*octet; + } + self + } +} + +#[stable(feature = "ip_bitops", since = "1.75.0")] +impl Not for &'_ Ipv4Addr { + type Output = Ipv4Addr; + + #[inline] + fn not(self) -> Ipv4Addr { + !*self + } +} + +#[stable(feature = "ip_bitops", since = "1.75.0")] +impl Not for Ipv6Addr { + type Output = Ipv6Addr; + + #[inline] + fn not(mut self) -> Ipv6Addr { + for octet in &mut self.octets { + *octet = !*octet; + } + self + } +} + +#[stable(feature = "ip_bitops", since = "1.75.0")] +impl Not for &'_ Ipv6Addr { + type Output = Ipv6Addr; + + #[inline] + fn not(self) -> Ipv6Addr { + !*self + } +} + +macro_rules! bitop_impls { + ($( + $(#[$attr:meta])* + impl ($BitOp:ident, $BitOpAssign:ident) for $ty:ty = ($bitop:ident, $bitop_assign:ident); + )*) => { + $( + $(#[$attr])* + impl $BitOpAssign for $ty { + fn $bitop_assign(&mut self, rhs: $ty) { + for (lhs, rhs) in iter::zip(&mut self.octets, rhs.octets) { + lhs.$bitop_assign(rhs); + } + } + } + + $(#[$attr])* + impl $BitOpAssign<&'_ $ty> for $ty { + fn $bitop_assign(&mut self, rhs: &'_ $ty) { + self.$bitop_assign(*rhs); + } + } + + $(#[$attr])* + impl $BitOp for $ty { + type Output = $ty; + + #[inline] + fn $bitop(mut self, rhs: $ty) -> $ty { + self.$bitop_assign(rhs); + self + } + } + + $(#[$attr])* + impl $BitOp<&'_ $ty> for $ty { + type Output = $ty; + + #[inline] + fn $bitop(mut self, rhs: &'_ $ty) -> $ty { + self.$bitop_assign(*rhs); + self + } + } + + $(#[$attr])* + impl $BitOp<$ty> for &'_ $ty { + type Output = $ty; + + #[inline] + fn $bitop(self, rhs: $ty) -> $ty { + let mut lhs = *self; + lhs.$bitop_assign(rhs); + lhs + } + } + + $(#[$attr])* + impl $BitOp<&'_ $ty> for &'_ $ty { + type Output = $ty; + + #[inline] + fn $bitop(self, rhs: &'_ $ty) -> $ty { + let mut lhs = *self; + lhs.$bitop_assign(*rhs); + lhs + } + } + )* + }; +} + +bitop_impls! { + #[stable(feature = "ip_bitops", since = "1.75.0")] + impl (BitAnd, BitAndAssign) for Ipv4Addr = (bitand, bitand_assign); + #[stable(feature = "ip_bitops", since = "1.75.0")] + impl (BitOr, BitOrAssign) for Ipv4Addr = (bitor, bitor_assign); + + #[stable(feature = "ip_bitops", since = "1.75.0")] + impl (BitAnd, BitAndAssign) for Ipv6Addr = (bitand, bitand_assign); + #[stable(feature = "ip_bitops", since = "1.75.0")] + impl (BitOr, BitOrAssign) for Ipv6Addr = (bitor, bitor_assign); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/net/socket_addr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/net/socket_addr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/net/socket_addr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/net/socket_addr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,4 @@ -use crate::cmp::Ordering; use crate::fmt::{self, Write}; -use crate::hash; use crate::net::{IpAddr, Ipv4Addr, Ipv6Addr}; use super::display_buffer::DisplayBuffer; @@ -63,7 +61,7 @@ /// assert_eq!(socket.ip(), &Ipv4Addr::new(127, 0, 0, 1)); /// assert_eq!(socket.port(), 8080); /// ``` -#[derive(Copy, Clone, Eq, PartialEq)] +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[stable(feature = "rust1", since = "1.0.0")] pub struct SocketAddrV4 { ip: Ipv4Addr, @@ -96,7 +94,7 @@ /// assert_eq!(socket.ip(), &Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 1)); /// assert_eq!(socket.port(), 8080); /// ``` -#[derive(Copy, Clone, Eq, PartialEq)] +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[stable(feature = "rust1", since = "1.0.0")] pub struct SocketAddrV6 { ip: Ipv6Addr, @@ -644,48 +642,3 @@ fmt::Display::fmt(self, fmt) } } - -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl PartialOrd for SocketAddrV4 { - #[inline] - fn partial_cmp(&self, other: &SocketAddrV4) -> Option { - Some(self.cmp(other)) - } -} - -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl PartialOrd for SocketAddrV6 { - #[inline] - fn partial_cmp(&self, other: &SocketAddrV6) -> Option { - Some(self.cmp(other)) - } -} - -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl Ord for SocketAddrV4 { - #[inline] - fn cmp(&self, other: &SocketAddrV4) -> Ordering { - self.ip().cmp(other.ip()).then(self.port().cmp(&other.port())) - } -} - -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl Ord for SocketAddrV6 { - #[inline] - fn cmp(&self, other: &SocketAddrV6) -> Ordering { - self.ip().cmp(other.ip()).then(self.port().cmp(&other.port())) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl hash::Hash for SocketAddrV4 { - fn hash(&self, s: &mut H) { - (self.port, self.ip).hash(s) - } -} -#[stable(feature = "rust1", since = "1.0.0")] -impl hash::Hash for SocketAddrV6 { - fn hash(&self, s: &mut H) { - (self.port, &self.ip, self.flowinfo, self.scope_id).hash(s) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/f32.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/f32.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/f32.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/f32.rs 2023-12-21 16:55:28.000000000 +0000 @@ -377,6 +377,13 @@ pub const MANTISSA_DIGITS: u32 = 24; /// Approximate number of significant digits in base 10. + /// + /// This is the maximum x such that any decimal number with x + /// significant digits can be converted to `f32` and back without loss. + /// + /// Equal to floor(log10 2[`MANTISSA_DIGITS`] − 1). + /// + /// [`MANTISSA_DIGITS`]: f32::MANTISSA_DIGITS #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const DIGITS: u32 = 6; @@ -384,31 +391,62 @@ /// /// This is the difference between `1.0` and the next larger representable number. /// + /// Equal to 21 − [`MANTISSA_DIGITS`]. + /// /// [Machine epsilon]: https://en.wikipedia.org/wiki/Machine_epsilon + /// [`MANTISSA_DIGITS`]: f32::MANTISSA_DIGITS #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const EPSILON: f32 = 1.19209290e-07_f32; /// Smallest finite `f32` value. + /// + /// Equal to −[`MAX`]. + /// + /// [`MAX`]: f32::MAX #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN: f32 = -3.40282347e+38_f32; /// Smallest positive normal `f32` value. + /// + /// Equal to 2[`MIN_EXP`] − 1. + /// + /// [`MIN_EXP`]: f32::MIN_EXP #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_POSITIVE: f32 = 1.17549435e-38_f32; /// Largest finite `f32` value. + /// + /// Equal to + /// (1 − 2−[`MANTISSA_DIGITS`]) 2[`MAX_EXP`]. + /// + /// [`MANTISSA_DIGITS`]: f32::MANTISSA_DIGITS + /// [`MAX_EXP`]: f32::MAX_EXP #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX: f32 = 3.40282347e+38_f32; /// One greater than the minimum possible normal power of 2 exponent. + /// + /// If x = `MIN_EXP`, then normal numbers + /// ≥ 0.5 × 2x. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_EXP: i32 = -125; /// Maximum possible power of 2 exponent. + /// + /// If x = `MAX_EXP`, then normal numbers + /// < 1 × 2x. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX_EXP: i32 = 128; - /// Minimum possible normal power of 10 exponent. + /// Minimum x for which 10x is normal. + /// + /// Equal to ceil(log10 [`MIN_POSITIVE`]). + /// + /// [`MIN_POSITIVE`]: f32::MIN_POSITIVE #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_10_EXP: i32 = -37; - /// Maximum possible power of 10 exponent. + /// Maximum x for which 10x is normal. + /// + /// Equal to floor(log10 [`MAX`]). + /// + /// [`MAX`]: f32::MAX #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX_10_EXP: i32 = 38; @@ -820,7 +858,7 @@ /// let angle = std::f32::consts::PI; /// /// let abs_difference = (angle.to_degrees() - 180.0).abs(); - /// + /// # #[cfg(any(not(target_arch = "x86"), target_feature = "sse2"))] /// assert!(abs_difference <= f32::EPSILON); /// ``` #[must_use = "this returns the result of the operation, \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/f64.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/f64.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/f64.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/f64.rs 2023-12-21 16:55:28.000000000 +0000 @@ -376,6 +376,13 @@ #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MANTISSA_DIGITS: u32 = 53; /// Approximate number of significant digits in base 10. + /// + /// This is the maximum x such that any decimal number with x + /// significant digits can be converted to `f64` and back without loss. + /// + /// Equal to floor(log10 2[`MANTISSA_DIGITS`] − 1). + /// + /// [`MANTISSA_DIGITS`]: f64::MANTISSA_DIGITS #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const DIGITS: u32 = 15; @@ -383,31 +390,62 @@ /// /// This is the difference between `1.0` and the next larger representable number. /// + /// Equal to 21 − [`MANTISSA_DIGITS`]. + /// /// [Machine epsilon]: https://en.wikipedia.org/wiki/Machine_epsilon + /// [`MANTISSA_DIGITS`]: f64::MANTISSA_DIGITS #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const EPSILON: f64 = 2.2204460492503131e-16_f64; /// Smallest finite `f64` value. + /// + /// Equal to −[`MAX`]. + /// + /// [`MAX`]: f64::MAX #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN: f64 = -1.7976931348623157e+308_f64; /// Smallest positive normal `f64` value. + /// + /// Equal to 2[`MIN_EXP`] − 1. + /// + /// [`MIN_EXP`]: f64::MIN_EXP #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64; /// Largest finite `f64` value. + /// + /// Equal to + /// (1 − 2−[`MANTISSA_DIGITS`]) 2[`MAX_EXP`]. + /// + /// [`MANTISSA_DIGITS`]: f64::MANTISSA_DIGITS + /// [`MAX_EXP`]: f64::MAX_EXP #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX: f64 = 1.7976931348623157e+308_f64; /// One greater than the minimum possible normal power of 2 exponent. + /// + /// If x = `MIN_EXP`, then normal numbers + /// ≥ 0.5 × 2x. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_EXP: i32 = -1021; /// Maximum possible power of 2 exponent. + /// + /// If x = `MAX_EXP`, then normal numbers + /// < 1 × 2x. #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX_EXP: i32 = 1024; - /// Minimum possible normal power of 10 exponent. + /// Minimum x for which 10x is normal. + /// + /// Equal to ceil(log10 [`MIN_POSITIVE`]). + /// + /// [`MIN_POSITIVE`]: f64::MIN_POSITIVE #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MIN_10_EXP: i32 = -307; - /// Maximum possible power of 10 exponent. + /// Maximum x for which 10x is normal. + /// + /// Equal to floor(log10 [`MAX`]). + /// + /// [`MAX`]: f64::MAX #[stable(feature = "assoc_int_consts", since = "1.43.0")] pub const MAX_10_EXP: i32 = 308; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/int_macros.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/int_macros.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/int_macros.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/int_macros.rs 2023-12-21 16:55:28.000000000 +0000 @@ -471,7 +471,7 @@ )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_math", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { @@ -539,7 +539,7 @@ )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_math", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { @@ -607,7 +607,7 @@ )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_math", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self { @@ -740,6 +740,31 @@ if unlikely!(b) {None} else {Some(a)} } + /// Unchecked negation. Computes `-self`, assuming overflow cannot occur. + /// + /// # Safety + /// + /// This results in undefined behavior when + #[doc = concat!("`self == ", stringify!($SelfT), "::MIN`,")] + /// i.e. when [`checked_neg`] would return `None`. + /// + #[doc = concat!("[`checked_neg`]: ", stringify!($SelfT), "::checked_neg")] + #[unstable( + feature = "unchecked_neg", + reason = "niche optimization path", + issue = "85122", + )] + #[must_use = "this returns the result of the operation, \ + without modifying the original"] + #[rustc_const_unstable(feature = "unchecked_neg", issue = "85122")] + #[inline(always)] + #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + pub const unsafe fn unchecked_neg(self) -> Self { + // SAFETY: the caller must uphold the safety contract for + // `unchecked_neg`. + unsafe { intrinsics::unchecked_sub(0, self) } + } + /// Checked shift left. Computes `self << rhs`, returning `None` if `rhs` is larger /// than or equal to the number of bits in `self`. /// @@ -772,13 +797,13 @@ /// #[doc = concat!("[`checked_shl`]: ", stringify!($SelfT), "::checked_shl")] #[unstable( - feature = "unchecked_math", + feature = "unchecked_shifts", reason = "niche optimization path", issue = "85122", )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_shifts", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self { @@ -820,13 +845,13 @@ /// #[doc = concat!("[`checked_shr`]: ", stringify!($SelfT), "::checked_shr")] #[unstable( - feature = "unchecked_math", + feature = "unchecked_shifts", reason = "niche optimization path", issue = "85122", )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_shifts", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self { @@ -1404,7 +1429,7 @@ #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] - #[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)] + #[rustc_allow_const_fn_unstable(unchecked_shifts)] pub const fn wrapping_shl(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -1434,7 +1459,7 @@ #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] - #[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)] + #[rustc_allow_const_fn_unstable(unchecked_shifts)] pub const fn wrapping_shr(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -114,7 +114,7 @@ without modifying the original"] #[inline] pub const fn midpoint(self, rhs: $SelfT) -> $SelfT { - // Use the well known branchless algorthim from Hacker's Delight to compute + // Use the well known branchless algorithm from Hacker's Delight to compute // `(a + b) / 2` without overflowing: `((a ^ b) >> 1) + (a & b)`. ((self ^ rhs) >> 1) + (self & rhs) } @@ -791,7 +791,7 @@ #[rustc_const_stable(feature = "const_ascii_ctype_on_intrinsics", since = "1.47.0")] #[inline] pub const fn is_ascii_alphanumeric(&self) -> bool { - matches!(*self, b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z') + matches!(*self, b'0'..=b'9') | matches!(*self, b'A'..=b'Z') | matches!(*self, b'a'..=b'z') } /// Checks if the value is an ASCII decimal digit: @@ -894,7 +894,7 @@ #[rustc_const_stable(feature = "const_ascii_ctype_on_intrinsics", since = "1.47.0")] #[inline] pub const fn is_ascii_hexdigit(&self) -> bool { - matches!(*self, b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f') + matches!(*self, b'0'..=b'9') | matches!(*self, b'A'..=b'F') | matches!(*self, b'a'..=b'f') } /// Checks if the value is an ASCII punctuation character: @@ -932,7 +932,10 @@ #[rustc_const_stable(feature = "const_ascii_ctype_on_intrinsics", since = "1.47.0")] #[inline] pub const fn is_ascii_punctuation(&self) -> bool { - matches!(*self, b'!'..=b'/' | b':'..=b'@' | b'['..=b'`' | b'{'..=b'~') + matches!(*self, b'!'..=b'/') + | matches!(*self, b':'..=b'@') + | matches!(*self, b'['..=b'`') + | matches!(*self, b'{'..=b'~') } /// Checks if the value is an ASCII graphic character: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/saturating.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/saturating.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/saturating.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/saturating.rs 2023-12-21 16:55:28.000000000 +0000 @@ -35,9 +35,7 @@ #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)] #[repr(transparent)] #[rustc_diagnostic_item = "Saturating"] -pub struct Saturating( - #[stable(feature = "saturating_int_impl", since = "1.74.0")] pub T, -); +pub struct Saturating(#[stable(feature = "saturating_int_impl", since = "1.74.0")] pub T); #[stable(feature = "saturating_int_impl", since = "1.74.0")] impl fmt::Debug for Saturating { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/uint_macros.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/uint_macros.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/num/uint_macros.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/num/uint_macros.rs 2023-12-21 16:55:28.000000000 +0000 @@ -479,7 +479,7 @@ )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_math", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { @@ -548,7 +548,7 @@ )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_math", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { @@ -595,7 +595,7 @@ )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_math", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self { @@ -926,13 +926,13 @@ /// #[doc = concat!("[`checked_shl`]: ", stringify!($SelfT), "::checked_shl")] #[unstable( - feature = "unchecked_math", + feature = "unchecked_shifts", reason = "niche optimization path", issue = "85122", )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_shifts", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self { @@ -974,13 +974,13 @@ /// #[doc = concat!("[`checked_shr`]: ", stringify!($SelfT), "::checked_shr")] #[unstable( - feature = "unchecked_math", + feature = "unchecked_shifts", reason = "niche optimization path", issue = "85122", )] #[must_use = "this returns the result of the operation, \ without modifying the original"] - #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] + #[rustc_const_unstable(feature = "unchecked_shifts", issue = "85122")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self { @@ -1418,7 +1418,7 @@ #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] - #[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)] + #[rustc_allow_const_fn_unstable(unchecked_shifts)] pub const fn wrapping_shl(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -1451,7 +1451,7 @@ #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] - #[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)] + #[rustc_allow_const_fn_unstable(unchecked_shifts)] pub const fn wrapping_shr(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/coroutine.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/coroutine.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/coroutine.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/coroutine.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,139 @@ +use crate::marker::Unpin; +use crate::pin::Pin; + +/// The result of a coroutine resumption. +/// +/// This enum is returned from the `Coroutine::resume` method and indicates the +/// possible return values of a coroutine. Currently this corresponds to either +/// a suspension point (`Yielded`) or a termination point (`Complete`). +#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] +#[cfg_attr(bootstrap, lang = "generator_state")] +#[cfg_attr(not(bootstrap), lang = "coroutine_state")] +#[unstable(feature = "coroutine_trait", issue = "43122")] +pub enum CoroutineState { + /// The coroutine suspended with a value. + /// + /// This state indicates that a coroutine has been suspended, and typically + /// corresponds to a `yield` statement. The value provided in this variant + /// corresponds to the expression passed to `yield` and allows coroutines to + /// provide a value each time they yield. + Yielded(Y), + + /// The coroutine completed with a return value. + /// + /// This state indicates that a coroutine has finished execution with the + /// provided value. Once a coroutine has returned `Complete` it is + /// considered a programmer error to call `resume` again. + Complete(R), +} + +/// The trait implemented by builtin coroutine types. +/// +/// Coroutines are currently an +/// experimental language feature in Rust. Added in [RFC 2033] coroutines are +/// currently intended to primarily provide a building block for async/await +/// syntax but will likely extend to also providing an ergonomic definition for +/// iterators and other primitives. +/// +/// The syntax and semantics for coroutines is unstable and will require a +/// further RFC for stabilization. At this time, though, the syntax is +/// closure-like: +/// +/// ```rust +/// #![cfg_attr(bootstrap, feature(generators))] +/// #![cfg_attr(not(bootstrap), feature(coroutines))] +/// #![feature(coroutine_trait)] +/// +/// use std::ops::{Coroutine, CoroutineState}; +/// use std::pin::Pin; +/// +/// fn main() { +/// let mut coroutine = || { +/// yield 1; +/// "foo" +/// }; +/// +/// match Pin::new(&mut coroutine).resume(()) { +/// CoroutineState::Yielded(1) => {} +/// _ => panic!("unexpected return from resume"), +/// } +/// match Pin::new(&mut coroutine).resume(()) { +/// CoroutineState::Complete("foo") => {} +/// _ => panic!("unexpected return from resume"), +/// } +/// } +/// ``` +/// +/// More documentation of coroutines can be found in the [unstable book]. +/// +/// [RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033 +/// [unstable book]: ../../unstable-book/language-features/coroutines.html +#[cfg_attr(bootstrap, lang = "generator")] +#[cfg_attr(not(bootstrap), lang = "coroutine")] +#[unstable(feature = "coroutine_trait", issue = "43122")] +#[fundamental] +pub trait Coroutine { + /// The type of value this coroutine yields. + /// + /// This associated type corresponds to the `yield` expression and the + /// values which are allowed to be returned each time a coroutine yields. + /// For example an iterator-as-a-coroutine would likely have this type as + /// `T`, the type being iterated over. + type Yield; + + /// The type of value this coroutine returns. + /// + /// This corresponds to the type returned from a coroutine either with a + /// `return` statement or implicitly as the last expression of a coroutine + /// literal. For example futures would use this as `Result` as it + /// represents a completed future. + type Return; + + /// Resumes the execution of this coroutine. + /// + /// This function will resume execution of the coroutine or start execution + /// if it hasn't already. This call will return back into the coroutine's + /// last suspension point, resuming execution from the latest `yield`. The + /// coroutine will continue executing until it either yields or returns, at + /// which point this function will return. + /// + /// # Return value + /// + /// The `CoroutineState` enum returned from this function indicates what + /// state the coroutine is in upon returning. If the `Yielded` variant is + /// returned then the coroutine has reached a suspension point and a value + /// has been yielded out. Coroutines in this state are available for + /// resumption at a later point. + /// + /// If `Complete` is returned then the coroutine has completely finished + /// with the value provided. It is invalid for the coroutine to be resumed + /// again. + /// + /// # Panics + /// + /// This function may panic if it is called after the `Complete` variant has + /// been returned previously. While coroutine literals in the language are + /// guaranteed to panic on resuming after `Complete`, this is not guaranteed + /// for all implementations of the `Coroutine` trait. + fn resume(self: Pin<&mut Self>, arg: R) -> CoroutineState; +} + +#[unstable(feature = "coroutine_trait", issue = "43122")] +impl, R> Coroutine for Pin<&mut G> { + type Yield = G::Yield; + type Return = G::Return; + + fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState { + G::resume((*self).as_mut(), arg) + } +} + +#[unstable(feature = "coroutine_trait", issue = "43122")] +impl + Unpin, R> Coroutine for &mut G { + type Yield = G::Yield; + type Return = G::Return; + + fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState { + G::resume(Pin::new(&mut *self), arg) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/deref.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/deref.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/deref.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/deref.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,40 +3,107 @@ /// In addition to being used for explicit dereferencing operations with the /// (unary) `*` operator in immutable contexts, `Deref` is also used implicitly /// by the compiler in many circumstances. This mechanism is called -/// ['`Deref` coercion'][more]. In mutable contexts, [`DerefMut`] is used. +/// ["`Deref` coercion"][coercion]. In mutable contexts, [`DerefMut`] is used and +/// mutable deref coercion similarly occurs. /// -/// Implementing `Deref` for smart pointers makes accessing the data behind them -/// convenient, which is why they implement `Deref`. On the other hand, the -/// rules regarding `Deref` and [`DerefMut`] were designed specifically to -/// accommodate smart pointers. Because of this, **`Deref` should only be -/// implemented for smart pointers** to avoid confusion. +/// **Warning:** Deref coercion is a powerful language feature which has +/// far-reaching implications for every type that implements `Deref`. The +/// compiler will silently insert calls to `Deref::deref`. For this reason, one +/// should be careful about implementing `Deref` and only do so when deref +/// coercion is desirable. See [below][implementing] for advice on when this is +/// typically desirable or undesirable. +/// +/// Types that implement `Deref` or `DerefMut` are often called "smart +/// pointers" and the mechanism of deref coercion has been specifically designed +/// to facilitate the pointer-like behaviour that name suggests. Often, the +/// purpose of a "smart pointer" type is to change the ownership semantics +/// of a contained value (for example, [`Rc`][rc] or [`Cow`][cow]) or the +/// storage semantics of a contained value (for example, [`Box`][box]). /// -/// For similar reasons, **this trait should never fail**. Failure during -/// dereferencing can be extremely confusing when `Deref` is invoked implicitly. +/// # Deref coercion /// -/// Violating these requirements is a logic error. The behavior resulting from a logic error is not -/// specified, but users of the trait must ensure that such logic errors do *not* result in -/// undefined behavior. This means that `unsafe` code **must not** rely on the correctness of this -/// method. +/// If `T` implements `Deref`, and `v` is a value of type `T`, then: /// -/// # More on `Deref` coercion -/// -/// If `T` implements `Deref`, and `x` is a value of type `T`, then: -/// -/// * In immutable contexts, `*x` (where `T` is neither a reference nor a raw pointer) -/// is equivalent to `*Deref::deref(&x)`. +/// * In immutable contexts, `*v` (where `T` is neither a reference nor a raw +/// pointer) is equivalent to `*Deref::deref(&v)`. /// * Values of type `&T` are coerced to values of type `&U` -/// * `T` implicitly implements all the (immutable) methods of the type `U`. +/// * `T` implicitly implements all the methods of the type `U` which take the +/// `&self` receiver. /// /// For more details, visit [the chapter in *The Rust Programming Language*][book] /// as well as the reference sections on [the dereference operator][ref-deref-op], -/// [method resolution] and [type coercions]. +/// [method resolution], and [type coercions]. +/// +/// # When to implement `Deref` or `DerefMut` +/// +/// The same advice applies to both deref traits. In general, deref traits +/// **should** be implemented if: +/// +/// 1. a value of the type transparently behaves like a value of the target +/// type; +/// 1. the implementation of the deref function is cheap; and +/// 1. users of the type will not be surprised by any deref coercion behaviour. +/// +/// In general, deref traits **should not** be implemented if: +/// +/// 1. the deref implementations could fail unexpectedly; or +/// 1. the type has methods that are likely to collide with methods on the +/// target type; or +/// 1. committing to deref coercion as part of the public API is not desirable. +/// +/// Note that there's a large difference between implementing deref traits +/// generically over many target types, and doing so only for specific target +/// types. +/// +/// Generic implementations, such as for [`Box`][box] (which is generic over +/// every type and dereferences to `T`) should be careful to provide few or no +/// methods, since the target type is unknown and therefore every method could +/// collide with one on the target type, causing confusion for users. +/// `impl Box` has no methods (though several associated functions), +/// partly for this reason. +/// +/// Specific implementations, such as for [`String`][string] (whose `Deref` +/// implementation has `Target = str`) can have many methods, since avoiding +/// collision is much easier. `String` and `str` both have many methods, and +/// `String` additionally behaves as if it has every method of `str` because of +/// deref coercion. The implementing type may also be generic while the +/// implementation is still specific in this sense; for example, [`Vec`][vec] +/// dereferences to `[T]`, so methods of `T` are not applicable. +/// +/// Consider also that deref coericion means that deref traits are a much larger +/// part of a type's public API than any other trait as it is implicitly called +/// by the compiler. Therefore, it is advisable to consider whether this is +/// something you are comfortable supporting as a public API. +/// +/// The [`AsRef`] and [`Borrow`][core::borrow::Borrow] traits have very similar +/// signatures to `Deref`. It may be desirable to implement either or both of +/// these, whether in addition to or rather than deref traits. See their +/// documentation for details. +/// +/// # Fallibility +/// +/// **This trait's method should never unexpectedly fail**. Deref coercion means +/// the compiler will often insert calls to `Deref::deref` implicitly. Failure +/// during dereferencing can be extremely confusing when `Deref` is invoked +/// implicitly. In the majority of uses it should be infallible, though it may +/// be acceptable to panic if the type is misused through programmer error, for +/// example. +/// +/// However, infallibility is not enforced and therefore not guaranteed. +/// As such, `unsafe` code should not rely on infallibility in general for +/// soundness. /// /// [book]: ../../book/ch15-02-deref.html -/// [more]: #more-on-deref-coercion +/// [coercion]: #deref-coercion +/// [implementing]: #when-to-implement-deref-or-derefmut /// [ref-deref-op]: ../../reference/expressions/operator-expr.html#the-dereference-operator /// [method resolution]: ../../reference/expressions/method-call-expr.html /// [type coercions]: ../../reference/type-coercions.html +/// [box]: ../../alloc/boxed/struct.Box.html +/// [string]: ../../alloc/string/struct.String.html +/// [vec]: ../../alloc/vec/struct.Vec.html +/// [rc]: ../../alloc/rc/struct.Rc.html +/// [cow]: ../../alloc/borrow/enum.Cow.html /// /// # Examples /// @@ -107,30 +174,29 @@ /// In addition to being used for explicit dereferencing operations with the /// (unary) `*` operator in mutable contexts, `DerefMut` is also used implicitly /// by the compiler in many circumstances. This mechanism is called -/// ['`Deref` coercion'][more]. In immutable contexts, [`Deref`] is used. +/// ["mutable deref coercion"][coercion]. In immutable contexts, [`Deref`] is used. /// -/// Implementing `DerefMut` for smart pointers makes mutating the data behind -/// them convenient, which is why they implement `DerefMut`. On the other hand, -/// the rules regarding [`Deref`] and `DerefMut` were designed specifically to -/// accommodate smart pointers. Because of this, **`DerefMut` should only be -/// implemented for smart pointers** to avoid confusion. -/// -/// For similar reasons, **this trait should never fail**. Failure during -/// dereferencing can be extremely confusing when `DerefMut` is invoked -/// implicitly. -/// -/// Violating these requirements is a logic error. The behavior resulting from a logic error is not -/// specified, but users of the trait must ensure that such logic errors do *not* result in -/// undefined behavior. This means that `unsafe` code **must not** rely on the correctness of this -/// method. +/// **Warning:** Deref coercion is a powerful language feature which has +/// far-reaching implications for every type that implements `DerefMut`. The +/// compiler will silently insert calls to `DerefMut::deref_mut`. For this +/// reason, one should be careful about implementing `DerefMut` and only do so +/// when mutable deref coercion is desirable. See [the `Deref` docs][implementing] +/// for advice on when this is typically desirable or undesirable. +/// +/// Types that implement `DerefMut` or `Deref` are often called "smart +/// pointers" and the mechanism of deref coercion has been specifically designed +/// to facilitate the pointer-like behaviour that name suggests. Often, the +/// purpose of a "smart pointer" type is to change the ownership semantics +/// of a contained value (for example, [`Rc`][rc] or [`Cow`][cow]) or the +/// storage semantics of a contained value (for example, [`Box`][box]). /// -/// # More on `Deref` coercion +/// # Mutable deref coercion /// -/// If `T` implements `DerefMut`, and `x` is a value of type `T`, +/// If `T` implements `DerefMut`, and `v` is a value of type `T`, /// then: /// -/// * In mutable contexts, `*x` (where `T` is neither a reference nor a raw pointer) -/// is equivalent to `*DerefMut::deref_mut(&mut x)`. +/// * In mutable contexts, `*v` (where `T` is neither a reference nor a raw pointer) +/// is equivalent to `*DerefMut::deref_mut(&mut v)`. /// * Values of type `&mut T` are coerced to values of type `&mut U` /// * `T` implicitly implements all the (mutable) methods of the type `U`. /// @@ -138,11 +204,29 @@ /// as well as the reference sections on [the dereference operator][ref-deref-op], /// [method resolution] and [type coercions]. /// +/// # Fallibility +/// +/// **This trait's method should never unexpectedly fail**. Deref coercion means +/// the compiler will often insert calls to `DerefMut::deref_mut` implicitly. +/// Failure during dereferencing can be extremely confusing when `DerefMut` is +/// invoked implicitly. In the majority of uses it should be infallible, though +/// it may be acceptable to panic if the type is misused through programmer +/// error, for example. +/// +/// However, infallibility is not enforced and therefore not guaranteed. +/// As such, `unsafe` code should not rely on infallibility in general for +/// soundness. +/// /// [book]: ../../book/ch15-02-deref.html -/// [more]: #more-on-deref-coercion +/// [coercion]: #mutable-deref-coercion +/// [implementing]: Deref#when-to-implement-deref-or-derefmut /// [ref-deref-op]: ../../reference/expressions/operator-expr.html#the-dereference-operator /// [method resolution]: ../../reference/expressions/method-call-expr.html /// [type coercions]: ../../reference/type-coercions.html +/// [box]: ../../alloc/boxed/struct.Box.html +/// [string]: ../../alloc/string/struct.String.html +/// [rc]: ../../alloc/rc/struct.Rc.html +/// [cow]: ../../alloc/borrow/enum.Cow.html /// /// # Examples /// @@ -180,6 +264,7 @@ pub trait DerefMut: Deref { /// Mutably dereferences the value. #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_diagnostic_item = "deref_mut_method"] fn deref_mut(&mut self) -> &mut Self::Target; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/function.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/function.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/function.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/function.rs 2023-12-21 16:55:28.000000000 +0000 @@ -56,7 +56,7 @@ #[lang = "fn"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_paren_sugar] -#[rustc_on_unimplemented( +#[cfg_attr(not(bootstrap), rustc_on_unimplemented( on( Args = "()", note = "wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}`" @@ -67,9 +67,9 @@ // SAFETY: tidy is not smart enough to tell that the below unsafe block is a string label = "call the function in a closure: `|| unsafe {{ /* code */ }}`" ), - message = "expected a `{Fn}<{Args}>` closure, found `{Self}`", - label = "expected an `Fn<{Args}>` closure, found `{Self}`" -)] + message = "expected a `{Trait}` closure, found `{Self}`", + label = "expected an `{Trait}` closure, found `{Self}`" +))] #[fundamental] // so that regex can rely that `&str: !FnMut` #[must_use = "closures are lazy and do nothing unless called"] // FIXME(effects) #[const_trait] @@ -143,7 +143,7 @@ #[lang = "fn_mut"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_paren_sugar] -#[rustc_on_unimplemented( +#[cfg_attr(not(bootstrap), rustc_on_unimplemented( on( Args = "()", note = "wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}`" @@ -154,9 +154,9 @@ // SAFETY: tidy is not smart enough to tell that the below unsafe block is a string label = "call the function in a closure: `|| unsafe {{ /* code */ }}`" ), - message = "expected a `{FnMut}<{Args}>` closure, found `{Self}`", - label = "expected an `FnMut<{Args}>` closure, found `{Self}`" -)] + message = "expected a `{Trait}` closure, found `{Self}`", + label = "expected an `{Trait}` closure, found `{Self}`" +))] #[fundamental] // so that regex can rely that `&str: !FnMut` #[must_use = "closures are lazy and do nothing unless called"] // FIXME(effects) #[const_trait] @@ -222,7 +222,7 @@ #[lang = "fn_once"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_paren_sugar] -#[rustc_on_unimplemented( +#[cfg_attr(not(bootstrap), rustc_on_unimplemented( on( Args = "()", note = "wrap the `{Self}` in a closure with no arguments: `|| {{ /* code */ }}`" @@ -233,9 +233,9 @@ // SAFETY: tidy is not smart enough to tell that the below unsafe block is a string label = "call the function in a closure: `|| unsafe {{ /* code */ }}`" ), - message = "expected a `{FnOnce}<{Args}>` closure, found `{Self}`", - label = "expected an `FnOnce<{Args}>` closure, found `{Self}`" -)] + message = "expected a `{Trait}` closure, found `{Self}`", + label = "expected an `{Trait}` closure, found `{Self}`" +))] #[fundamental] // so that regex can rely that `&str: !FnMut` #[must_use = "closures are lazy and do nothing unless called"] // FIXME(effects) #[const_trait] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/generator.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/generator.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/generator.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/generator.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,135 +0,0 @@ -use crate::marker::Unpin; -use crate::pin::Pin; - -/// The result of a generator resumption. -/// -/// This enum is returned from the `Generator::resume` method and indicates the -/// possible return values of a generator. Currently this corresponds to either -/// a suspension point (`Yielded`) or a termination point (`Complete`). -#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] -#[lang = "generator_state"] -#[unstable(feature = "generator_trait", issue = "43122")] -pub enum GeneratorState { - /// The generator suspended with a value. - /// - /// This state indicates that a generator has been suspended, and typically - /// corresponds to a `yield` statement. The value provided in this variant - /// corresponds to the expression passed to `yield` and allows generators to - /// provide a value each time they yield. - Yielded(Y), - - /// The generator completed with a return value. - /// - /// This state indicates that a generator has finished execution with the - /// provided value. Once a generator has returned `Complete` it is - /// considered a programmer error to call `resume` again. - Complete(R), -} - -/// The trait implemented by builtin generator types. -/// -/// Generators, also commonly referred to as coroutines, are currently an -/// experimental language feature in Rust. Added in [RFC 2033] generators are -/// currently intended to primarily provide a building block for async/await -/// syntax but will likely extend to also providing an ergonomic definition for -/// iterators and other primitives. -/// -/// The syntax and semantics for generators is unstable and will require a -/// further RFC for stabilization. At this time, though, the syntax is -/// closure-like: -/// -/// ```rust -/// #![feature(generators, generator_trait)] -/// -/// use std::ops::{Generator, GeneratorState}; -/// use std::pin::Pin; -/// -/// fn main() { -/// let mut generator = || { -/// yield 1; -/// "foo" -/// }; -/// -/// match Pin::new(&mut generator).resume(()) { -/// GeneratorState::Yielded(1) => {} -/// _ => panic!("unexpected return from resume"), -/// } -/// match Pin::new(&mut generator).resume(()) { -/// GeneratorState::Complete("foo") => {} -/// _ => panic!("unexpected return from resume"), -/// } -/// } -/// ``` -/// -/// More documentation of generators can be found in the [unstable book]. -/// -/// [RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033 -/// [unstable book]: ../../unstable-book/language-features/generators.html -#[lang = "generator"] -#[unstable(feature = "generator_trait", issue = "43122")] -#[fundamental] -pub trait Generator { - /// The type of value this generator yields. - /// - /// This associated type corresponds to the `yield` expression and the - /// values which are allowed to be returned each time a generator yields. - /// For example an iterator-as-a-generator would likely have this type as - /// `T`, the type being iterated over. - type Yield; - - /// The type of value this generator returns. - /// - /// This corresponds to the type returned from a generator either with a - /// `return` statement or implicitly as the last expression of a generator - /// literal. For example futures would use this as `Result` as it - /// represents a completed future. - type Return; - - /// Resumes the execution of this generator. - /// - /// This function will resume execution of the generator or start execution - /// if it hasn't already. This call will return back into the generator's - /// last suspension point, resuming execution from the latest `yield`. The - /// generator will continue executing until it either yields or returns, at - /// which point this function will return. - /// - /// # Return value - /// - /// The `GeneratorState` enum returned from this function indicates what - /// state the generator is in upon returning. If the `Yielded` variant is - /// returned then the generator has reached a suspension point and a value - /// has been yielded out. Generators in this state are available for - /// resumption at a later point. - /// - /// If `Complete` is returned then the generator has completely finished - /// with the value provided. It is invalid for the generator to be resumed - /// again. - /// - /// # Panics - /// - /// This function may panic if it is called after the `Complete` variant has - /// been returned previously. While generator literals in the language are - /// guaranteed to panic on resuming after `Complete`, this is not guaranteed - /// for all implementations of the `Generator` trait. - fn resume(self: Pin<&mut Self>, arg: R) -> GeneratorState; -} - -#[unstable(feature = "generator_trait", issue = "43122")] -impl, R> Generator for Pin<&mut G> { - type Yield = G::Yield; - type Return = G::Return; - - fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState { - G::resume((*self).as_mut(), arg) - } -} - -#[unstable(feature = "generator_trait", issue = "43122")] -impl + Unpin, R> Generator for &mut G { - type Yield = G::Yield; - type Return = G::Return; - - fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState { - G::resume(Pin::new(&mut *self), arg) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/index.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/index.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/index.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/index.rs 2023-12-21 16:55:28.000000000 +0000 @@ -153,7 +153,7 @@ see chapter in The Book " ), on( - any(_Self = "alloc::string::String", _Self = "std::string::String"), + _Self = "alloc::string::String", note = "you can use `.chars().nth()` or `.bytes().nth()` see chapter in The Book " ), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,8 +8,8 @@ //! trait, but since the assignment operator (`=`) has no backing trait, there //! is no way of overloading its semantics. Additionally, this module does not //! provide any mechanism to create new operators. If traitless overloading or -//! custom operators are required, you should look toward macros or compiler -//! plugins to extend Rust's syntax. +//! custom operators are required, you should look toward macros to extend +//! Rust's syntax. //! //! Implementations of operator traits should be unsurprising in their //! respective contexts, keeping in mind their usual meanings and @@ -141,10 +141,10 @@ mod arith; mod bit; mod control_flow; +mod coroutine; mod deref; mod drop; mod function; -mod generator; mod index; mod index_range; mod range; @@ -198,8 +198,8 @@ pub(crate) use self::try_trait::{ChangeOutputType, NeverShortCircuit}; -#[unstable(feature = "generator_trait", issue = "43122")] -pub use self::generator::{Generator, GeneratorState}; +#[unstable(feature = "coroutine_trait", issue = "43122")] +pub use self::coroutine::{Coroutine, CoroutineState}; #[unstable(feature = "coerce_unsized", issue = "18598")] pub use self::unsize::CoerceUnsized; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/range.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/range.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/range.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/range.rs 2023-12-21 16:55:28.000000000 +0000 @@ -115,6 +115,7 @@ /// assert!(!(0.0..f32::NAN).contains(&0.5)); /// assert!(!(f32::NAN..1.0).contains(&0.5)); /// ``` + #[inline] #[stable(feature = "range_contains", since = "1.35.0")] pub fn contains(&self, item: &U) -> bool where @@ -141,6 +142,7 @@ /// assert!( (3.0..f32::NAN).is_empty()); /// assert!( (f32::NAN..5.0).is_empty()); /// ``` + #[inline] #[stable(feature = "range_is_empty", since = "1.47.0")] pub fn is_empty(&self) -> bool { !(self.start < self.end) @@ -213,6 +215,7 @@ /// assert!(!(0.0..).contains(&f32::NAN)); /// assert!(!(f32::NAN..).contains(&0.5)); /// ``` + #[inline] #[stable(feature = "range_contains", since = "1.35.0")] pub fn contains(&self, item: &U) -> bool where @@ -294,6 +297,7 @@ /// assert!(!(..1.0).contains(&f32::NAN)); /// assert!(!(..f32::NAN).contains(&0.5)); /// ``` + #[inline] #[stable(feature = "range_contains", since = "1.35.0")] pub fn contains(&self, item: &U) -> bool where @@ -500,6 +504,7 @@ /// // Precise field values are unspecified here /// assert!(!r.contains(&3) && !r.contains(&5)); /// ``` + #[inline] #[stable(feature = "range_contains", since = "1.35.0")] pub fn contains(&self, item: &U) -> bool where @@ -613,6 +618,7 @@ /// assert!(!(..=1.0).contains(&f32::NAN)); /// assert!(!(..=f32::NAN).contains(&0.5)); /// ``` + #[inline] #[stable(feature = "range_contains", since = "1.35.0")] pub fn contains(&self, item: &U) -> bool where @@ -758,6 +764,7 @@ /// `RangeBounds` is implemented by Rust's built-in range types, produced /// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`. #[stable(feature = "collections_range", since = "1.28.0")] +#[rustc_diagnostic_item = "RangeBounds"] pub trait RangeBounds { /// Start index bound. /// @@ -807,6 +814,7 @@ /// assert!(!(0.0..1.0).contains(&f32::NAN)); /// assert!(!(0.0..f32::NAN).contains(&0.5)); /// assert!(!(f32::NAN..1.0).contains(&0.5)); + #[inline] #[stable(feature = "range_contains", since = "1.35.0")] fn contains(&self, item: &U) -> bool where diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/try_trait.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/try_trait.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ops/try_trait.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ops/try_trait.rs 2023-12-21 16:55:28.000000000 +0000 @@ -226,14 +226,8 @@ on( all( from_desugaring = "QuestionMark", - any( - _Self = "core::result::Result", - _Self = "std::result::Result", - ), - any( - R = "core::option::Option", - R = "std::option::Option", - ) + _Self = "core::result::Result", + R = "core::option::Option", ), message = "the `?` operator can only be used on `Result`s, not `Option`s, \ in {ItemContext} that returns `Result`", @@ -243,10 +237,7 @@ on( all( from_desugaring = "QuestionMark", - any( - _Self = "core::result::Result", - _Self = "std::result::Result", - ) + _Self = "core::result::Result", ), // There's a special error message in the trait selection code for // `From` in `?`, so this is not shown for result-in-result errors, @@ -259,14 +250,8 @@ on( all( from_desugaring = "QuestionMark", - any( - _Self = "core::option::Option", - _Self = "std::option::Option", - ), - any( - R = "core::result::Result", - R = "std::result::Result", - ) + _Self = "core::option::Option", + R = "core::result::Result", ), message = "the `?` operator can only be used on `Option`s, not `Result`s, \ in {ItemContext} that returns `Option`", @@ -276,10 +261,7 @@ on( all( from_desugaring = "QuestionMark", - any( - _Self = "core::option::Option", - _Self = "std::option::Option", - ) + _Self = "core::option::Option", ), // `Option`-in-`Option` always works, as there's only one possible // residual, so this can also be phrased strongly. @@ -291,14 +273,8 @@ on( all( from_desugaring = "QuestionMark", - any( - _Self = "core::ops::ControlFlow", - _Self = "std::ops::ControlFlow", - ), - any( - R = "core::ops::ControlFlow", - R = "std::ops::ControlFlow", - ) + _Self = "core::ops::control_flow::ControlFlow", + R = "core::ops::control_flow::ControlFlow", ), message = "the `?` operator in {ItemContext} that returns `ControlFlow` \ can only be used on other `ControlFlow`s (with the same Break type)", @@ -309,10 +285,7 @@ on( all( from_desugaring = "QuestionMark", - any( - _Self = "core::ops::ControlFlow", - _Self = "std::ops::ControlFlow", - ) + _Self = "core::ops::control_flow::ControlFlow", // `R` is not a `ControlFlow`, as that case was matched previously ), message = "the `?` operator can only be used on `ControlFlow`s \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/option.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/option.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/option.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/option.rs 2023-12-21 16:55:28.000000000 +0000 @@ -119,15 +119,21 @@ //! # Representation //! //! Rust guarantees to optimize the following types `T` such that -//! [`Option`] has the same size and alignment as `T`: -//! -//! * [`Box`] -//! * `&U` -//! * `&mut U` -//! * `fn`, `extern "C" fn`[^extern_fn] -//! * [`num::NonZero*`] -//! * [`ptr::NonNull`] -//! * `#[repr(transparent)]` struct around one of the types in this list. +//! [`Option`] has the same size and alignment as `T`. In some +//! of these cases, Rust further guarantees that +//! `transmute::<_, Option>([0u8; size_of::()])` is sound and +//! produces `Option::::None`. These cases are identified by the +//! second column: +//! +//! | `T` | `transmute::<_, Option>([0u8; size_of::()])` sound? | +//! |---------------------------------------------------------------------|----------------------------------------------------------------------| +//! | [`Box`] | when `U: Sized` | +//! | `&U` | when `U: Sized` | +//! | `&mut U` | when `U: Sized` | +//! | `fn`, `extern "C" fn`[^extern_fn] | always | +//! | [`num::NonZero*`] | always | +//! | [`ptr::NonNull`] | when `U: Sized` | +//! | `#[repr(transparent)]` struct around one of the types in this list. | when it holds for the inner type | //! //! [^extern_fn]: this remains true for any other ABI: `extern "abi" fn` (_e.g._, `extern "system" fn`) //! @@ -743,8 +749,6 @@ /// # Examples /// /// ```rust - /// #![feature(option_as_slice)] - /// /// assert_eq!( /// [Some(1234).as_slice(), None.as_slice()], /// [&[1234][..], &[][..]], @@ -755,15 +759,13 @@ /// borrowing) [`[_]::first`](slice::first): /// /// ```rust - /// #![feature(option_as_slice)] - /// /// for i in [Some(1234_u16), None] { /// assert_eq!(i.as_ref(), i.as_slice().first()); /// } /// ``` #[inline] #[must_use] - #[unstable(feature = "option_as_slice", issue = "108545")] + #[stable(feature = "option_as_slice", since = "1.75.0")] pub fn as_slice(&self) -> &[T] { // SAFETY: When the `Option` is `Some`, we're using the actual pointer // to the payload, with a length of 1, so this is equivalent to @@ -794,8 +796,6 @@ /// # Examples /// /// ```rust - /// #![feature(option_as_slice)] - /// /// assert_eq!( /// [Some(1234).as_mut_slice(), None.as_mut_slice()], /// [&mut [1234][..], &mut [][..]], @@ -806,8 +806,6 @@ /// our original `Option`: /// /// ```rust - /// #![feature(option_as_slice)] - /// /// let mut x = Some(1234); /// x.as_mut_slice()[0] += 1; /// assert_eq!(x, Some(1235)); @@ -817,13 +815,11 @@ /// is [`[_]::first_mut`](slice::first_mut): /// /// ```rust - /// #![feature(option_as_slice)] - /// /// assert_eq!(Some(123).as_mut_slice().first_mut(), Some(&mut 123)) /// ``` #[inline] #[must_use] - #[unstable(feature = "option_as_slice", issue = "108545")] + #[stable(feature = "option_as_slice", since = "1.75.0")] pub fn as_mut_slice(&mut self) -> &mut [T] { // SAFETY: When the `Option` is `Some`, we're using the actual pointer // to the payload, with a length of 1, so this is equivalent to @@ -969,6 +965,7 @@ /// assert_eq!(None.unwrap_or_else(|| 2 * k), 20); /// ``` #[inline] + #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] pub fn unwrap_or_else(self, f: F) -> T where @@ -1485,7 +1482,7 @@ #[stable(feature = "rust1", since = "1.0.0")] pub fn or(self, optb: Option) -> Option { match self { - Some(x) => Some(x), + x @ Some(_) => x, None => optb, } } @@ -1510,7 +1507,7 @@ F: FnOnce() -> Option, { match self { - Some(x) => Some(x), + x @ Some(_) => x, None => f(), } } @@ -1540,8 +1537,8 @@ #[stable(feature = "option_xor", since = "1.37.0")] pub fn xor(self, optb: Option) -> Option { match (self, optb) { - (Some(a), None) => Some(a), - (None, Some(b)) => Some(b), + (a @ Some(_), None) => a, + (None, b @ Some(_)) => b, _ => None, } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/panic/unwind_safe.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/panic/unwind_safe.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/panic/unwind_safe.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/panic/unwind_safe.rs 2023-12-21 16:55:28.000000000 +0000 @@ -267,6 +267,7 @@ impl R> FnOnce<()> for AssertUnwindSafe { type Output = R; + #[inline] extern "rust-call" fn call_once(self, _args: ()) -> R { (self.0)() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/panic.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/panic.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/panic.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/panic.rs 2023-12-21 16:55:28.000000000 +0000 @@ -47,6 +47,7 @@ #[allow_internal_unstable(core_panic, const_format_args)] #[rustc_diagnostic_item = "core_panic_2021_macro"] #[rustc_macro_transparency = "semitransparent"] +#[cfg(any(bootstrap, feature = "panic_immediate_abort"))] pub macro panic_2021 { () => ( $crate::panicking::panic("explicit panic") @@ -57,6 +58,50 @@ }), ($($t:tt)+) => ({ // Semicolon to prevent temporaries inside the formatting machinery from + // being considered alive in the caller after the panic_fmt call. + $crate::panicking::panic_fmt($crate::const_format_args!($($t)+)); + }), +} + +#[doc(hidden)] +#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")] +#[allow_internal_unstable( + core_panic, + core_intrinsics, + const_dispatch, + const_eval_select, + const_format_args, + rustc_attrs +)] +#[rustc_diagnostic_item = "core_panic_2021_macro"] +#[rustc_macro_transparency = "semitransparent"] +#[cfg(not(any(bootstrap, feature = "panic_immediate_abort")))] +pub macro panic_2021 { + () => ({ + // Create a function so that the argument for `track_caller` + // can be moved inside if possible. + #[cold] + #[track_caller] + #[inline(never)] + const fn panic_cold_explicit() -> ! { + $crate::panicking::panic_explicit() + } + panic_cold_explicit(); + }), + // Special-case the single-argument case for const_panic. + ("{}", $arg:expr $(,)?) => ({ + #[cold] + #[track_caller] + #[inline(never)] + #[rustc_const_panic_str] // enforce a &&str argument in const-check and hook this by const-eval + #[rustc_do_not_const_check] // hooked by const-eval + const fn panic_cold_display(arg: &T) -> ! { + $crate::panicking::panic_display(arg) + } + panic_cold_display(&$arg); + }), + ($($t:tt)+) => ({ + // Semicolon to prevent temporaries inside the formatting machinery from // being considered alive in the caller after the panic_fmt call. $crate::panicking::panic_fmt($crate::const_format_args!($($t)+)); }), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/panicking.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/panicking.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/panicking.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/panicking.rs 2023-12-21 16:55:28.000000000 +0000 @@ -152,6 +152,14 @@ panic_display(&expr); } +#[track_caller] +#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)] +#[cfg_attr(feature = "panic_immediate_abort", inline)] +#[rustc_const_unstable(feature = "core_panic", issue = "none")] +pub const fn panic_explicit() -> ! { + panic_display(&"explicit panic"); +} + #[inline] #[track_caller] #[rustc_diagnostic_item = "unreachable_display"] // needed for `non-fmt-panics` lint @@ -161,8 +169,10 @@ #[inline] #[track_caller] -#[lang = "panic_display"] // needed for const-evaluated panics #[rustc_do_not_const_check] // hooked by const-eval +#[cfg_attr(bootstrap, lang = "panic_display")] +// enforce a &&str argument in const-check and hook this by const-eval +#[cfg_attr(not(bootstrap), rustc_const_panic_str)] #[rustc_const_unstable(feature = "core_panic", issue = "none")] pub const fn panic_display(x: &T) -> ! { panic_fmt(format_args!("{}", *x)); @@ -219,7 +229,6 @@ /// pass to `panic_nounwind`. /// This function is called directly by the codegen backend, and must not have /// any extra arguments (including those synthesized by track_caller). -#[cfg(not(bootstrap))] #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)] #[cfg_attr(feature = "panic_immediate_abort", inline)] #[lang = "panic_in_cleanup"] // needed by codegen for panic in nounwind function diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/pin.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/pin.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/pin.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/pin.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1085,17 +1085,19 @@ /// # assert_eq!(42, block_on(async { 42 })); /// ``` /// -/// ### With `Generator`s +/// ### With `Coroutine`s /// /// ```rust -/// #![feature(generators, generator_trait)] +/// #![cfg_attr(bootstrap, feature(generators))] +/// #![cfg_attr(not(bootstrap), feature(coroutines))] +/// #![feature(coroutine_trait)] /// use core::{ -/// ops::{Generator, GeneratorState}, +/// ops::{Coroutine, CoroutineState}, /// pin::pin, /// }; /// -/// fn generator_fn() -> impl Generator /* not Unpin */ { -/// // Allow generator to be self-referential (not `Unpin`) +/// fn coroutine_fn() -> impl Coroutine /* not Unpin */ { +/// // Allow coroutine to be self-referential (not `Unpin`) /// // vvvvvv so that locals can cross yield points. /// static || { /// let foo = String::from("foo"); @@ -1107,18 +1109,18 @@ /// } /// /// fn main() { -/// let mut generator = pin!(generator_fn()); -/// match generator.as_mut().resume(()) { -/// GeneratorState::Yielded(0) => {}, +/// let mut coroutine = pin!(coroutine_fn()); +/// match coroutine.as_mut().resume(()) { +/// CoroutineState::Yielded(0) => {}, /// _ => unreachable!(), /// } -/// match generator.as_mut().resume(()) { -/// GeneratorState::Yielded(3) => {}, +/// match coroutine.as_mut().resume(()) { +/// CoroutineState::Yielded(3) => {}, /// _ => unreachable!(), /// } -/// match generator.resume(()) { -/// GeneratorState::Yielded(_) => unreachable!(), -/// GeneratorState::Complete(()) => {}, +/// match coroutine.resume(()) { +/// CoroutineState::Yielded(_) => unreachable!(), +/// CoroutineState::Complete(()) => {}, /// } /// } /// ``` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/primitive_docs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/primitive_docs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/primitive_docs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/primitive_docs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -283,7 +283,7 @@ /// `char` type. For technical reasons, there is additional, separate /// documentation in [the `std::char` module](char/index.html) as well. /// -/// # Validity +/// # Validity and Layout /// /// A `char` is a '[Unicode scalar value]', which is any '[Unicode code point]' /// other than a [surrogate code point]. This has a fixed numerical definition: @@ -291,7 +291,7 @@ /// Surrogate code points, used by UTF-16, are in the range 0xD800 to 0xDFFF. /// /// No `char` may be constructed, whether as a literal or at runtime, that is not a -/// Unicode scalar value: +/// Unicode scalar value. Violating this rule causes undefined behavior. /// /// ```compile_fail /// // Each of these is a compiler error @@ -308,9 +308,10 @@ /// let _ = unsafe { char::from_u32_unchecked(0x110000) }; /// ``` /// -/// USVs are also the exact set of values that may be encoded in UTF-8. Because -/// `char` values are USVs and `str` values are valid UTF-8, it is safe to store -/// any `char` in a `str` or read any character from a `str` as a `char`. +/// Unicode scalar values are also the exact set of values that may be encoded in UTF-8. Because +/// `char` values are Unicode scalar values and functions may assume [incoming `str` values are +/// valid UTF-8](primitive.str.html#invariant), it is safe to store any `char` in a `str` or read +/// any character from a `str` as a `char`. /// /// The gap in valid `char` values is understood by the compiler, so in the /// below example the two ranges are understood to cover the whole range of @@ -324,11 +325,17 @@ /// }; /// ``` /// -/// All USVs are valid `char` values, but not all of them represent a real -/// character. Many USVs are not currently assigned to a character, but may be -/// in the future ("reserved"); some will never be a character -/// ("noncharacters"); and some may be given different meanings by different -/// users ("private use"). +/// All Unicode scalar values are valid `char` values, but not all of them represent a real +/// character. Many Unicode scalar values are not currently assigned to a character, but may be in +/// the future ("reserved"); some will never be a character ("noncharacters"); and some may be given +/// different meanings by different users ("private use"). +/// +/// `char` is guaranteed to have the same size and alignment as `u32` on all +/// platforms. +/// ``` +/// use std::alloc::Layout; +/// assert_eq!(Layout::new::(), Layout::new::()); +/// ``` /// /// [Unicode code point]: https://www.unicode.org/glossary/#code_point /// [Unicode scalar value]: https://www.unicode.org/glossary/#unicode_scalar_value @@ -887,8 +894,6 @@ /// type. It is usually seen in its borrowed form, `&str`. It is also the type /// of string literals, `&'static str`. /// -/// String slices are always valid UTF-8. -/// /// # Basic Usage /// /// String literals are string slices: @@ -942,6 +947,14 @@ /// Note: This example shows the internals of `&str`. `unsafe` should not be /// used to get a string slice under normal circumstances. Use `as_str` /// instead. +/// +/// # Invariant +/// +/// Rust libraries may assume that string slices are always valid UTF-8. +/// +/// Constructing a non-UTF-8 string slice is not immediate undefined behavior, but any function +/// called on a string slice may assume that it is valid UTF-8, which means that a non-UTF-8 string +/// slice can lead to undefined behavior down the road. #[stable(feature = "rust1", since = "1.0.0")] mod prim_str {} @@ -1077,26 +1090,6 @@ #[doc(hidden)] impl (T,) {} -// Fake impl that's only really used for docs. -#[cfg(doc)] -#[stable(feature = "rust1", since = "1.0.0")] -#[doc(fake_variadic)] -/// This trait is implemented on arbitrary-length tuples. -impl Clone for (T,) { - fn clone(&self) -> Self { - loop {} - } -} - -// Fake impl that's only really used for docs. -#[cfg(doc)] -#[stable(feature = "rust1", since = "1.0.0")] -#[doc(fake_variadic)] -/// This trait is implemented on arbitrary-length tuples. -impl Copy for (T,) { - // empty -} - #[rustc_doc_primitive = "f32"] /// A 32-bit floating point type (specifically, the "binary32" type defined in IEEE 754-2008). /// @@ -1142,10 +1135,9 @@ /// surprising results upon inspecting the bit patterns, /// as the same calculations might produce NaNs with different bit patterns. /// -/// When the number resulting from a primitive operation (addition, -/// subtraction, multiplication, or division) on this type is not exactly -/// representable as `f32`, it is rounded according to the roundTiesToEven -/// direction defined in IEEE 754-2008. That means: +/// When a primitive operation (addition, subtraction, multiplication, or +/// division) is performed on this type, the result is rounded according to the +/// roundTiesToEven direction defined in IEEE 754-2008. That means: /// /// - The result is the representable value closest to the true value, if there /// is a unique closest representable value. @@ -1154,6 +1146,9 @@ /// - If the true value's magnitude is ≥ `f32::MAX` + 2(`f32::MAX_EXP` − /// `f32::MANTISSA_DIGITS` − 1), the result is ∞ or −∞ (preserving the /// true value's sign). +/// - If the result of a sum exactly equals zero, the outcome is +0.0 unless +/// both arguments were negative, then it is -0.0. Subtraction `a - b` is +/// regarded as a sum `a + (-b)`. /// /// For more information on floating point numbers, see [Wikipedia][wikipedia]. /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/const_ptr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/const_ptr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/const_ptr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/const_ptr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -480,8 +480,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -560,8 +561,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] pub const fn wrapping_byte_offset(self, count: isize) -> Self { self.cast::().wrapping_offset(count).with_metadata_of(self) } @@ -726,8 +728,9 @@ /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_offset_from(self, origin: *const U) -> isize { // SAFETY: the caller must uphold the safety contract for `offset_from`. @@ -842,7 +845,7 @@ where T: Sized, { - match intrinsics::ptr_guaranteed_cmp(self as _, other as _) { + match intrinsics::ptr_guaranteed_cmp(self, other) { 2 => None, other => Some(other == 1), } @@ -952,8 +955,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1045,8 +1049,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. @@ -1125,8 +1130,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] pub const fn wrapping_byte_add(self, count: usize) -> Self { self.cast::().wrapping_add(count).with_metadata_of(self) } @@ -1203,8 +1209,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] pub const fn wrapping_byte_sub(self, count: usize) -> Self { self.cast::().wrapping_sub(count).with_metadata_of(self) } @@ -1372,7 +1379,6 @@ /// /// ``` /// #![feature(pointer_is_aligned)] - /// #![feature(pointer_byte_offsets)] /// /// // On some platforms, the alignment of i32 is less than 4. /// #[repr(align(4))] @@ -1494,7 +1500,6 @@ /// /// ``` /// #![feature(pointer_is_aligned)] - /// #![feature(pointer_byte_offsets)] /// /// // On some platforms, the alignment of i32 is less than 4. /// #[repr(align(4))] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -494,6 +494,7 @@ #[stable(feature = "drop_in_place", since = "1.8.0")] #[lang = "drop_in_place"] #[allow(unconditional_recursion)] +#[rustc_diagnostic_item = "ptr_drop_in_place"] pub unsafe fn drop_in_place(to_drop: *mut T) { // Code here does not matter - this is replaced by the // real drop glue by the compiler. @@ -504,6 +505,10 @@ /// Creates a null raw pointer. /// +/// This function is equivalent to zero-initializing the pointer: +/// `MaybeUninit::<*const T>::zeroed().assume_init()`. +/// The resulting pointer has the address 0. +/// /// # Examples /// /// ``` @@ -511,6 +516,7 @@ /// /// let p: *const i32 = ptr::null(); /// assert!(p.is_null()); +/// assert_eq!(p as usize, 0); // this pointer has the address 0 /// ``` #[inline(always)] #[must_use] @@ -525,6 +531,10 @@ /// Creates a null mutable raw pointer. /// +/// This function is equivalent to zero-initializing the pointer: +/// `MaybeUninit::<*mut T>::zeroed().assume_init()`. +/// The resulting pointer has the address 0. +/// /// # Examples /// /// ``` @@ -532,6 +542,7 @@ /// /// let p: *mut i32 = ptr::null_mut(); /// assert!(p.is_null()); +/// assert_eq!(p as usize, 0); // this pointer has the address 0 /// ``` #[inline(always)] #[must_use] @@ -698,7 +709,7 @@ #[inline(always)] #[must_use] #[unstable(feature = "ptr_from_ref", issue = "106116")] -#[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] +#[rustc_never_returns_null_ptr] #[rustc_diagnostic_item = "ptr_from_ref"] pub const fn from_ref(r: &T) -> *const T { r @@ -711,7 +722,7 @@ #[inline(always)] #[must_use] #[unstable(feature = "ptr_from_ref", issue = "106116")] -#[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] +#[rustc_never_returns_null_ptr] pub const fn from_mut(r: &mut T) -> *mut T { r } @@ -740,6 +751,7 @@ #[stable(feature = "slice_from_raw_parts", since = "1.42.0")] #[rustc_const_stable(feature = "const_slice_from_raw_parts", since = "1.64.0")] #[rustc_allow_const_fn_unstable(ptr_metadata)] +#[rustc_diagnostic_item = "ptr_slice_from_raw_parts"] pub const fn slice_from_raw_parts(data: *const T, len: usize) -> *const [T] { from_raw_parts(data.cast(), len) } @@ -772,6 +784,7 @@ #[inline] #[stable(feature = "slice_from_raw_parts", since = "1.42.0")] #[rustc_const_unstable(feature = "const_slice_from_raw_parts_mut", issue = "67456")] +#[rustc_diagnostic_item = "ptr_slice_from_raw_parts_mut"] pub const fn slice_from_raw_parts_mut(data: *mut T, len: usize) -> *mut [T] { from_raw_parts_mut(data.cast(), len) } @@ -850,6 +863,7 @@ #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_swap", issue = "83163")] +#[rustc_diagnostic_item = "ptr_swap"] pub const unsafe fn swap(x: *mut T, y: *mut T) { // Give ourselves some scratch space to work with. // We do not have to worry about drops: `MaybeUninit` does nothing when dropped. @@ -911,6 +925,7 @@ #[inline] #[stable(feature = "swap_nonoverlapping", since = "1.27.0")] #[rustc_const_unstable(feature = "const_swap", issue = "83163")] +#[rustc_diagnostic_item = "ptr_swap_nonoverlapping"] pub const unsafe fn swap_nonoverlapping(x: *mut T, y: *mut T, count: usize) { #[allow(unused)] macro_rules! attempt_swap_as_chunks { @@ -1022,6 +1037,7 @@ #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_replace", issue = "83164")] +#[rustc_diagnostic_item = "ptr_replace"] pub const unsafe fn replace(dst: *mut T, mut src: T) -> T { // SAFETY: the caller must guarantee that `dst` is valid to be // cast to a mutable reference (valid for writes, aligned, initialized), @@ -1147,6 +1163,7 @@ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")] #[rustc_allow_const_fn_unstable(const_mut_refs, const_maybe_uninit_as_mut_ptr)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces +#[rustc_diagnostic_item = "ptr_read"] pub const unsafe fn read(src: *const T) -> T { // It would be semantically correct to implement this via `copy_nonoverlapping` // and `MaybeUninit`, as was done before PR #109035. Calling `assume_init` @@ -1264,6 +1281,7 @@ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")] #[rustc_allow_const_fn_unstable(const_mut_refs, const_maybe_uninit_as_mut_ptr)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces +#[rustc_diagnostic_item = "ptr_read_unaligned"] pub const unsafe fn read_unaligned(src: *const T) -> T { let mut tmp = MaybeUninit::::uninit(); // SAFETY: the caller must guarantee that `src` is valid for reads. @@ -1539,6 +1557,7 @@ #[inline] #[stable(feature = "volatile", since = "1.9.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces +#[rustc_diagnostic_item = "ptr_read_volatile"] pub unsafe fn read_volatile(src: *const T) -> T { // SAFETY: the caller must uphold the safety contract for `volatile_load`. unsafe { @@ -1864,10 +1883,35 @@ /// ``` #[stable(feature = "ptr_eq", since = "1.17.0")] #[inline(always)] +#[must_use = "pointer comparison produces a value"] +#[rustc_diagnostic_item = "ptr_eq"] pub fn eq(a: *const T, b: *const T) -> bool { a == b } +/// Compares the *addresses* of the two pointers for equality, +/// ignoring any metadata in fat pointers. +/// +/// If the arguments are thin pointers of the same type, +/// then this is the same as [`eq`]. +/// +/// # Examples +/// +/// ``` +/// #![feature(ptr_addr_eq)] +/// +/// let whole: &[i32; 3] = &[1, 2, 3]; +/// let first: &i32 = &whole[0]; +/// assert!(std::ptr::addr_eq(whole, first)); +/// assert!(!std::ptr::eq::(whole, first)); +/// ``` +#[unstable(feature = "ptr_addr_eq", issue = "116324")] +#[inline(always)] +#[must_use = "pointer comparison produces a value"] +pub fn addr_eq(p: *const T, q: *const U) -> bool { + (p as *const ()) == (q as *const ()) +} + /// Hash a raw pointer. /// /// This can be used to hash a `&T` reference (which coerces to `*const T` implicitly) @@ -1955,9 +1999,18 @@ /// as all other references. This macro can create a raw pointer *without* creating /// a reference first. /// -/// Note, however, that the `expr` in `addr_of!(expr)` is still subject to all -/// the usual rules. In particular, `addr_of!(*ptr::null())` is Undefined -/// Behavior because it dereferences a null pointer. +/// The `expr` in `addr_of!(expr)` is evaluated as a place expression, but never loads +/// from the place or requires the place to be dereferenceable. This means that +/// `addr_of!(*ptr)` is defined behavior even if `ptr` is null, dangling, or misaligned. +/// Note however that `addr_of!((*ptr).field)` still requires the projection to +/// `field` to be in-bounds, using the same rules as [`offset`]. +/// +/// Note that `Deref`/`Index` coercions (and their mutable counterparts) are applied inside +/// `addr_of!` like everywhere else, in which case a reference is created to call `Deref::deref` or +/// `Index::index`, respectively. The statements above only apply when no such coercions are +/// applied. +/// +/// [`offset`]: pointer::offset /// /// # Example /// @@ -1995,9 +2048,18 @@ /// as all other references. This macro can create a raw pointer *without* creating /// a reference first. /// -/// Note, however, that the `expr` in `addr_of_mut!(expr)` is still subject to all -/// the usual rules. In particular, `addr_of_mut!(*ptr::null_mut())` is Undefined -/// Behavior because it dereferences a null pointer. +/// The `expr` in `addr_of_mut!(expr)` is evaluated as a place expression, but never loads +/// from the place or requires the place to be dereferenceable. This means that +/// `addr_of_mut!(*ptr)` is defined behavior even if `ptr` is null, dangling, or misaligned. +/// Note however that `addr_of_mut!((*ptr).field)` still requires the projection to +/// `field` to be in-bounds, using the same rules as [`offset`]. +/// +/// Note that `Deref`/`Index` coercions (and their mutable counterparts) are applied inside +/// `addr_of_mut!` like everywhere else, in which case a reference is created to call `Deref::deref` +/// or `Index::index`, respectively. The statements above only apply when no such coercions are +/// applied. +/// +/// [`offset`]: pointer::offset /// /// # Examples /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/mut_ptr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/mut_ptr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/mut_ptr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/mut_ptr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -495,8 +495,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -574,8 +575,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] pub const fn wrapping_byte_offset(self, count: isize) -> Self { self.cast::().wrapping_offset(count).with_metadata_of(self) } @@ -898,8 +900,9 @@ /// For non-`Sized` pointees this operation considers only the data pointers, /// ignoring the metadata. #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_offset_from(self, origin: *const U) -> isize { // SAFETY: the caller must uphold the safety contract for `offset_from`. @@ -1053,8 +1056,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1146,8 +1150,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. @@ -1226,8 +1231,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] pub const fn wrapping_byte_add(self, count: usize) -> Self { self.cast::().wrapping_add(count).with_metadata_of(self) } @@ -1304,8 +1310,9 @@ /// leaving the metadata untouched. #[must_use] #[inline(always)] - #[unstable(feature = "pointer_byte_offsets", issue = "96283")] - #[rustc_const_unstable(feature = "const_pointer_byte_offsets", issue = "96283")] + #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] + #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] + #[rustc_allow_const_fn_unstable(set_ptr_value)] pub const fn wrapping_byte_sub(self, count: usize) -> Self { self.cast::().wrapping_sub(count).with_metadata_of(self) } @@ -1639,7 +1646,6 @@ /// /// ``` /// #![feature(pointer_is_aligned)] - /// #![feature(pointer_byte_offsets)] /// /// // On some platforms, the alignment of i32 is less than 4. /// #[repr(align(4))] @@ -1763,7 +1769,6 @@ /// /// ``` /// #![feature(pointer_is_aligned)] - /// #![feature(pointer_byte_offsets)] /// /// // On some platforms, the alignment of i32 is less than 4. /// #[repr(align(4))] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/non_null.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/non_null.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/ptr/non_null.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/ptr/non_null.rs 2023-12-21 16:55:28.000000000 +0000 @@ -68,6 +68,7 @@ #[repr(transparent)] #[rustc_layout_scalar_valid_range_start(1)] #[rustc_nonnull_optimization_guaranteed] +#[rustc_diagnostic_item = "NonNull"] pub struct NonNull { pointer: *const T, } @@ -338,7 +339,7 @@ /// ``` #[stable(feature = "nonnull", since = "1.25.0")] #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[must_use] #[inline(always)] pub const fn as_ptr(self) -> *mut T { @@ -598,7 +599,7 @@ #[must_use] #[unstable(feature = "slice_ptr_get", issue = "74265")] #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn as_mut_ptr(self) -> *mut T { self.as_non_null_ptr().as_ptr() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/result.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/result.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/result.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/result.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1422,6 +1422,7 @@ /// assert_eq!(Err("foo").unwrap_or_else(count), 3); /// ``` #[inline] + #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] pub fn unwrap_or_else T>(self, op: F) -> T { match self { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/index.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/index.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/index.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/index.rs 2023-12-21 16:55:28.000000000 +0000 @@ -152,10 +152,7 @@ #[rustc_on_unimplemented( on(T = "str", label = "string indices are ranges of `usize`",), on( - all( - any(T = "str", T = "&str", T = "alloc::string::String", T = "std::string::String"), - _Self = "{integer}" - ), + all(any(T = "str", T = "&str", T = "alloc::string::String"), _Self = "{integer}"), note = "you can use `.chars().nth()` or `.bytes().nth()`\n\ for more information, see chapter 8 in The Book: \ " diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/iter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/iter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/iter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/iter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -59,6 +59,7 @@ /// [slices]: slice #[stable(feature = "rust1", since = "1.0.0")] #[must_use = "iterators are lazy and do nothing unless consumed"] +#[rustc_diagnostic_item = "SliceIter"] pub struct Iter<'a, T: 'a> { /// The pointer to the next element to return, or the past-the-end location /// if the iterator is empty. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -640,6 +640,11 @@ /// Calling this method with an out-of-bounds index is *[undefined behavior]* /// even if the resulting reference is not used. /// + /// You can think of this like `.get(index).unwrap_unchecked()`. It's UB + /// to call `.get_unchecked(len)`, even if you immediately convert to a + /// pointer. And it's UB to call `.get_unchecked(..len + 1)`, + /// `.get_unchecked(..=len)`, or similar. + /// /// [`get`]: slice::get /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html /// @@ -675,6 +680,11 @@ /// Calling this method with an out-of-bounds index is *[undefined behavior]* /// even if the resulting reference is not used. /// + /// You can think of this like `.get_mut(index).unwrap_unchecked()`. It's + /// UB to call `.get_unchecked_mut(len)`, even if you immediately convert + /// to a pointer. And it's UB to call `.get_unchecked_mut(..len + 1)`, + /// `.get_unchecked_mut(..=len)`, or similar. + /// /// [`get_mut`]: slice::get_mut /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html /// @@ -730,7 +740,7 @@ /// [`as_mut_ptr`]: slice::as_mut_ptr #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[inline(always)] #[must_use] pub const fn as_ptr(&self) -> *const T { @@ -761,7 +771,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[rustc_allow_const_fn_unstable(const_mut_refs)] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[inline(always)] #[must_use] pub const fn as_mut_ptr(&mut self) -> *mut T { @@ -2482,6 +2492,62 @@ RSplitNMut::new(self.rsplit_mut(pred), n) } + /// Splits the slice on the first element that matches the specified + /// predicate. + /// + /// If any matching elements are resent in the slice, returns the prefix + /// before the match and suffix after. The matching element itself is not + /// included. If no elements match, returns `None`. + /// + /// # Examples + /// + /// ``` + /// #![feature(slice_split_once)] + /// let s = [1, 2, 3, 2, 4]; + /// assert_eq!(s.split_once(|&x| x == 2), Some(( + /// &[1][..], + /// &[3, 2, 4][..] + /// ))); + /// assert_eq!(s.split_once(|&x| x == 0), None); + /// ``` + #[unstable(feature = "slice_split_once", reason = "newly added", issue = "112811")] + #[inline] + pub fn split_once(&self, pred: F) -> Option<(&[T], &[T])> + where + F: FnMut(&T) -> bool, + { + let index = self.iter().position(pred)?; + Some((&self[..index], &self[index + 1..])) + } + + /// Splits the slice on the last element that matches the specified + /// predicate. + /// + /// If any matching elements are resent in the slice, returns the prefix + /// before the match and suffix after. The matching element itself is not + /// included. If no elements match, returns `None`. + /// + /// # Examples + /// + /// ``` + /// #![feature(slice_split_once)] + /// let s = [1, 2, 3, 2, 4]; + /// assert_eq!(s.rsplit_once(|&x| x == 2), Some(( + /// &[1, 2, 3][..], + /// &[4][..] + /// ))); + /// assert_eq!(s.rsplit_once(|&x| x == 0), None); + /// ``` + #[unstable(feature = "slice_split_once", reason = "newly added", issue = "112811")] + #[inline] + pub fn rsplit_once(&self, pred: F) -> Option<(&[T], &[T])> + where + F: FnMut(&T) -> bool, + { + let index = self.iter().rposition(pred)?; + Some((&self[..index], &self[index + 1..])) + } + /// Returns `true` if the slice contains an element with the given value. /// /// This operation is *O*(*n*). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/raw.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/raw.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/raw.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/raw.rs 2023-12-21 16:55:28.000000000 +0000 @@ -90,6 +90,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_from_raw_parts", since = "1.64.0")] #[must_use] +#[rustc_diagnostic_item = "slice_from_raw_parts"] pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] { // SAFETY: the caller must uphold the safety contract for `from_raw_parts`. unsafe { @@ -136,6 +137,7 @@ #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_slice_from_raw_parts_mut", issue = "67456")] #[must_use] +#[rustc_diagnostic_item = "slice_from_raw_parts_mut"] pub const unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] { // SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`. unsafe { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/sort.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/sort.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/slice/sort.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/slice/sort.rs 2023-12-21 16:55:28.000000000 +0000 @@ -628,9 +628,14 @@ let _pivot_guard = InsertionHole { src: &*tmp, dest: pivot }; let pivot = &*tmp; + let len = v.len(); + if len == 0 { + return 0; + } + // Now partition the slice. let mut l = 0; - let mut r = v.len(); + let mut r = len; loop { // SAFETY: The unsafety below involves indexing an array. // For the first one: We already do the bounds checking here with `l < r`. @@ -643,8 +648,11 @@ } // Find the last element equal to the pivot. - while l < r && is_less(pivot, v.get_unchecked(r - 1)) { + loop { r -= 1; + if l >= r || !is_less(pivot, v.get_unchecked(r)) { + break; + } } // Are we done? @@ -653,7 +661,6 @@ } // Swap the found pair of out-of-order elements. - r -= 1; let ptr = v.as_mut_ptr(); ptr::swap(ptr.add(l), ptr.add(r)); l += 1; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/iter.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/iter.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/iter.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/iter.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1360,7 +1360,7 @@ } #[stable(feature = "split_inclusive", since = "1.51.0")] -impl<'a, P: Pattern<'a, Searcher: ReverseSearcher<'a>>> DoubleEndedIterator +impl<'a, P: Pattern<'a, Searcher: DoubleEndedSearcher<'a>>> DoubleEndedIterator for SplitInclusive<'a, P> { #[inline] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -81,6 +81,7 @@ #[cold] #[track_caller] #[rustc_allow_const_fn_unstable(const_eval_select)] +#[cfg(not(feature = "panic_immediate_abort"))] const fn slice_error_fail(s: &str, begin: usize, end: usize) -> ! { // SAFETY: panics for both branches unsafe { @@ -92,6 +93,11 @@ } } +#[cfg(feature = "panic_immediate_abort")] +const fn slice_error_fail(s: &str, begin: usize, end: usize) -> ! { + slice_error_fail_ct(s, begin, end) +} + #[track_caller] const fn slice_error_fail_ct(_: &str, _: usize, _: usize) -> ! { panic!("failed to slice string"); @@ -386,7 +392,7 @@ /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "rustc_str_as_ptr", since = "1.32.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[must_use] #[inline(always)] pub const fn as_ptr(&self) -> *const u8 { @@ -402,7 +408,7 @@ /// It is your responsibility to make sure that the string slice only gets /// modified in a way that it remains valid UTF-8. #[stable(feature = "str_as_mut_ptr", since = "1.36.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] #[must_use] #[inline(always)] pub fn as_mut_ptr(&mut self) -> *mut u8 { @@ -808,7 +814,7 @@ /// assert_eq!(Some((0, 'y')), char_indices.next()); // not (0, 'y̆') /// assert_eq!(Some((1, '\u{0306}')), char_indices.next()); /// - /// // note the 3 here - the last character took up two bytes + /// // note the 3 here - the previous character took up two bytes /// assert_eq!(Some((3, 'e')), char_indices.next()); /// assert_eq!(Some((4, 's')), char_indices.next()); /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/pattern.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/pattern.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/pattern.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/pattern.rs 2023-12-21 16:55:28.000000000 +0000 @@ -806,6 +806,8 @@ searcher_methods!(reverse); } +impl<'a, const N: usize> DoubleEndedSearcher<'a> for CharArraySearcher<'a, N> {} + /// Searches for chars that are equal to any of the [`char`]s in the array. /// /// # Examples @@ -826,6 +828,8 @@ searcher_methods!(reverse); } +impl<'a, 'b, const N: usize> DoubleEndedSearcher<'a> for CharArrayRefSearcher<'a, 'b, N> {} + ///////////////////////////////////////////////////////////////////////////// // Impl for &[char] ///////////////////////////////////////////////////////////////////////////// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/traits.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/traits.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/str/traits.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/str/traits.rs 2023-12-21 16:55:28.000000000 +0000 @@ -624,6 +624,7 @@ /// assert_eq!(5, x); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_diagnostic_item = "from_str_method"] fn from_str(s: &str) -> Result; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/sync/atomic.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/sync/atomic.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/sync/atomic.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/sync/atomic.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,26 +4,12 @@ //! threads, and are the building blocks of other concurrent //! types. //! -//! Rust atomics currently follow the same rules as [C++20 atomics][cpp], specifically `atomic_ref`. -//! Basically, creating a *shared reference* to one of the Rust atomic types corresponds to creating -//! an `atomic_ref` in C++; the `atomic_ref` is destroyed when the lifetime of the shared reference -//! ends. (A Rust atomic type that is exclusively owned or behind a mutable reference does *not* -//! correspond to an "atomic object" in C++, since it can be accessed via non-atomic operations.) -//! //! This module defines atomic versions of a select number of primitive //! types, including [`AtomicBool`], [`AtomicIsize`], [`AtomicUsize`], //! [`AtomicI8`], [`AtomicU16`], etc. //! Atomic types present operations that, when used correctly, synchronize //! updates between threads. //! -//! Each method takes an [`Ordering`] which represents the strength of -//! the memory barrier for that operation. These orderings are the -//! same as the [C++20 atomic orderings][1]. For more information see the [nomicon][2]. -//! -//! [cpp]: https://en.cppreference.com/w/cpp/atomic -//! [1]: https://en.cppreference.com/w/cpp/atomic/memory_order -//! [2]: ../../../nomicon/atomics.html -//! //! Atomic variables are safe to share between threads (they implement [`Sync`]) //! but they do not themselves provide the mechanism for sharing and follow the //! [threading model](../../../std/thread/index.html#the-threading-model) of Rust. @@ -36,6 +22,75 @@ //! the constant initializers like [`AtomicBool::new`]. Atomic statics //! are often used for lazy global initialization. //! +//! ## Memory model for atomic accesses +//! +//! Rust atomics currently follow the same rules as [C++20 atomics][cpp], specifically `atomic_ref`. +//! Basically, creating a *shared reference* to one of the Rust atomic types corresponds to creating +//! an `atomic_ref` in C++; the `atomic_ref` is destroyed when the lifetime of the shared reference +//! ends. (A Rust atomic type that is exclusively owned or behind a mutable reference does *not* +//! correspond to an "atomic object" in C++, since it can be accessed via non-atomic operations.) +//! +//! [cpp]: https://en.cppreference.com/w/cpp/atomic +//! +//! Each method takes an [`Ordering`] which represents the strength of +//! the memory barrier for that operation. These orderings are the +//! same as the [C++20 atomic orderings][1]. For more information see the [nomicon][2]. +//! +//! [1]: https://en.cppreference.com/w/cpp/atomic/memory_order +//! [2]: ../../../nomicon/atomics.html +//! +//! Since C++ does not support mixing atomic and non-atomic accesses, or non-synchronized +//! different-sized accesses to the same data, Rust does not support those operations either. +//! Note that both of those restrictions only apply if the accesses are non-synchronized. +//! +//! ```rust,no_run undefined_behavior +//! use std::sync::atomic::{AtomicU16, AtomicU8, Ordering}; +//! use std::mem::transmute; +//! use std::thread; +//! +//! let atomic = AtomicU16::new(0); +//! +//! thread::scope(|s| { +//! // This is UB: mixing atomic and non-atomic accesses +//! s.spawn(|| atomic.store(1, Ordering::Relaxed)); +//! s.spawn(|| unsafe { atomic.as_ptr().write(2) }); +//! }); +//! +//! thread::scope(|s| { +//! // This is UB: even reads are not allowed to be mixed +//! s.spawn(|| atomic.load(Ordering::Relaxed)); +//! s.spawn(|| unsafe { atomic.as_ptr().read() }); +//! }); +//! +//! thread::scope(|s| { +//! // This is fine, `join` synchronizes the code in a way such that atomic +//! // and non-atomic accesses can't happen "at the same time" +//! let handle = s.spawn(|| atomic.store(1, Ordering::Relaxed)); +//! handle.join().unwrap(); +//! s.spawn(|| unsafe { atomic.as_ptr().write(2) }); +//! }); +//! +//! thread::scope(|s| { +//! // This is UB: using different-sized atomic accesses to the same data +//! s.spawn(|| atomic.store(1, Ordering::Relaxed)); +//! s.spawn(|| unsafe { +//! let differently_sized = transmute::<&AtomicU16, &AtomicU8>(&atomic); +//! differently_sized.store(2, Ordering::Relaxed); +//! }); +//! }); +//! +//! thread::scope(|s| { +//! // This is fine, `join` synchronizes the code in a way such that +//! // differently-sized accesses can't happen "at the same time" +//! let handle = s.spawn(|| atomic.store(1, Ordering::Relaxed)); +//! handle.join().unwrap(); +//! s.spawn(|| unsafe { +//! let differently_sized = transmute::<&AtomicU16, &AtomicU8>(&atomic); +//! differently_sized.store(2, Ordering::Relaxed); +//! }); +//! }); +//! ``` +//! //! # Portability //! //! All atomic types in this module are guaranteed to be [lock-free] if they're @@ -79,6 +134,40 @@ //! //! [lock-free]: https://en.wikipedia.org/wiki/Non-blocking_algorithm //! +//! # Atomic accesses to read-only memory +//! +//! In general, *all* atomic accesses on read-only memory are Undefined Behavior. For instance, attempting +//! to do a `compare_exchange` that will definitely fail (making it conceptually a read-only +//! operation) can still cause a page fault if the underlying memory page is mapped read-only. Since +//! atomic `load`s might be implemented using compare-exchange operations, even a `load` can fault +//! on read-only memory. +//! +//! For the purpose of this section, "read-only memory" is defined as memory that is read-only in +//! the underlying target, i.e., the pages are mapped with a read-only flag and any attempt to write +//! will cause a page fault. In particular, an `&u128` reference that points to memory that is +//! read-write mapped is *not* considered to point to "read-only memory". In Rust, almost all memory +//! is read-write; the only exceptions are memory created by `const` items or `static` items without +//! interior mutability, and memory that was specifically marked as read-only by the operating +//! system via platform-specific APIs. +//! +//! As an exception from the general rule stated above, "sufficiently small" atomic loads with +//! `Ordering::Relaxed` are implemented in a way that works on read-only memory, and are hence not +//! Undefined Behavior. The exact size limit for what makes a load "sufficiently small" varies +//! depending on the target: +//! +//! | `target_arch` | Size limit | +//! |---------------|---------| +//! | `x86`, `arm`, `mips`, `mips32r6`, `powerpc`, `riscv32`, `sparc`, `hexagon` | 4 bytes | +//! | `x86_64`, `aarch64`, `loongarch64`, `mips64`, `mips64r6`, `powerpc64`, `riscv64`, `sparc64`, `s390x` | 8 bytes | +//! +//! Atomics loads that are larger than this limit as well as atomic loads with ordering other +//! than `Relaxed`, as well as *all* atomic loads on targets not listed in the table, might still be +//! read-only under certain conditions, but that is not a stable guarantee and should not be relied +//! upon. +//! +//! If you need to do an acquire load on read-only memory, you can do a relaxed load followed by an +//! acquire fence instead. +//! //! # Examples //! //! A simple spinlock: @@ -319,7 +408,7 @@ /// # Examples /// /// ``` - /// #![feature(atomic_from_ptr, pointer_is_aligned)] + /// #![feature(pointer_is_aligned)] /// use std::sync::atomic::{self, AtomicBool}; /// use std::mem::align_of; /// @@ -346,13 +435,17 @@ /// /// # Safety /// - /// * `ptr` must be aligned to `align_of::()` (note that on some platforms this can be bigger than `align_of::()`). + /// * `ptr` must be aligned to `align_of::()` (note that on some platforms this can + /// be bigger than `align_of::()`). /// * `ptr` must be [valid] for both reads and writes for the whole lifetime `'a`. - /// * The value behind `ptr` must not be accessed through non-atomic operations for the whole lifetime `'a`. + /// * You must adhere to the [Memory model for atomic accesses]. In particular, it is not + /// allowed to mix atomic and non-atomic accesses, or atomic accesses of different sizes, + /// without synchronization. /// /// [valid]: crate::ptr#safety - #[unstable(feature = "atomic_from_ptr", issue = "108652")] - #[rustc_const_unstable(feature = "atomic_from_ptr", issue = "108652")] + /// [Memory model for atomic accesses]: self#memory-model-for-atomic-accesses + #[stable(feature = "atomic_from_ptr", since = "1.75.0")] + #[rustc_const_unstable(feature = "const_atomic_from_ptr", issue = "108652")] pub const unsafe fn from_ptr<'a>(ptr: *mut bool) -> &'a AtomicBool { // SAFETY: guaranteed by the caller unsafe { &*ptr.cast() } @@ -1018,7 +1111,7 @@ #[inline] #[stable(feature = "atomic_as_ptr", since = "1.70.0")] #[rustc_const_stable(feature = "atomic_as_ptr", since = "1.70.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn as_ptr(&self) -> *mut bool { self.v.get().cast() } @@ -1113,7 +1206,7 @@ /// # Examples /// /// ``` - /// #![feature(atomic_from_ptr, pointer_is_aligned)] + /// #![feature(pointer_is_aligned)] /// use std::sync::atomic::{self, AtomicPtr}; /// use std::mem::align_of; /// @@ -1140,13 +1233,17 @@ /// /// # Safety /// - /// * `ptr` must be aligned to `align_of::>()` (note that on some platforms this can be bigger than `align_of::<*mut T>()`). + /// * `ptr` must be aligned to `align_of::>()` (note that on some platforms this + /// can be bigger than `align_of::<*mut T>()`). /// * `ptr` must be [valid] for both reads and writes for the whole lifetime `'a`. - /// * The value behind `ptr` must not be accessed through non-atomic operations for the whole lifetime `'a`. + /// * You must adhere to the [Memory model for atomic accesses]. In particular, it is not + /// allowed to mix atomic and non-atomic accesses, or atomic accesses of different sizes, + /// without synchronization. /// /// [valid]: crate::ptr#safety - #[unstable(feature = "atomic_from_ptr", issue = "108652")] - #[rustc_const_unstable(feature = "atomic_from_ptr", issue = "108652")] + /// [Memory model for atomic accesses]: self#memory-model-for-atomic-accesses + #[stable(feature = "atomic_from_ptr", since = "1.75.0")] + #[rustc_const_unstable(feature = "const_atomic_from_ptr", issue = "108652")] pub const unsafe fn from_ptr<'a>(ptr: *mut *mut T) -> &'a AtomicPtr { // SAFETY: guaranteed by the caller unsafe { &*ptr.cast() } @@ -1954,7 +2051,7 @@ #[inline] #[stable(feature = "atomic_as_ptr", since = "1.70.0")] #[rustc_const_stable(feature = "atomic_as_ptr", since = "1.70.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn as_ptr(&self) -> *mut *mut T { self.p.get() } @@ -2083,7 +2180,7 @@ /// # Examples /// /// ``` - /// #![feature(atomic_from_ptr, pointer_is_aligned)] + /// #![feature(pointer_is_aligned)] #[doc = concat!($extra_feature, "use std::sync::atomic::{self, ", stringify!($atomic_type), "};")] /// use std::mem::align_of; /// @@ -2111,14 +2208,18 @@ /// /// # Safety /// - /// * `ptr` must be aligned to `align_of::()` (note that on some platforms this can be bigger than `align_of::()`). - #[doc = concat!(" * `ptr` must be aligned to `align_of::<", stringify!($atomic_type), ">()` (note that on some platforms this can be bigger than `align_of::<", stringify!($int_type), ">()`).")] + #[doc = concat!(" * `ptr` must be aligned to \ + `align_of::<", stringify!($atomic_type), ">()` (note that on some platforms this \ + can be bigger than `align_of::<", stringify!($int_type), ">()`).")] /// * `ptr` must be [valid] for both reads and writes for the whole lifetime `'a`. - /// * The value behind `ptr` must not be accessed through non-atomic operations for the whole lifetime `'a`. + /// * You must adhere to the [Memory model for atomic accesses]. In particular, it is not + /// allowed to mix atomic and non-atomic accesses, or atomic accesses of different sizes, + /// without synchronization. /// /// [valid]: crate::ptr#safety - #[unstable(feature = "atomic_from_ptr", issue = "108652")] - #[rustc_const_unstable(feature = "atomic_from_ptr", issue = "108652")] + /// [Memory model for atomic accesses]: self#memory-model-for-atomic-accesses + #[stable(feature = "atomic_from_ptr", since = "1.75.0")] + #[rustc_const_unstable(feature = "const_atomic_from_ptr", issue = "108652")] pub const unsafe fn from_ptr<'a>(ptr: *mut $int_type) -> &'a $atomic_type { // SAFETY: guaranteed by the caller unsafe { &*ptr.cast() } @@ -2893,7 +2994,7 @@ #[inline] #[stable(feature = "atomic_as_ptr", since = "1.70.0")] #[rustc_const_stable(feature = "atomic_as_ptr", since = "1.70.0")] - #[cfg_attr(not(bootstrap), rustc_never_returns_null_ptr)] + #[rustc_never_returns_null_ptr] pub const fn as_ptr(&self) -> *mut $int_type { self.v.get() } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/sync/exclusive.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/sync/exclusive.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/sync/exclusive.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/sync/exclusive.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ use core::fmt; use core::future::Future; use core::marker::Tuple; -use core::ops::{Generator, GeneratorState}; +use core::ops::{Coroutine, CoroutineState}; use core::pin::Pin; use core::task::{Context, Poll}; @@ -206,16 +206,16 @@ } } -#[unstable(feature = "generator_trait", issue = "43122")] // also #98407 -impl Generator for Exclusive +#[unstable(feature = "coroutine_trait", issue = "43122")] // also #98407 +impl Coroutine for Exclusive where - G: Generator + ?Sized, + G: Coroutine + ?Sized, { type Yield = G::Yield; type Return = G::Return; #[inline] - fn resume(self: Pin<&mut Self>, arg: R) -> GeneratorState { + fn resume(self: Pin<&mut Self>, arg: R) -> CoroutineState { G::resume(self.get_pin_mut(), arg) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/task/wake.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/task/wake.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/task/wake.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/task/wake.rs 2023-12-21 16:55:28.000000000 +0000 @@ -231,6 +231,10 @@ /// this might be done to wake a future when a blocking function call completes on another /// thread. /// +/// Note that it is preferable to use `waker.clone_from(&new_waker)` instead +/// of `*waker = new_waker.clone()`, as the former will avoid cloning the waker +/// unnecessarily if the two wakers [wake the same task](Self::will_wake). +/// /// [`Future::poll()`]: core::future::Future::poll /// [`Poll::Pending`]: core::task::Poll::Pending #[cfg_attr(not(doc), repr(transparent))] // work around https://github.com/rust-lang/rust/issues/66401 @@ -302,7 +306,9 @@ /// when the `Waker`s would awaken the same task. However, if this function /// returns `true`, it is guaranteed that the `Waker`s will awaken the same task. /// - /// This function is primarily used for optimization purposes. + /// This function is primarily used for optimization purposes — for example, + /// this type's [`clone_from`](Self::clone_from) implementation uses it to + /// avoid cloning the waker when they would wake the same task anyway. #[inline] #[must_use] #[stable(feature = "futures_api", since = "1.36.0")] @@ -382,6 +388,13 @@ waker: unsafe { (self.waker.vtable.clone)(self.waker.data) }, } } + + #[inline] + fn clone_from(&mut self, source: &Self) { + if !self.will_wake(source) { + *self = source.clone(); + } + } } #[stable(feature = "futures_api", since = "1.36.0")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/time.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/time.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/src/time.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/src/time.rs 2023-12-21 16:55:28.000000000 +0000 @@ -910,6 +910,7 @@ impl Add for Duration { type Output = Duration; + #[inline] fn add(self, rhs: Duration) -> Duration { self.checked_add(rhs).expect("overflow when adding durations") } @@ -917,6 +918,7 @@ #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl AddAssign for Duration { + #[inline] fn add_assign(&mut self, rhs: Duration) { *self = *self + rhs; } @@ -926,6 +928,7 @@ impl Sub for Duration { type Output = Duration; + #[inline] fn sub(self, rhs: Duration) -> Duration { self.checked_sub(rhs).expect("overflow when subtracting durations") } @@ -933,6 +936,7 @@ #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl SubAssign for Duration { + #[inline] fn sub_assign(&mut self, rhs: Duration) { *self = *self - rhs; } @@ -942,6 +946,7 @@ impl Mul for Duration { type Output = Duration; + #[inline] fn mul(self, rhs: u32) -> Duration { self.checked_mul(rhs).expect("overflow when multiplying duration by scalar") } @@ -951,6 +956,7 @@ impl Mul for u32 { type Output = Duration; + #[inline] fn mul(self, rhs: Duration) -> Duration { rhs * self } @@ -958,6 +964,7 @@ #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl MulAssign for Duration { + #[inline] fn mul_assign(&mut self, rhs: u32) { *self = *self * rhs; } @@ -967,6 +974,7 @@ impl Div for Duration { type Output = Duration; + #[inline] fn div(self, rhs: u32) -> Duration { self.checked_div(rhs).expect("divide by zero error when dividing duration by scalar") } @@ -974,6 +982,7 @@ #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl DivAssign for Duration { + #[inline] fn div_assign(&mut self, rhs: u32) { *self = *self / rhs; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/array.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/array.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/array.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/array.rs 2023-12-21 16:55:28.000000000 +0000 @@ -663,7 +663,7 @@ #[test] fn array_into_iter_fold() { - // Strings to help MIRI catch if we double-free or something + // Strings to help Miri catch if we double-free or something let a = ["Aa".to_string(), "Bb".to_string(), "Cc".to_string()]; let mut s = "s".to_string(); a.into_iter().for_each(|b| s += &b); @@ -679,7 +679,7 @@ #[test] fn array_into_iter_rfold() { - // Strings to help MIRI catch if we double-free or something + // Strings to help Miri catch if we double-free or something let a = ["Aa".to_string(), "Bb".to_string(), "Cc".to_string()]; let mut s = "s".to_string(); a.into_iter().rev().for_each(|b| s += &b); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/io/borrowed_buf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/io/borrowed_buf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/io/borrowed_buf.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/io/borrowed_buf.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,175 @@ +use core::io::BorrowedBuf; +use core::mem::MaybeUninit; + +/// Test that BorrowedBuf has the correct numbers when created with new +#[test] +fn new() { + let buf: &mut [_] = &mut [0; 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + assert_eq!(rbuf.filled().len(), 0); + assert_eq!(rbuf.init_len(), 16); + assert_eq!(rbuf.capacity(), 16); + assert_eq!(rbuf.unfilled().capacity(), 16); +} + +/// Test that BorrowedBuf has the correct numbers when created with uninit +#[test] +fn uninit() { + let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + assert_eq!(rbuf.filled().len(), 0); + assert_eq!(rbuf.init_len(), 0); + assert_eq!(rbuf.capacity(), 16); + assert_eq!(rbuf.unfilled().capacity(), 16); +} + +#[test] +fn initialize_unfilled() { + let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + rbuf.unfilled().ensure_init(); + + assert_eq!(rbuf.init_len(), 16); +} + +#[test] +fn advance_filled() { + let buf: &mut [_] = &mut [0; 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + unsafe { + rbuf.unfilled().advance(1); + } + + assert_eq!(rbuf.filled().len(), 1); + assert_eq!(rbuf.unfilled().capacity(), 15); +} + +#[test] +fn clear() { + let buf: &mut [_] = &mut [255; 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + unsafe { + rbuf.unfilled().advance(16); + } + + assert_eq!(rbuf.filled().len(), 16); + assert_eq!(rbuf.unfilled().capacity(), 0); + + rbuf.clear(); + + assert_eq!(rbuf.filled().len(), 0); + assert_eq!(rbuf.unfilled().capacity(), 16); + + assert_eq!(rbuf.unfilled().init_ref(), [255; 16]); +} + +#[test] +fn set_init() { + let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + unsafe { + rbuf.set_init(8); + } + + assert_eq!(rbuf.init_len(), 8); + + unsafe { + rbuf.unfilled().advance(4); + } + + unsafe { + rbuf.set_init(2); + } + + assert_eq!(rbuf.init_len(), 8); + + unsafe { + rbuf.set_init(8); + } + + assert_eq!(rbuf.init_len(), 8); +} + +#[test] +fn append() { + let buf: &mut [_] = &mut [MaybeUninit::new(255); 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + rbuf.unfilled().append(&[0; 8]); + + assert_eq!(rbuf.init_len(), 8); + assert_eq!(rbuf.filled().len(), 8); + assert_eq!(rbuf.filled(), [0; 8]); + + rbuf.clear(); + + rbuf.unfilled().append(&[1; 16]); + + assert_eq!(rbuf.init_len(), 16); + assert_eq!(rbuf.filled().len(), 16); + assert_eq!(rbuf.filled(), [1; 16]); +} + +#[test] +fn reborrow_written() { + let buf: &mut [_] = &mut [MaybeUninit::new(0); 32]; + let mut buf: BorrowedBuf<'_> = buf.into(); + + let mut cursor = buf.unfilled(); + cursor.append(&[1; 16]); + + let mut cursor2 = cursor.reborrow(); + cursor2.append(&[2; 16]); + + assert_eq!(cursor2.written(), 32); + assert_eq!(cursor.written(), 32); + + assert_eq!(buf.unfilled().written(), 0); + assert_eq!(buf.init_len(), 32); + assert_eq!(buf.filled().len(), 32); + let filled = buf.filled(); + assert_eq!(&filled[..16], [1; 16]); + assert_eq!(&filled[16..], [2; 16]); +} + +#[test] +fn cursor_set_init() { + let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; + let mut rbuf: BorrowedBuf<'_> = buf.into(); + + unsafe { + rbuf.unfilled().set_init(8); + } + + assert_eq!(rbuf.init_len(), 8); + assert_eq!(rbuf.unfilled().init_ref().len(), 8); + assert_eq!(rbuf.unfilled().init_mut().len(), 8); + assert_eq!(rbuf.unfilled().uninit_mut().len(), 8); + assert_eq!(unsafe { rbuf.unfilled().as_mut() }.len(), 16); + + unsafe { + rbuf.unfilled().advance(4); + } + + unsafe { + rbuf.unfilled().set_init(2); + } + + assert_eq!(rbuf.init_len(), 8); + + unsafe { + rbuf.unfilled().set_init(8); + } + + assert_eq!(rbuf.init_len(), 12); + assert_eq!(rbuf.unfilled().init_ref().len(), 8); + assert_eq!(rbuf.unfilled().init_mut().len(), 8); + assert_eq!(rbuf.unfilled().uninit_mut().len(), 4); + assert_eq!(unsafe { rbuf.unfilled().as_mut() }.len(), 12); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/io/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/io/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/io/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/io/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1 @@ +mod borrowed_buf; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/iter/adapters/zip.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/iter/adapters/zip.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/iter/adapters/zip.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/iter/adapters/zip.rs 2023-12-21 16:55:28.000000000 +0000 @@ -184,7 +184,11 @@ let it = xs.iter_mut().map(|x| *x = 1).enumerate().zip(&ys); it.count(); } - assert_eq!(&xs, &[1, 1, 1, 1, 1, 0]); + let length_aware = &xs == &[1, 1, 1, 1, 0, 0]; + let probe_first = &xs == &[1, 1, 1, 1, 1, 0]; + + // either implementation is valid according to zip documentation + assert!(length_aware || probe_first); } #[test] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,9 +15,7 @@ #![feature(const_hash)] #![feature(const_heap)] #![feature(const_maybe_uninit_as_mut_ptr)] -#![feature(const_maybe_uninit_assume_init_read)] #![feature(const_nonnull_new)] -#![feature(const_pointer_byte_offsets)] #![feature(const_pointer_is_aligned)] #![feature(const_ptr_as_ref)] #![feature(const_ptr_write)] @@ -25,6 +23,7 @@ #![feature(const_likely)] #![feature(const_location_fields)] #![feature(core_intrinsics)] +#![feature(core_io_borrowed_buf)] #![feature(core_private_bignum)] #![feature(core_private_diy_float)] #![feature(dec2flt)] @@ -49,6 +48,7 @@ #![feature(sort_internals)] #![feature(slice_take)] #![feature(slice_from_ptr_range)] +#![feature(slice_split_once)] #![feature(split_as_slice)] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_write_slice)] @@ -87,7 +87,6 @@ #![feature(const_waker)] #![feature(never_type)] #![feature(unwrap_infallible)] -#![feature(pointer_byte_offsets)] #![feature(pointer_is_aligned)] #![feature(portable_simd)] #![feature(ptr_metadata)] @@ -120,8 +119,6 @@ #![deny(unsafe_op_in_unsafe_fn)] #![deny(fuzzy_provenance_casts)] -extern crate test; - mod alloc; mod any; mod array; @@ -139,6 +136,7 @@ mod future; mod hash; mod intrinsics; +mod io; mod iter; mod lazy; #[cfg(test)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/mem.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/mem.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/mem.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/mem.rs 2023-12-21 16:55:28.000000000 +0000 @@ -565,3 +565,24 @@ assert_eq!(ptr::addr_of!(base).addr() + offset_of!(Foo, z.0), ptr::addr_of!(base.z.0).addr()); assert_eq!(ptr::addr_of!(base).addr() + offset_of!(Foo, z.1), ptr::addr_of!(base.z.1).addr()); } + +#[test] +fn const_maybe_uninit_zeroed() { + // Sanity check for `MaybeUninit::zeroed` in a realistic const situation (plugin array term) + #[repr(C)] + struct Foo { + a: Option<&'static str>, + b: Bar, + c: f32, + d: *const u8, + } + #[repr(C)] + struct Bar(usize); + struct FooPtr(*const Foo); + unsafe impl Sync for FooPtr {} + + static UNINIT: FooPtr = FooPtr([unsafe { MaybeUninit::zeroed().assume_init() }].as_ptr()); + const SIZE: usize = size_of::(); + + assert_eq!(unsafe { (*UNINIT.0.cast::<[[u8; SIZE]; 1]>())[0] }, [0u8; SIZE]); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/net/socket_addr.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/net/socket_addr.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/net/socket_addr.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/net/socket_addr.rs 2023-12-21 16:55:28.000000000 +0000 @@ -199,6 +199,9 @@ let v6_1 = "[2001:db8:f00::1002]:23456".parse::().unwrap(); let v6_2 = "[2001:db8:f00::2001]:12345".parse::().unwrap(); let v6_3 = "[2001:db8:f00::2001]:23456".parse::().unwrap(); + let v6_4 = "[2001:db8:f00::2001%42]:23456".parse::().unwrap(); + let mut v6_5 = "[2001:db8:f00::2001]:23456".parse::().unwrap(); + v6_5.set_flowinfo(17); // equality assert_eq!(v4_1, v4_1); @@ -207,6 +210,8 @@ assert_eq!(SocketAddr::V6(v6_1), SocketAddr::V6(v6_1)); assert!(v4_1 != v4_2); assert!(v6_1 != v6_2); + assert!(v6_3 != v6_4); + assert!(v6_3 != v6_5); // compare different addresses assert!(v4_1 < v4_2); @@ -226,6 +231,12 @@ assert!(v4_3 > v4_1); assert!(v6_3 > v6_1); + // compare the same address with different scope_id + assert!(v6_3 < v6_4); + + // compare the same address with different flowinfo + assert!(v6_3 < v6_5); + // compare with an inferred right-hand side assert_eq!(v4_1, "224.120.45.1:23456".parse().unwrap()); assert_eq!(v6_1, "[2001:db8:f00::1002]:23456".parse().unwrap()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/num/flt2dec/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/num/flt2dec/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/num/flt2dec/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/num/flt2dec/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -8,8 +8,6 @@ }; use core::num::fmt::{Formatted, Part}; -pub use test::Bencher; - mod estimator; mod strategy { mod dragon; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/slice.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/slice.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/core/tests/slice.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/core/tests/slice.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2476,6 +2476,26 @@ let _ = v.rsplit_array_mut::<7>(); } +#[test] +fn slice_split_once() { + let v = &[1, 2, 3, 2, 4][..]; + + assert_eq!(v.split_once(|&x| x == 2), Some((&[1][..], &[3, 2, 4][..]))); + assert_eq!(v.split_once(|&x| x == 1), Some((&[][..], &[2, 3, 2, 4][..]))); + assert_eq!(v.split_once(|&x| x == 4), Some((&[1, 2, 3, 2][..], &[][..]))); + assert_eq!(v.split_once(|&x| x == 0), None); +} + +#[test] +fn slice_rsplit_once() { + let v = &[1, 2, 3, 2, 4][..]; + + assert_eq!(v.rsplit_once(|&x| x == 2), Some((&[1, 2, 3][..], &[4][..]))); + assert_eq!(v.rsplit_once(|&x| x == 1), Some((&[][..], &[2, 3, 2, 4][..]))); + assert_eq!(v.rsplit_once(|&x| x == 4), Some((&[1, 2, 3, 2][..], &[][..]))); + assert_eq!(v.rsplit_once(|&x| x == 0), None); +} + macro_rules! take_tests { (slice: &[], $($tts:tt)*) => { take_tests!(ty: &[()], slice: &[], $($tts)*); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/panic_unwind/src/gcc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/panic_unwind/src/gcc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/panic_unwind/src/gcc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/panic_unwind/src/gcc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -63,7 +63,7 @@ _uwe: uw::_Unwind_Exception { exception_class: rust_exception_class(), exception_cleanup, - private: [0; uw::unwinder_private_data_size], + private: [core::ptr::null(); uw::unwinder_private_data_size], }, canary: &CANARY, cause: data, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/portable-simd/crates/core_simd/src/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/portable-simd/crates/core_simd/src/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/portable-simd/crates/core_simd/src/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/portable-simd/crates/core_simd/src/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -35,6 +35,5 @@ pub use crate::core_simd::masks::*; pub use crate::core_simd::ord::*; pub use crate::core_simd::swizzle::*; - pub use crate::core_simd::swizzle_dyn::*; pub use crate::core_simd::vector::*; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/proc_macro/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/proc_macro/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/proc_macro/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/proc_macro/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,6 +17,8 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) )] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] // This library is copied into rust-analyzer to allow loading rustc compiled proc macros. // Please avoid unstable features where possible to minimize the amount of changes necessary // to make it compile with rust-analyzer on stable. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -17,8 +17,8 @@ panic_unwind = { path = "../panic_unwind", optional = true } panic_abort = { path = "../panic_abort" } core = { path = "../core", public = true } -libc = { version = "0.2.148", default-features = false, features = ['rustc-dep-of-std'], public = true } -compiler_builtins = { version = "0.1.100" } +libc = { version = "0.2.150", default-features = false, features = ['rustc-dep-of-std'], public = true } +compiler_builtins = { version = "0.1.103" } profiler_builtins = { path = "../profiler_builtins", optional = true } unwind = { path = "../unwind" } hashbrown = { version = "0.14", default-features = false, features = ['rustc-dep-of-std'] } @@ -72,7 +72,7 @@ system-llvm-libunwind = ["unwind/system-llvm-libunwind"] # Make panics and failed asserts immediately abort without formatting any message -panic_immediate_abort = ["core/panic_immediate_abort"] +panic_immediate_abort = ["core/panic_immediate_abort", "alloc/panic_immediate_abort"] # Enable std_detect default features for stdarch/crates/std_detect: # https://github.com/rust-lang/stdarch/blob/master/crates/std_detect/Cargo.toml diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/build.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/build.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/build.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/build.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,17 +3,11 @@ fn main() { println!("cargo:rerun-if-changed=build.rs"); let target = env::var("TARGET").expect("TARGET was not set"); - if target.contains("freebsd") { - if env::var("RUST_STD_FREEBSD_12_ABI").is_ok() { - println!("cargo:rustc-cfg=freebsd12"); - } else if env::var("RUST_STD_FREEBSD_13_ABI").is_ok() { - println!("cargo:rustc-cfg=freebsd12"); - println!("cargo:rustc-cfg=freebsd13"); - } - } else if target.contains("linux") + if target.contains("linux") || target.contains("netbsd") || target.contains("dragonfly") || target.contains("openbsd") + || target.contains("freebsd") || target.contains("solaris") || target.contains("illumos") || target.contains("apple-darwin") @@ -36,6 +30,7 @@ || target.contains("solid") || target.contains("nintendo-3ds") || target.contains("vita") + || target.contains("aix") || target.contains("nto") || target.contains("xous") || target.contains("hurd") diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/map.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/map.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/map.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/map.rs 2023-12-21 16:55:28.000000000 +0000 @@ -24,7 +24,7 @@ /// reasonable best-effort is made to generate this seed from a high quality, /// secure source of randomness provided by the host without blocking the /// program. Because of this, the randomness of the seed depends on the output -/// quality of the system's random number generator when the seed is created. +/// quality of the system's random number coroutine when the seed is created. /// In particular, seeds generated when the system's entropy pool is abnormally /// low such as during system boot may be of a lower quality. /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/set.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/set.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/set.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/collections/hash/set.rs 2023-12-21 16:55:28.000000000 +0000 @@ -144,7 +144,7 @@ #[must_use] #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> HashSet { - HashSet { base: base::HashSet::with_capacity_and_hasher(capacity, Default::default()) } + HashSet::with_capacity_and_hasher(capacity, Default::default()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/fs/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/fs/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/fs/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/fs/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1707,3 +1707,89 @@ assert_eq!(metadata.created().unwrap(), created); } } + +#[test] +#[cfg(any(target_os = "macos", target_os = "ios", target_os = "tvos", target_os = "watchos"))] +fn test_file_times_pre_epoch_with_nanos() { + #[cfg(target_os = "ios")] + use crate::os::ios::fs::FileTimesExt; + #[cfg(target_os = "macos")] + use crate::os::macos::fs::FileTimesExt; + #[cfg(target_os = "tvos")] + use crate::os::tvos::fs::FileTimesExt; + #[cfg(target_os = "watchos")] + use crate::os::watchos::fs::FileTimesExt; + + let tmp = tmpdir(); + let file = File::create(tmp.join("foo")).unwrap(); + + for (accessed, modified, created) in [ + // The first round is to set filetimes to something we know works, but this time + // it's validated with nanoseconds as well which probe the numeric boundary. + ( + SystemTime::UNIX_EPOCH + Duration::new(12345, 1), + SystemTime::UNIX_EPOCH + Duration::new(54321, 100_000_000), + SystemTime::UNIX_EPOCH + Duration::new(32123, 999_999_999), + ), + // The second rounds uses pre-epoch dates along with nanoseconds that probe + // the numeric boundary. + ( + SystemTime::UNIX_EPOCH - Duration::new(1, 1), + SystemTime::UNIX_EPOCH - Duration::new(60, 100_000_000), + SystemTime::UNIX_EPOCH - Duration::new(3600, 999_999_999), + ), + ] { + let mut times = FileTimes::new(); + times = times.set_accessed(accessed).set_modified(modified).set_created(created); + file.set_times(times).unwrap(); + + let metadata = file.metadata().unwrap(); + assert_eq!(metadata.accessed().unwrap(), accessed); + assert_eq!(metadata.modified().unwrap(), modified); + assert_eq!(metadata.created().unwrap(), created); + } +} + +#[test] +#[cfg(windows)] +fn windows_unix_socket_exists() { + use crate::sys::{c, net}; + use crate::{mem, ptr}; + + let tmp = tmpdir(); + let socket_path = tmp.join("socket"); + + // std doesn't currently support Unix sockets on Windows so manually create one here. + net::init(); + unsafe { + let socket = c::WSASocketW( + c::AF_UNIX as i32, + c::SOCK_STREAM, + 0, + ptr::null_mut(), + 0, + c::WSA_FLAG_OVERLAPPED | c::WSA_FLAG_NO_HANDLE_INHERIT, + ); + // AF_UNIX is not supported on earlier versions of Windows, + // so skip this test if it's unsupported and we're not in CI. + if socket == c::INVALID_SOCKET { + let error = c::WSAGetLastError(); + if env::var_os("CI").is_none() && error == c::WSAEAFNOSUPPORT { + return; + } else { + panic!("Creating AF_UNIX socket failed (OS error {error})"); + } + } + let mut addr = c::SOCKADDR_UN { sun_family: c::AF_UNIX, sun_path: mem::zeroed() }; + let bytes = socket_path.as_os_str().as_encoded_bytes(); + addr.sun_path[..bytes.len()].copy_from_slice(bytes); + let len = mem::size_of_val(&addr) as i32; + let result = c::bind(socket, ptr::addr_of!(addr).cast::(), len); + c::closesocket(socket); + assert_eq!(result, 0); + } + // Make sure all ways of testing a file exist work for a Unix socket. + assert_eq!(socket_path.exists(), true); + assert_eq!(socket_path.try_exists().unwrap(), true); + assert_eq!(socket_path.metadata().is_ok(), true); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -184,11 +184,12 @@ /// ``` #[derive(Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "FsOpenOptions")] pub struct OpenOptions(fs_imp::OpenOptions); /// Representation of the various timestamps on a file. #[derive(Copy, Clone, Debug, Default)] -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] pub struct FileTimes(fs_imp::FileTimes); /// Representation of the various permissions on a file. @@ -201,6 +202,7 @@ /// [`PermissionsExt`]: crate::os::unix::fs::PermissionsExt #[derive(Clone, PartialEq, Eq, Debug)] #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "FsPermissions")] pub struct Permissions(fs_imp::FilePermissions); /// A structure representing a type of file with accessors for each file type. @@ -674,8 +676,6 @@ /// # Examples /// /// ```no_run - /// #![feature(file_set_times)] - /// /// fn main() -> std::io::Result<()> { /// use std::fs::{self, File, FileTimes}; /// @@ -688,7 +688,7 @@ /// Ok(()) /// } /// ``` - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] #[doc(alias = "futimens")] #[doc(alias = "futimes")] #[doc(alias = "SetFileTime")] @@ -699,7 +699,7 @@ /// Changes the modification time of the underlying file. /// /// This is an alias for `set_times(FileTimes::new().set_modified(time))`. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] #[inline] pub fn set_modified(&self, time: SystemTime) -> io::Result<()> { self.set_times(FileTimes::new().set_modified(time)) @@ -1413,20 +1413,20 @@ /// Create a new `FileTimes` with no times set. /// /// Using the resulting `FileTimes` in [`File::set_times`] will not modify any timestamps. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] pub fn new() -> Self { Self::default() } /// Set the last access time of a file. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] pub fn set_accessed(mut self, t: SystemTime) -> Self { self.0.set_accessed(t.into_inner()); self } /// Set the last modified time of a file. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] pub fn set_modified(mut self, t: SystemTime) -> Self { self.0.set_modified(t.into_inner()); self @@ -1440,7 +1440,7 @@ } // For implementing OS extension traits in `std::os` -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] impl Sealed for FileTimes {} impl Permissions { @@ -2241,6 +2241,7 @@ /// ``` #[doc(alias = "mkdir")] #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "fs_create_dir")] pub fn create_dir>(path: P) -> io::Result<()> { DirBuilder::new().create(path.as_ref()) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/buffered/bufreader.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/buffered/bufreader.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/buffered/bufreader.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/buffered/bufreader.rs 2023-12-21 16:55:28.000000000 +0000 @@ -2,7 +2,8 @@ use crate::fmt; use crate::io::{ - self, BorrowedCursor, BufRead, IoSliceMut, Read, Seek, SeekFrom, SizeHint, DEFAULT_BUF_SIZE, + self, uninlined_slow_read_byte, BorrowedCursor, BufRead, IoSliceMut, Read, Seek, SeekFrom, + SizeHint, SpecReadByte, DEFAULT_BUF_SIZE, }; use buffer::Buffer; @@ -259,6 +260,22 @@ } } +impl SpecReadByte for BufReader +where + Self: Read, +{ + #[inline] + fn spec_read_byte(&mut self) -> Option> { + let mut byte = 0; + if self.buf.consume_with(1, |claimed| byte = claimed[0]) { + return Some(Ok(byte)); + } + + // Fallback case, only reached once per buffer refill. + uninlined_slow_read_byte(self) + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl Read for BufReader { fn read(&mut self, buf: &mut [u8]) -> io::Result { @@ -269,10 +286,8 @@ self.discard_buffer(); return self.inner.read(buf); } - let nread = { - let mut rem = self.fill_buf()?; - rem.read(buf)? - }; + let mut rem = self.fill_buf()?; + let nread = rem.read(buf)?; self.consume(nread); Ok(nread) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/copy/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/copy/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/copy/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/copy/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -81,6 +81,18 @@ } #[test] +fn copy_specializes_to_vec() { + let cap = 123456; + let mut source = ShortReader { cap, observed_buffer: 0, read_size: 1337 }; + let mut sink = Vec::new(); + assert_eq!(cap as u64, io::copy(&mut source, &mut sink).unwrap()); + assert!( + source.observed_buffer > DEFAULT_BUF_SIZE, + "expected a large buffer to be provided to the reader" + ); +} + +#[test] fn copy_specializes_from_vecdeque() { let mut source = VecDeque::with_capacity(100 * 1024); for _ in 0..20 * 1024 { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/copy.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/copy.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/copy.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/copy.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,5 +1,7 @@ use super::{BorrowedBuf, BufReader, BufWriter, Read, Result, Write, DEFAULT_BUF_SIZE}; use crate::alloc::Allocator; +use crate::cmp; +use crate::cmp::min; use crate::collections::VecDeque; use crate::io::IoSlice; use crate::mem::MaybeUninit; @@ -252,6 +254,78 @@ } } } +} + +impl BufferedWriterSpec for Vec { + fn buffer_size(&self) -> usize { + cmp::max(DEFAULT_BUF_SIZE, self.capacity() - self.len()) + } + + fn copy_from(&mut self, reader: &mut R) -> Result { + let mut bytes = 0; + + // avoid inflating empty/small vecs before we have determined that there's anything to read + if self.capacity() < DEFAULT_BUF_SIZE { + let stack_read_limit = DEFAULT_BUF_SIZE as u64; + bytes = stack_buffer_copy(&mut reader.take(stack_read_limit), self)?; + // fewer bytes than requested -> EOF reached + if bytes < stack_read_limit { + return Ok(bytes); + } + } + + // don't immediately offer the vec's whole spare capacity, otherwise + // we might have to fully initialize it if the reader doesn't have a custom read_buf() impl + let mut max_read_size = DEFAULT_BUF_SIZE; + + loop { + self.reserve(DEFAULT_BUF_SIZE); + let mut initialized_spare_capacity = 0; + + loop { + let buf = self.spare_capacity_mut(); + let read_size = min(max_read_size, buf.len()); + let mut buf = BorrowedBuf::from(&mut buf[..read_size]); + // SAFETY: init is either 0 or the init_len from the previous iteration. + unsafe { + buf.set_init(initialized_spare_capacity); + } + match reader.read_buf(buf.unfilled()) { + Ok(()) => { + let bytes_read = buf.len(); + + // EOF + if bytes_read == 0 { + return Ok(bytes); + } + + // the reader is returning short reads but it doesn't call ensure_init() + if buf.init_len() < buf.capacity() { + max_read_size = usize::MAX; + } + // the reader hasn't returned short reads so far + if bytes_read == buf.capacity() { + max_read_size *= 2; + } + + initialized_spare_capacity = buf.init_len() - bytes_read; + bytes += bytes_read as u64; + // SAFETY: BorrowedBuf guarantees all of its filled bytes are init + // and the number of read bytes can't exceed the spare capacity since + // that's what the buffer is borrowing from. + unsafe { self.set_len(self.len() + bytes_read) }; + + // spare capacity full, reserve more + if self.len() == self.capacity() { + break; + } + } + Err(e) if e.is_interrupted() => continue, + Err(e) => return Err(e), + } + } + } + } } fn stack_buffer_copy( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/impls.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/impls.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/impls.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/impls.rs 2023-12-21 16:55:28.000000000 +0000 @@ -475,6 +475,24 @@ } } +/// BufRead is implemented for `VecDeque` by reading bytes from the front of the `VecDeque`. +#[stable(feature = "vecdeque_buf_read", since = "1.75.0")] +impl BufRead for VecDeque { + /// Returns the contents of the "front" slice as returned by + /// [`as_slices`][`VecDeque::as_slices`]. If the contained byte slices of the `VecDeque` are + /// discontiguous, multiple calls to `fill_buf` will be needed to read the entire content. + #[inline] + fn fill_buf(&mut self) -> io::Result<&[u8]> { + let (front, _) = self.as_slices(); + Ok(front) + } + + #[inline] + fn consume(&mut self, amt: usize) { + self.drain(..amt); + } +} + /// Write is implemented for `VecDeque` by appending to the `VecDeque`, growing it as needed. #[stable(feature = "vecdeque_read_write", since = "1.63.0")] impl Write for VecDeque { @@ -506,6 +524,20 @@ } #[inline] + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +#[unstable(feature = "read_buf", issue = "78485")] +impl<'a> io::Write for core::io::BorrowedCursor<'a> { + fn write(&mut self, buf: &[u8]) -> io::Result { + let amt = cmp::min(buf.len(), self.capacity()); + self.append(&buf[..amt]); + Ok(amt) + } + + #[inline] fn flush(&mut self) -> io::Result<()> { Ok(()) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -317,6 +317,7 @@ #[stable(feature = "is_terminal", since = "1.70.0")] pub use self::stdio::IsTerminal; #[unstable(feature = "print_internals", issue = "none")] +#[doc(hidden)] pub use self::stdio::{_eprint, _print}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::{ @@ -329,7 +330,7 @@ }; #[unstable(feature = "read_buf", issue = "78485")] -pub use self::readbuf::{BorrowedBuf, BorrowedCursor}; +pub use core::io::{BorrowedBuf, BorrowedCursor}; pub(crate) use error::const_io_error; mod buffered; @@ -338,7 +339,6 @@ mod error; mod impls; pub mod prelude; -mod readbuf; mod stdio; mod util; @@ -513,8 +513,7 @@ match this.read(buf) { Ok(0) => break, Ok(n) => { - let tmp = buf; - buf = &mut tmp[n..]; + buf = &mut buf[n..]; } Err(ref e) if e.is_interrupted() => {} Err(e) => return Err(e), @@ -1141,10 +1140,10 @@ #[repr(transparent)] pub struct IoSliceMut<'a>(sys::io::IoSliceMut<'a>); -#[stable(feature = "iovec-send-sync", since = "1.44.0")] +#[stable(feature = "iovec_send_sync", since = "1.44.0")] unsafe impl<'a> Send for IoSliceMut<'a> {} -#[stable(feature = "iovec-send-sync", since = "1.44.0")] +#[stable(feature = "iovec_send_sync", since = "1.44.0")] unsafe impl<'a> Sync for IoSliceMut<'a> {} #[stable(feature = "iovec", since = "1.36.0")] @@ -1284,10 +1283,10 @@ #[repr(transparent)] pub struct IoSlice<'a>(sys::io::IoSlice<'a>); -#[stable(feature = "iovec-send-sync", since = "1.44.0")] +#[stable(feature = "iovec_send_sync", since = "1.44.0")] unsafe impl<'a> Send for IoSlice<'a> {} -#[stable(feature = "iovec-send-sync", since = "1.44.0")] +#[stable(feature = "iovec_send_sync", since = "1.44.0")] unsafe impl<'a> Sync for IoSlice<'a> {} #[stable(feature = "iovec", since = "1.36.0")] @@ -1830,6 +1829,7 @@ /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "IoSeek")] pub trait Seek { /// Seek to an offset, in bytes, in a stream. /// @@ -2777,23 +2777,55 @@ impl Iterator for Bytes { type Item = Result; + // Not `#[inline]`. This function gets inlined even without it, but having + // the inline annotation can result in worse code generation. See #116785. fn next(&mut self) -> Option> { - let mut byte = 0; - loop { - return match self.inner.read(slice::from_mut(&mut byte)) { - Ok(0) => None, - Ok(..) => Some(Ok(byte)), - Err(ref e) if e.is_interrupted() => continue, - Err(e) => Some(Err(e)), - }; - } + SpecReadByte::spec_read_byte(&mut self.inner) } + #[inline] fn size_hint(&self) -> (usize, Option) { SizeHint::size_hint(&self.inner) } } +/// For the specialization of `Bytes::next`. +trait SpecReadByte { + fn spec_read_byte(&mut self) -> Option>; +} + +impl SpecReadByte for R +where + Self: Read, +{ + #[inline] + default fn spec_read_byte(&mut self) -> Option> { + inlined_slow_read_byte(self) + } +} + +/// Read a single byte in a slow, generic way. This is used by the default +/// `spec_read_byte`. +#[inline] +fn inlined_slow_read_byte(reader: &mut R) -> Option> { + let mut byte = 0; + loop { + return match reader.read(slice::from_mut(&mut byte)) { + Ok(0) => None, + Ok(..) => Some(Ok(byte)), + Err(ref e) if e.is_interrupted() => continue, + Err(e) => Some(Err(e)), + }; + } +} + +// Used by `BufReader::spec_read_byte`, for which the `inline(ever)` is +// important. +#[inline(never)] +fn uninlined_slow_read_byte(reader: &mut R) -> Option> { + inlined_slow_read_byte(reader) +} + trait SizeHint { fn lower_bound(&self) -> usize; @@ -2893,6 +2925,7 @@ /// [`lines`]: BufRead::lines #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] +#[cfg_attr(not(test), rustc_diagnostic_item = "IoLines")] pub struct Lines { buf: B, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf/tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -use super::BorrowedBuf; -use crate::mem::MaybeUninit; - -/// Test that BorrowedBuf has the correct numbers when created with new -#[test] -fn new() { - let buf: &mut [_] = &mut [0; 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - assert_eq!(rbuf.filled().len(), 0); - assert_eq!(rbuf.init_len(), 16); - assert_eq!(rbuf.capacity(), 16); - assert_eq!(rbuf.unfilled().capacity(), 16); -} - -/// Test that BorrowedBuf has the correct numbers when created with uninit -#[test] -fn uninit() { - let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - assert_eq!(rbuf.filled().len(), 0); - assert_eq!(rbuf.init_len(), 0); - assert_eq!(rbuf.capacity(), 16); - assert_eq!(rbuf.unfilled().capacity(), 16); -} - -#[test] -fn initialize_unfilled() { - let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - rbuf.unfilled().ensure_init(); - - assert_eq!(rbuf.init_len(), 16); -} - -#[test] -fn advance_filled() { - let buf: &mut [_] = &mut [0; 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - unsafe { - rbuf.unfilled().advance(1); - } - - assert_eq!(rbuf.filled().len(), 1); - assert_eq!(rbuf.unfilled().capacity(), 15); -} - -#[test] -fn clear() { - let buf: &mut [_] = &mut [255; 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - unsafe { - rbuf.unfilled().advance(16); - } - - assert_eq!(rbuf.filled().len(), 16); - assert_eq!(rbuf.unfilled().capacity(), 0); - - rbuf.clear(); - - assert_eq!(rbuf.filled().len(), 0); - assert_eq!(rbuf.unfilled().capacity(), 16); - - assert_eq!(rbuf.unfilled().init_ref(), [255; 16]); -} - -#[test] -fn set_init() { - let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - unsafe { - rbuf.set_init(8); - } - - assert_eq!(rbuf.init_len(), 8); - - unsafe { - rbuf.unfilled().advance(4); - } - - unsafe { - rbuf.set_init(2); - } - - assert_eq!(rbuf.init_len(), 8); - - unsafe { - rbuf.set_init(8); - } - - assert_eq!(rbuf.init_len(), 8); -} - -#[test] -fn append() { - let buf: &mut [_] = &mut [MaybeUninit::new(255); 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - rbuf.unfilled().append(&[0; 8]); - - assert_eq!(rbuf.init_len(), 8); - assert_eq!(rbuf.filled().len(), 8); - assert_eq!(rbuf.filled(), [0; 8]); - - rbuf.clear(); - - rbuf.unfilled().append(&[1; 16]); - - assert_eq!(rbuf.init_len(), 16); - assert_eq!(rbuf.filled().len(), 16); - assert_eq!(rbuf.filled(), [1; 16]); -} - -#[test] -fn reborrow_written() { - let buf: &mut [_] = &mut [MaybeUninit::new(0); 32]; - let mut buf: BorrowedBuf<'_> = buf.into(); - - let mut cursor = buf.unfilled(); - cursor.append(&[1; 16]); - - let mut cursor2 = cursor.reborrow(); - cursor2.append(&[2; 16]); - - assert_eq!(cursor2.written(), 32); - assert_eq!(cursor.written(), 32); - - assert_eq!(buf.unfilled().written(), 0); - assert_eq!(buf.init_len(), 32); - assert_eq!(buf.filled().len(), 32); - let filled = buf.filled(); - assert_eq!(&filled[..16], [1; 16]); - assert_eq!(&filled[16..], [2; 16]); -} - -#[test] -fn cursor_set_init() { - let buf: &mut [_] = &mut [MaybeUninit::uninit(); 16]; - let mut rbuf: BorrowedBuf<'_> = buf.into(); - - unsafe { - rbuf.unfilled().set_init(8); - } - - assert_eq!(rbuf.init_len(), 8); - assert_eq!(rbuf.unfilled().init_ref().len(), 8); - assert_eq!(rbuf.unfilled().init_mut().len(), 8); - assert_eq!(rbuf.unfilled().uninit_mut().len(), 8); - assert_eq!(unsafe { rbuf.unfilled().as_mut() }.len(), 16); - - unsafe { - rbuf.unfilled().advance(4); - } - - unsafe { - rbuf.unfilled().set_init(2); - } - - assert_eq!(rbuf.init_len(), 8); - - unsafe { - rbuf.unfilled().set_init(8); - } - - assert_eq!(rbuf.init_len(), 12); - assert_eq!(rbuf.unfilled().init_ref().len(), 8); - assert_eq!(rbuf.unfilled().init_mut().len(), 8); - assert_eq!(rbuf.unfilled().uninit_mut().len(), 4); - assert_eq!(unsafe { rbuf.unfilled().as_mut() }.len(), 12); -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/readbuf.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,317 +0,0 @@ -#![unstable(feature = "read_buf", issue = "78485")] - -#[cfg(test)] -mod tests; - -use crate::fmt::{self, Debug, Formatter}; -use crate::io::{Result, Write}; -use crate::mem::{self, MaybeUninit}; -use crate::{cmp, ptr}; - -/// A borrowed byte buffer which is incrementally filled and initialized. -/// -/// This type is a sort of "double cursor". It tracks three regions in the buffer: a region at the beginning of the -/// buffer that has been logically filled with data, a region that has been initialized at some point but not yet -/// logically filled, and a region at the end that is fully uninitialized. The filled region is guaranteed to be a -/// subset of the initialized region. -/// -/// In summary, the contents of the buffer can be visualized as: -/// ```not_rust -/// [ capacity ] -/// [ filled | unfilled ] -/// [ initialized | uninitialized ] -/// ``` -/// -/// A `BorrowedBuf` is created around some existing data (or capacity for data) via a unique reference -/// (`&mut`). The `BorrowedBuf` can be configured (e.g., using `clear` or `set_init`), but cannot be -/// directly written. To write into the buffer, use `unfilled` to create a `BorrowedCursor`. The cursor -/// has write-only access to the unfilled portion of the buffer (you can think of it as a -/// write-only iterator). -/// -/// The lifetime `'data` is a bound on the lifetime of the underlying data. -pub struct BorrowedBuf<'data> { - /// The buffer's underlying data. - buf: &'data mut [MaybeUninit], - /// The length of `self.buf` which is known to be filled. - filled: usize, - /// The length of `self.buf` which is known to be initialized. - init: usize, -} - -impl Debug for BorrowedBuf<'_> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("BorrowedBuf") - .field("init", &self.init) - .field("filled", &self.filled) - .field("capacity", &self.capacity()) - .finish() - } -} - -/// Create a new `BorrowedBuf` from a fully initialized slice. -impl<'data> From<&'data mut [u8]> for BorrowedBuf<'data> { - #[inline] - fn from(slice: &'data mut [u8]) -> BorrowedBuf<'data> { - let len = slice.len(); - - BorrowedBuf { - // SAFETY: initialized data never becoming uninitialized is an invariant of BorrowedBuf - buf: unsafe { (slice as *mut [u8]).as_uninit_slice_mut().unwrap() }, - filled: 0, - init: len, - } - } -} - -/// Create a new `BorrowedBuf` from an uninitialized buffer. -/// -/// Use `set_init` if part of the buffer is known to be already initialized. -impl<'data> From<&'data mut [MaybeUninit]> for BorrowedBuf<'data> { - #[inline] - fn from(buf: &'data mut [MaybeUninit]) -> BorrowedBuf<'data> { - BorrowedBuf { buf, filled: 0, init: 0 } - } -} - -impl<'data> BorrowedBuf<'data> { - /// Returns the total capacity of the buffer. - #[inline] - pub fn capacity(&self) -> usize { - self.buf.len() - } - - /// Returns the length of the filled part of the buffer. - #[inline] - pub fn len(&self) -> usize { - self.filled - } - - /// Returns the length of the initialized part of the buffer. - #[inline] - pub fn init_len(&self) -> usize { - self.init - } - - /// Returns a shared reference to the filled portion of the buffer. - #[inline] - pub fn filled(&self) -> &[u8] { - // SAFETY: We only slice the filled part of the buffer, which is always valid - unsafe { MaybeUninit::slice_assume_init_ref(&self.buf[0..self.filled]) } - } - - /// Returns a mutable reference to the filled portion of the buffer. - #[inline] - pub fn filled_mut(&mut self) -> &mut [u8] { - // SAFETY: We only slice the filled part of the buffer, which is always valid - unsafe { MaybeUninit::slice_assume_init_mut(&mut self.buf[0..self.filled]) } - } - - /// Returns a cursor over the unfilled part of the buffer. - #[inline] - pub fn unfilled<'this>(&'this mut self) -> BorrowedCursor<'this> { - BorrowedCursor { - start: self.filled, - // SAFETY: we never assign into `BorrowedCursor::buf`, so treating its - // lifetime covariantly is safe. - buf: unsafe { - mem::transmute::<&'this mut BorrowedBuf<'data>, &'this mut BorrowedBuf<'this>>(self) - }, - } - } - - /// Clears the buffer, resetting the filled region to empty. - /// - /// The number of initialized bytes is not changed, and the contents of the buffer are not modified. - #[inline] - pub fn clear(&mut self) -> &mut Self { - self.filled = 0; - self - } - - /// Asserts that the first `n` bytes of the buffer are initialized. - /// - /// `BorrowedBuf` assumes that bytes are never de-initialized, so this method does nothing when called with fewer - /// bytes than are already known to be initialized. - /// - /// # Safety - /// - /// The caller must ensure that the first `n` unfilled bytes of the buffer have already been initialized. - #[inline] - pub unsafe fn set_init(&mut self, n: usize) -> &mut Self { - self.init = cmp::max(self.init, n); - self - } -} - -/// A writeable view of the unfilled portion of a [`BorrowedBuf`](BorrowedBuf). -/// -/// Provides access to the initialized and uninitialized parts of the underlying `BorrowedBuf`. -/// Data can be written directly to the cursor by using [`append`](BorrowedCursor::append) or -/// indirectly by getting a slice of part or all of the cursor and writing into the slice. In the -/// indirect case, the caller must call [`advance`](BorrowedCursor::advance) after writing to inform -/// the cursor how many bytes have been written. -/// -/// Once data is written to the cursor, it becomes part of the filled portion of the underlying -/// `BorrowedBuf` and can no longer be accessed or re-written by the cursor. I.e., the cursor tracks -/// the unfilled part of the underlying `BorrowedBuf`. -/// -/// The lifetime `'a` is a bound on the lifetime of the underlying buffer (which means it is a bound -/// on the data in that buffer by transitivity). -#[derive(Debug)] -pub struct BorrowedCursor<'a> { - /// The underlying buffer. - // Safety invariant: we treat the type of buf as covariant in the lifetime of `BorrowedBuf` when - // we create a `BorrowedCursor`. This is only safe if we never replace `buf` by assigning into - // it, so don't do that! - buf: &'a mut BorrowedBuf<'a>, - /// The length of the filled portion of the underlying buffer at the time of the cursor's - /// creation. - start: usize, -} - -impl<'a> BorrowedCursor<'a> { - /// Reborrow this cursor by cloning it with a smaller lifetime. - /// - /// Since a cursor maintains unique access to its underlying buffer, the borrowed cursor is - /// not accessible while the new cursor exists. - #[inline] - pub fn reborrow<'this>(&'this mut self) -> BorrowedCursor<'this> { - BorrowedCursor { - // SAFETY: we never assign into `BorrowedCursor::buf`, so treating its - // lifetime covariantly is safe. - buf: unsafe { - mem::transmute::<&'this mut BorrowedBuf<'a>, &'this mut BorrowedBuf<'this>>( - self.buf, - ) - }, - start: self.start, - } - } - - /// Returns the available space in the cursor. - #[inline] - pub fn capacity(&self) -> usize { - self.buf.capacity() - self.buf.filled - } - - /// Returns the number of bytes written to this cursor since it was created from a `BorrowedBuf`. - /// - /// Note that if this cursor is a reborrowed clone of another, then the count returned is the - /// count written via either cursor, not the count since the cursor was reborrowed. - #[inline] - pub fn written(&self) -> usize { - self.buf.filled - self.start - } - - /// Returns a shared reference to the initialized portion of the cursor. - #[inline] - pub fn init_ref(&self) -> &[u8] { - // SAFETY: We only slice the initialized part of the buffer, which is always valid - unsafe { MaybeUninit::slice_assume_init_ref(&self.buf.buf[self.buf.filled..self.buf.init]) } - } - - /// Returns a mutable reference to the initialized portion of the cursor. - #[inline] - pub fn init_mut(&mut self) -> &mut [u8] { - // SAFETY: We only slice the initialized part of the buffer, which is always valid - unsafe { - MaybeUninit::slice_assume_init_mut(&mut self.buf.buf[self.buf.filled..self.buf.init]) - } - } - - /// Returns a mutable reference to the uninitialized part of the cursor. - /// - /// It is safe to uninitialize any of these bytes. - #[inline] - pub fn uninit_mut(&mut self) -> &mut [MaybeUninit] { - &mut self.buf.buf[self.buf.init..] - } - - /// Returns a mutable reference to the whole cursor. - /// - /// # Safety - /// - /// The caller must not uninitialize any bytes in the initialized portion of the cursor. - #[inline] - pub unsafe fn as_mut(&mut self) -> &mut [MaybeUninit] { - &mut self.buf.buf[self.buf.filled..] - } - - /// Advance the cursor by asserting that `n` bytes have been filled. - /// - /// After advancing, the `n` bytes are no longer accessible via the cursor and can only be - /// accessed via the underlying buffer. I.e., the buffer's filled portion grows by `n` elements - /// and its unfilled portion (and the capacity of this cursor) shrinks by `n` elements. - /// - /// # Safety - /// - /// The caller must ensure that the first `n` bytes of the cursor have been properly - /// initialised. - #[inline] - pub unsafe fn advance(&mut self, n: usize) -> &mut Self { - self.buf.filled += n; - self.buf.init = cmp::max(self.buf.init, self.buf.filled); - self - } - - /// Initializes all bytes in the cursor. - #[inline] - pub fn ensure_init(&mut self) -> &mut Self { - let uninit = self.uninit_mut(); - // SAFETY: 0 is a valid value for MaybeUninit and the length matches the allocation - // since it is comes from a slice reference. - unsafe { - ptr::write_bytes(uninit.as_mut_ptr(), 0, uninit.len()); - } - self.buf.init = self.buf.capacity(); - - self - } - - /// Asserts that the first `n` unfilled bytes of the cursor are initialized. - /// - /// `BorrowedBuf` assumes that bytes are never de-initialized, so this method does nothing when - /// called with fewer bytes than are already known to be initialized. - /// - /// # Safety - /// - /// The caller must ensure that the first `n` bytes of the buffer have already been initialized. - #[inline] - pub unsafe fn set_init(&mut self, n: usize) -> &mut Self { - self.buf.init = cmp::max(self.buf.init, self.buf.filled + n); - self - } - - /// Appends data to the cursor, advancing position within its buffer. - /// - /// # Panics - /// - /// Panics if `self.capacity()` is less than `buf.len()`. - #[inline] - pub fn append(&mut self, buf: &[u8]) { - assert!(self.capacity() >= buf.len()); - - // SAFETY: we do not de-initialize any of the elements of the slice - unsafe { - MaybeUninit::write_slice(&mut self.as_mut()[..buf.len()], buf); - } - - // SAFETY: We just added the entire contents of buf to the filled section. - unsafe { - self.set_init(buf.len()); - } - self.buf.filled += buf.len(); - } -} - -impl<'a> Write for BorrowedCursor<'a> { - fn write(&mut self, buf: &[u8]) -> Result { - self.append(buf); - Ok(buf.len()) - } - - #[inline] - fn flush(&mut self) -> Result<()> { - Ok(()) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/stdio.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/stdio.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/io/stdio.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/io/stdio.rs 2023-12-21 16:55:28.000000000 +0000 @@ -611,6 +611,7 @@ /// ``` #[must_use] #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "io_stdout")] pub fn stdout() -> Stdout { Stdout { inner: STDOUT @@ -847,6 +848,7 @@ /// ``` #[must_use] #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "io_stderr")] pub fn stderr() -> Stderr { // Note that unlike `stdout()` we don't use `at_exit` here to register a // destructor. Stderr is not buffered, so there's no need to run a diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -227,6 +227,7 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) )] +#![cfg_attr(not(bootstrap), doc(rust_logo))] #![doc(cfg_hide( not(test), not(any(test, bootstrap)), @@ -259,7 +260,7 @@ all(target_vendor = "fortanix", target_env = "sgx"), feature(slice_index_methods, coerce_unsized, sgx_platform) )] -#![cfg_attr(windows, feature(round_char_boundary))] +#![cfg_attr(any(windows, target_os = "uefi"), feature(round_char_boundary))] #![cfg_attr(target_os = "xous", feature(slice_ptr_len))] // // Language features: @@ -270,6 +271,7 @@ #![feature(allow_internal_unstable)] #![feature(c_unwind)] #![feature(cfg_target_thread_local)] +#![feature(cfi_encoding)] #![feature(concat_idents)] #![feature(const_mut_refs)] #![feature(const_trait_impl)] @@ -292,6 +294,7 @@ #![feature(needs_panic_runtime)] #![feature(negative_impls)] #![feature(never_type)] +#![feature(no_sanitize)] #![feature(platform_intrinsics)] #![feature(prelude_import)] #![feature(rustc_attrs)] @@ -307,6 +310,7 @@ // tidy-alphabetical-start #![feature(char_internals)] #![feature(core_intrinsics)] +#![feature(core_io_borrowed_buf)] #![feature(duration_constants)] #![feature(error_generic_member_access)] #![feature(error_in_core)] @@ -328,7 +332,6 @@ #![feature(panic_can_unwind)] #![feature(panic_info_message)] #![feature(panic_internals)] -#![feature(pointer_byte_offsets)] #![feature(pointer_is_aligned)] #![feature(portable_simd)] #![feature(prelude_2024)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/net/udp.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/net/udp.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/net/udp.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/net/udp.rs 2023-12-21 16:55:28.000000000 +0000 @@ -99,6 +99,16 @@ /// /// let socket = UdpSocket::bind("127.0.0.1:0").unwrap(); /// ``` + /// + /// Note that `bind` declares the scope of your network connection. + /// You can only receive datagrams from and send datagrams to + /// participants in that view of the network. + /// For instance, binding to a loopback address as in the example + /// above will prevent you from sending datagrams to another device + /// in your local network. + /// + /// In order to limit your view of the network the least, `bind` to + /// [`Ipv4Addr::UNSPECIFIED`] or [`Ipv6Addr::UNSPECIFIED`]. #[stable(feature = "rust1", since = "1.0.0")] pub fn bind(addr: A) -> io::Result { super::each_addr(addr, net_imp::UdpSocket::bind).map(UdpSocket) @@ -157,7 +167,9 @@ } /// Sends data on the socket to the given address. On success, returns the - /// number of bytes written. + /// number of bytes written. Note that the operating system may refuse + /// buffers larger than 65507. However, partial writes are not possible + /// until buffer sizes above `i32::MAX`. /// /// Address type can be any implementor of [`ToSocketAddrs`] trait. See its /// documentation for concrete examples. @@ -652,12 +664,19 @@ /// function of a UDP socket is not a useful thing to do: The OS will be /// unable to determine whether something is listening on the remote /// address without the application sending data. + /// + /// If your first `connect` is to a loopback address, subsequent + /// `connect`s to non-loopback addresses might fail, depending + /// on the platform. #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn connect(&self, addr: A) -> io::Result<()> { super::each_addr(addr, |addr| self.0.connect(addr)) } /// Sends data on the socket to the remote address to which it is connected. + /// On success, returns the number of bytes written. Note that the operating + /// system may refuse buffers larger than 65507. However, partial writes are + /// not possible until buffer sizes above `i32::MAX`. /// /// [`UdpSocket::connect`] will connect this socket to a remote address. This /// method will fail if the socket is not connected. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/aix/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/aix/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/aix/fs.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/aix/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,348 @@ +//! AIX specific extensions to primitives in the [`std::fs`] module. +//! +//! [`std::fs`]: crate::fs + +#![stable(feature = "metadata_ext", since = "1.1.0")] + +use crate::fs::Metadata; +use crate::sys_common::AsInner; + +/// OS-specific extensions to [`fs::Metadata`]. +/// +/// [`fs::Metadata`]: crate::fs::Metadata +#[stable(feature = "metadata_ext", since = "1.1.0")] +pub trait MetadataExt { + /// Returns the device ID on which this file resides. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_dev()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_dev(&self) -> u64; + /// Returns the inode number. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_ino()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ino(&self) -> u64; + /// Returns the file type and mode. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_mode()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mode(&self) -> u32; + /// Returns the number of hard links to file. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_nlink()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_nlink(&self) -> u64; + /// Returns the user ID of the file owner. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_uid()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_uid(&self) -> u32; + /// Returns the group ID of the file owner. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_gid()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_gid(&self) -> u32; + /// Returns the device ID that this file represents. Only relevant for special file. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_rdev()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_rdev(&self) -> u64; + /// Returns the size of the file (if it is a regular file or a symbolic link) in bytes. + /// + /// The size of a symbolic link is the length of the pathname it contains, + /// without a terminating null byte. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_size()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_size(&self) -> u64; + /// Returns the last access time of the file, in seconds since Unix Epoch. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_atime()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime(&self) -> i64; + /// Returns the last access time of the file, in nanoseconds since [`st_atime`]. + /// + /// [`st_atime`]: Self::st_atime + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_atime_nsec()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime_nsec(&self) -> i64; + /// Returns the last modification time of the file, in seconds since Unix Epoch. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_mtime()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime(&self) -> i64; + /// Returns the last modification time of the file, in nanoseconds since [`st_mtime`]. + /// + /// [`st_mtime`]: Self::st_mtime + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_mtime_nsec()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime_nsec(&self) -> i64; + /// Returns the last status change time of the file, in seconds since Unix Epoch. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_ctime()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime(&self) -> i64; + /// Returns the last status change time of the file, in nanoseconds since [`st_ctime`]. + /// + /// [`st_ctime`]: Self::st_ctime + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_ctime_nsec()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime_nsec(&self) -> i64; + /// Returns the "preferred" block size for efficient filesystem I/O. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_blksize()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_blksize(&self) -> u64; + /// Returns the number of blocks allocated to the file, 512-byte units. + /// + /// # Examples + /// + /// ```no_run + /// use std::fs; + /// use std::io; + /// use std::os::aix::fs::MetadataExt; + /// + /// fn main() -> io::Result<()> { + /// let meta = fs::metadata("some_file")?; + /// println!("{}", meta.st_blocks()); + /// Ok(()) + /// } + /// ``` + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_blocks(&self) -> u64; +} + +#[stable(feature = "metadata_ext", since = "1.1.0")] +impl MetadataExt for Metadata { + fn st_dev(&self) -> u64 { + self.as_inner().as_inner().st_dev as u64 + } + fn st_ino(&self) -> u64 { + self.as_inner().as_inner().st_ino as u64 + } + fn st_mode(&self) -> u32 { + self.as_inner().as_inner().st_mode as u32 + } + fn st_nlink(&self) -> u64 { + self.as_inner().as_inner().st_nlink as u64 + } + fn st_uid(&self) -> u32 { + self.as_inner().as_inner().st_uid as u32 + } + fn st_gid(&self) -> u32 { + self.as_inner().as_inner().st_gid as u32 + } + fn st_rdev(&self) -> u64 { + self.as_inner().as_inner().st_rdev as u64 + } + fn st_size(&self) -> u64 { + self.as_inner().as_inner().st_size as u64 + } + fn st_atime(&self) -> i64 { + self.as_inner().as_inner().st_atime.tv_sec as i64 + } + fn st_atime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_atime.tv_nsec as i64 + } + fn st_mtime(&self) -> i64 { + self.as_inner().as_inner().st_mtime.tv_sec as i64 + } + fn st_mtime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_mtime.tv_nsec as i64 + } + fn st_ctime(&self) -> i64 { + self.as_inner().as_inner().st_ctime.tv_sec as i64 + } + fn st_ctime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_ctime.tv_nsec as i64 + } + fn st_blksize(&self) -> u64 { + self.as_inner().as_inner().st_blksize as u64 + } + fn st_blocks(&self) -> u64 { + self.as_inner().as_inner().st_blocks as u64 + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/aix/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/aix/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/aix/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/aix/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,6 @@ +//! AIX specific definitions. + +#![stable(feature = "raw_ext", since = "1.1.0")] + +pub mod fs; +pub mod raw; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/aix/raw.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/aix/raw.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/aix/raw.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/aix/raw.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,9 @@ +//! AIX specific raw type definitions. + +#![stable(feature = "raw_ext", since = "1.1.0")] + +#[stable(feature = "pthread_t", since = "1.8.0")] +pub use libc::pthread_t; + +#[stable(feature = "raw_ext", since = "1.1.0")] +pub use libc::{blkcnt_t, blksize_t, dev_t, ino_t, mode_t, nlink_t, off_t, stat, time_t}; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/fd/owned.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/fd/owned.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/fd/owned.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/fd/owned.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,14 +97,14 @@ // We want to atomically duplicate this file descriptor and set the // CLOEXEC flag, and currently that's done via F_DUPFD_CLOEXEC. This // is a POSIX flag that was added to Linux in 2.6.24. - #[cfg(not(target_os = "espidf"))] + #[cfg(not(any(target_os = "espidf", target_os = "vita")))] let cmd = libc::F_DUPFD_CLOEXEC; // For ESP-IDF, F_DUPFD is used instead, because the CLOEXEC semantics // will never be supported, as this is a bare metal framework with // no capabilities for multi-process execution. While F_DUPFD is also // not supported yet, it might be (currently it returns ENOSYS). - #[cfg(target_os = "espidf")] + #[cfg(any(target_os = "espidf", target_os = "vita"))] let cmd = libc::F_DUPFD; // Avoid using file descriptors below 3 as they are used for stdio @@ -119,7 +119,7 @@ pub fn try_clone_to_owned(&self) -> crate::io::Result { Err(crate::io::const_io_error!( crate::io::ErrorKind::Unsupported, - "operation not supported on WASI yet", + "operation not supported on this platform", )) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/freebsd/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/freebsd/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/freebsd/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/freebsd/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -76,12 +76,7 @@ fn as_raw_stat(&self) -> &raw::stat { // The methods below use libc::stat, so they work fine when libc is built with FreeBSD 12 ABI. // This method would just return nonsense. - #[cfg(freebsd12)] panic!("as_raw_stat not supported with FreeBSD 12 ABI"); - #[cfg(not(freebsd12))] - unsafe { - &*(self.as_inner().as_inner() as *const libc::stat as *const raw::stat) - } } fn st_dev(&self) -> u64 { self.as_inner().as_inner().st_dev as u64 @@ -143,12 +138,7 @@ fn st_flags(&self) -> u32 { self.as_inner().as_inner().st_flags as u32 } - #[cfg(freebsd12)] fn st_lspare(&self) -> u32 { panic!("st_lspare not supported with FreeBSD 12 ABI"); } - #[cfg(not(freebsd12))] - fn st_lspare(&self) -> u32 { - self.as_inner().as_inner().st_lspare as u32 - } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/ios/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/ios/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/ios/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/ios/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -144,14 +144,14 @@ } /// OS-specific extensions to [`fs::FileTimes`]. -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] pub trait FileTimesExt: Sealed { /// Set the creation time of a file. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] fn set_created(self, t: SystemTime) -> Self; } -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] impl FileTimesExt for fs::FileTimes { fn set_created(mut self, t: SystemTime) -> Self { self.as_inner_mut().set_created(t.into_inner()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/linux/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/linux/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/linux/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/linux/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -329,7 +329,14 @@ impl MetadataExt for Metadata { #[allow(deprecated)] fn as_raw_stat(&self) -> &raw::stat { - unsafe { &*(self.as_inner().as_inner() as *const libc::stat64 as *const raw::stat) } + #[cfg(target_env = "musl")] + unsafe { + &*(self.as_inner().as_inner() as *const libc::stat as *const raw::stat) + } + #[cfg(not(target_env = "musl"))] + unsafe { + &*(self.as_inner().as_inner() as *const libc::stat64 as *const raw::stat) + } } fn st_dev(&self) -> u64 { self.as_inner().as_inner().st_dev as u64 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/macos/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/macos/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/macos/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/macos/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -150,14 +150,14 @@ } /// OS-specific extensions to [`fs::FileTimes`]. -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] pub trait FileTimesExt: Sealed { /// Set the creation time of a file. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] fn set_created(self, t: SystemTime) -> Self; } -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] impl FileTimesExt for fs::FileTimes { fn set_created(mut self, t: SystemTime) -> Self { self.as_inner_mut().set_created(t.into_inner()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -97,6 +97,8 @@ pub mod windows; // Others. +#[cfg(target_os = "aix")] +pub mod aix; #[cfg(target_os = "android")] pub mod android; #[cfg(target_os = "dragonfly")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/unix/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/unix/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/unix/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/unix/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -37,6 +37,8 @@ #[cfg(not(doc))] mod platform { + #[cfg(target_os = "aix")] + pub use crate::os::aix::*; #[cfg(target_os = "android")] pub use crate::os::android::*; #[cfg(target_os = "dragonfly")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/watchos/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/watchos/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/watchos/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/watchos/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -144,14 +144,14 @@ } /// OS-specific extensions to [`fs::FileTimes`]. -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] pub trait FileTimesExt: Sealed { /// Set the creation time of a file. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] fn set_created(self, t: SystemTime) -> Self; } -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] impl FileTimesExt for fs::FileTimes { fn set_created(mut self, t: SystemTime) -> Self { self.as_inner_mut().set_created(t.into_inner()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/windows/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/windows/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/os/windows/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/os/windows/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -528,14 +528,14 @@ } /// Windows-specific extensions to [`fs::FileTimes`]. -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] pub trait FileTimesExt: Sealed { /// Set the creation time of a file. - #[unstable(feature = "file_set_times", issue = "98245")] + #[stable(feature = "file_set_times", since = "1.75.0")] fn set_created(self, t: SystemTime) -> Self; } -#[unstable(feature = "file_set_times", issue = "98245")] +#[stable(feature = "file_set_times", since = "1.75.0")] impl FileTimesExt for fs::FileTimes { fn set_created(mut self, t: SystemTime) -> Self { self.as_inner_mut().set_created(t.into_inner()); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/panicking.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/panicking.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/panicking.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/panicking.rs 2023-12-21 16:55:28.000000000 +0000 @@ -295,12 +295,53 @@ #[cfg(not(test))] #[doc(hidden)] +#[cfg(feature = "panic_immediate_abort")] +#[unstable(feature = "update_panic_count", issue = "none")] +pub mod panic_count { + /// A reason for forcing an immediate abort on panic. + #[derive(Debug)] + pub enum MustAbort { + AlwaysAbort, + PanicInHook, + } + + #[inline] + pub fn increase(run_panic_hook: bool) -> Option { + None + } + + #[inline] + pub fn finished_panic_hook() {} + + #[inline] + pub fn decrease() {} + + #[inline] + pub fn set_always_abort() {} + + // Disregards ALWAYS_ABORT_FLAG + #[inline] + #[must_use] + pub fn get_count() -> usize { + 0 + } + + #[must_use] + #[inline] + pub fn count_is_zero() -> bool { + true + } +} + +#[cfg(not(test))] +#[doc(hidden)] +#[cfg(not(feature = "panic_immediate_abort"))] #[unstable(feature = "update_panic_count", issue = "none")] pub mod panic_count { use crate::cell::Cell; use crate::sync::atomic::{AtomicUsize, Ordering}; - pub const ALWAYS_ABORT_FLAG: usize = 1 << (usize::BITS - 1); + const ALWAYS_ABORT_FLAG: usize = 1 << (usize::BITS - 1); /// A reason for forcing an immediate abort on panic. #[derive(Debug)] @@ -421,6 +462,13 @@ pub use realstd::rt::panic_count; /// Invoke a closure, capturing the cause of an unwinding panic if one occurs. +#[cfg(feature = "panic_immediate_abort")] +pub unsafe fn r#try R>(f: F) -> Result> { + Ok(f()) +} + +/// Invoke a closure, capturing the cause of an unwinding panic if one occurs. +#[cfg(not(feature = "panic_immediate_abort"))] pub unsafe fn r#try R>(f: F) -> Result> { union Data { f: ManuallyDrop, @@ -755,6 +803,7 @@ /// This is the entry point for `resume_unwind`. /// It just forwards the payload to the panic runtime. +#[cfg_attr(feature = "panic_immediate_abort", inline)] pub fn rust_panic_without_hook(payload: Box) -> ! { panic_count::increase(false); @@ -777,7 +826,16 @@ /// yer breakpoints. #[inline(never)] #[cfg_attr(not(test), rustc_std_internal_symbol)] +#[cfg(not(feature = "panic_immediate_abort"))] fn rust_panic(msg: &mut dyn PanicPayload) -> ! { let code = unsafe { __rust_start_panic(msg) }; rtabort!("failed to initiate panic, error {code}") } + +#[cfg_attr(not(test), rustc_std_internal_symbol)] +#[cfg(feature = "panic_immediate_abort")] +fn rust_panic(_: &mut dyn PanicPayload) -> ! { + unsafe { + crate::intrinsics::abort(); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/process.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/process.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/process.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/process.rs 2023-12-21 16:55:28.000000000 +0000 @@ -526,6 +526,7 @@ /// list_dir.status().expect("process failed to execute"); /// ``` #[stable(feature = "process", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "Command")] pub struct Command { inner: imp::Command, } @@ -607,7 +608,7 @@ /// /// Note that the argument is not passed through a shell, but given /// literally to the program. This means that shell syntax like quotes, - /// escaped characters, word splitting, glob patterns, substitution, etc. + /// escaped characters, word splitting, glob patterns, variable substitution, etc. /// have no effect. /// /// # Examples @@ -637,7 +638,7 @@ /// /// Note that the arguments are not passed through a shell, but given /// literally to the program. This means that shell syntax like quotes, - /// escaped characters, word splitting, glob patterns, substitution, etc. + /// escaped characters, word splitting, glob patterns, variable substitution, etc. /// have no effect. /// /// # Examples @@ -1593,7 +1594,7 @@ pub struct ExitStatus(imp::ExitStatus); /// The default value is one which indicates successful completion. -#[stable(feature = "process-exitcode-default", since = "1.73.0")] +#[stable(feature = "process_exitstatus_default", since = "1.73.0")] impl Default for ExitStatus { fn default() -> Self { // Ideally this would be done by ExitCode::default().into() but that is complicated. @@ -1959,6 +1960,14 @@ } } +/// The default value is [`ExitCode::SUCCESS`] +#[stable(feature = "process_exitcode_default", since = "1.75.0")] +impl Default for ExitCode { + fn default() -> Self { + ExitCode::SUCCESS + } +} + #[stable(feature = "process_exitcode", since = "1.61.0")] impl From for ExitCode { /// Construct an `ExitCode` from an arbitrary u8 value. @@ -2196,6 +2205,7 @@ /// process::exit(0x0100); /// ``` #[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "process_exit")] pub fn exit(code: i32) -> ! { crate::rt::cleanup(); crate::sys::os::exit(code) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/rt.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/rt.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/rt.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/rt.rs 2023-12-21 16:55:28.000000000 +0000 @@ -155,6 +155,7 @@ } #[cfg(not(test))] +#[inline(never)] #[lang = "start"] fn lang_start( main: fn() -> T, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sync/once.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sync/once.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sync/once.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sync/once.rs 2023-12-21 16:55:28.000000000 +0000 @@ -125,7 +125,7 @@ /// /// # Panics /// - /// The closure `f` will only be executed once if this is called + /// The closure `f` will only be executed once even if this is called /// concurrently amongst many threads. If that closure panics, however, then /// it will *poison* this [`Once`] instance, causing all future invocations of /// `call_once` to also panic. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sync/once_lock.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sync/once_lock.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sync/once_lock.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sync/once_lock.rs 2023-12-21 16:55:28.000000000 +0000 @@ -126,11 +126,48 @@ #[inline] #[stable(feature = "once_cell", since = "1.70.0")] pub fn set(&self, value: T) -> Result<(), T> { + match self.try_insert(value) { + Ok(_) => Ok(()), + Err((_, value)) => Err(value), + } + } + + /// Sets the contents of this cell to `value` if the cell was empty, then + /// returns a reference to it. + /// + /// May block if another thread is currently attempting to initialize the cell. The cell is + /// guaranteed to contain a value when set returns, though not necessarily the one provided. + /// + /// Returns `Ok(&value)` if the cell was empty and `Err(¤t_value, value)` if it was full. + /// + /// # Examples + /// + /// ``` + /// #![feature(once_cell_try_insert)] + /// + /// use std::sync::OnceLock; + /// + /// static CELL: OnceLock = OnceLock::new(); + /// + /// fn main() { + /// assert!(CELL.get().is_none()); + /// + /// std::thread::spawn(|| { + /// assert_eq!(CELL.try_insert(92), Ok(&92)); + /// }).join().unwrap(); + /// + /// assert_eq!(CELL.try_insert(62), Err((&92, 62))); + /// assert_eq!(CELL.get(), Some(&92)); + /// } + /// ``` + #[inline] + #[unstable(feature = "once_cell_try_insert", issue = "116693")] + pub fn try_insert(&self, value: T) -> Result<&T, (&T, T)> { let mut value = Some(value); - self.get_or_init(|| value.take().unwrap()); + let res = self.get_or_init(|| value.take().unwrap()); match value { - None => Ok(()), - Some(value) => Err(value), + None => Ok(res), + Some(value) => Err((res, value)), } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sync/rwlock.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sync/rwlock.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sync/rwlock.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sync/rwlock.rs 2023-12-21 16:55:28.000000000 +0000 @@ -380,7 +380,7 @@ /// /// If the lock is poisoned, it will remain poisoned until this function is called. This allows /// recovering from a poisoned state and marking that it has recovered. For example, if the - /// value is overwritten by a known-good value, then the mutex can be marked as un-poisoned. Or + /// value is overwritten by a known-good value, then the lock can be marked as un-poisoned. Or /// possibly, the value could be inspected to determine if it is in a consistent state, and if /// so the poison is removed. /// @@ -397,7 +397,7 @@ /// /// let _ = thread::spawn(move || { /// let _lock = c_lock.write().unwrap(); - /// panic!(); // the mutex gets poisoned + /// panic!(); // the lock gets poisoned /// }).join(); /// /// assert_eq!(lock.is_poisoned(), true); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/common/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/common/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/common/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/common/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,6 +12,7 @@ pub mod alloc; pub mod small_c_string; +#[allow(unused_imports)] pub mod thread_local; #[cfg(test)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/net.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/net.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/net.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/net.rs 2023-12-21 16:55:28.000000000 +0000 @@ -56,6 +56,12 @@ unimplemented!() } + pub fn connect(&self, addr: &SocketAddr) -> io::Result<()> { + let (addr, len) = addr.into_inner(); + cvt_r(|| unsafe { netc::connect(self.as_raw_fd(), addr.as_ptr(), len) })?; + Ok(()) + } + pub fn connect_timeout(&self, addr: &SocketAddr, timeout: Duration) -> io::Result<()> { self.set_nonblocking(true)?; let r = unsafe { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/thread_local_dtor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/thread_local_dtor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/thread_local_dtor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/hermit/thread_local_dtor.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,23 +5,25 @@ // The this solution works like the implementation of macOS and // doesn't additional OS support -use crate::mem; +use crate::cell::RefCell; #[thread_local] -static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static DTORS: RefCell> = RefCell::new(Vec::new()); pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { - let list = &mut DTORS; - list.push((t, dtor)); + match DTORS.try_borrow_mut() { + Ok(mut dtors) => dtors.push((t, dtor)), + Err(_) => rtabort!("global allocator may not use TLS"), + } } // every thread call this function to run through all possible destructors pub unsafe fn run_dtors() { - let mut list = mem::take(&mut DTORS); + let mut list = DTORS.take(); while !list.is_empty() { for (ptr, dtor) in list { dtor(ptr); } - list = mem::take(&mut DTORS); + list = DTORS.take(); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/dwarf/eh.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/dwarf/eh.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/dwarf/eh.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/dwarf/eh.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,7 @@ //! Parsing of GCC-style Language-Specific Data Area (LSDA) //! For details see: //! * +//! * //! * //! * //! * @@ -37,17 +38,19 @@ #[derive(Copy, Clone)] pub struct EHContext<'a> { - pub ip: usize, // Current instruction pointer - pub func_start: usize, // Address of the current function - pub get_text_start: &'a dyn Fn() -> usize, // Get address of the code section - pub get_data_start: &'a dyn Fn() -> usize, // Get address of the data section + pub ip: *const u8, // Current instruction pointer + pub func_start: *const u8, // Pointer to the current function + pub get_text_start: &'a dyn Fn() -> *const u8, // Get pointer to the code section + pub get_data_start: &'a dyn Fn() -> *const u8, // Get pointer to the data section } +/// Landing pad. +type LPad = *const u8; pub enum EHAction { None, - Cleanup(usize), - Catch(usize), - Filter(usize), + Cleanup(LPad), + Catch(LPad), + Filter(LPad), Terminate, } @@ -81,22 +84,24 @@ let ip = context.ip; if !USING_SJLJ_EXCEPTIONS { + // read the callsite table while reader.ptr < action_table { - let cs_start = read_encoded_pointer(&mut reader, context, call_site_encoding)?; - let cs_len = read_encoded_pointer(&mut reader, context, call_site_encoding)?; - let cs_lpad = read_encoded_pointer(&mut reader, context, call_site_encoding)?; + // these are offsets rather than pointers; + let cs_start = read_encoded_offset(&mut reader, call_site_encoding)?; + let cs_len = read_encoded_offset(&mut reader, call_site_encoding)?; + let cs_lpad = read_encoded_offset(&mut reader, call_site_encoding)?; let cs_action_entry = reader.read_uleb128(); // Callsite table is sorted by cs_start, so if we've passed the ip, we // may stop searching. - if ip < func_start + cs_start { + if ip < func_start.wrapping_add(cs_start) { break; } - if ip < func_start + cs_start + cs_len { + if ip < func_start.wrapping_add(cs_start + cs_len) { if cs_lpad == 0 { return Ok(EHAction::None); } else { - let lpad = lpad_base + cs_lpad; - return Ok(interpret_cs_action(action_table as *mut u8, cs_action_entry, lpad)); + let lpad = lpad_base.wrapping_add(cs_lpad); + return Ok(interpret_cs_action(action_table, cs_action_entry, lpad)); } } } @@ -106,12 +111,12 @@ // SjLj version: // The "IP" is an index into the call-site table, with two exceptions: // -1 means 'no-action', and 0 means 'terminate'. - match ip as isize { + match ip.addr() as isize { -1 => return Ok(EHAction::None), 0 => return Ok(EHAction::Terminate), _ => (), } - let mut idx = ip; + let mut idx = ip.addr(); loop { let cs_lpad = reader.read_uleb128(); let cs_action_entry = reader.read_uleb128(); @@ -119,17 +124,18 @@ if idx == 0 { // Can never have null landing pad for sjlj -- that would have // been indicated by a -1 call site index. - let lpad = (cs_lpad + 1) as usize; - return Ok(interpret_cs_action(action_table as *mut u8, cs_action_entry, lpad)); + // FIXME(strict provenance) + let lpad = ptr::from_exposed_addr((cs_lpad + 1) as usize); + return Ok(interpret_cs_action(action_table, cs_action_entry, lpad)); } } } } unsafe fn interpret_cs_action( - action_table: *mut u8, + action_table: *const u8, cs_action_entry: u64, - lpad: usize, + lpad: LPad, ) -> EHAction { if cs_action_entry == 0 { // If cs_action_entry is 0 then this is a cleanup (Drop::drop). We run these @@ -138,7 +144,7 @@ } else { // If lpad != 0 and cs_action_entry != 0, we have to check ttype_index. // If ttype_index == 0 under the condition, we take cleanup action. - let action_record = (action_table as *mut u8).offset(cs_action_entry as isize - 1); + let action_record = action_table.offset(cs_action_entry as isize - 1); let mut action_reader = DwarfReader::new(action_record); let ttype_index = action_reader.read_sleb128(); if ttype_index == 0 { @@ -157,22 +163,24 @@ if align.is_power_of_two() { Ok((unrounded + align - 1) & !(align - 1)) } else { Err(()) } } -unsafe fn read_encoded_pointer( - reader: &mut DwarfReader, - context: &EHContext<'_>, - encoding: u8, -) -> Result { - if encoding == DW_EH_PE_omit { +/// Read a offset (`usize`) from `reader` whose encoding is described by `encoding`. +/// +/// `encoding` must be a [DWARF Exception Header Encoding as described by the LSB spec][LSB-dwarf-ext]. +/// In addition the upper ("application") part must be zero. +/// +/// # Errors +/// Returns `Err` if `encoding` +/// * is not a valid DWARF Exception Header Encoding, +/// * is `DW_EH_PE_omit`, or +/// * has a non-zero application part. +/// +/// [LSB-dwarf-ext]: https://refspecs.linuxfoundation.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/dwarfext.html +unsafe fn read_encoded_offset(reader: &mut DwarfReader, encoding: u8) -> Result { + if encoding == DW_EH_PE_omit || encoding & 0xF0 != 0 { return Err(()); } - - // DW_EH_PE_aligned implies it's an absolute pointer value - if encoding == DW_EH_PE_aligned { - reader.ptr = reader.ptr.with_addr(round_up(reader.ptr.addr(), mem::size_of::())?); - return Ok(reader.read::()); - } - - let mut result = match encoding & 0x0F { + let result = match encoding & 0x0F { + // despite the name, LLVM also uses absptr for offsets instead of pointers DW_EH_PE_absptr => reader.read::(), DW_EH_PE_uleb128 => reader.read_uleb128() as usize, DW_EH_PE_udata2 => reader.read::() as usize, @@ -184,25 +192,66 @@ DW_EH_PE_sdata8 => reader.read::() as usize, _ => return Err(()), }; + Ok(result) +} + +/// Read a pointer from `reader` whose encoding is described by `encoding`. +/// +/// `encoding` must be a [DWARF Exception Header Encoding as described by the LSB spec][LSB-dwarf-ext]. +/// +/// # Errors +/// Returns `Err` if `encoding` +/// * is not a valid DWARF Exception Header Encoding, +/// * is `DW_EH_PE_omit`, or +/// * combines `DW_EH_PE_absptr` or `DW_EH_PE_aligned` application part with an integer encoding +/// (not `DW_EH_PE_absptr`) in the value format part. +/// +/// [LSB-dwarf-ext]: https://refspecs.linuxfoundation.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/dwarfext.html +unsafe fn read_encoded_pointer( + reader: &mut DwarfReader, + context: &EHContext<'_>, + encoding: u8, +) -> Result<*const u8, ()> { + if encoding == DW_EH_PE_omit { + return Err(()); + } - result += match encoding & 0x70 { - DW_EH_PE_absptr => 0, + let base_ptr = match encoding & 0x70 { + DW_EH_PE_absptr => core::ptr::null(), // relative to address of the encoded value, despite the name - DW_EH_PE_pcrel => reader.ptr.expose_addr(), + DW_EH_PE_pcrel => reader.ptr, DW_EH_PE_funcrel => { - if context.func_start == 0 { + if context.func_start.is_null() { return Err(()); } context.func_start } DW_EH_PE_textrel => (*context.get_text_start)(), DW_EH_PE_datarel => (*context.get_data_start)(), + // aligned means the value is aligned to the size of a pointer + DW_EH_PE_aligned => { + reader.ptr = + reader.ptr.with_addr(round_up(reader.ptr.addr(), mem::size_of::<*const u8>())?); + core::ptr::null() + } _ => return Err(()), }; + let mut ptr = if base_ptr.is_null() { + // any value encoding other than absptr would be nonsensical here; + // there would be no source of pointer provenance + if encoding & 0x0F != DW_EH_PE_absptr { + return Err(()); + } + reader.read::<*const u8>() + } else { + let offset = read_encoded_offset(reader, encoding & 0x0F)?; + base_ptr.wrapping_add(offset) + }; + if encoding & DW_EH_PE_indirect != 0 { - result = *ptr::from_exposed_addr::(result); + ptr = *(ptr.cast::<*const u8>()); } - Ok(result) + Ok(ptr) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/gcc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/gcc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/gcc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/personality/gcc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -38,7 +38,6 @@ use super::dwarf::eh::{self, EHAction, EHContext}; use crate::ffi::c_int; -use libc::uintptr_t; use unwind as uw; // Register ids were lifted from LLVM's TargetLowering::getExceptionPointerRegister() @@ -95,7 +94,7 @@ cfg_if::cfg_if! { if #[cfg(all(target_arch = "arm", not(target_os = "ios"), not(target_os = "tvos"), not(target_os = "watchos"), not(target_os = "netbsd")))] { // ARM EHABI personality routine. - // https://infocenter.arm.com/help/topic/com.arm.doc.ihi0038b/IHI0038B_ehabi.pdf + // https://web.archive.org/web/20190728160938/https://infocenter.arm.com/help/topic/com.arm.doc.ihi0038b/IHI0038B_ehabi.pdf // // iOS uses the default routine instead since it uses SjLj unwinding. #[lang = "eh_personality"] @@ -160,9 +159,9 @@ uw::_Unwind_SetGR( context, UNWIND_DATA_REG.0, - exception_object as uintptr_t, + exception_object as uw::_Unwind_Ptr, ); - uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); + uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, core::ptr::null()); uw::_Unwind_SetIP(context, lpad); return uw::_URC_INSTALL_CONTEXT; } @@ -222,9 +221,9 @@ uw::_Unwind_SetGR( context, UNWIND_DATA_REG.0, - exception_object as uintptr_t, + exception_object.cast(), ); - uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); + uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, core::ptr::null()); uw::_Unwind_SetIP(context, lpad); uw::_URC_INSTALL_CONTEXT } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/abi/usercalls/alloc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/abi/usercalls/alloc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/abi/usercalls/alloc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/abi/usercalls/alloc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,8 @@ use crate::arch::asm; use crate::cell::UnsafeCell; use crate::cmp; +use crate::convert::TryInto; +use crate::intrinsics; use crate::mem; use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut}; use crate::ptr::{self, NonNull}; @@ -306,20 +308,35 @@ } } -// Split a memory region ptr..ptr + len into three parts: -// +--------+ -// | small0 | Chunk smaller than 8 bytes -// +--------+ -// | big | Chunk 8-byte aligned, and size a multiple of 8 bytes -// +--------+ -// | small1 | Chunk smaller than 8 bytes -// +--------+ -fn region_as_aligned_chunks(ptr: *const u8, len: usize) -> (usize, usize, usize) { - let small0_size = if ptr.is_aligned_to(8) { 0 } else { 8 - ptr.addr() % 8 }; - let small1_size = (len - small0_size) % 8; - let big_size = len - small0_size - small1_size; +/// Divide the slice `(ptr, len)` into three parts, where the middle part is +/// aligned to `u64`. +/// +/// The return values `(prefix_len, mid_len, suffix_len)` add back up to `len`. +/// The return values are such that the memory region `(ptr + prefix_len, +/// mid_len)` is the largest possible region where `ptr + prefix_len` is aligned +/// to `u64` and `mid_len` is a multiple of the byte size of `u64`. This means +/// that `prefix_len` and `suffix_len` are guaranteed to be less than the byte +/// size of `u64`, and that `(ptr, prefix_len)` and `(ptr + prefix_len + +/// mid_len, suffix_len)` don't straddle an alignment boundary. +// Standard Rust functions such as `<[u8]>::align_to::` and +// `<*const u8>::align_offset` aren't _guaranteed_ to compute the largest +// possible middle region, and as such can't be used. +fn u64_align_to_guaranteed(ptr: *const u8, mut len: usize) -> (usize, usize, usize) { + const QWORD_SIZE: usize = mem::size_of::(); + + let offset = ptr as usize % QWORD_SIZE; + + let prefix_len = if intrinsics::unlikely(offset > 0) { QWORD_SIZE - offset } else { 0 }; + + len = match len.checked_sub(prefix_len) { + Some(remaining_len) => remaining_len, + None => return (len, 0, 0), + }; + + let suffix_len = len % QWORD_SIZE; + len -= suffix_len; - (small0_size, big_size, small1_size) + (prefix_len, len, suffix_len) } unsafe fn copy_quadwords(src: *const u8, dst: *mut u8, len: usize) { @@ -352,7 +369,13 @@ /// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html /// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2 pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) { - unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) { + /// Like `ptr::copy(src, dst, len)`, except it uses the Intel-recommended + /// instruction sequence for unaligned writes. + unsafe fn write_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) { + if intrinsics::likely(len == 0) { + return; + } + unsafe { let mut seg_sel: u16 = 0; for off in 0..len { @@ -380,41 +403,15 @@ assert!(!src.addr().overflowing_add(len).1); assert!(!dst.addr().overflowing_add(len).1); - if len < 8 { - // Can't align on 8 byte boundary: copy safely byte per byte - unsafe { - copy_bytewise_to_userspace(src, dst, len); - } - } else if len % 8 == 0 && dst.is_aligned_to(8) { - // Copying 8-byte aligned quadwords: copy quad word per quad word - unsafe { - copy_quadwords(src, dst, len); - } - } else { - // Split copies into three parts: - // +--------+ - // | small0 | Chunk smaller than 8 bytes - // +--------+ - // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes - // +--------+ - // | small1 | Chunk smaller than 8 bytes - // +--------+ - let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len); - - unsafe { - // Copy small0 - copy_bytewise_to_userspace(src, dst, small0_size); - - // Copy big - let big_src = src.add(small0_size); - let big_dst = dst.add(small0_size); - copy_quadwords(big_src, big_dst, big_size); - - // Copy small1 - let small1_src = src.add(big_size + small0_size); - let small1_dst = dst.add(big_size + small0_size); - copy_bytewise_to_userspace(small1_src, small1_dst, small1_size); - } + unsafe { + let (len1, len2, len3) = u64_align_to_guaranteed(dst, len); + let (src1, dst1) = (src, dst); + let (src2, dst2) = (src1.add(len1), dst1.add(len1)); + let (src3, dst3) = (src2.add(len2), dst2.add(len2)); + + write_bytewise_to_userspace(src1, dst1, len1); + copy_quadwords(src2, dst2, len2); + write_bytewise_to_userspace(src3, dst3, len3); } } @@ -434,45 +431,33 @@ /// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html /// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html pub(crate) unsafe fn copy_from_userspace(src: *const u8, dst: *mut u8, len: usize) { - // Copies memory region `src..src + len` to the enclave at `dst`. The source memory region - // is: - // - strictly less than 8 bytes in size and may be - // - located at a misaligned memory location - fn copy_misaligned_chunk_to_enclave(src: *const u8, dst: *mut u8, len: usize) { - let mut tmp_buff = [0u8; 16]; + /// Like `ptr::copy(src, dst, len)`, except it uses only u64-aligned reads. + /// + /// # Safety + /// The source memory region must not straddle an alignment boundary. + unsafe fn read_misaligned_from_userspace(src: *const u8, dst: *mut u8, len: usize) { + if intrinsics::likely(len == 0) { + return; + } unsafe { - // Compute an aligned memory region to read from - // +--------+ <-- aligned_src + aligned_len (8B-aligned) - // | pad1 | - // +--------+ <-- src + len (misaligned) - // | | - // | | - // | | - // +--------+ <-- src (misaligned) - // | pad0 | - // +--------+ <-- aligned_src (8B-aligned) - let pad0_size = src as usize % 8; - let aligned_src = src.sub(pad0_size); - - let pad1_size = 8 - (src.add(len) as usize % 8); - let aligned_len = pad0_size + len + pad1_size; - - debug_assert!(len < 8); - debug_assert_eq!(aligned_src as usize % 8, 0); - debug_assert_eq!(aligned_len % 8, 0); - debug_assert!(aligned_len <= 16); - - // Copy the aligned buffer to a temporary buffer - // Note: copying from a slightly different memory location is a bit odd. In this case it - // can't lead to page faults or inadvertent copying from the enclave as we only ensured - // that the `src` pointer is aligned at an 8 byte boundary. As pages are 4096 bytes - // aligned, `aligned_src` must be on the same page as `src`. A similar argument can be made - // for `src + len` - copy_quadwords(aligned_src as _, tmp_buff.as_mut_ptr(), aligned_len); - - // Copy the correct parts of the temporary buffer to the destination - ptr::copy(tmp_buff.as_ptr().add(pad0_size), dst, len); + let offset: usize; + let data: u64; + // doing a memory read that's potentially out of bounds for `src`, + // this isn't supported by Rust, so have to use assembly + asm!(" + movl {src:e}, {offset:e} + andl $7, {offset:e} + andq $-8, {src} + movq ({src}), {dst} + ", + src = inout(reg) src => _, + offset = out(reg) offset, + dst = out(reg) data, + options(nostack, att_syntax, readonly, pure) + ); + let data = data.to_le_bytes(); + ptr::copy_nonoverlapping(data.as_ptr().add(offset), dst, len); } } @@ -480,41 +465,19 @@ assert!(!dst.is_null()); assert!(is_user_range(src, len)); assert!(is_enclave_range(dst, len)); - assert!(!(src as usize).overflowing_add(len + 8).1); - assert!(!(dst as usize).overflowing_add(len + 8).1); - - if len < 8 { - copy_misaligned_chunk_to_enclave(src, dst, len); - } else if len % 8 == 0 && src as usize % 8 == 0 { - // Copying 8-byte aligned quadwords: copy quad word per quad word - unsafe { - copy_quadwords(src, dst, len); - } - } else { - // Split copies into three parts: - // +--------+ - // | small0 | Chunk smaller than 8 bytes - // +--------+ - // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes - // +--------+ - // | small1 | Chunk smaller than 8 bytes - // +--------+ - let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len); - - unsafe { - // Copy small0 - copy_misaligned_chunk_to_enclave(src, dst, small0_size); + assert!(len < isize::MAX as usize); + assert!(!(src as usize).overflowing_add(len).1); + assert!(!(dst as usize).overflowing_add(len).1); - // Copy big - let big_src = src.add(small0_size); - let big_dst = dst.add(small0_size); - copy_quadwords(big_src, big_dst, big_size); - - // Copy small1 - let small1_src = src.add(big_size + small0_size); - let small1_dst = dst.add(big_size + small0_size); - copy_misaligned_chunk_to_enclave(small1_src, small1_dst, small1_size); - } + unsafe { + let (len1, len2, len3) = u64_align_to_guaranteed(src, len); + let (src1, dst1) = (src, dst); + let (src2, dst2) = (src1.add(len1), dst1.add(len1)); + let (src3, dst3) = (src2.add(len2), dst2.add(len2)); + + read_misaligned_from_userspace(src1, dst1, len1); + copy_quadwords(src2, dst2, len2); + read_misaligned_from_userspace(src3, dst3, len3); } } @@ -609,9 +572,9 @@ /// Copies the value from user memory into enclave memory. pub fn to_enclave(&self) -> T { unsafe { - let mut data: T = mem::MaybeUninit::uninit().assume_init(); - copy_from_userspace(self.0.get() as _, &mut data as *mut T as _, mem::size_of::()); - data + let mut data = mem::MaybeUninit::uninit(); + copy_from_userspace(self.0.get() as _, data.as_mut_ptr() as _, mem::size_of::()); + data.assume_init() } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/waitqueue/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/waitqueue/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/waitqueue/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/sgx/waitqueue/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,6 +18,7 @@ use crate::num::NonZeroUsize; use crate::ops::{Deref, DerefMut}; +use crate::panic::{self, AssertUnwindSafe}; use crate::time::Duration; use super::abi::thread; @@ -147,7 +148,8 @@ /// Adds the calling thread to the `WaitVariable`'s wait queue, then wait /// until a wakeup event. /// - /// This function does not return until this thread has been awoken. + /// This function does not return until this thread has been awoken. When `before_wait` panics, + /// this function will abort. pub fn wait(mut guard: SpinMutexGuard<'_, WaitVariable>, before_wait: F) { // very unsafe: check requirements of UnsafeList::push unsafe { @@ -157,8 +159,13 @@ })); let entry = guard.queue.inner.push(&mut entry); drop(guard); - before_wait(); + if let Err(_e) = panic::catch_unwind(AssertUnwindSafe(|| before_wait())) { + rtabort!("Panic before wait on wakeup event") + } while !entry.lock().wake { + // `entry.wake` is only set in `notify_one` and `notify_all` functions. Both ensure + // the entry is removed from the queue _before_ setting this bool. There are no + // other references to `entry`. // don't panic, this would invalidate `entry` during unwinding let eventset = rtunwrap!(Ok, usercalls::wait(EV_UNPARK, WAIT_INDEFINITE)); rtassert!(eventset & EV_UNPARK == EV_UNPARK); @@ -169,6 +176,7 @@ /// Adds the calling thread to the `WaitVariable`'s wait queue, then wait /// until a wakeup event or timeout. If event was observed, returns true. /// If not, it will remove the calling thread from the wait queue. + /// When `before_wait` panics, this function will abort. pub fn wait_timeout( lock: &SpinMutex>, timeout: Duration, @@ -181,9 +189,13 @@ wake: false, })); let entry_lock = lock.lock().queue.inner.push(&mut entry); - before_wait(); + if let Err(_e) = panic::catch_unwind(AssertUnwindSafe(|| before_wait())) { + rtabort!("Panic before wait on wakeup event or timeout") + } usercalls::wait_timeout(EV_UNPARK, timeout, || entry_lock.lock().wake); - // acquire the wait queue's lock first to avoid deadlock. + // acquire the wait queue's lock first to avoid deadlock + // and ensure no other function can simultaneously access the list + // (e.g., `notify_one` or `notify_all`) let mut guard = lock.lock(); let success = entry_lock.lock().wake; if !success { @@ -204,8 +216,8 @@ ) -> Result, SpinMutexGuard<'_, WaitVariable>> { // SAFETY: lifetime of the pop() return value is limited to the map // closure (The closure return value is 'static). The underlying - // stack frame won't be freed until after the WaitGuard created below - // is dropped. + // stack frame won't be freed until after the lock on the queue is released + // (i.e., `guard` is dropped). unsafe { let tcs = guard.queue.inner.pop().map(|entry| -> Tcs { let mut entry_guard = entry.lock(); @@ -231,7 +243,7 @@ ) -> Result, SpinMutexGuard<'_, WaitVariable>> { // SAFETY: lifetime of the pop() return values are limited to the // while loop body. The underlying stack frames won't be freed until - // after the WaitGuard created below is dropped. + // after the lock on the queue is released (i.e., `guard` is dropped). unsafe { let mut count = 0; while let Some(entry) = guard.queue.inner.pop() { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/net.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/net.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/net.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/net.rs 2023-12-21 16:55:28.000000000 +0000 @@ -233,12 +233,15 @@ } } + pub fn connect(&self, addr: &SocketAddr) -> io::Result<()> { + let (addr, len) = addr.into_inner(); + cvt(unsafe { netc::connect(self.0.raw(), addr.as_ptr(), len) })?; + Ok(()) + } + pub fn connect_timeout(&self, addr: &SocketAddr, timeout: Duration) -> io::Result<()> { self.set_nonblocking(true)?; - let r = unsafe { - let (addr, len) = addr.into_inner(); - cvt(netc::connect(self.0.raw(), addr.as_ptr(), len)) - }; + let r = self.connect(addr); self.set_nonblocking(false)?; match r { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/thread_local_dtor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/thread_local_dtor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/thread_local_dtor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/solid/thread_local_dtor.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,14 +4,13 @@ // Simplify dtor registration by using a list of destructors. use super::{abi, itron::task}; -use crate::cell::Cell; -use crate::mem; +use crate::cell::{Cell, RefCell}; #[thread_local] static REGISTERED: Cell = Cell::new(false); #[thread_local] -static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static DTORS: RefCell> = RefCell::new(Vec::new()); pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { if !REGISTERED.get() { @@ -22,18 +21,20 @@ REGISTERED.set(true); } - let list = unsafe { &mut DTORS }; - list.push((t, dtor)); + match DTORS.try_borrow_mut() { + Ok(mut dtors) => dtors.push((t, dtor)), + Err(_) => rtabort!("global allocator may not use TLS"), + } } pub unsafe fn run_dtors() { - let mut list = mem::take(unsafe { &mut DTORS }); + let mut list = DTORS.take(); while !list.is_empty() { for (ptr, dtor) in list { unsafe { dtor(ptr) }; } - list = mem::take(unsafe { &mut DTORS }); + list = DTORS.take(); } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/alloc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/alloc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/alloc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/alloc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,13 +1,17 @@ //! Global Allocator for UEFI. //! Uses [r-efi-alloc](https://crates.io/crates/r-efi-alloc) -use crate::alloc::{GlobalAlloc, Layout, System}; +use r_efi::protocols::loaded_image; -const MEMORY_TYPE: u32 = r_efi::efi::LOADER_DATA; +use crate::alloc::{GlobalAlloc, Layout, System}; +use crate::sync::OnceLock; +use crate::sys::uefi::helpers; #[stable(feature = "alloc_system_type", since = "1.28.0")] unsafe impl GlobalAlloc for System { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + static EFI_MEMORY_TYPE: OnceLock = OnceLock::new(); + // Return null pointer if boot services are not available if crate::os::uefi::env::boot_services().is_none() { return crate::ptr::null_mut(); @@ -15,8 +19,20 @@ // If boot services is valid then SystemTable is not null. let system_table = crate::os::uefi::env::system_table().as_ptr().cast(); + + // Each loaded image has an image handle that supports `EFI_LOADED_IMAGE_PROTOCOL`. Thus, this + // will never fail. + let mem_type = EFI_MEMORY_TYPE.get_or_init(|| { + let protocol = helpers::image_handle_protocol::( + loaded_image::PROTOCOL_GUID, + ) + .unwrap(); + // Gives allocations the memory type that the data sections were loaded as. + unsafe { (*protocol.as_ptr()).image_data_type } + }); + // The caller must ensure non-0 layout - unsafe { r_efi_alloc::raw::alloc(system_table, layout, MEMORY_TYPE) } + unsafe { r_efi_alloc::raw::alloc(system_table, layout, *mem_type) } } unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/args.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/args.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/args.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/args.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,158 @@ +use r_efi::protocols::loaded_image; + +use crate::env::current_exe; +use crate::ffi::OsString; +use crate::fmt; +use crate::iter::Iterator; +use crate::mem::size_of; +use crate::sys::uefi::helpers; +use crate::vec; + +pub struct Args { + parsed_args_list: vec::IntoIter, +} + +pub fn args() -> Args { + let lazy_current_exe = || Vec::from([current_exe().map(Into::into).unwrap_or_default()]); + + // Each loaded image has an image handle that supports `EFI_LOADED_IMAGE_PROTOCOL`. Thus, this + // will never fail. + let protocol = + helpers::image_handle_protocol::(loaded_image::PROTOCOL_GUID) + .unwrap(); + + let lp_size = unsafe { (*protocol.as_ptr()).load_options_size } as usize; + // Break if we are sure that it cannot be UTF-16 + if lp_size < size_of::() || lp_size % size_of::() != 0 { + return Args { parsed_args_list: lazy_current_exe().into_iter() }; + } + let lp_size = lp_size / size_of::(); + + let lp_cmd_line = unsafe { (*protocol.as_ptr()).load_options as *const u16 }; + if !lp_cmd_line.is_aligned() { + return Args { parsed_args_list: lazy_current_exe().into_iter() }; + } + let lp_cmd_line = unsafe { crate::slice::from_raw_parts(lp_cmd_line, lp_size) }; + + Args { + parsed_args_list: parse_lp_cmd_line(lp_cmd_line) + .unwrap_or_else(lazy_current_exe) + .into_iter(), + } +} + +impl fmt::Debug for Args { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.parsed_args_list.as_slice().fmt(f) + } +} + +impl Iterator for Args { + type Item = OsString; + + fn next(&mut self) -> Option { + self.parsed_args_list.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.parsed_args_list.size_hint() + } +} + +impl ExactSizeIterator for Args { + fn len(&self) -> usize { + self.parsed_args_list.len() + } +} + +impl DoubleEndedIterator for Args { + fn next_back(&mut self) -> Option { + self.parsed_args_list.next_back() + } +} + +/// Implements the UEFI command-line argument parsing algorithm. +/// +/// This implementation is based on what is defined in Section 3.4 of +/// [UEFI Shell Specification](https://uefi.org/sites/default/files/resources/UEFI_Shell_Spec_2_0.pdf) +/// +/// Return None in the following cases: +/// - Invalid UTF-16 (unpaired surrogate) +/// - Empty/improper arguments +fn parse_lp_cmd_line(code_units: &[u16]) -> Option> { + const QUOTE: char = '"'; + const SPACE: char = ' '; + const CARET: char = '^'; + const NULL: char = '\0'; + + let mut ret_val = Vec::new(); + let mut code_units_iter = char::decode_utf16(code_units.iter().cloned()).peekable(); + + // The executable name at the beginning is special. + let mut in_quotes = false; + let mut cur = String::new(); + while let Some(w) = code_units_iter.next() { + let w = w.ok()?; + match w { + // break on NULL + NULL => break, + // A quote mark always toggles `in_quotes` no matter what because + // there are no escape characters when parsing the executable name. + QUOTE => in_quotes = !in_quotes, + // If not `in_quotes` then whitespace ends argv[0]. + SPACE if !in_quotes => break, + // In all other cases the code unit is taken literally. + _ => cur.push(w), + } + } + + // If exe name is missing, the cli args are invalid + if cur.is_empty() { + return None; + } + + ret_val.push(OsString::from(cur)); + // Skip whitespace. + while code_units_iter.next_if_eq(&Ok(SPACE)).is_some() {} + + // Parse the arguments according to these rules: + // * All code units are taken literally except space, quote and caret. + // * When not `in_quotes`, space separate arguments. Consecutive spaces are + // treated as a single separator. + // * A space `in_quotes` is taken literally. + // * A quote toggles `in_quotes` mode unless it's escaped. An escaped quote is taken literally. + // * A quote can be escaped if preceded by caret. + // * A caret can be escaped if preceded by caret. + let mut cur = String::new(); + let mut in_quotes = false; + while let Some(w) = code_units_iter.next() { + let w = w.ok()?; + match w { + // break on NULL + NULL => break, + // If not `in_quotes`, a space or tab ends the argument. + SPACE if !in_quotes => { + ret_val.push(OsString::from(&cur[..])); + cur.truncate(0); + + // Skip whitespace. + while code_units_iter.next_if_eq(&Ok(SPACE)).is_some() {} + } + // Caret can escape quotes or carets + CARET if in_quotes => { + if let Some(x) = code_units_iter.next() { + cur.push(x.ok()?); + } + } + // If quote then flip `in_quotes` + QUOTE => in_quotes = !in_quotes, + // Everything else is always taken literally. + _ => cur.push(w), + } + } + // Push the final argument, if any. + if !cur.is_empty() || in_quotes { + ret_val.push(OsString::from(cur)); + } + Some(ret_val) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/helpers.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/helpers.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/helpers.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/helpers.rs 2023-12-21 16:55:28.000000000 +0000 @@ -139,3 +139,10 @@ if r.is_error() { Err(crate::io::Error::from_raw_os_error(r.as_usize())) } else { Ok(()) } } + +/// Get the Protocol for current system handle. +/// Note: Some protocols need to be manually freed. It is the callers responsibility to do so. +pub(crate) fn image_handle_protocol(protocol_guid: Guid) -> Option> { + let system_handle = uefi::env::try_image_handle()?; + open_protocol(system_handle, protocol_guid).ok() +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,7 +13,6 @@ //! [`OsString`]: crate::ffi::OsString pub mod alloc; -#[path = "../unsupported/args.rs"] pub mod args; #[path = "../unix/cmath.rs"] pub mod cmath; @@ -36,7 +35,6 @@ pub mod pipe; #[path = "../unsupported/process.rs"] pub mod process; -#[path = "../unsupported/stdio.rs"] pub mod stdio; #[path = "../unsupported/thread.rs"] pub mod thread; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/stdio.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/stdio.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/stdio.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/uefi/stdio.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,162 @@ +use crate::io; +use crate::iter::Iterator; +use crate::mem::MaybeUninit; +use crate::os::uefi; +use crate::ptr::NonNull; + +const MAX_BUFFER_SIZE: usize = 8192; + +pub struct Stdin; +pub struct Stdout; +pub struct Stderr; + +impl Stdin { + pub const fn new() -> Stdin { + Stdin + } +} + +impl io::Read for Stdin { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let st: NonNull = uefi::env::system_table().cast(); + let stdin = unsafe { (*st.as_ptr()).con_in }; + + // Try reading any pending data + let inp = match read_key_stroke(stdin) { + Ok(x) => x, + Err(e) if e == r_efi::efi::Status::NOT_READY => { + // Wait for keypress for new data + wait_stdin(stdin)?; + read_key_stroke(stdin).map_err(|x| io::Error::from_raw_os_error(x.as_usize()))? + } + Err(e) => { + return Err(io::Error::from_raw_os_error(e.as_usize())); + } + }; + + // Check if the key is printiable character + if inp.scan_code != 0x00 { + return Err(io::const_io_error!(io::ErrorKind::Interrupted, "Special Key Press")); + } + + // SAFETY: Iterator will have only 1 character since we are reading only 1 Key + // SAFETY: This character will always be UCS-2 and thus no surrogates. + let ch: char = char::decode_utf16([inp.unicode_char]).next().unwrap().unwrap(); + if ch.len_utf8() > buf.len() { + return Ok(0); + } + + ch.encode_utf8(buf); + + Ok(ch.len_utf8()) + } +} + +impl Stdout { + pub const fn new() -> Stdout { + Stdout + } +} + +impl io::Write for Stdout { + fn write(&mut self, buf: &[u8]) -> io::Result { + let st: NonNull = uefi::env::system_table().cast(); + let stdout = unsafe { (*st.as_ptr()).con_out }; + + write(stdout, buf) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +impl Stderr { + pub const fn new() -> Stderr { + Stderr + } +} + +impl io::Write for Stderr { + fn write(&mut self, buf: &[u8]) -> io::Result { + let st: NonNull = uefi::env::system_table().cast(); + let stderr = unsafe { (*st.as_ptr()).std_err }; + + write(stderr, buf) + } + + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +// UCS-2 character should occupy 3 bytes at most in UTF-8 +pub const STDIN_BUF_SIZE: usize = 3; + +pub fn is_ebadf(_err: &io::Error) -> bool { + true +} + +pub fn panic_output() -> Option { + uefi::env::try_system_table().map(|_| Stderr::new()) +} + +fn write( + protocol: *mut r_efi::protocols::simple_text_output::Protocol, + buf: &[u8], +) -> io::Result { + let mut utf16 = [0; MAX_BUFFER_SIZE / 2]; + + // Get valid UTF-8 buffer + let utf8 = match crate::str::from_utf8(buf) { + Ok(x) => x, + Err(e) => unsafe { crate::str::from_utf8_unchecked(&buf[..e.valid_up_to()]) }, + }; + // Clip UTF-8 buffer to max UTF-16 buffer we support + let utf8 = &utf8[..utf8.floor_char_boundary(utf16.len() - 1)]; + + for (i, ch) in utf8.encode_utf16().enumerate() { + utf16[i] = ch; + } + + unsafe { simple_text_output(protocol, &mut utf16) }?; + + Ok(utf8.len()) +} + +unsafe fn simple_text_output( + protocol: *mut r_efi::protocols::simple_text_output::Protocol, + buf: &mut [u16], +) -> io::Result<()> { + let res = unsafe { ((*protocol).output_string)(protocol, buf.as_mut_ptr()) }; + if res.is_error() { Err(io::Error::from_raw_os_error(res.as_usize())) } else { Ok(()) } +} + +fn wait_stdin(stdin: *mut r_efi::protocols::simple_text_input::Protocol) -> io::Result<()> { + let boot_services: NonNull = + uefi::env::boot_services().unwrap().cast(); + let wait_for_event = unsafe { (*boot_services.as_ptr()).wait_for_event }; + let wait_for_key_event = unsafe { (*stdin).wait_for_key }; + + let r = { + let mut x: usize = 0; + (wait_for_event)(1, [wait_for_key_event].as_mut_ptr(), &mut x) + }; + if r.is_error() { Err(io::Error::from_raw_os_error(r.as_usize())) } else { Ok(()) } +} + +fn read_key_stroke( + stdin: *mut r_efi::protocols::simple_text_input::Protocol, +) -> Result { + let mut input_key: MaybeUninit = + MaybeUninit::uninit(); + + let r = unsafe { ((*stdin).read_key_stroke)(stdin, input_key.as_mut_ptr()) }; + + if r.is_error() { + Err(r) + } else { + let input_key = unsafe { input_key.assume_init() }; + Ok(input_key) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/args.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/args.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/args.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/args.rs 2023-12-21 16:55:28.000000000 +0000 @@ -70,6 +70,7 @@ target_os = "redox", target_os = "vxworks", target_os = "horizon", + target_os = "aix", target_os = "nto", target_os = "hurd", ))] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/env.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/env.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/env.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/env.rs 2023-12-21 16:55:28.000000000 +0000 @@ -261,3 +261,14 @@ pub const EXE_SUFFIX: &str = ""; pub const EXE_EXTENSION: &str = ""; } + +#[cfg(target_os = "aix")] +pub mod os { + pub const FAMILY: &str = "unix"; + pub const OS: &str = "aix"; + pub const DLL_PREFIX: &str = "lib"; + pub const DLL_SUFFIX: &str = ".a"; + pub const DLL_EXTENSION: &str = "a"; + pub const EXE_SUFFIX: &str = ""; + pub const EXE_EXTENSION: &str = ""; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fd.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fd.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fd.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fd.rs 2023-12-21 16:55:28.000000000 +0000 @@ -126,9 +126,17 @@ } pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { - #[cfg(not(any(target_os = "linux", target_os = "android", target_os = "hurd")))] + #[cfg(not(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + )))] use libc::pread as pread64; - #[cfg(any(target_os = "linux", target_os = "android", target_os = "hurd"))] + #[cfg(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + ))] use libc::pread64; unsafe { @@ -285,9 +293,17 @@ } pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { - #[cfg(not(any(target_os = "linux", target_os = "android", target_os = "hurd")))] + #[cfg(not(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + )))] use libc::pwrite as pwrite64; - #[cfg(any(target_os = "linux", target_os = "android", target_os = "hurd"))] + #[cfg(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "android", + target_os = "hurd" + ))] use libc::pwrite64; unsafe { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -40,13 +40,17 @@ ))] use libc::c_char; #[cfg(any( - target_os = "linux", + all(target_os = "linux", not(target_env = "musl")), target_os = "emscripten", target_os = "android", - target_os = "hurd", + target_os = "hurd" ))] use libc::dirfd; -#[cfg(any(target_os = "linux", target_os = "emscripten", target_os = "hurd"))] +#[cfg(any( + all(target_os = "linux", not(target_env = "musl")), + target_os = "emscripten", + target_os = "hurd" +))] use libc::fstatat64; #[cfg(any( target_os = "android", @@ -54,11 +58,13 @@ target_os = "fuchsia", target_os = "redox", target_os = "illumos", + target_os = "aix", target_os = "nto", target_os = "vita", + all(target_os = "linux", target_env = "musl"), ))] use libc::readdir as readdir64; -#[cfg(any(target_os = "linux", target_os = "hurd"))] +#[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "hurd"))] use libc::readdir64; #[cfg(any(target_os = "emscripten", target_os = "l4re"))] use libc::readdir64_r; @@ -71,6 +77,7 @@ target_os = "l4re", target_os = "fuchsia", target_os = "redox", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -82,7 +89,7 @@ lstat as lstat64, off64_t, open as open64, stat as stat64, }; #[cfg(not(any( - target_os = "linux", + all(target_os = "linux", not(target_env = "musl")), target_os = "emscripten", target_os = "l4re", target_os = "android", @@ -93,7 +100,7 @@ lstat as lstat64, off_t as off64_t, open as open64, stat as stat64, }; #[cfg(any( - target_os = "linux", + all(target_os = "linux", not(target_env = "musl")), target_os = "emscripten", target_os = "l4re", target_os = "hurd" @@ -288,6 +295,7 @@ target_os = "illumos", target_os = "fuchsia", target_os = "redox", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -311,6 +319,7 @@ target_os = "illumos", target_os = "fuchsia", target_os = "redox", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -320,8 +329,9 @@ #[cfg(not(any( target_os = "solaris", target_os = "illumos", + target_os = "aix", target_os = "nto", - target_os = "vita" + target_os = "vita", )))] d_type: u8, } @@ -333,6 +343,7 @@ target_os = "illumos", target_os = "fuchsia", target_os = "redox", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -464,7 +475,22 @@ } } -#[cfg(not(any(target_os = "netbsd", target_os = "nto")))] +#[cfg(target_os = "aix")] +impl FileAttr { + pub fn modified(&self) -> io::Result { + Ok(SystemTime::new(self.stat.st_mtime.tv_sec as i64, self.stat.st_mtime.tv_nsec as i64)) + } + + pub fn accessed(&self) -> io::Result { + Ok(SystemTime::new(self.stat.st_atime.tv_sec as i64, self.stat.st_atime.tv_nsec as i64)) + } + + pub fn created(&self) -> io::Result { + Ok(SystemTime::new(self.stat.st_ctime.tv_sec as i64, self.stat.st_ctime.tv_nsec as i64)) + } +} + +#[cfg(not(any(target_os = "netbsd", target_os = "nto", target_os = "aix")))] impl FileAttr { #[cfg(not(any( target_os = "vxworks", @@ -671,6 +697,7 @@ target_os = "fuchsia", target_os = "redox", target_os = "illumos", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -748,6 +775,7 @@ #[cfg(not(any( target_os = "solaris", target_os = "illumos", + target_os = "aix", target_os = "nto", )))] d_type: *offset_ptr!(entry_ptr, d_type) as u8, @@ -772,6 +800,7 @@ target_os = "fuchsia", target_os = "redox", target_os = "illumos", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -829,10 +858,10 @@ #[cfg(all( any( - target_os = "linux", + all(target_os = "linux", not(target_env = "musl")), target_os = "emscripten", target_os = "android", - target_os = "hurd", + target_os = "hurd" ), not(miri) ))] @@ -858,7 +887,7 @@ #[cfg(any( not(any( - target_os = "linux", + all(target_os = "linux", not(target_env = "musl")), target_os = "emscripten", target_os = "android", target_os = "hurd", @@ -874,6 +903,7 @@ target_os = "illumos", target_os = "haiku", target_os = "vxworks", + target_os = "aix", target_os = "nto", target_os = "vita", ))] @@ -886,6 +916,7 @@ target_os = "illumos", target_os = "haiku", target_os = "vxworks", + target_os = "aix", target_os = "nto", target_os = "vita", )))] @@ -920,6 +951,7 @@ target_os = "espidf", target_os = "horizon", target_os = "vita", + target_os = "aix", target_os = "nto", target_os = "hurd", ))] @@ -977,6 +1009,7 @@ target_os = "illumos", target_os = "fuchsia", target_os = "redox", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -991,6 +1024,7 @@ target_os = "illumos", target_os = "fuchsia", target_os = "redox", + target_os = "aix", target_os = "nto", target_os = "vita", target_os = "hurd", @@ -1391,6 +1425,7 @@ } impl AsFd for File { + #[inline] fn as_fd(&self) -> BorrowedFd<'_> { self.0.as_fd() } @@ -2025,6 +2060,7 @@ target_os = "illumos", target_os = "haiku", target_os = "vxworks", + target_os = "aix", ))] fn is_dir(_ent: &DirEntry) -> Option { None @@ -2035,6 +2071,7 @@ target_os = "illumos", target_os = "haiku", target_os = "vxworks", + target_os = "aix", )))] fn is_dir(ent: &DirEntry) -> Option { match ent.entry.d_type { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -241,6 +241,7 @@ #[cfg(target_os = "android")] pub use crate::sys::android::signal; +#[allow(unused_imports)] #[cfg(not(target_os = "android"))] pub use libc::signal; @@ -278,6 +279,7 @@ libc::ENETUNREACH => NetworkUnreachable, libc::ENOTCONN => NotConnected, libc::ENOTDIR => NotADirectory, + #[cfg(not(target_os = "aix"))] libc::ENOTEMPTY => DirectoryNotEmpty, libc::EPIPE => BrokenPipe, libc::EROFS => ReadOnlyFilesystem, @@ -413,7 +415,6 @@ } else if #[cfg(any(target_os = "ios", target_os = "tvos", target_os = "watchos"))] { #[link(name = "System")] #[link(name = "objc")] - #[link(name = "Security", kind = "framework")] #[link(name = "Foundation", kind = "framework")] extern "C" {} } else if #[cfg(target_os = "fuchsia")] { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/net.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/net.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/net.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/net.rs 2023-12-21 16:55:28.000000000 +0000 @@ -6,6 +6,7 @@ use crate::os::unix::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, RawFd}; use crate::str; use crate::sys::fd::FileDesc; +use crate::sys::unix::IsMinusOne; use crate::sys_common::net::{getsockopt, setsockopt, sockaddr_to_addr}; use crate::sys_common::{AsInner, FromInner, IntoInner}; use crate::time::{Duration, Instant}; @@ -103,7 +104,7 @@ } } - #[cfg(not(any(target_os = "vxworks", target_os = "vita")))] + #[cfg(not(target_os = "vxworks"))] pub fn new_pair(fam: c_int, ty: c_int) -> io::Result<(Socket, Socket)> { unsafe { let mut fds = [0, 0]; @@ -135,11 +136,27 @@ } } - #[cfg(any(target_os = "vxworks", target_os = "vita"))] + #[cfg(target_os = "vxworks")] pub fn new_pair(_fam: c_int, _ty: c_int) -> io::Result<(Socket, Socket)> { unimplemented!() } + pub fn connect(&self, addr: &SocketAddr) -> io::Result<()> { + let (addr, len) = addr.into_inner(); + loop { + let result = unsafe { libc::connect(self.as_raw_fd(), addr.as_ptr(), len) }; + if result.is_minus_one() { + let err = crate::sys::os::errno(); + match err { + libc::EINTR => continue, + libc::EISCONN => return Ok(()), + _ => return Err(io::Error::from_raw_os_error(err)), + } + } + return Ok(()); + } + } + pub fn connect_timeout(&self, addr: &SocketAddr, timeout: Duration) -> io::Result<()> { self.set_nonblocking(true)?; let r = unsafe { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/os.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/os.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/os.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/os.rs 2023-12-21 16:55:28.000000000 +0000 @@ -74,6 +74,7 @@ link_name = "__error" )] #[cfg_attr(target_os = "haiku", link_name = "_errnop")] + #[cfg_attr(target_os = "aix", link_name = "_Errno")] fn errno_location() -> *mut c_int; } @@ -254,6 +255,41 @@ } } +#[cfg(target_os = "aix")] +pub fn current_exe() -> io::Result { + use crate::io::ErrorKind; + + #[cfg(test)] + use realstd::env; + + #[cfg(not(test))] + use crate::env; + + let exe_path = env::args().next().ok_or(io::const_io_error!( + ErrorKind::NotFound, + "an executable path was not found because no arguments were provided through argv" + ))?; + let path = PathBuf::from(exe_path); + if path.is_absolute() { + return path.canonicalize(); + } + // Search PWD to infer current_exe. + if let Some(pstr) = path.to_str() && pstr.contains("/") { + return getcwd().map(|cwd| cwd.join(path))?.canonicalize(); + } + // Search PATH to infer current_exe. + if let Some(p) = getenv(OsStr::from_bytes("PATH".as_bytes())) { + for search_path in split_paths(&p) { + let pb = search_path.join(&path); + if pb.is_file() && let Ok(metadata) = crate::fs::metadata(&pb) && + metadata.permissions().mode() & 0o111 != 0 { + return pb.canonicalize(); + } + } + } + Err(io::const_io_error!(ErrorKind::NotFound, "an executable path was not found")) +} + #[cfg(any(target_os = "freebsd", target_os = "dragonfly"))] pub fn current_exe() -> io::Result { unsafe { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,13 @@ pub use self::process_common::{Command, CommandArgs, ExitCode, Stdio, StdioPipes}; pub use self::process_inner::{ExitStatus, ExitStatusError, Process}; pub use crate::ffi::OsString as EnvKey; -pub use crate::sys_common::process::CommandEnvs; #[cfg_attr(any(target_os = "espidf", target_os = "horizon"), allow(unused))] mod process_common; +#[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "vita"))] +mod process_unsupported; + cfg_if::cfg_if! { if #[cfg(target_os = "fuchsia")] { #[path = "process_fuchsia.rs"] @@ -15,8 +17,9 @@ #[path = "process_vxworks.rs"] mod process_inner; } else if #[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "vita"))] { - #[path = "process_unsupported.rs"] - mod process_inner; + mod process_inner { + pub use super::process_unsupported::*; + } } else { #[path = "process_unix.rs"] mod process_inner; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -159,3 +159,36 @@ ); } } + +// Test that Rust std handles wait status values (`ExitStatus`) the way that Unix does, +// at least for the values which represent a Unix exit status (`ExitCode`). +// Should work on every #[cfg(unix)] platform. However: +#[cfg(not(any( + // Fuchsia is not Unix and has totally broken std::os::unix. + // https://github.com/rust-lang/rust/issues/58590#issuecomment-836535609 + target_os = "fuchsia", +)))] +#[test] +fn unix_exit_statuses() { + use crate::num::NonZeroI32; + use crate::os::unix::process::ExitStatusExt; + use crate::process::*; + + for exit_code in 0..=0xff { + // FIXME impl From for ExitStatus and then test that here too; + // the two ExitStatus values should be the same + let raw_wait_status = exit_code << 8; + let exit_status = ExitStatus::from_raw(raw_wait_status); + + assert_eq!(exit_status.code(), Some(exit_code)); + + if let Ok(nz) = NonZeroI32::try_from(exit_code) { + assert!(!exit_status.success()); + let es_error = exit_status.exit_ok().unwrap_err(); + assert_eq!(es_error.code().unwrap(), i32::from(nz)); + } else { + assert!(exit_status.success()); + assert_eq!(exit_status.exit_ok(), Ok(())); + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_common.rs 2023-12-21 16:55:28.000000000 +0000 @@ -75,6 +75,7 @@ return 0; } } else { + #[allow(unused_imports)] pub use libc::{sigemptyset, sigaddset}; } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unix.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unix.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unix.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unix.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1074,3 +1074,8 @@ #[cfg(test)] #[path = "process_unix/tests.rs"] mod tests; + +// See [`process_unsupported_wait_status::compare_with_linux`]; +#[cfg(all(test, target_os = "linux"))] +#[path = "process_unsupported/wait_status.rs"] +mod process_unsupported_wait_status; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status/tests.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,36 @@ +// Note that tests in this file are run on Linux as well as on platforms using process_unsupported + +// Test that our emulation exactly matches Linux +// +// This test runs *on Linux* but it tests +// the implementation used on non-Unix `#[cfg(unix)]` platforms. +// +// I.e. we're using Linux as a proxy for "trad unix". +#[cfg(target_os = "linux")] +#[test] +fn compare_with_linux() { + use super::ExitStatus as Emulated; + use crate::os::unix::process::ExitStatusExt as _; + use crate::process::ExitStatus as Real; + + // Check that we handle out-of-range values similarly, too. + for wstatus in -0xf_ffff..0xf_ffff { + let emulated = Emulated::from(wstatus); + let real = Real::from_raw(wstatus); + + macro_rules! compare { { $method:ident } => { + assert_eq!( + emulated.$method(), + real.$method(), + "{wstatus:#x}.{}()", + stringify!($method), + ); + } } + compare!(code); + compare!(signal); + compare!(core_dumped); + compare!(stopped_signal); + compare!(continued); + compare!(into_raw); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported/wait_status.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,84 @@ +//! Emulated wait status for non-Unix #[cfg(unix) platforms +//! +//! Separate module to facilitate testing against a real Unix implementation. +use core::ffi::NonZero_c_int; + +use crate::ffi::c_int; +use crate::fmt; + +use super::ExitStatusError; + +/// Emulated wait status for use by `process_unsupported.rs` +/// +/// Uses the "traditional unix" encoding. For use on platfors which are `#[cfg(unix)]` +/// but do not actually support subprocesses at all. +/// +/// These platforms aren't Unix, but are simply pretending to be for porting convenience. +/// So, we provide a faithful pretence here. +#[derive(PartialEq, Eq, Clone, Copy, Debug, Default)] +pub struct ExitStatus { + wait_status: c_int, +} + +/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it +impl From for ExitStatus { + fn from(wait_status: c_int) -> ExitStatus { + ExitStatus { wait_status } + } +} + +impl fmt::Display for ExitStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "emulated wait status: {}", self.wait_status) + } +} + +impl ExitStatus { + pub fn code(&self) -> Option { + // Linux and FreeBSD both agree that values linux 0x80 + // count as "WIFEXITED" even though this is quite mad. + // Likewise the macros disregard all the high bits, so are happy to declare + // out-of-range values to be WIFEXITED, WIFSTOPPED, etc. + let w = self.wait_status; + if (w & 0x7f) == 0 { Some((w & 0xff00) >> 8) } else { None } + } + + #[allow(unused)] + pub fn exit_ok(&self) -> Result<(), ExitStatusError> { + // This assumes that WIFEXITED(status) && WEXITSTATUS==0 corresponds to status==0. This is + // true on all actual versions of Unix, is widely assumed, and is specified in SuS + // https://pubs.opengroup.org/onlinepubs/9699919799/functions/wait.html. If it is not + // true for a platform pretending to be Unix, the tests (our doctests, and also + // process_unix/tests.rs) will spot it. `ExitStatusError::code` assumes this too. + match NonZero_c_int::try_from(self.wait_status) { + /* was nonzero */ Ok(failure) => Err(ExitStatusError(failure)), + /* was zero, couldn't convert */ Err(_) => Ok(()), + } + } + + pub fn signal(&self) -> Option { + let signal = self.wait_status & 0x007f; + if signal > 0 && signal < 0x7f { Some(signal) } else { None } + } + + pub fn core_dumped(&self) -> bool { + self.signal().is_some() && (self.wait_status & 0x80) != 0 + } + + pub fn stopped_signal(&self) -> Option { + let w = self.wait_status; + if (w & 0xff) == 0x7f { Some((w & 0xff00) >> 8) } else { None } + } + + pub fn continued(&self) -> bool { + self.wait_status == 0xffff + } + + pub fn into_raw(&self) -> c_int { + self.wait_status + } +} + +#[cfg(test)] +#[path = "wait_status/tests.rs"] // needed because of strange layout of process_unsupported +mod tests; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/process/process_unsupported.rs 2023-12-21 16:55:28.000000000 +0000 @@ -55,68 +55,20 @@ } } -#[derive(PartialEq, Eq, Clone, Copy, Debug, Default)] -pub struct ExitStatus(c_int); - -impl ExitStatus { - #[cfg_attr(target_os = "horizon", allow(unused))] - pub fn success(&self) -> bool { - self.code() == Some(0) - } - - pub fn exit_ok(&self) -> Result<(), ExitStatusError> { - Err(ExitStatusError(1.try_into().unwrap())) - } - - pub fn code(&self) -> Option { - None - } - - pub fn signal(&self) -> Option { - None - } - - pub fn core_dumped(&self) -> bool { - false - } - - pub fn stopped_signal(&self) -> Option { - None - } - - pub fn continued(&self) -> bool { - false - } - - pub fn into_raw(&self) -> c_int { - 0 - } -} - -/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it without copying. -impl From for ExitStatus { - fn from(a: c_int) -> ExitStatus { - ExitStatus(a as i32) - } -} - -impl fmt::Display for ExitStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "exit code: {}", self.0) - } -} +mod wait_status; +pub use wait_status::ExitStatus; #[derive(PartialEq, Eq, Clone, Copy, Debug)] pub struct ExitStatusError(NonZero_c_int); impl Into for ExitStatusError { fn into(self) -> ExitStatus { - ExitStatus(self.0.into()) + ExitStatus::from(c_int::from(self.0)) } } impl ExitStatusError { pub fn code(self) -> Option { - ExitStatus(self.0.into()).code().map(|st| st.try_into().unwrap()) + ExitStatus::from(c_int::from(self.0)).code().map(|st| st.try_into().unwrap()) } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/rand.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/rand.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/rand.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/rand.rs 2023-12-21 16:55:28.000000000 +0000 @@ -62,18 +62,15 @@ unsafe { getrandom(buf.as_mut_ptr().cast(), buf.len(), libc::GRND_NONBLOCK) } } - #[cfg(any(target_os = "espidf", target_os = "horizon"))] + #[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "freebsd"))] fn getrandom(buf: &mut [u8]) -> libc::ssize_t { - unsafe { libc::getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) } - } - - #[cfg(target_os = "freebsd")] - fn getrandom(buf: &mut [u8]) -> libc::ssize_t { - // FIXME: using the above when libary std's libc is updated + #[cfg(not(target_os = "freebsd"))] + use libc::getrandom; + #[cfg(target_os = "freebsd")] extern "C" { fn getrandom( - buffer: *mut libc::c_void, - length: libc::size_t, + buf: *mut libc::c_void, + buflen: libc::size_t, flags: libc::c_uint, ) -> libc::ssize_t; } @@ -154,40 +151,65 @@ } } -#[cfg(target_os = "macos")] +#[cfg(target_vendor = "apple")] mod imp { - use crate::fs::File; - use crate::io::Read; - use crate::sys::os::errno; - use crate::sys::weak::weak; + use crate::io; use libc::{c_int, c_void, size_t}; - fn getentropy_fill_bytes(v: &mut [u8]) -> bool { - weak!(fn getentropy(*mut c_void, size_t) -> c_int); + #[inline(always)] + fn random_failure() -> ! { + panic!("unexpected random generation error: {}", io::Error::last_os_error()); + } - getentropy - .get() - .map(|f| { - // getentropy(2) permits a maximum buffer size of 256 bytes - for s in v.chunks_mut(256) { - let ret = unsafe { f(s.as_mut_ptr() as *mut c_void, s.len()) }; - if ret == -1 { - panic!("unexpected getentropy error: {}", errno()); - } - } - true - }) - .unwrap_or(false) + #[cfg(target_os = "macos")] + fn getentropy_fill_bytes(v: &mut [u8]) { + extern "C" { + fn getentropy(bytes: *mut c_void, count: size_t) -> c_int; + } + + // getentropy(2) permits a maximum buffer size of 256 bytes + for s in v.chunks_mut(256) { + let ret = unsafe { getentropy(s.as_mut_ptr().cast(), s.len()) }; + if ret == -1 { + random_failure() + } + } } - pub fn fill_bytes(v: &mut [u8]) { - if getentropy_fill_bytes(v) { - return; + #[cfg(not(target_os = "macos"))] + fn ccrandom_fill_bytes(v: &mut [u8]) { + extern "C" { + fn CCRandomGenerateBytes(bytes: *mut c_void, count: size_t) -> c_int; } - // for older macos which doesn't support getentropy - let mut file = File::open("/dev/urandom").expect("failed to open /dev/urandom"); - file.read_exact(v).expect("failed to read /dev/urandom") + let ret = unsafe { CCRandomGenerateBytes(v.as_mut_ptr().cast(), v.len()) }; + if ret == -1 { + random_failure() + } + } + + pub fn fill_bytes(v: &mut [u8]) { + // All supported versions of macOS (10.12+) support getentropy. + // + // `getentropy` is measurably faster (via Divan) then the other alternatives so its preferred + // when usable. + #[cfg(target_os = "macos")] + getentropy_fill_bytes(v); + + // On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply + // call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes` + // manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on + // its own thread accessed via GCD. This seems needlessly heavyweight for our purposes + // so we only use it on non-Mac OSes where the better entrypoints are blocked. + // + // `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible + // via `libSystem` (libc) while the other needs to link to `Security.framework`. + // + // Note that while `getentropy` has a available attribute in the macOS headers, the lack + // of a header in the iOS (and others) SDK means that its can cause app store rejections. + // Just use `CCRandomGenerateBytes` instead. + #[cfg(not(target_os = "macos"))] + ccrandom_fill_bytes(v); } } @@ -206,36 +228,7 @@ } } -// On iOS and MacOS `SecRandomCopyBytes` calls `CCRandomCopyBytes` with -// `kCCRandomDefault`. `CCRandomCopyBytes` manages a CSPRNG which is seeded -// from `/dev/random` and which runs on its own thread accessed via GCD. -// This seems needlessly heavyweight for the purposes of generating two u64s -// once per thread in `hashmap_random_keys`. Therefore `SecRandomCopyBytes` is -// only used on iOS where direct access to `/dev/urandom` is blocked by the -// sandbox. -#[cfg(any(target_os = "ios", target_os = "tvos", target_os = "watchos"))] -mod imp { - use crate::io; - use crate::ptr; - use libc::{c_int, size_t}; - - enum SecRandom {} - - #[allow(non_upper_case_globals)] - const kSecRandomDefault: *const SecRandom = ptr::null(); - - extern "C" { - fn SecRandomCopyBytes(rnd: *const SecRandom, count: size_t, bytes: *mut u8) -> c_int; - } - - pub fn fill_bytes(v: &mut [u8]) { - let ret = unsafe { SecRandomCopyBytes(kSecRandomDefault, v.len(), v.as_mut_ptr()) }; - if ret == -1 { - panic!("couldn't generate random bytes: {}", io::Error::last_os_error()); - } - } -} - +// FIXME: once the 10.x release becomes the minimum, this can be dropped for simplification. #[cfg(target_os = "netbsd")] mod imp { use crate::ptr; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/stack_overflow.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/stack_overflow.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/stack_overflow.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/stack_overflow.rs 2023-12-21 16:55:28.000000000 +0000 @@ -134,9 +134,19 @@ // OpenBSD requires this flag for stack mapping // otherwise the said mapping will fail as a no-op on most systems // and has a different meaning on FreeBSD - #[cfg(any(target_os = "openbsd", target_os = "netbsd", target_os = "linux",))] + #[cfg(any( + target_os = "openbsd", + target_os = "netbsd", + target_os = "linux", + target_os = "dragonfly", + ))] let flags = MAP_PRIVATE | MAP_ANON | libc::MAP_STACK; - #[cfg(not(any(target_os = "openbsd", target_os = "netbsd", target_os = "linux",)))] + #[cfg(not(any( + target_os = "openbsd", + target_os = "netbsd", + target_os = "linux", + target_os = "dragonfly", + )))] let flags = MAP_PRIVATE | MAP_ANON; let stackp = mmap64(ptr::null_mut(), SIGSTKSZ + page_size(), PROT_READ | PROT_WRITE, flags, -1, 0); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread.rs 2023-12-21 16:55:28.000000000 +0000 @@ -207,7 +207,9 @@ pub fn set_name(name: &CStr) { unsafe { let thread_self = libc::find_thread(ptr::null_mut()); - libc::rename_thread(thread_self, name.as_ptr()); + let res = libc::rename_thread(thread_self, name.as_ptr()); + // We have no good way of propagating errors here, but in debug-builds let's check that this actually worked. + debug_assert_eq!(res, libc::B_OK); } } @@ -218,6 +220,7 @@ target_os = "redox", target_os = "vxworks", target_os = "hurd", + target_os = "aix", ))] pub fn set_name(_name: &CStr) { // Newlib, Emscripten, and VxWorks have no way to set a thread name. @@ -317,6 +320,7 @@ target_os = "macos", target_os = "solaris", target_os = "illumos", + target_os = "aix", ))] { #[allow(unused_assignments)] #[allow(unused_mut)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread_local_dtor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread_local_dtor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread_local_dtor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/thread_local_dtor.rs 2023-12-21 16:55:28.000000000 +0000 @@ -11,28 +11,47 @@ // Note, however, that we run on lots older linuxes, as well as cross // compiling from a newer linux to an older linux, so we also have a // fallback implementation to use as well. +#[allow(unexpected_cfgs)] #[cfg(any(target_os = "linux", target_os = "fuchsia", target_os = "redox", target_os = "hurd"))] +// FIXME: The Rust compiler currently omits weakly function definitions (i.e., +// __cxa_thread_atexit_impl) and its metadata from LLVM IR. +#[no_sanitize(cfi, kcfi)] pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { use crate::mem; use crate::sys_common::thread_local_dtor::register_dtor_fallback; + /// This is necessary because the __cxa_thread_atexit_impl implementation + /// std links to by default may be a C or C++ implementation that was not + /// compiled using the Clang integer normalization option. + #[cfg(not(sanitizer_cfi_normalize_integers))] + #[cfi_encoding = "i"] + #[repr(transparent)] + pub struct c_int(pub libc::c_int); + extern "C" { #[linkage = "extern_weak"] static __dso_handle: *mut u8; #[linkage = "extern_weak"] - static __cxa_thread_atexit_impl: *const libc::c_void; + static __cxa_thread_atexit_impl: Option< + extern "C" fn( + unsafe extern "C" fn(*mut libc::c_void), + *mut libc::c_void, + *mut libc::c_void, + ) -> c_int, + >; } - if !__cxa_thread_atexit_impl.is_null() { - type F = unsafe extern "C" fn( - dtor: unsafe extern "C" fn(*mut u8), - arg: *mut u8, - dso_handle: *mut u8, - ) -> libc::c_int; - mem::transmute::<*const libc::c_void, F>(__cxa_thread_atexit_impl)( - dtor, - t, - &__dso_handle as *const _ as *mut _, - ); + + if let Some(f) = __cxa_thread_atexit_impl { + unsafe { + f( + mem::transmute::< + unsafe extern "C" fn(*mut u8), + unsafe extern "C" fn(*mut libc::c_void), + >(dtor), + t.cast(), + &__dso_handle as *const _ as *mut _, + ); + } return; } register_dtor_fallback(t, dtor); @@ -48,17 +67,16 @@ // workaround below is to register, via _tlv_atexit, a custom DTOR list once per // thread. thread_local dtors are pushed to the DTOR list without calling // _tlv_atexit. -#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos"))] +#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos", target_os = "tvos"))] pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { - use crate::cell::Cell; - use crate::mem; + use crate::cell::{Cell, RefCell}; use crate::ptr; #[thread_local] static REGISTERED: Cell = Cell::new(false); #[thread_local] - static mut DTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); + static DTORS: RefCell> = RefCell::new(Vec::new()); if !REGISTERED.get() { _tlv_atexit(run_dtors, ptr::null_mut()); @@ -69,21 +87,28 @@ fn _tlv_atexit(dtor: unsafe extern "C" fn(*mut u8), arg: *mut u8); } - let list = &mut DTORS; - list.push((t, dtor)); + match DTORS.try_borrow_mut() { + Ok(mut dtors) => dtors.push((t, dtor)), + Err(_) => rtabort!("global allocator may not use TLS"), + } unsafe extern "C" fn run_dtors(_: *mut u8) { - let mut list = mem::take(&mut DTORS); + let mut list = DTORS.take(); while !list.is_empty() { for (ptr, dtor) in list { dtor(ptr); } - list = mem::take(&mut DTORS); + list = DTORS.take(); } } } -#[cfg(any(target_os = "vxworks", target_os = "horizon", target_os = "emscripten"))] +#[cfg(any( + target_os = "vxworks", + target_os = "horizon", + target_os = "emscripten", + target_os = "aix" +))] #[cfg_attr(target_family = "wasm", allow(unused))] // might remain unused depending on target details (e.g. wasm32-unknown-emscripten) pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { use crate::sys_common::thread_local_dtor::register_dtor_fallback; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/time.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/time.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/time.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/unix/time.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,8 +1,6 @@ use crate::fmt; use crate::time::Duration; -pub use self::inner::Instant; - const NSEC_PER_SEC: u64 = 1_000_000_000; pub const UNIX_EPOCH: SystemTime = SystemTime { t: Timespec::zero() }; #[allow(dead_code)] // Used for pthread condvar timeouts @@ -40,6 +38,10 @@ SystemTime { t: Timespec::new(tv_sec, tv_nsec) } } + pub fn now() -> SystemTime { + SystemTime { t: Timespec::now(libc::CLOCK_REALTIME) } + } + pub fn sub_time(&self, other: &SystemTime) -> Result { self.t.sub_timespec(&other.t) } @@ -74,11 +76,65 @@ } const fn new(tv_sec: i64, tv_nsec: i64) -> Timespec { + // On Apple OS, dates before epoch are represented differently than on other + // Unix platforms: e.g. 1/10th of a second before epoch is represented as `seconds=-1` + // and `nanoseconds=100_000_000` on other platforms, but is `seconds=0` and + // `nanoseconds=-900_000_000` on Apple OS. + // + // To compensate, we first detect this special case by checking if both + // seconds and nanoseconds are in range, and then correct the value for seconds + // and nanoseconds to match the common unix representation. + // + // Please note that Apple OS nonetheless accepts the standard unix format when + // setting file times, which makes this compensation round-trippable and generally + // transparent. + #[cfg(any( + target_os = "macos", + target_os = "ios", + target_os = "tvos", + target_os = "watchos" + ))] + let (tv_sec, tv_nsec) = + if (tv_sec <= 0 && tv_sec > i64::MIN) && (tv_nsec < 0 && tv_nsec > -1_000_000_000) { + (tv_sec - 1, tv_nsec + 1_000_000_000) + } else { + (tv_sec, tv_nsec) + }; assert!(tv_nsec >= 0 && tv_nsec < NSEC_PER_SEC as i64); // SAFETY: The assert above checks tv_nsec is within the valid range Timespec { tv_sec, tv_nsec: unsafe { Nanoseconds(tv_nsec as u32) } } } + pub fn now(clock: libc::clockid_t) -> Timespec { + use crate::mem::MaybeUninit; + use crate::sys::cvt; + + // Try to use 64-bit time in preparation for Y2038. + #[cfg(all( + target_os = "linux", + target_env = "gnu", + target_pointer_width = "32", + not(target_arch = "riscv32") + ))] + { + use crate::sys::weak::weak; + + // __clock_gettime64 was added to 32-bit arches in glibc 2.34, + // and it handles both vDSO calls and ENOSYS fallbacks itself. + weak!(fn __clock_gettime64(libc::clockid_t, *mut __timespec64) -> libc::c_int); + + if let Some(clock_gettime64) = __clock_gettime64.get() { + let mut t = MaybeUninit::uninit(); + cvt(unsafe { clock_gettime64(clock, t.as_mut_ptr()) }).unwrap(); + return Timespec::from(unsafe { t.assume_init() }); + } + } + + let mut t = MaybeUninit::uninit(); + cvt(unsafe { libc::clock_gettime(clock, t.as_mut_ptr()) }).unwrap(); + Timespec::from(unsafe { t.assume_init() }) + } + pub fn sub_timespec(&self, other: &Timespec) -> Result { if self >= other { // NOTE(eddyb) two aspects of this `if`-`else` are required for LLVM @@ -216,209 +272,59 @@ } } -#[cfg(any( - all(target_os = "macos", any(not(target_arch = "aarch64"))), - target_os = "ios", - target_os = "watchos", - target_os = "tvos" -))] -mod inner { - use crate::sync::atomic::{AtomicU64, Ordering}; - use crate::sys::cvt; - use crate::sys_common::mul_div_u64; - use crate::time::Duration; - - use super::{SystemTime, Timespec, NSEC_PER_SEC}; - - #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] - pub struct Instant { - t: u64, - } - - #[repr(C)] - #[derive(Copy, Clone)] - struct mach_timebase_info { - numer: u32, - denom: u32, - } - type mach_timebase_info_t = *mut mach_timebase_info; - type kern_return_t = libc::c_int; - - impl Instant { - pub fn now() -> Instant { - extern "C" { - fn mach_absolute_time() -> u64; - } - Instant { t: unsafe { mach_absolute_time() } } - } - - pub fn checked_sub_instant(&self, other: &Instant) -> Option { - let diff = self.t.checked_sub(other.t)?; - let info = info(); - let nanos = mul_div_u64(diff, info.numer as u64, info.denom as u64); - Some(Duration::new(nanos / NSEC_PER_SEC, (nanos % NSEC_PER_SEC) as u32)) - } - - pub fn checked_add_duration(&self, other: &Duration) -> Option { - Some(Instant { t: self.t.checked_add(checked_dur2intervals(other)?)? }) - } - - pub fn checked_sub_duration(&self, other: &Duration) -> Option { - Some(Instant { t: self.t.checked_sub(checked_dur2intervals(other)?)? }) - } - } - - impl SystemTime { - pub fn now() -> SystemTime { - use crate::ptr; - - let mut s = libc::timeval { tv_sec: 0, tv_usec: 0 }; - cvt(unsafe { libc::gettimeofday(&mut s, ptr::null_mut()) }).unwrap(); - return SystemTime::from(s); - } - } - - impl From for Timespec { - fn from(t: libc::timeval) -> Timespec { - Timespec::new(t.tv_sec as i64, 1000 * t.tv_usec as i64) - } - } - - impl From for SystemTime { - fn from(t: libc::timeval) -> SystemTime { - SystemTime { t: Timespec::from(t) } - } - } - - fn checked_dur2intervals(dur: &Duration) -> Option { - let nanos = - dur.as_secs().checked_mul(NSEC_PER_SEC)?.checked_add(dur.subsec_nanos() as u64)?; - let info = info(); - Some(mul_div_u64(nanos, info.denom as u64, info.numer as u64)) - } - - fn info() -> mach_timebase_info { - // INFO_BITS conceptually is an `Option`. We can do - // this in 64 bits because we know 0 is never a valid value for the - // `denom` field. - // - // Encoding this as a single `AtomicU64` allows us to use `Relaxed` - // operations, as we are only interested in the effects on a single - // memory location. - static INFO_BITS: AtomicU64 = AtomicU64::new(0); - - // If a previous thread has initialized `INFO_BITS`, use it. - let info_bits = INFO_BITS.load(Ordering::Relaxed); - if info_bits != 0 { - return info_from_bits(info_bits); - } - - // ... otherwise learn for ourselves ... - extern "C" { - fn mach_timebase_info(info: mach_timebase_info_t) -> kern_return_t; - } - - let mut info = info_from_bits(0); - unsafe { - mach_timebase_info(&mut info); - } - INFO_BITS.store(info_to_bits(info), Ordering::Relaxed); - info - } - - #[inline] - fn info_to_bits(info: mach_timebase_info) -> u64 { - ((info.denom as u64) << 32) | (info.numer as u64) - } - - #[inline] - fn info_from_bits(bits: u64) -> mach_timebase_info { - mach_timebase_info { numer: bits as u32, denom: (bits >> 32) as u32 } - } +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Instant { + t: Timespec, } -#[cfg(not(any( - all(target_os = "macos", any(not(target_arch = "aarch64"))), - target_os = "ios", - target_os = "watchos", - target_os = "tvos" -)))] -mod inner { - use crate::fmt; - use crate::mem::MaybeUninit; - use crate::sys::cvt; - use crate::time::Duration; - - use super::{SystemTime, Timespec}; - - #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - pub struct Instant { - t: Timespec, +impl Instant { + pub fn now() -> Instant { + // https://www.manpagez.com/man/3/clock_gettime/ + // + // CLOCK_UPTIME_RAW clock that increments monotonically, in the same man- + // ner as CLOCK_MONOTONIC_RAW, but that does not incre- + // ment while the system is asleep. The returned value + // is identical to the result of mach_absolute_time() + // after the appropriate mach_timebase conversion is + // applied. + // + // Instant on macos was historically implemented using mach_absolute_time; + // we preserve this value domain out of an abundance of caution. + #[cfg(any( + target_os = "macos", + target_os = "ios", + target_os = "watchos", + target_os = "tvos" + ))] + const clock_id: libc::clockid_t = libc::CLOCK_UPTIME_RAW; + #[cfg(not(any( + target_os = "macos", + target_os = "ios", + target_os = "watchos", + target_os = "tvos" + )))] + const clock_id: libc::clockid_t = libc::CLOCK_MONOTONIC; + Instant { t: Timespec::now(clock_id) } } - impl Instant { - pub fn now() -> Instant { - #[cfg(target_os = "macos")] - const clock_id: libc::clockid_t = libc::CLOCK_UPTIME_RAW; - #[cfg(not(target_os = "macos"))] - const clock_id: libc::clockid_t = libc::CLOCK_MONOTONIC; - Instant { t: Timespec::now(clock_id) } - } - - pub fn checked_sub_instant(&self, other: &Instant) -> Option { - self.t.sub_timespec(&other.t).ok() - } - - pub fn checked_add_duration(&self, other: &Duration) -> Option { - Some(Instant { t: self.t.checked_add_duration(other)? }) - } - - pub fn checked_sub_duration(&self, other: &Duration) -> Option { - Some(Instant { t: self.t.checked_sub_duration(other)? }) - } + pub fn checked_sub_instant(&self, other: &Instant) -> Option { + self.t.sub_timespec(&other.t).ok() } - impl fmt::Debug for Instant { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Instant") - .field("tv_sec", &self.t.tv_sec) - .field("tv_nsec", &self.t.tv_nsec.0) - .finish() - } + pub fn checked_add_duration(&self, other: &Duration) -> Option { + Some(Instant { t: self.t.checked_add_duration(other)? }) } - impl SystemTime { - pub fn now() -> SystemTime { - SystemTime { t: Timespec::now(libc::CLOCK_REALTIME) } - } + pub fn checked_sub_duration(&self, other: &Duration) -> Option { + Some(Instant { t: self.t.checked_sub_duration(other)? }) } +} - impl Timespec { - pub fn now(clock: libc::clockid_t) -> Timespec { - // Try to use 64-bit time in preparation for Y2038. - #[cfg(all( - target_os = "linux", - target_env = "gnu", - target_pointer_width = "32", - not(target_arch = "riscv32") - ))] - { - use crate::sys::weak::weak; - - // __clock_gettime64 was added to 32-bit arches in glibc 2.34, - // and it handles both vDSO calls and ENOSYS fallbacks itself. - weak!(fn __clock_gettime64(libc::clockid_t, *mut super::__timespec64) -> libc::c_int); - - if let Some(clock_gettime64) = __clock_gettime64.get() { - let mut t = MaybeUninit::uninit(); - cvt(unsafe { clock_gettime64(clock, t.as_mut_ptr()) }).unwrap(); - return Timespec::from(unsafe { t.assume_init() }); - } - } - - let mut t = MaybeUninit::uninit(); - cvt(unsafe { libc::clock_gettime(clock, t.as_mut_ptr()) }).unwrap(); - Timespec::from(unsafe { t.assume_init() }) - } +impl fmt::Debug for Instant { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Instant") + .field("tv_sec", &self.t.tv_sec) + .field("tv_nsec", &self.t.tv_nsec.0) + .finish() } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/wasi/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/wasi/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/wasi/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/wasi/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -82,31 +82,99 @@ } pub fn decode_error_kind(errno: i32) -> std_io::ErrorKind { - use std_io::ErrorKind::*; - if errno > u16::MAX as i32 || errno < 0 { - return Uncategorized; + use std_io::ErrorKind; + + let Ok(errno) = u16::try_from(errno) else { + return ErrorKind::Uncategorized; + }; + + macro_rules! match_errno { + ($($($errno:ident)|+ => $errkind:ident),*, _ => $wildcard:ident $(,)?) => { + match errno { + $(e if $(e == ::wasi::$errno.raw())||+ => ErrorKind::$errkind),*, + _ => ErrorKind::$wildcard, + } + }; } - match errno { - e if e == wasi::ERRNO_CONNREFUSED.raw().into() => ConnectionRefused, - e if e == wasi::ERRNO_CONNRESET.raw().into() => ConnectionReset, - e if e == wasi::ERRNO_PERM.raw().into() || e == wasi::ERRNO_ACCES.raw().into() => { - PermissionDenied - } - e if e == wasi::ERRNO_PIPE.raw().into() => BrokenPipe, - e if e == wasi::ERRNO_NOTCONN.raw().into() => NotConnected, - e if e == wasi::ERRNO_CONNABORTED.raw().into() => ConnectionAborted, - e if e == wasi::ERRNO_ADDRNOTAVAIL.raw().into() => AddrNotAvailable, - e if e == wasi::ERRNO_ADDRINUSE.raw().into() => AddrInUse, - e if e == wasi::ERRNO_NOENT.raw().into() => NotFound, - e if e == wasi::ERRNO_INTR.raw().into() => Interrupted, - e if e == wasi::ERRNO_INVAL.raw().into() => InvalidInput, - e if e == wasi::ERRNO_TIMEDOUT.raw().into() => TimedOut, - e if e == wasi::ERRNO_EXIST.raw().into() => AlreadyExists, - e if e == wasi::ERRNO_AGAIN.raw().into() => WouldBlock, - e if e == wasi::ERRNO_NOSYS.raw().into() => Unsupported, - e if e == wasi::ERRNO_NOMEM.raw().into() => OutOfMemory, - _ => Uncategorized, + match_errno! { + ERRNO_2BIG => ArgumentListTooLong, + ERRNO_ACCES => PermissionDenied, + ERRNO_ADDRINUSE => AddrInUse, + ERRNO_ADDRNOTAVAIL => AddrNotAvailable, + ERRNO_AFNOSUPPORT => Unsupported, + ERRNO_AGAIN => WouldBlock, + // ALREADY => "connection already in progress", + // BADF => "bad file descriptor", + // BADMSG => "bad message", + ERRNO_BUSY => ResourceBusy, + // CANCELED => "operation canceled", + // CHILD => "no child processes", + ERRNO_CONNABORTED => ConnectionAborted, + ERRNO_CONNREFUSED => ConnectionRefused, + ERRNO_CONNRESET => ConnectionReset, + ERRNO_DEADLK => Deadlock, + // DESTADDRREQ => "destination address required", + ERRNO_DOM => InvalidInput, + // DQUOT => /* reserved */, + ERRNO_EXIST => AlreadyExists, + // FAULT => "bad address", + ERRNO_FBIG => FileTooLarge, + ERRNO_HOSTUNREACH => HostUnreachable, + // IDRM => "identifier removed", + // ILSEQ => "illegal byte sequence", + // INPROGRESS => "operation in progress", + ERRNO_INTR => Interrupted, + ERRNO_INVAL => InvalidInput, + ERRNO_IO => Uncategorized, + // ISCONN => "socket is connected", + ERRNO_ISDIR => IsADirectory, + ERRNO_LOOP => FilesystemLoop, + // MFILE => "file descriptor value too large", + ERRNO_MLINK => TooManyLinks, + // MSGSIZE => "message too large", + // MULTIHOP => /* reserved */, + ERRNO_NAMETOOLONG => InvalidFilename, + ERRNO_NETDOWN => NetworkDown, + // NETRESET => "connection aborted by network", + ERRNO_NETUNREACH => NetworkUnreachable, + // NFILE => "too many files open in system", + // NOBUFS => "no buffer space available", + ERRNO_NODEV => NotFound, + ERRNO_NOENT => NotFound, + // NOEXEC => "executable file format error", + // NOLCK => "no locks available", + // NOLINK => /* reserved */, + ERRNO_NOMEM => OutOfMemory, + // NOMSG => "no message of the desired type", + // NOPROTOOPT => "protocol not available", + ERRNO_NOSPC => StorageFull, + ERRNO_NOSYS => Unsupported, + ERRNO_NOTCONN => NotConnected, + ERRNO_NOTDIR => NotADirectory, + ERRNO_NOTEMPTY => DirectoryNotEmpty, + // NOTRECOVERABLE => "state not recoverable", + // NOTSOCK => "not a socket", + ERRNO_NOTSUP => Unsupported, + // NOTTY => "inappropriate I/O control operation", + ERRNO_NXIO => NotFound, + // OVERFLOW => "value too large to be stored in data type", + // OWNERDEAD => "previous owner died", + ERRNO_PERM => PermissionDenied, + ERRNO_PIPE => BrokenPipe, + // PROTO => "protocol error", + ERRNO_PROTONOSUPPORT => Unsupported, + // PROTOTYPE => "protocol wrong type for socket", + // RANGE => "result too large", + ERRNO_ROFS => ReadOnlyFilesystem, + ERRNO_SPIPE => NotSeekable, + ERRNO_SRCH => NotFound, + // STALE => /* reserved */, + ERRNO_TIMEDOUT => TimedOut, + ERRNO_TXTBSY => ResourceBusy, + ERRNO_XDEV => CrossesDevices, + ERRNO_NOTCAPABLE => PermissionDenied, + _ => Uncategorized, } } @@ -124,6 +192,7 @@ return ret; } +#[inline] fn err2io(err: wasi::Errno) -> std_io::Error { std_io::Error::from_raw_os_error(err.raw().into()) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/api.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/api.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/api.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/api.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,157 @@ +//! # Safe(r) wrappers around Windows API functions. +//! +//! This module contains fairly thin wrappers around Windows API functions, +//! aimed at centralising safety instead of having unsafe blocks spread +//! throughout higher level code. This makes it much easier to audit FFI safety. +//! +//! Not all functions can be made completely safe without more context but in +//! such cases we should still endeavour to reduce the caller's burden of safety +//! as much as possible. +//! +//! ## Guidelines for wrappers +//! +//! Items here should be named similarly to their raw Windows API name, except +//! that they follow Rust's case conventions. E.g. function names are +//! lower_snake_case. The idea here is that it should be easy for a Windows +//! C/C++ programmer to identify the underlying function that's being wrapped +//! while not looking too out of place in Rust code. +//! +//! Every use of an `unsafe` block must have a related SAFETY comment, even if +//! it's trivially safe (for example, see `get_last_error`). Public unsafe +//! functions must document what the caller has to do to call them safely. +//! +//! Avoid unchecked `as` casts. For integers, either assert that the integer +//! is in range or use `try_into` instead. For pointers, prefer to use +//! `ptr.cast::()` when possible. +//! +//! This module must only depend on core and not on std types as the eventual +//! hope is to have std depend on sys and not the other way around. +//! However, some amount of glue code may currently be necessary so such code +//! should go in sys/windows/mod.rs rather than here. See `IoResult` as an example. + +use core::ffi::c_void; +use core::ptr::addr_of; + +use super::c; + +/// Helper method for getting the size of `T` as a u32. +/// Errors at compile time if the size would overflow. +/// +/// While a type larger than u32::MAX is unlikely, it is possible if only because of a bug. +/// However, one key motivation for this function is to avoid the temptation to +/// use frequent `as` casts. This is risky because they are too powerful. +/// For example, the following will compile today: +/// +/// `std::mem::size_of:: as u32` +/// +/// Note that `size_of` is never actually called, instead a function pointer is +/// converted to a `u32`. Clippy would warn about this but, alas, it's not run +/// on the standard library. +const fn win32_size_of() -> u32 { + // Const assert that the size is less than u32::MAX. + // Uses a trait to workaround restriction on using generic types in inner items. + trait Win32SizeOf: Sized { + const WIN32_SIZE_OF: u32 = { + let size = core::mem::size_of::(); + assert!(size <= u32::MAX as usize); + size as u32 + }; + } + impl Win32SizeOf for T {} + + T::WIN32_SIZE_OF +} + +/// The `SetFileInformationByHandle` function takes a generic parameter by +/// making the user specify the type (class), a pointer to the data and its +/// size. This trait allows attaching that information to a Rust type so that +/// [`set_file_information_by_handle`] can be called safely. +/// +/// This trait is designed so that it can support variable sized types. +/// However, currently Rust's std only uses fixed sized structures. +/// +/// # Safety +/// +/// * `as_ptr` must return a pointer to memory that is readable up to `size` bytes. +/// * `CLASS` must accurately reflect the type pointed to by `as_ptr`. E.g. +/// the `FILE_BASIC_INFO` structure has the class `FileBasicInfo`. +pub unsafe trait SetFileInformation { + /// The type of information to set. + const CLASS: i32; + /// A pointer to the file information to set. + fn as_ptr(&self) -> *const c_void; + /// The size of the type pointed to by `as_ptr`. + fn size(&self) -> u32; +} +/// Helper trait for implementing `SetFileInformation` for statically sized types. +unsafe trait SizedSetFileInformation: Sized { + const CLASS: i32; +} +unsafe impl SetFileInformation for T { + const CLASS: i32 = T::CLASS; + fn as_ptr(&self) -> *const c_void { + addr_of!(*self).cast::() + } + fn size(&self) -> u32 { + win32_size_of::() + } +} + +// SAFETY: FILE_BASIC_INFO, FILE_END_OF_FILE_INFO, FILE_ALLOCATION_INFO, +// FILE_DISPOSITION_INFO, FILE_DISPOSITION_INFO_EX and FILE_IO_PRIORITY_HINT_INFO +// are all plain `repr(C)` structs that only contain primitive types. +// The given information classes correctly match with the struct. +unsafe impl SizedSetFileInformation for c::FILE_BASIC_INFO { + const CLASS: i32 = c::FileBasicInfo; +} +unsafe impl SizedSetFileInformation for c::FILE_END_OF_FILE_INFO { + const CLASS: i32 = c::FileEndOfFileInfo; +} +unsafe impl SizedSetFileInformation for c::FILE_ALLOCATION_INFO { + const CLASS: i32 = c::FileAllocationInfo; +} +unsafe impl SizedSetFileInformation for c::FILE_DISPOSITION_INFO { + const CLASS: i32 = c::FileDispositionInfo; +} +unsafe impl SizedSetFileInformation for c::FILE_DISPOSITION_INFO_EX { + const CLASS: i32 = c::FileDispositionInfoEx; +} +unsafe impl SizedSetFileInformation for c::FILE_IO_PRIORITY_HINT_INFO { + const CLASS: i32 = c::FileIoPriorityHintInfo; +} + +#[inline] +pub fn set_file_information_by_handle( + handle: c::HANDLE, + info: &T, +) -> Result<(), WinError> { + unsafe fn set_info( + handle: c::HANDLE, + class: i32, + info: *const c_void, + size: u32, + ) -> Result<(), WinError> { + let result = c::SetFileInformationByHandle(handle, class, info, size); + (result != 0).then_some(()).ok_or_else(|| get_last_error()) + } + // SAFETY: The `SetFileInformation` trait ensures that this is safe. + unsafe { set_info(handle, T::CLASS, info.as_ptr(), info.size()) } +} + +/// Gets the error from the last function. +/// This must be called immediately after the function that sets the error to +/// avoid the risk of another function overwriting it. +pub fn get_last_error() -> WinError { + // SAFETY: This just returns a thread-local u32 and has no other effects. + unsafe { WinError { code: c::GetLastError() } } +} + +/// An error code as returned by [`get_last_error`]. +/// +/// This is usually a 16-bit Win32 error code but may be a 32-bit HRESULT or NTSTATUS. +/// Check the documentation of the Windows API function being called for expected errors. +#[derive(Clone, Copy, PartialEq, Eq)] +#[repr(transparent)] +pub struct WinError { + pub code: u32, +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.lst rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.lst --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.lst 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.lst 2023-12-21 16:55:28.000000000 +0000 @@ -1964,6 +1964,7 @@ Windows.Win32.Networking.WinSock.ADDRINFOA Windows.Win32.Networking.WinSock.AF_INET Windows.Win32.Networking.WinSock.AF_INET6 +Windows.Win32.Networking.WinSock.AF_UNIX Windows.Win32.Networking.WinSock.AF_UNSPEC Windows.Win32.Networking.WinSock.bind Windows.Win32.Networking.WinSock.closesocket @@ -2058,6 +2059,7 @@ Windows.Win32.Networking.WinSock.SOCK_SEQPACKET Windows.Win32.Networking.WinSock.SOCK_STREAM Windows.Win32.Networking.WinSock.SOCKADDR +Windows.Win32.Networking.WinSock.SOCKADDR_UN Windows.Win32.Networking.WinSock.SOCKET Windows.Win32.Networking.WinSock.SOCKET_ERROR Windows.Win32.Networking.WinSock.SOL_SOCKET @@ -2222,6 +2224,7 @@ Windows.Win32.Storage.FileSystem.FILE_ADD_FILE Windows.Win32.Storage.FileSystem.FILE_ADD_SUBDIRECTORY Windows.Win32.Storage.FileSystem.FILE_ALL_ACCESS +Windows.Win32.Storage.FileSystem.FILE_ALLOCATION_INFO Windows.Win32.Storage.FileSystem.FILE_APPEND_DATA Windows.Win32.Storage.FileSystem.FILE_ATTRIBUTE_ARCHIVE Windows.Win32.Storage.FileSystem.FILE_ATTRIBUTE_COMPRESSED @@ -2282,6 +2285,7 @@ Windows.Win32.Storage.FileSystem.FILE_GENERIC_WRITE Windows.Win32.Storage.FileSystem.FILE_ID_BOTH_DIR_INFO Windows.Win32.Storage.FileSystem.FILE_INFO_BY_HANDLE_CLASS +Windows.Win32.Storage.FileSystem.FILE_IO_PRIORITY_HINT_INFO Windows.Win32.Storage.FileSystem.FILE_LIST_DIRECTORY Windows.Win32.Storage.FileSystem.FILE_NAME_NORMALIZED Windows.Win32.Storage.FileSystem.FILE_NAME_OPENED @@ -2503,9 +2507,12 @@ Windows.Win32.System.Threading.CREATE_SHARED_WOW_VDM Windows.Win32.System.Threading.CREATE_SUSPENDED Windows.Win32.System.Threading.CREATE_UNICODE_ENVIRONMENT +Windows.Win32.System.Threading.CREATE_WAITABLE_TIMER_HIGH_RESOLUTION +Windows.Win32.System.Threading.CREATE_WAITABLE_TIMER_MANUAL_RESET Windows.Win32.System.Threading.CreateEventW Windows.Win32.System.Threading.CreateProcessW Windows.Win32.System.Threading.CreateThread +Windows.Win32.System.Threading.CreateWaitableTimerExW Windows.Win32.System.Threading.DEBUG_ONLY_THIS_PROCESS Windows.Win32.System.Threading.DEBUG_PROCESS Windows.Win32.System.Threading.DeleteProcThreadAttributeList @@ -2542,6 +2549,7 @@ Windows.Win32.System.Threading.ReleaseSRWLockExclusive Windows.Win32.System.Threading.ReleaseSRWLockShared Windows.Win32.System.Threading.SetThreadStackGuarantee +Windows.Win32.System.Threading.SetWaitableTimer Windows.Win32.System.Threading.Sleep Windows.Win32.System.Threading.SleepConditionVariableSRW Windows.Win32.System.Threading.SleepEx @@ -2568,6 +2576,8 @@ Windows.Win32.System.Threading.THREAD_CREATE_RUN_IMMEDIATELY Windows.Win32.System.Threading.THREAD_CREATE_SUSPENDED Windows.Win32.System.Threading.THREAD_CREATION_FLAGS +Windows.Win32.System.Threading.TIMER_ALL_ACCESS +Windows.Win32.System.Threading.TIMER_MODIFY_STATE Windows.Win32.System.Threading.TLS_OUT_OF_INDEXES Windows.Win32.System.Threading.TlsAlloc Windows.Win32.System.Threading.TlsFree diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c/windows_sys.rs 2023-12-21 16:55:28.000000000 +0000 @@ -152,6 +152,15 @@ } #[link(name = "kernel32")] extern "system" { + pub fn CreateWaitableTimerExW( + lptimerattributes: *const SECURITY_ATTRIBUTES, + lptimername: PCWSTR, + dwflags: u32, + dwdesiredaccess: u32, + ) -> HANDLE; +} +#[link(name = "kernel32")] +extern "system" { pub fn DeleteFileW(lpfilename: PCWSTR) -> BOOL; } #[link(name = "kernel32")] @@ -509,6 +518,17 @@ } #[link(name = "kernel32")] extern "system" { + pub fn SetWaitableTimer( + htimer: HANDLE, + lpduetime: *const i64, + lperiod: i32, + pfncompletionroutine: PTIMERAPCROUTINE, + lpargtocompletionroutine: *const ::core::ffi::c_void, + fresume: BOOL, + ) -> BOOL; +} +#[link(name = "kernel32")] +extern "system" { pub fn Sleep(dwmilliseconds: u32) -> (); } #[link(name = "kernel32")] @@ -847,6 +867,7 @@ } pub const AF_INET: ADDRESS_FAMILY = 2u16; pub const AF_INET6: ADDRESS_FAMILY = 23u16; +pub const AF_UNIX: u16 = 1u16; pub const AF_UNSPEC: ADDRESS_FAMILY = 0u16; pub const ALL_PROCESSOR_GROUPS: u32 = 65535u32; #[repr(C)] @@ -1164,6 +1185,8 @@ pub const CREATE_SHARED_WOW_VDM: PROCESS_CREATION_FLAGS = 4096u32; pub const CREATE_SUSPENDED: PROCESS_CREATION_FLAGS = 4u32; pub const CREATE_UNICODE_ENVIRONMENT: PROCESS_CREATION_FLAGS = 1024u32; +pub const CREATE_WAITABLE_TIMER_HIGH_RESOLUTION: u32 = 2u32; +pub const CREATE_WAITABLE_TIMER_MANUAL_RESET: u32 = 1u32; pub const CSTR_EQUAL: COMPARESTRING_RESULT = 2i32; pub const CSTR_GREATER_THAN: COMPARESTRING_RESULT = 3i32; pub const CSTR_LESS_THAN: COMPARESTRING_RESULT = 1i32; @@ -3106,6 +3129,16 @@ pub type FILE_ACCESS_RIGHTS = u32; pub const FILE_ADD_FILE: FILE_ACCESS_RIGHTS = 2u32; pub const FILE_ADD_SUBDIRECTORY: FILE_ACCESS_RIGHTS = 4u32; +#[repr(C)] +pub struct FILE_ALLOCATION_INFO { + pub AllocationSize: i64, +} +impl ::core::marker::Copy for FILE_ALLOCATION_INFO {} +impl ::core::clone::Clone for FILE_ALLOCATION_INFO { + fn clone(&self) -> Self { + *self + } +} pub const FILE_ALL_ACCESS: FILE_ACCESS_RIGHTS = 2032127u32; pub const FILE_APPEND_DATA: FILE_ACCESS_RIGHTS = 4u32; pub const FILE_ATTRIBUTE_ARCHIVE: FILE_FLAGS_AND_ATTRIBUTES = 32u32; @@ -3247,6 +3280,16 @@ } } pub type FILE_INFO_BY_HANDLE_CLASS = i32; +#[repr(C)] +pub struct FILE_IO_PRIORITY_HINT_INFO { + pub PriorityHint: PRIORITY_HINT, +} +impl ::core::marker::Copy for FILE_IO_PRIORITY_HINT_INFO {} +impl ::core::clone::Clone for FILE_IO_PRIORITY_HINT_INFO { + fn clone(&self) -> Self { + *self + } +} pub const FILE_LIST_DIRECTORY: FILE_ACCESS_RIGHTS = 1u32; pub const FILE_NAME_NORMALIZED: GETFINALPATHNAMEBYHANDLE_FLAGS = 0u32; pub const FILE_NAME_OPENED: GETFINALPATHNAMEBYHANDLE_FLAGS = 8u32; @@ -3752,6 +3795,7 @@ pub const PIPE_TYPE_BYTE: NAMED_PIPE_MODE = 0u32; pub const PIPE_TYPE_MESSAGE: NAMED_PIPE_MODE = 4u32; pub const PIPE_WAIT: NAMED_PIPE_MODE = 0u32; +pub type PRIORITY_HINT = i32; pub type PROCESSOR_ARCHITECTURE = u16; pub type PROCESS_CREATION_FLAGS = u32; #[repr(C)] @@ -3774,6 +3818,13 @@ pub const PROFILE_USER: PROCESS_CREATION_FLAGS = 268435456u32; pub const PROGRESS_CONTINUE: u32 = 0u32; pub type PSTR = *mut u8; +pub type PTIMERAPCROUTINE = ::core::option::Option< + unsafe extern "system" fn( + lpargtocompletionroutine: *const ::core::ffi::c_void, + dwtimerlowvalue: u32, + dwtimerhighvalue: u32, + ) -> (), +>; pub type PWSTR = *mut u16; pub const READ_CONTROL: FILE_ACCESS_RIGHTS = 131072u32; pub const REALTIME_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 256u32; @@ -3813,6 +3864,17 @@ *self } } +#[repr(C)] +pub struct SOCKADDR_UN { + pub sun_family: ADDRESS_FAMILY, + pub sun_path: [u8; 108], +} +impl ::core::marker::Copy for SOCKADDR_UN {} +impl ::core::clone::Clone for SOCKADDR_UN { + fn clone(&self) -> Self { + *self + } +} pub type SOCKET = usize; pub const SOCKET_ERROR: i32 = -1i32; pub const SOCK_DGRAM: WINSOCK_SOCKET_TYPE = 2i32; @@ -3910,6 +3972,7 @@ pub const SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE: SYMBOLIC_LINK_FLAGS = 2u32; pub const SYMBOLIC_LINK_FLAG_DIRECTORY: SYMBOLIC_LINK_FLAGS = 1u32; pub const SYMLINK_FLAG_RELATIVE: u32 = 1u32; +pub type SYNCHRONIZATION_ACCESS_RIGHTS = u32; pub const SYNCHRONIZE: FILE_ACCESS_RIGHTS = 1048576u32; #[repr(C)] pub struct SYSTEM_INFO { @@ -3956,6 +4019,8 @@ pub const THREAD_CREATE_RUN_IMMEDIATELY: THREAD_CREATION_FLAGS = 0u32; pub const THREAD_CREATE_SUSPENDED: THREAD_CREATION_FLAGS = 4u32; pub type THREAD_CREATION_FLAGS = u32; +pub const TIMER_ALL_ACCESS: SYNCHRONIZATION_ACCESS_RIGHTS = 2031619u32; +pub const TIMER_MODIFY_STATE: SYNCHRONIZATION_ACCESS_RIGHTS = 2u32; #[repr(C)] pub struct TIMEVAL { pub tv_sec: i32, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/c.rs 2023-12-21 16:55:28.000000000 +0000 @@ -46,6 +46,10 @@ pub use LINGER as linger; pub use TIMEVAL as timeval; +// https://learn.microsoft.com/en-us/cpp/c-runtime-library/exit-success-exit-failure?view=msvc-170 +pub const EXIT_SUCCESS: u32 = 0; +pub const EXIT_FAILURE: u32 = 1; + pub const CONDITION_VARIABLE_INIT: CONDITION_VARIABLE = CONDITION_VARIABLE { Ptr: ptr::null_mut() }; pub const SRWLOCK_INIT: SRWLOCK = SRWLOCK { Ptr: ptr::null_mut() }; pub const INIT_ONCE_STATIC_INIT: INIT_ONCE = INIT_ONCE { Ptr: ptr::null_mut() }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/cmath.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/cmath.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/cmath.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/cmath.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ #![cfg(not(test))] -use libc::{c_double, c_float, c_int}; +use core::ffi::{c_double, c_float, c_int}; extern "C" { pub fn acos(n: c_double) -> c_double; @@ -33,7 +33,7 @@ #[cfg(not(all(target_env = "msvc", target_arch = "x86")))] mod shims { - use libc::c_float; + use core::ffi::c_float; extern "C" { pub fn acosf(n: c_float) -> c_float; @@ -52,7 +52,7 @@ // back to f32. While not precisely correct should be "correct enough" for now. #[cfg(all(target_env = "msvc", target_arch = "x86"))] mod shims { - use libc::c_float; + use core::ffi::c_float; #[inline] pub unsafe fn acosf(n: c_float) -> c_float { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/fs.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/fs.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/fs.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/fs.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,8 +16,10 @@ use crate::sys_common::{AsInner, FromInner, IntoInner}; use crate::thread; +use core::ffi::c_void; + use super::path::maybe_verbatim; -use super::to_u16s; +use super::{api, to_u16s, IoResult}; pub struct File { handle: Handle, @@ -121,7 +123,7 @@ let mut wfd = mem::zeroed(); loop { if c::FindNextFileW(self.handle.0, &mut wfd) == 0 { - if c::GetLastError() == c::ERROR_NO_MORE_FILES { + if api::get_last_error().code == c::ERROR_NO_MORE_FILES { return None; } else { return Some(Err(Error::last_os_error())); @@ -316,17 +318,8 @@ } pub fn truncate(&self, size: u64) -> io::Result<()> { - let mut info = c::FILE_END_OF_FILE_INFO { EndOfFile: size as c::LARGE_INTEGER }; - let size = mem::size_of_val(&info); - cvt(unsafe { - c::SetFileInformationByHandle( - self.handle.as_raw_handle(), - c::FileEndOfFileInfo, - &mut info as *mut _ as *mut _, - size as c::DWORD, - ) - })?; - Ok(()) + let info = c::FILE_END_OF_FILE_INFO { EndOfFile: size as i64 }; + api::set_file_information_by_handle(self.handle.as_raw_handle(), &info).io_result() } #[cfg(not(target_vendor = "uwp"))] @@ -371,7 +364,7 @@ cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), c::FileBasicInfo, - &mut info as *mut _ as *mut libc::c_void, + &mut info as *mut _ as *mut c_void, size as c::DWORD, ))?; let mut attr = FileAttr { @@ -399,7 +392,7 @@ cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), c::FileStandardInfo, - &mut info as *mut _ as *mut libc::c_void, + &mut info as *mut _ as *mut c_void, size as c::DWORD, ))?; attr.file_size = info.AllocationSize as u64; @@ -563,23 +556,14 @@ } pub fn set_permissions(&self, perm: FilePermissions) -> io::Result<()> { - let mut info = c::FILE_BASIC_INFO { + let info = c::FILE_BASIC_INFO { CreationTime: 0, LastAccessTime: 0, LastWriteTime: 0, ChangeTime: 0, FileAttributes: perm.attrs, }; - let size = mem::size_of_val(&info); - cvt(unsafe { - c::SetFileInformationByHandle( - self.handle.as_raw_handle(), - c::FileBasicInfo, - &mut info as *mut _ as *mut _, - size as c::DWORD, - ) - })?; - Ok(()) + api::set_file_information_by_handle(self.handle.as_raw_handle(), &info).io_result() } pub fn set_times(&self, times: FileTimes) -> io::Result<()> { @@ -624,7 +608,7 @@ cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), c::FileBasicInfo, - &mut info as *mut _ as *mut libc::c_void, + &mut info as *mut _ as *mut c_void, size as c::DWORD, ))?; Ok(info) @@ -639,38 +623,20 @@ /// If the operation is not supported for this filesystem or OS version /// then errors will be `ERROR_NOT_SUPPORTED` or `ERROR_INVALID_PARAMETER`. fn posix_delete(&self) -> io::Result<()> { - let mut info = c::FILE_DISPOSITION_INFO_EX { + let info = c::FILE_DISPOSITION_INFO_EX { Flags: c::FILE_DISPOSITION_FLAG_DELETE | c::FILE_DISPOSITION_FLAG_POSIX_SEMANTICS | c::FILE_DISPOSITION_FLAG_IGNORE_READONLY_ATTRIBUTE, }; - let size = mem::size_of_val(&info); - cvt(unsafe { - c::SetFileInformationByHandle( - self.handle.as_raw_handle(), - c::FileDispositionInfoEx, - &mut info as *mut _ as *mut _, - size as c::DWORD, - ) - })?; - Ok(()) + api::set_file_information_by_handle(self.handle.as_raw_handle(), &info).io_result() } /// Delete a file using win32 semantics. The file won't actually be deleted /// until all file handles are closed. However, marking a file for deletion /// will prevent anyone from opening a new handle to the file. fn win32_delete(&self) -> io::Result<()> { - let mut info = c::FILE_DISPOSITION_INFO { DeleteFile: c::TRUE as _ }; - let size = mem::size_of_val(&info); - cvt(unsafe { - c::SetFileInformationByHandle( - self.handle.as_raw_handle(), - c::FileDispositionInfo, - &mut info as *mut _ as *mut _, - size as c::DWORD, - ) - })?; - Ok(()) + let info = c::FILE_DISPOSITION_INFO { DeleteFile: c::TRUE as _ }; + api::set_file_information_by_handle(self.handle.as_raw_handle(), &info).io_result() } /// Fill the given buffer with as many directory entries as will fit. @@ -1064,6 +1030,14 @@ } pub fn readdir(p: &Path) -> io::Result { + // We push a `*` to the end of the path which cause the empty path to be + // treated as the current directory. So, for consistency with other platforms, + // we explicitly error on the empty path. + if p.as_os_str().is_empty() { + // Return an error code consistent with other ways of opening files. + // E.g. fs::metadata or File::open. + return Err(io::Error::from_raw_os_error(c::ERROR_PATH_NOT_FOUND as i32)); + } let root = p.to_path_buf(); let star = p.join("*"); let path = maybe_verbatim(&star)?; @@ -1513,6 +1487,13 @@ // as the file existing. _ if e.raw_os_error() == Some(c::ERROR_SHARING_VIOLATION as i32) => Ok(true), + // `ERROR_CANT_ACCESS_FILE` means that a file exists but that the + // reparse point could not be handled by `CreateFile`. + // This can happen for special files such as: + // * Unix domain sockets which you need to `connect` to + // * App exec links which require using `CreateProcess` + _ if e.raw_os_error() == Some(c::ERROR_CANT_ACCESS_FILE as i32) => Ok(true), + // Other errors such as `ERROR_ACCESS_DENIED` may indicate that the // file exists. However, these types of errors are usually more // permanent so we report them here. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/io.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/io.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/io.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/io.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,7 @@ use crate::os::windows::io::{AsHandle, AsRawHandle, BorrowedHandle}; use crate::slice; use crate::sys::c; -use libc; +use core::ffi::c_void; #[derive(Copy, Clone)] #[repr(transparent)] @@ -136,7 +136,7 @@ let res = c::GetFileInformationByHandleEx( handle, c::FileNameInfo, - &mut name_info as *mut _ as *mut libc::c_void, + &mut name_info as *mut _ as *mut c_void, size_of::() as u32, ); if res == 0 { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -44,6 +44,18 @@ } } +mod api; + +/// Map a Result to io::Result. +trait IoResult { + fn io_result(self) -> crate::io::Result; +} +impl IoResult for Result { + fn io_result(self) -> crate::io::Result { + self.map_err(|e| crate::io::Error::from_raw_os_error(e.code as i32)) + } +} + // SAFETY: must be called only once during runtime initialization. // NOTE: this is not guaranteed to run, for example when Rust code is called externally. pub unsafe fn init(_argc: isize, _argv: *const *const u8, _sigpipe: u8) { @@ -241,11 +253,11 @@ // not an actual error. c::SetLastError(0); let k = match f1(buf.as_mut_ptr().cast::(), n as c::DWORD) { - 0 if c::GetLastError() == 0 => 0, + 0 if api::get_last_error().code == 0 => 0, 0 => return Err(crate::io::Error::last_os_error()), n => n, } as usize; - if k == n && c::GetLastError() == c::ERROR_INSUFFICIENT_BUFFER { + if k == n && api::get_last_error().code == c::ERROR_INSUFFICIENT_BUFFER { n = n.saturating_mul(2).min(c::DWORD::MAX as usize); } else if k > n { n = k; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/net.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/net.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/net.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/net.rs 2023-12-21 16:55:28.000000000 +0000 @@ -15,7 +15,7 @@ use crate::sys_common::{AsInner, FromInner, IntoInner}; use crate::time::Duration; -use libc::{c_int, c_long, c_ulong, c_ushort}; +use core::ffi::{c_int, c_long, c_ulong, c_ushort}; pub type wrlen_t = i32; @@ -140,13 +140,15 @@ } } + pub fn connect(&self, addr: &SocketAddr) -> io::Result<()> { + let (addr, len) = addr.into_inner(); + let result = unsafe { c::connect(self.as_raw(), addr.as_ptr(), len) }; + cvt(result).map(drop) + } + pub fn connect_timeout(&self, addr: &SocketAddr, timeout: Duration) -> io::Result<()> { self.set_nonblocking(true)?; - let result = { - let (addr, len) = addr.into_inner(); - let result = unsafe { c::connect(self.as_raw(), addr.as_ptr(), len) }; - cvt(result).map(drop) - }; + let result = self.connect(addr); self.set_nonblocking(false)?; match result { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/os.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/os.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/os.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/os.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,10 +17,10 @@ use crate::slice; use crate::sys::{c, cvt}; -use super::to_u16s; +use super::{api, to_u16s}; pub fn errno() -> i32 { - unsafe { c::GetLastError() as i32 } + api::get_last_error().code as i32 } /// Gets a detailed string description for the given error number. @@ -336,7 +336,7 @@ super::fill_utf16_buf( |buf, mut sz| { match c::GetUserProfileDirectoryW(token, buf, &mut sz) { - 0 if c::GetLastError() != c::ERROR_INSUFFICIENT_BUFFER => 0, + 0 if api::get_last_error().code != c::ERROR_INSUFFICIENT_BUFFER => 0, 0 => sz, _ => sz - 1, // sz includes the null terminator } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/process.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/process.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/process.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/process.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,8 +19,7 @@ use crate::ptr; use crate::sync::Mutex; use crate::sys::args::{self, Arg}; -use crate::sys::c; -use crate::sys::c::NonZeroDWORD; +use crate::sys::c::{self, NonZeroDWORD, EXIT_FAILURE, EXIT_SUCCESS}; use crate::sys::cvt; use crate::sys::fs::{File, OpenOptions}; use crate::sys::handle::Handle; @@ -30,7 +29,7 @@ use crate::sys_common::process::{CommandEnv, CommandEnvs}; use crate::sys_common::IntoInner; -use libc::{c_void, EXIT_FAILURE, EXIT_SUCCESS}; +use core::ffi::c_void; //////////////////////////////////////////////////////////////////////////////// // Command diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stack_overflow.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stack_overflow.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stack_overflow.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stack_overflow.rs 2023-12-21 16:55:28.000000000 +0000 @@ -3,6 +3,8 @@ use crate::sys::c; use crate::thread; +use super::api; + pub struct Handler; impl Handler { @@ -10,7 +12,7 @@ // This API isn't available on XP, so don't panic in that case and just // pray it works out ok. if c::SetThreadStackGuarantee(&mut 0x5000) == 0 - && c::GetLastError() as u32 != c::ERROR_CALL_NOT_IMPLEMENTED as u32 + && api::get_last_error().code != c::ERROR_CALL_NOT_IMPLEMENTED { panic!("failed to reserve stack space for exception handling"); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stdio.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stdio.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stdio.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/stdio.rs 2023-12-21 16:55:28.000000000 +0000 @@ -9,6 +9,7 @@ use crate::sys::c; use crate::sys::cvt; use crate::sys::handle::Handle; +use crate::sys::windows::api; use core::str::utf8_char_width; #[cfg(test)] @@ -369,7 +370,7 @@ // ReadConsoleW returns success with ERROR_OPERATION_ABORTED for Ctrl-C or Ctrl-Break. // Explicitly check for that case here and try again. - if amount == 0 && unsafe { c::GetLastError() } == c::ERROR_OPERATION_ABORTED { + if amount == 0 && api::get_last_error().code == c::ERROR_OPERATION_ABORTED { continue; } break; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread.rs 2023-12-21 16:55:28.000000000 +0000 @@ -10,8 +10,9 @@ use crate::sys_common::FromInner; use crate::time::Duration; -use libc::c_void; +use core::ffi::c_void; +use super::time::WaitableTimer; use super::to_u16s; pub const DEFAULT_MIN_STACK_SIZE: usize = 2 * 1024 * 1024; @@ -87,7 +88,17 @@ } pub fn sleep(dur: Duration) { - unsafe { c::Sleep(super::dur2timeout(dur)) } + fn high_precision_sleep(dur: Duration) -> Result<(), ()> { + let timer = WaitableTimer::high_resolution()?; + timer.set(dur)?; + timer.wait() + } + // Attempt to use high-precision sleep (Windows 10, version 1803+). + // On error fallback to the standard `Sleep` function. + // Also preserves the zero duration behaviour of `Sleep`. + if dur.is_zero() || high_precision_sleep(dur).is_err() { + unsafe { c::Sleep(super::dur2timeout(dur)) } + } } pub fn handle(&self) -> &Handle { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread_local_key.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread_local_key.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread_local_key.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/thread_local_key.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,14 +16,19 @@ // Using a per-thread list avoids the problems in synchronizing global state. #[thread_local] #[cfg(target_thread_local)] -static mut DESTRUCTORS: Vec<(*mut u8, unsafe extern "C" fn(*mut u8))> = Vec::new(); +static DESTRUCTORS: crate::cell::RefCell> = + crate::cell::RefCell::new(Vec::new()); // Ensure this can never be inlined because otherwise this may break in dylibs. // See #44391. #[inline(never)] #[cfg(target_thread_local)] pub unsafe fn register_keyless_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { - DESTRUCTORS.push((t, dtor)); + match DESTRUCTORS.try_borrow_mut() { + Ok(mut dtors) => dtors.push((t, dtor)), + Err(_) => rtabort!("global allocator may not use TLS"), + } + HAS_DTORS.store(true, Relaxed); } @@ -37,11 +42,17 @@ // the case that this loop always terminates because we provide the // guarantee that a TLS key cannot be set after it is flagged for // destruction. - while let Some((ptr, dtor)) = DESTRUCTORS.pop() { + loop { + // Use a let-else binding to ensure the `RefCell` guard is dropped + // immediately. Otherwise, a panic would occur if a TLS destructor + // tries to access the list. + let Some((ptr, dtor)) = DESTRUCTORS.borrow_mut().pop() else { + break; + }; (dtor)(ptr); } // We're done so free the memory. - DESTRUCTORS = Vec::new(); + DESTRUCTORS.replace(Vec::new()); } type Key = c::DWORD; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/time.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/time.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/time.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys/windows/time.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,11 +1,13 @@ use crate::cmp::Ordering; use crate::fmt; use crate::mem; +use crate::ptr::{null, null_mut}; use crate::sys::c; use crate::sys_common::IntoInner; use crate::time::Duration; use core::hash::{Hash, Hasher}; +use core::ops::Neg; const NANOS_PER_SEC: u64 = 1_000_000_000; const INTERVALS_PER_SEC: u64 = NANOS_PER_SEC / 100; @@ -222,3 +224,39 @@ qpc_value } } + +/// A timer you can wait on. +pub(super) struct WaitableTimer { + handle: c::HANDLE, +} +impl WaitableTimer { + /// Create a high-resolution timer. Will fail before Windows 10, version 1803. + pub fn high_resolution() -> Result { + let handle = unsafe { + c::CreateWaitableTimerExW( + null(), + null(), + c::CREATE_WAITABLE_TIMER_HIGH_RESOLUTION, + c::TIMER_ALL_ACCESS, + ) + }; + if handle != null_mut() { Ok(Self { handle }) } else { Err(()) } + } + pub fn set(&self, duration: Duration) -> Result<(), ()> { + // Convert the Duration to a format similar to FILETIME. + // Negative values are relative times whereas positive values are absolute. + // Therefore we negate the relative duration. + let time = checked_dur2intervals(&duration).ok_or(())?.neg(); + let result = unsafe { c::SetWaitableTimer(self.handle, &time, 0, None, null(), c::FALSE) }; + if result != 0 { Ok(()) } else { Err(()) } + } + pub fn wait(&self) -> Result<(), ()> { + let result = unsafe { c::WaitForSingleObject(self.handle, c::INFINITE) }; + if result != c::WAIT_FAILED { Ok(()) } else { Err(()) } + } +} +impl Drop for WaitableTimer { + fn drop(&mut self) { + unsafe { c::CloseHandle(self.handle) }; + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys_common/net.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys_common/net.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys_common/net.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys_common/net.rs 2023-12-21 16:55:28.000000000 +0000 @@ -226,9 +226,7 @@ init(); let sock = Socket::new(addr, c::SOCK_STREAM)?; - - let (addr, len) = addr.into_inner(); - cvt_r(|| unsafe { c::connect(sock.as_raw(), addr.as_ptr(), len) })?; + sock.connect(addr)?; Ok(TcpStream { inner: sock }) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys_common/thread_local_dtor.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys_common/thread_local_dtor.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/sys_common/thread_local_dtor.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/sys_common/thread_local_dtor.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,6 +13,7 @@ #![unstable(feature = "thread_local_internals", issue = "none")] #![allow(dead_code)] +use crate::cell::RefCell; use crate::ptr; use crate::sys_common::thread_local_key::StaticKey; @@ -28,17 +29,23 @@ // flagged for destruction. static DTORS: StaticKey = StaticKey::new(Some(run_dtors)); - type List = Vec<(*mut u8, unsafe extern "C" fn(*mut u8))>; + // FIXME(joboet): integrate RefCell into pointer to avoid infinite recursion + // when the global allocator tries to register a destructor and just panic + // instead. + type List = RefCell>; if DTORS.get().is_null() { - let v: Box = Box::new(Vec::new()); + let v: Box = Box::new(RefCell::new(Vec::new())); DTORS.set(Box::into_raw(v) as *mut u8); } - let list: &mut List = &mut *(DTORS.get() as *mut List); - list.push((t, dtor)); + let list = &*(DTORS.get() as *const List); + match list.try_borrow_mut() { + Ok(mut dtors) => dtors.push((t, dtor)), + Err(_) => rtabort!("global allocator may not use TLS"), + } unsafe extern "C" fn run_dtors(mut ptr: *mut u8) { while !ptr.is_null() { - let list: Box = Box::from_raw(ptr as *mut List); + let list = Box::from_raw(ptr as *mut List).into_inner(); for (ptr, dtor) in list.into_iter() { dtor(ptr); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/thread/local.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/thread/local.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/thread/local.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/thread/local.rs 2023-12-21 16:55:28.000000000 +0000 @@ -29,9 +29,9 @@ /// within a thread, and values that implement [`Drop`] get destructed when a /// thread exits. Some caveats apply, which are explained below. /// -/// A `LocalKey`'s initializer cannot recursively depend on itself, and using -/// a `LocalKey` in this way will cause the initializer to infinitely recurse -/// on the first call to `with`. +/// A `LocalKey`'s initializer cannot recursively depend on itself. Using a +/// `LocalKey` in this way may cause panics, aborts or infinite recursion on +/// the first call to `with`. /// /// # Examples /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/thread/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/thread/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/thread/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/thread/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -545,6 +545,15 @@ scope_data.increment_num_running_threads(); } + let main = Box::new(main); + // SAFETY: dynamic size and alignment of the Box remain the same. See below for why the + // lifetime change is justified. + #[cfg(bootstrap)] + let main = + unsafe { mem::transmute::, Box>(main) }; + #[cfg(not(bootstrap))] + let main = unsafe { Box::from_raw(Box::into_raw(main) as *mut (dyn FnOnce() + 'static)) }; + Ok(JoinInner { // SAFETY: // @@ -559,14 +568,7 @@ // Similarly, the `sys` implementation must guarantee that no references to the closure // exist after the thread has terminated, which is signaled by `Thread::join` // returning. - native: unsafe { - imp::Thread::new( - stack_size, - mem::transmute::, Box>( - Box::new(main), - ), - )? - }, + native: unsafe { imp::Thread::new(stack_size, main)? }, thread: my_thread, packet: my_packet, }) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/time.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/time.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/src/time.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/src/time.rs 2023-12-21 16:55:28.000000000 +0000 @@ -111,7 +111,7 @@ /// |-----------|----------------------------------------------------------------------| /// | SGX | [`insecure_time` usercall]. More information on [timekeeping in SGX] | /// | UNIX | [clock_gettime (Monotonic Clock)] | -/// | Darwin | [mach_absolute_time] | +/// | Darwin | [clock_gettime (Monotonic Clock)] | /// | VXWorks | [clock_gettime (Monotonic Clock)] | /// | SOLID | `get_tim` | /// | WASI | [__wasi_clock_time_get (Monotonic Clock)] | @@ -123,7 +123,6 @@ /// [timekeeping in SGX]: https://edp.fortanix.com/docs/concepts/rust-std/#codestdtimecode /// [__wasi_clock_time_get (Monotonic Clock)]: https://github.com/WebAssembly/WASI/blob/main/legacy/preview1/docs.md#clock_time_get /// [clock_gettime (Monotonic Clock)]: https://linux.die.net/man/3/clock_gettime -/// [mach_absolute_time]: https://developer.apple.com/library/archive/documentation/Darwin/Conceptual/KernelProgramming/services/services.html /// /// **Disclaimer:** These system calls might change over time. /// @@ -153,6 +152,7 @@ /// #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[stable(feature = "time2", since = "1.8.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "Instant")] pub struct Instant(time::Instant); /// A measurement of the system clock, useful for talking to @@ -223,7 +223,7 @@ /// |-----------|----------------------------------------------------------------------| /// | SGX | [`insecure_time` usercall]. More information on [timekeeping in SGX] | /// | UNIX | [clock_gettime (Realtime Clock)] | -/// | Darwin | [gettimeofday] | +/// | Darwin | [clock_gettime (Realtime Clock)] | /// | VXWorks | [clock_gettime (Realtime Clock)] | /// | SOLID | `SOLID_RTC_ReadTime` | /// | WASI | [__wasi_clock_time_get (Realtime Clock)] | @@ -232,7 +232,6 @@ /// [currently]: crate::io#platform-specific-behavior /// [`insecure_time` usercall]: https://edp.fortanix.com/docs/api/fortanix_sgx_abi/struct.Usercalls.html#method.insecure_time /// [timekeeping in SGX]: https://edp.fortanix.com/docs/concepts/rust-std/#codestdtimecode -/// [gettimeofday]: https://man7.org/linux/man-pages/man2/gettimeofday.2.html /// [clock_gettime (Realtime Clock)]: https://linux.die.net/man/3/clock_gettime /// [__wasi_clock_time_get (Realtime Clock)]: https://github.com/WebAssembly/WASI/blob/main/legacy/preview1/docs.md#clock_time_get /// [GetSystemTimePreciseAsFileTime]: https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getsystemtimepreciseasfiletime diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/tests/switch-stdout.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/tests/switch-stdout.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/std/tests/switch-stdout.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/std/tests/switch-stdout.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,32 +5,48 @@ mod common; +#[cfg(windows)] +use std::os::windows::io::OwnedHandle; + #[cfg(unix)] -fn switch_stdout_to(file: File) { +use std::os::fd::OwnedFd; + +#[cfg(unix)] +fn switch_stdout_to(file: OwnedFd) -> OwnedFd { use std::os::unix::prelude::*; extern "C" { + fn dup(old: i32) -> i32; fn dup2(old: i32, new: i32) -> i32; } unsafe { + let orig_fd = dup(1); + assert_ne!(orig_fd, -1); + let res = OwnedFd::from_raw_fd(orig_fd); assert_eq!(dup2(file.as_raw_fd(), 1), 1); + res } } #[cfg(windows)] -fn switch_stdout_to(file: File) { +fn switch_stdout_to(file: OwnedHandle) -> OwnedHandle { use std::os::windows::prelude::*; extern "system" { + fn GetStdHandle(nStdHandle: u32) -> *mut u8; fn SetStdHandle(nStdHandle: u32, handle: *mut u8) -> i32; } const STD_OUTPUT_HANDLE: u32 = (-11i32) as u32; + const INVALID_HANDLE_VALUE: *mut u8 = !0 as *mut u8; unsafe { + let orig_hdl = GetStdHandle(STD_OUTPUT_HANDLE); + assert!(!orig_hdl.is_null() && orig_hdl != INVALID_HANDLE_VALUE); let rc = SetStdHandle(STD_OUTPUT_HANDLE, file.into_raw_handle() as *mut _); assert!(rc != 0); + OwnedHandle::from_raw_handle(orig_hdl as _) } } @@ -43,10 +59,12 @@ let mut stdout = std::io::stdout(); stdout.write(b"foo\n").unwrap(); stdout.flush().unwrap(); - switch_stdout_to(f); + let orig_hdl = switch_stdout_to(f.into()); stdout.write(b"bar\n").unwrap(); stdout.flush().unwrap(); + switch_stdout_to(orig_hdl); + let mut contents = String::new(); File::open(&path).unwrap().read_to_string(&mut contents).unwrap(); assert_eq!(contents, "bar\n"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/ci/docker/riscv64gc-unknown-linux-gnu/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/ci/docker/riscv64gc-unknown-linux-gnu/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/ci/docker/riscv64gc-unknown-linux-gnu/Dockerfile 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/ci/docker/riscv64gc-unknown-linux-gnu/Dockerfile 2023-12-21 16:55:32.000000000 +0000 @@ -2,8 +2,12 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gcc libc6-dev qemu-user ca-certificates \ - gcc-riscv64-linux-gnu libc6-dev-riscv64-cross + gcc-riscv64-linux-gnu libc6-dev-riscv64-cross \ + llvm ENV CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER=riscv64-linux-gnu-gcc \ - CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_RUNNER="qemu-riscv64 -L /usr/riscv64-linux-gnu -cpu rv64,zk=true,zbb=true,zbc=true" \ - OBJDUMP=riscv64-linux-gnu-objdump + CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_RUNNER="qemu-riscv64 \ + -L /usr/riscv64-linux-gnu \ + -cpu rv64,zk=true,zks=true,zbb=true,zbc=true \ + " \ + OBJDUMP=llvm-objdump diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/ci/run.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/ci/run.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/ci/run.sh 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/ci/run.sh 2023-12-21 16:55:32.000000000 +0000 @@ -33,6 +33,11 @@ i686-* | i586-*) export RUSTFLAGS="${RUSTFLAGS} -C relocation-model=static -Z plt=yes" ;; + # Some x86_64 targets enable by default more features beyond SSE2, + # which cause some instruction assertion checks to fail. + x86_64-*) + export RUSTFLAGS="${RUSTFLAGS} -C target-feature=-sse3" + ;; #Unoptimized build uses fast-isel which breaks with msa mips-* | mipsel-*) export RUSTFLAGS="${RUSTFLAGS} -C llvm-args=-fast-isel=false" @@ -47,7 +52,7 @@ # Some of our test dependencies use the deprecated `gcc` crates which # doesn't detect RISC-V compilers automatically, so do it manually here. riscv64*) - export RUSTFLAGS="${RUSTFLAGS} -Ctarget-feature=+zk,+zbb,+zbc" + export RUSTFLAGS="${RUSTFLAGS} -Ctarget-feature=+zk,+zks,+zbb,+zbc" export TARGET_CC="riscv64-linux-gnu-gcc" ;; esac diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -11,4 +11,4 @@ [dependencies] proc-macro2 = "1.0" quote = "1.0" -syn = { version = "1.0", features = ["full"] } +syn = { version = "2.0", features = ["full"] } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/src/lib.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/assert-instr-macro/src/lib.rs 2023-12-21 16:55:32.000000000 +0000 @@ -35,6 +35,15 @@ let instr = &invoc.instr; let name = &func.sig.ident; + let maybe_allow_deprecated = if func + .attrs + .iter() + .any(|attr| attr.path().is_ident("deprecated")) + { + quote! { #[allow(deprecated)] } + } else { + quote! {} + }; // Disable assert_instr for x86 targets compiled with avx enabled, which // causes LLVM to generate different intrinsics that the ones we are @@ -108,7 +117,7 @@ .attrs .iter() .filter(|attr| { - attr.path + attr.path() .segments .first() .expect("attr.path.segments.first() failed") @@ -135,6 +144,7 @@ let to_test = if disable_dedup_guard { quote! { #attrs + #maybe_allow_deprecated #[no_mangle] #[inline(never)] pub unsafe extern #abi fn #shim_name(#(#inputs),*) #ret { @@ -147,6 +157,7 @@ const #shim_name_ptr : *const u8 = #shim_name_str.as_ptr(); #attrs + #maybe_allow_deprecated #[no_mangle] #[inline(never)] pub unsafe extern #abi fn #shim_name(#(#inputs),*) #ret { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/barrier/cp15.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/barrier/cp15.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/barrier/cp15.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/barrier/cp15.rs 2023-12-21 16:55:32.000000000 +0000 @@ -11,7 +11,8 @@ #[inline(always)] unsafe fn __dmb(&self) { asm!( - "mcr p15, 0, r0, c7, c10, 5", + "mcr p15, 0, {}, c7, c10, 5", + in(reg) 0_u32, options(preserves_flags, nostack) ) } @@ -21,7 +22,8 @@ #[inline(always)] unsafe fn __dsb(&self) { asm!( - "mcr p15, 0, r0, c7, c10, 4", + "mcr p15, 0, {}, c7, c10, 4", + in(reg) 0_u32, options(preserves_flags, nostack) ) } @@ -31,7 +33,8 @@ #[inline(always)] unsafe fn __isb(&self) { asm!( - "mcr p15, 0, r0, c7, c5, 4", + "mcr p15, 0, {}, c7, c5, 4", + in(reg) 0_u32, options(preserves_flags, nostack) ) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/neon/shift_and_insert_tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/neon/shift_and_insert_tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/neon/shift_and_insert_tests.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/arm_shared/neon/shift_and_insert_tests.rs 2023-12-21 16:55:32.000000000 +0000 @@ -22,7 +22,7 @@ let a = [$($a as $t),*]; let b = [$($b as $t),*]; let n_bit_mask: $t = (1 << $n) - 1; - let e = [$(($a as $t & n_bit_mask) | ($b as $t << $n)),*]; + let e = [$(($a as $t & n_bit_mask) | (($b as $t) << $n)),*]; let r = $fn_id::<$n>(transmute(a), transmute(b)); let mut d = e; d = transmute(r); @@ -60,7 +60,7 @@ unsafe fn $test_id() { let a = [$($a as $t),*]; let b = [$($b as $t),*]; - let n_bit_mask = ((1 as $t << $n) - 1).rotate_right($n); + let n_bit_mask = (((1 as $t) << $n) - 1).rotate_right($n); let e = [$(($a as $t & n_bit_mask) | (($b as $t >> $n) & !n_bit_mask)),*]; let r = $fn_id::<$n>(transmute(a), transmute(b)); let mut d = e; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv64/zk.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv64/zk.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv64/zk.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv64/zk.rs 2023-12-21 16:55:32.000000000 +0000 @@ -20,6 +20,9 @@ #[link_name = "llvm.riscv.aes64ks2"] fn _aes64ks2(rs1: i64, rs2: i64) -> i64; + #[link_name = "llvm.riscv.aes64im"] + fn _aes64im(rs1: i64) -> i64; + #[link_name = "llvm.riscv.sha512sig0"] fn _sha512sig0(rs1: i64) -> i64; @@ -50,8 +53,7 @@ /// /// This function is safe to use if the `zkne` target feature is present. #[target_feature(enable = "zkne")] -// See #1464 -// #[cfg_attr(test, assert_instr(aes64es))] +#[cfg_attr(test, assert_instr(aes64es))] #[inline] pub unsafe fn aes64es(rs1: u64, rs2: u64) -> u64 { _aes64es(rs1 as i64, rs2 as i64) as u64 @@ -74,8 +76,7 @@ /// /// This function is safe to use if the `zkne` target feature is present. #[target_feature(enable = "zkne")] -// See #1464 -// #[cfg_attr(test, assert_instr(aes64esm))] +#[cfg_attr(test, assert_instr(aes64esm))] #[inline] pub unsafe fn aes64esm(rs1: u64, rs2: u64) -> u64 { _aes64esm(rs1 as i64, rs2 as i64) as u64 @@ -98,8 +99,7 @@ /// /// This function is safe to use if the `zknd` target feature is present. #[target_feature(enable = "zknd")] -// See #1464 -// #[cfg_attr(test, assert_instr(aes64ds))] +#[cfg_attr(test, assert_instr(aes64ds))] #[inline] pub unsafe fn aes64ds(rs1: u64, rs2: u64) -> u64 { _aes64ds(rs1 as i64, rs2 as i64) as u64 @@ -122,8 +122,7 @@ /// /// This function is safe to use if the `zknd` target feature is present. #[target_feature(enable = "zknd")] -// See #1464 -// #[cfg_attr(test, assert_instr(aes64dsm))] +#[cfg_attr(test, assert_instr(aes64dsm))] #[inline] pub unsafe fn aes64dsm(rs1: u64, rs2: u64) -> u64 { _aes64dsm(rs1 as i64, rs2 as i64) as u64 @@ -152,8 +151,7 @@ /// This function is safe to use if the `zkne` or `zknd` target feature is present. #[target_feature(enable = "zkne", enable = "zknd")] #[rustc_legacy_const_generics(1)] -// See #1464 -// #[cfg_attr(test, assert_instr(aes64ks1i, RNUM = 0))] +#[cfg_attr(test, assert_instr(aes64ks1i, RNUM = 0))] #[inline] pub unsafe fn aes64ks1i(rs1: u64) -> u64 { static_assert!(RNUM <= 10); @@ -177,13 +175,36 @@ /// /// This function is safe to use if the `zkne` or `zknd` target feature is present. #[target_feature(enable = "zkne", enable = "zknd")] -// See #1464 -// #[cfg_attr(test, assert_instr(aes64ks2))] +#[cfg_attr(test, assert_instr(aes64ks2))] #[inline] pub unsafe fn aes64ks2(rs1: u64, rs2: u64) -> u64 { _aes64ks2(rs1 as i64, rs2 as i64) as u64 } +/// This instruction accelerates the inverse MixColumns step of the AES Block Cipher, and is used to aid creation of +/// the decryption KeySchedule. +/// +/// The instruction applies the inverse MixColumns transformation to two columns of the state array, packed +/// into a single 64-bit register. It is used to create the inverse cipher KeySchedule, according to the equivalent +/// inverse cipher construction in (Page 23, Section 5.3.5). This instruction must always be implemented +/// such that its execution latency does not depend on the data being operated on. +/// +/// Source: RISC-V Cryptography Extensions Volume I: Scalar & Entropy Source Instructions +/// +/// Version: v1.0.1 +/// +/// Section: 3.9 +/// +/// # Safety +/// +/// This function is safe to use if the `zkne` or `zknd` target feature is present. +#[target_feature(enable = "zkne", enable = "zknd")] +#[cfg_attr(test, assert_instr(aes64im))] +#[inline] +pub unsafe fn aes64im(rs1: u64) -> u64 { + _aes64im(rs1 as i64) as u64 +} + /// Implements the Sigma0 transformation function as used in the SHA2-512 hash function \[49\] /// (Section 4.1.3). /// @@ -201,8 +222,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha512sig0))] +#[cfg_attr(test, assert_instr(sha512sig0))] #[inline] pub unsafe fn sha512sig0(rs1: u64) -> u64 { _sha512sig0(rs1 as i64) as u64 @@ -225,8 +245,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha512sig1))] +#[cfg_attr(test, assert_instr(sha512sig1))] #[inline] pub unsafe fn sha512sig1(rs1: u64) -> u64 { _sha512sig1(rs1 as i64) as u64 @@ -249,8 +268,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha512sum0))] +#[cfg_attr(test, assert_instr(sha512sum0))] #[inline] pub unsafe fn sha512sum0(rs1: u64) -> u64 { _sha512sum0(rs1 as i64) as u64 @@ -273,8 +291,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha512sum1))] +#[cfg_attr(test, assert_instr(sha512sum1))] #[inline] pub unsafe fn sha512sum1(rs1: u64) -> u64 { _sha512sum1(rs1 as i64) as u64 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zb.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zb.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zb.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zb.rs 2023-12-21 16:55:32.000000000 +0000 @@ -47,8 +47,7 @@ /// /// This function is safe to use if the `zbb` target feature is present. #[target_feature(enable = "zbb")] -// See #1464 -// #[cfg_attr(test, assert_instr(orc.b))] +#[cfg_attr(test, assert_instr(orc.b))] #[inline] pub unsafe fn orc_b(rs: usize) -> usize { #[cfg(target_arch = "riscv32")] @@ -76,8 +75,7 @@ /// /// This function is safe to use if the `zbc` target feature is present. #[target_feature(enable = "zbc")] -// See #1464 -// #[cfg_attr(test, assert_instr(clmul))] +#[cfg_attr(test, assert_instr(clmul))] #[inline] pub unsafe fn clmul(rs1: usize, rs2: usize) -> usize { #[cfg(target_arch = "riscv32")] @@ -105,8 +103,7 @@ /// /// This function is safe to use if the `zbc` target feature is present. #[target_feature(enable = "zbc")] -// See #1464 -// #[cfg_attr(test, assert_instr(clmulh))] +#[cfg_attr(test, assert_instr(clmulh))] #[inline] pub unsafe fn clmulh(rs1: usize, rs2: usize) -> usize { #[cfg(target_arch = "riscv32")] @@ -134,8 +131,7 @@ /// /// This function is safe to use if the `zbc` target feature is present. #[target_feature(enable = "zbc")] -// See #1464 -// #[cfg_attr(test, assert_instr(clmulr))] +#[cfg_attr(test, assert_instr(clmulr))] #[inline] pub unsafe fn clmulr(rs1: usize, rs2: usize) -> usize { #[cfg(target_arch = "riscv32")] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zk.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zk.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zk.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/riscv_shared/zk.rs 2023-12-21 16:55:32.000000000 +0000 @@ -62,8 +62,7 @@ /// /// This function is safe to use if the `zbkx` target feature is present. #[target_feature(enable = "zbkx")] -// See #1464 -// #[cfg_attr(test, assert_instr(xperm8))] +#[cfg_attr(test, assert_instr(xperm8))] #[inline] pub unsafe fn xperm8(rs1: usize, rs2: usize) -> usize { #[cfg(target_arch = "riscv32")] @@ -94,8 +93,7 @@ /// /// This function is safe to use if the `zbkx` target feature is present. #[target_feature(enable = "zbkx")] -// See #1464 -// #[cfg_attr(test, assert_instr(xperm4))] +#[cfg_attr(test, assert_instr(xperm4))] #[inline] pub unsafe fn xperm4(rs1: usize, rs2: usize) -> usize { #[cfg(target_arch = "riscv32")] @@ -129,8 +127,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha256sig0))] +#[cfg_attr(test, assert_instr(sha256sig0))] #[inline] pub unsafe fn sha256sig0(rs1: u32) -> u32 { _sha256sig0(rs1 as i32) as u32 @@ -156,8 +153,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha256sig1))] +#[cfg_attr(test, assert_instr(sha256sig1))] #[inline] pub unsafe fn sha256sig1(rs1: u32) -> u32 { _sha256sig1(rs1 as i32) as u32 @@ -183,8 +179,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha256sum0))] +#[cfg_attr(test, assert_instr(sha256sum0))] #[inline] pub unsafe fn sha256sum0(rs1: u32) -> u32 { _sha256sum0(rs1 as i32) as u32 @@ -210,8 +205,7 @@ /// /// This function is safe to use if the `zknh` target feature is present. #[target_feature(enable = "zknh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sha256sum1))] +#[cfg_attr(test, assert_instr(sha256sum1))] #[inline] pub unsafe fn sha256sum1(rs1: u32) -> u32 { _sha256sum1(rs1 as i32) as u32 @@ -288,8 +282,7 @@ /// ``` #[target_feature(enable = "zksed")] #[rustc_legacy_const_generics(2)] -// See #1464 -// #[cfg_attr(test, assert_instr(sm4ed, BS = 0))] +#[cfg_attr(test, assert_instr(sm4ed, BS = 0))] #[inline] pub unsafe fn sm4ed(rs1: u32, rs2: u32) -> u32 { static_assert!(BS < 4); @@ -368,8 +361,7 @@ /// ``` #[target_feature(enable = "zksed")] #[rustc_legacy_const_generics(2)] -// See #1464 -// #[cfg_attr(test, assert_instr(sm4ks, BS = 0))] +#[cfg_attr(test, assert_instr(sm4ks, BS = 0))] #[inline] pub unsafe fn sm4ks(rs1: u32, rs2: u32) -> u32 { static_assert!(BS < 4); @@ -409,8 +401,7 @@ /// compression function `CF` uses the intermediate value `TT2` to calculate /// the variable `E` in one iteration for subsequent processes. #[target_feature(enable = "zksh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sm3p0))] +#[cfg_attr(test, assert_instr(sm3p0))] #[inline] pub unsafe fn sm3p0(rs1: u32) -> u32 { _sm3p0(rs1 as i32) as u32 @@ -454,8 +445,7 @@ /// ENDFOR /// ``` #[target_feature(enable = "zksh")] -// See #1464 -// #[cfg_attr(test, assert_instr(sm3p1))] +#[cfg_attr(test, assert_instr(sm3p1))] #[inline] pub unsafe fn sm3p1(rs1: u32) -> u32 { _sm3p1(rs1 as i32) as u32 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx.rs 2023-12-21 16:55:32.000000000 +0000 @@ -268,7 +268,11 @@ #[cfg_attr(test, assert_instr(vaddsubpd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_addsub_pd(a: __m256d, b: __m256d) -> __m256d { - addsubpd256(a, b) + let a = a.as_f64x4(); + let b = b.as_f64x4(); + let add = simd_add(a, b); + let sub = simd_sub(a, b); + simd_shuffle!(add, sub, [4, 1, 6, 3]) } /// Alternatively adds and subtracts packed single-precision (32-bit) @@ -280,7 +284,11 @@ #[cfg_attr(test, assert_instr(vaddsubps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_addsub_ps(a: __m256, b: __m256) -> __m256 { - addsubps256(a, b) + let a = a.as_f32x8(); + let b = b.as_f32x8(); + let add = simd_add(a, b); + let sub = simd_sub(a, b); + simd_shuffle!(add, sub, [8, 1, 10, 3, 12, 5, 14, 7]) } /// Subtracts packed double-precision (64-bit) floating-point elements in `b` @@ -511,7 +519,8 @@ #[cfg_attr(test, assert_instr(vblendvpd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_blendv_pd(a: __m256d, b: __m256d, c: __m256d) -> __m256d { - vblendvpd(a, b, c) + let mask: i64x4 = simd_lt(transmute::<_, i64x4>(c), i64x4::splat(0)); + transmute(simd_select(mask, b.as_f64x4(), a.as_f64x4())) } /// Blends packed single-precision (32-bit) floating-point elements from @@ -523,7 +532,8 @@ #[cfg_attr(test, assert_instr(vblendvps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_blendv_ps(a: __m256, b: __m256, c: __m256) -> __m256 { - vblendvps(a, b, c) + let mask: i32x8 = simd_lt(transmute::<_, i32x8>(c), i32x8::splat(0)); + transmute(simd_select(mask, b.as_f32x8(), a.as_f32x8())) } /// Conditionally multiplies the packed single-precision (32-bit) floating-point @@ -2056,7 +2066,10 @@ #[cfg_attr(test, assert_instr(vmovmskpd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_movemask_pd(a: __m256d) -> i32 { - movmskpd256(a) + // Propagate the highest bit to the rest, because simd_bitmask + // requires all-1 or all-0. + let mask: i64x4 = simd_lt(transmute(a), i64x4::splat(0)); + simd_bitmask::(mask).into() } /// Sets each bit of the returned mask based on the most significant bit of the @@ -2069,7 +2082,10 @@ #[cfg_attr(test, assert_instr(vmovmskps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_movemask_ps(a: __m256) -> i32 { - movmskps256(a) + // Propagate the highest bit to the rest, because simd_bitmask + // requires all-1 or all-0. + let mask: i32x8 = simd_lt(transmute(a), i32x8::splat(0)); + simd_bitmask::(mask).into() } /// Returns vector of type __m256d with all elements set to zero. @@ -2904,20 +2920,12 @@ // LLVM intrinsics used in the above functions #[allow(improper_ctypes)] extern "C" { - #[link_name = "llvm.x86.avx.addsub.pd.256"] - fn addsubpd256(a: __m256d, b: __m256d) -> __m256d; - #[link_name = "llvm.x86.avx.addsub.ps.256"] - fn addsubps256(a: __m256, b: __m256) -> __m256; #[link_name = "llvm.x86.avx.round.pd.256"] fn roundpd256(a: __m256d, b: i32) -> __m256d; #[link_name = "llvm.x86.avx.round.ps.256"] fn roundps256(a: __m256, b: i32) -> __m256; #[link_name = "llvm.x86.avx.sqrt.ps.256"] fn sqrtps256(a: __m256) -> __m256; - #[link_name = "llvm.x86.avx.blendv.pd.256"] - fn vblendvpd(a: __m256d, b: __m256d, c: __m256d) -> __m256d; - #[link_name = "llvm.x86.avx.blendv.ps.256"] - fn vblendvps(a: __m256, b: __m256, c: __m256) -> __m256; #[link_name = "llvm.x86.avx.dp.ps.256"] fn vdpps(a: __m256, b: __m256, imm8: i32) -> __m256; #[link_name = "llvm.x86.avx.hadd.pd.256"] @@ -3026,10 +3034,6 @@ fn vtestcps(a: __m128, b: __m128) -> i32; #[link_name = "llvm.x86.avx.vtestnzc.ps"] fn vtestnzcps(a: __m128, b: __m128) -> i32; - #[link_name = "llvm.x86.avx.movmsk.pd.256"] - fn movmskpd256(a: __m256d) -> i32; - #[link_name = "llvm.x86.avx.movmsk.ps.256"] - fn movmskps256(a: __m256) -> i32; #[link_name = "llvm.x86.avx.min.ps.256"] fn vminps(a: __m256, b: __m256) -> __m256; #[link_name = "llvm.x86.avx.max.ps.256"] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx2.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx2.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx2.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx2.rs 2023-12-21 16:55:32.000000000 +0000 @@ -344,7 +344,10 @@ #[cfg_attr(test, assert_instr(vpavgw))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_avg_epu16(a: __m256i, b: __m256i) -> __m256i { - transmute(pavgw(a.as_u16x16(), b.as_u16x16())) + let a = simd_cast::<_, u32x16>(a.as_u16x16()); + let b = simd_cast::<_, u32x16>(b.as_u16x16()); + let r = simd_shr(simd_add(simd_add(a, b), u32x16::splat(1)), u32x16::splat(1)); + transmute(simd_cast::<_, u16x16>(r)) } /// Averages packed unsigned 8-bit integers in `a` and `b`. @@ -355,7 +358,10 @@ #[cfg_attr(test, assert_instr(vpavgb))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_avg_epu8(a: __m256i, b: __m256i) -> __m256i { - transmute(pavgb(a.as_u8x32(), b.as_u8x32())) + let a = simd_cast::<_, u16x32>(a.as_u8x32()); + let b = simd_cast::<_, u16x32>(b.as_u8x32()); + let r = simd_shr(simd_add(simd_add(a, b), u16x32::splat(1)), u16x32::splat(1)); + transmute(simd_cast::<_, u8x32>(r)) } /// Blends packed 32-bit integers from `a` and `b` using control mask `IMM4`. @@ -458,7 +464,8 @@ #[cfg_attr(test, assert_instr(vpblendvb))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_blendv_epi8(a: __m256i, b: __m256i, mask: __m256i) -> __m256i { - transmute(pblendvb(a.as_i8x32(), b.as_i8x32(), mask.as_i8x32())) + let mask: i8x32 = simd_lt(mask.as_i8x32(), i8x32::splat(0)); + transmute(simd_select(mask, b.as_i8x32(), a.as_i8x32())) } /// Broadcasts the low packed 8-bit integer from `a` to all elements of @@ -2060,7 +2067,9 @@ #[cfg_attr(test, assert_instr(vpmuldq))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_mul_epi32(a: __m256i, b: __m256i) -> __m256i { - transmute(pmuldq(a.as_i32x8(), b.as_i32x8())) + let a = simd_cast::<_, i64x4>(simd_cast::<_, i32x4>(a.as_i64x4())); + let b = simd_cast::<_, i64x4>(simd_cast::<_, i32x4>(b.as_i64x4())); + transmute(simd_mul(a, b)) } /// Multiplies the low unsigned 32-bit integers from each packed 64-bit @@ -2074,7 +2083,10 @@ #[cfg_attr(test, assert_instr(vpmuludq))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_mul_epu32(a: __m256i, b: __m256i) -> __m256i { - transmute(pmuludq(a.as_u32x8(), b.as_u32x8())) + let a = a.as_u64x4(); + let b = b.as_u64x4(); + let mask = u64x4::splat(u32::MAX.into()); + transmute(simd_mul(simd_and(a, mask), simd_and(b, mask))) } /// Multiplies the packed 16-bit integers in `a` and `b`, producing @@ -2087,7 +2099,10 @@ #[cfg_attr(test, assert_instr(vpmulhw))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_mulhi_epi16(a: __m256i, b: __m256i) -> __m256i { - transmute(pmulhw(a.as_i16x16(), b.as_i16x16())) + let a = simd_cast::<_, i32x16>(a.as_i16x16()); + let b = simd_cast::<_, i32x16>(b.as_i16x16()); + let r = simd_shr(simd_mul(a, b), i32x16::splat(16)); + transmute(simd_cast::(r)) } /// Multiplies the packed unsigned 16-bit integers in `a` and `b`, producing @@ -2100,7 +2115,10 @@ #[cfg_attr(test, assert_instr(vpmulhuw))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm256_mulhi_epu16(a: __m256i, b: __m256i) -> __m256i { - transmute(pmulhuw(a.as_u16x16(), b.as_u16x16())) + let a = simd_cast::<_, u32x16>(a.as_u16x16()); + let b = simd_cast::<_, u32x16>(b.as_u16x16()); + let r = simd_shr(simd_mul(a, b), u32x16::splat(16)); + transmute(simd_cast::(r)) } /// Multiplies the packed 16-bit integers in `a` and `b`, producing @@ -3629,12 +3647,6 @@ fn pabsw(a: i16x16) -> u16x16; #[link_name = "llvm.x86.avx2.pabs.d"] fn pabsd(a: i32x8) -> u32x8; - #[link_name = "llvm.x86.avx2.pavg.b"] - fn pavgb(a: u8x32, b: u8x32) -> u8x32; - #[link_name = "llvm.x86.avx2.pavg.w"] - fn pavgw(a: u16x16, b: u16x16) -> u16x16; - #[link_name = "llvm.x86.avx2.pblendvb"] - fn pblendvb(a: i8x32, b: i8x32, mask: i8x32) -> i8x32; #[link_name = "llvm.x86.avx2.phadd.w"] fn phaddw(a: i16x16, b: i16x16) -> i16x16; #[link_name = "llvm.x86.avx2.phadd.d"] @@ -3669,14 +3681,6 @@ fn maskstoreq256(mem_addr: *mut i8, mask: i64x4, a: i64x4); #[link_name = "llvm.x86.avx2.mpsadbw"] fn mpsadbw(a: u8x32, b: u8x32, imm8: i32) -> u16x16; - #[link_name = "llvm.x86.avx2.pmulhu.w"] - fn pmulhuw(a: u16x16, b: u16x16) -> u16x16; - #[link_name = "llvm.x86.avx2.pmulh.w"] - fn pmulhw(a: i16x16, b: i16x16) -> i16x16; - #[link_name = "llvm.x86.avx2.pmul.dq"] - fn pmuldq(a: i32x8, b: i32x8) -> i64x4; - #[link_name = "llvm.x86.avx2.pmulu.dq"] - fn pmuludq(a: u32x8, b: u32x8) -> u64x4; #[link_name = "llvm.x86.avx2.pmul.hr.sw"] fn pmulhrsw(a: i16x16, b: i16x16) -> i16x16; #[link_name = "llvm.x86.avx2.packsswb"] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bitalg.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bitalg.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bitalg.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bitalg.rs 2023-12-21 16:55:32.000000000 +0000 @@ -311,7 +311,7 @@ #[target_feature(enable = "avx512bitalg")] #[cfg_attr(test, assert_instr(vpshufbitqmb))] pub unsafe fn _mm512_bitshuffle_epi64_mask(b: __m512i, c: __m512i) -> __mmask64 { - transmute(bitshuffle_512(b.as_i8x64(), c.as_i8x64(), !0)) + bitshuffle_512(b.as_i8x64(), c.as_i8x64(), !0) } /// Considers the input `b` as packed 64-bit integers and `c` as packed 8-bit integers. @@ -326,7 +326,7 @@ #[target_feature(enable = "avx512bitalg")] #[cfg_attr(test, assert_instr(vpshufbitqmb))] pub unsafe fn _mm512_mask_bitshuffle_epi64_mask(k: __mmask64, b: __m512i, c: __m512i) -> __mmask64 { - transmute(bitshuffle_512(b.as_i8x64(), c.as_i8x64(), k)) + bitshuffle_512(b.as_i8x64(), c.as_i8x64(), k) } /// Considers the input `b` as packed 64-bit integers and `c` as packed 8-bit integers. @@ -338,7 +338,7 @@ #[target_feature(enable = "avx512bitalg,avx512vl")] #[cfg_attr(test, assert_instr(vpshufbitqmb))] pub unsafe fn _mm256_bitshuffle_epi64_mask(b: __m256i, c: __m256i) -> __mmask32 { - transmute(bitshuffle_256(b.as_i8x32(), c.as_i8x32(), !0)) + bitshuffle_256(b.as_i8x32(), c.as_i8x32(), !0) } /// Considers the input `b` as packed 64-bit integers and `c` as packed 8-bit integers. @@ -353,7 +353,7 @@ #[target_feature(enable = "avx512bitalg,avx512vl")] #[cfg_attr(test, assert_instr(vpshufbitqmb))] pub unsafe fn _mm256_mask_bitshuffle_epi64_mask(k: __mmask32, b: __m256i, c: __m256i) -> __mmask32 { - transmute(bitshuffle_256(b.as_i8x32(), c.as_i8x32(), k)) + bitshuffle_256(b.as_i8x32(), c.as_i8x32(), k) } /// Considers the input `b` as packed 64-bit integers and `c` as packed 8-bit integers. @@ -365,7 +365,7 @@ #[target_feature(enable = "avx512bitalg,avx512vl")] #[cfg_attr(test, assert_instr(vpshufbitqmb))] pub unsafe fn _mm_bitshuffle_epi64_mask(b: __m128i, c: __m128i) -> __mmask16 { - transmute(bitshuffle_128(b.as_i8x16(), c.as_i8x16(), !0)) + bitshuffle_128(b.as_i8x16(), c.as_i8x16(), !0) } /// Considers the input `b` as packed 64-bit integers and `c` as packed 8-bit integers. @@ -380,7 +380,7 @@ #[target_feature(enable = "avx512bitalg,avx512vl")] #[cfg_attr(test, assert_instr(vpshufbitqmb))] pub unsafe fn _mm_mask_bitshuffle_epi64_mask(k: __mmask16, b: __m128i, c: __m128i) -> __mmask16 { - transmute(bitshuffle_128(b.as_i8x16(), c.as_i8x16(), k)) + bitshuffle_128(b.as_i8x16(), c.as_i8x16(), k) } #[cfg(test)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bw.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bw.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bw.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512bw.rs 2023-12-21 16:55:32.000000000 +0000 @@ -3703,8 +3703,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u16x32(); let b = b.as_u16x32(); - let r = vpcmpuw(a, b, IMM8, 0b11111111_11111111_11111111_11111111); - transmute(r) + vpcmpuw(a, b, IMM8, 0b11111111_11111111_11111111_11111111) } /// Compare packed unsigned 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3722,8 +3721,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u16x32(); let b = b.as_u16x32(); - let r = vpcmpuw(a, b, IMM8, k1); - transmute(r) + vpcmpuw(a, b, IMM8, k1) } /// Compare packed unsigned 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3737,8 +3735,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u16x16(); let b = b.as_u16x16(); - let r = vpcmpuw256(a, b, IMM8, 0b11111111_11111111); - transmute(r) + vpcmpuw256(a, b, IMM8, 0b11111111_11111111) } /// Compare packed unsigned 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3756,8 +3753,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u16x16(); let b = b.as_u16x16(); - let r = vpcmpuw256(a, b, IMM8, k1); - transmute(r) + vpcmpuw256(a, b, IMM8, k1) } /// Compare packed unsigned 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3771,8 +3767,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u16x8(); let b = b.as_u16x8(); - let r = vpcmpuw128(a, b, IMM8, 0b11111111); - transmute(r) + vpcmpuw128(a, b, IMM8, 0b11111111) } /// Compare packed unsigned 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3790,8 +3785,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u16x8(); let b = b.as_u16x8(); - let r = vpcmpuw128(a, b, IMM8, k1); - transmute(r) + vpcmpuw128(a, b, IMM8, k1) } /// Compare packed unsigned 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3805,13 +3799,12 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u8x64(); let b = b.as_u8x64(); - let r = vpcmpub( + vpcmpub( a, b, IMM8, 0b11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111, - ); - transmute(r) + ) } /// Compare packed unsigned 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3829,8 +3822,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u8x64(); let b = b.as_u8x64(); - let r = vpcmpub(a, b, IMM8, k1); - transmute(r) + vpcmpub(a, b, IMM8, k1) } /// Compare packed unsigned 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3844,8 +3836,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u8x32(); let b = b.as_u8x32(); - let r = vpcmpub256(a, b, IMM8, 0b11111111_11111111_11111111_11111111); - transmute(r) + vpcmpub256(a, b, IMM8, 0b11111111_11111111_11111111_11111111) } /// Compare packed unsigned 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3863,8 +3854,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u8x32(); let b = b.as_u8x32(); - let r = vpcmpub256(a, b, IMM8, k1); - transmute(r) + vpcmpub256(a, b, IMM8, k1) } /// Compare packed unsigned 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3878,8 +3868,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u8x16(); let b = b.as_u8x16(); - let r = vpcmpub128(a, b, IMM8, 0b11111111_11111111); - transmute(r) + vpcmpub128(a, b, IMM8, 0b11111111_11111111) } /// Compare packed unsigned 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3897,8 +3886,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_u8x16(); let b = b.as_u8x16(); - let r = vpcmpub128(a, b, IMM8, k1); - transmute(r) + vpcmpub128(a, b, IMM8, k1) } /// Compare packed signed 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3912,8 +3900,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i16x32(); let b = b.as_i16x32(); - let r = vpcmpw(a, b, IMM8, 0b11111111_11111111_11111111_11111111); - transmute(r) + vpcmpw(a, b, IMM8, 0b11111111_11111111_11111111_11111111) } /// Compare packed signed 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3931,8 +3918,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i16x32(); let b = b.as_i16x32(); - let r = vpcmpw(a, b, IMM8, k1); - transmute(r) + vpcmpw(a, b, IMM8, k1) } /// Compare packed signed 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3946,8 +3932,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i16x16(); let b = b.as_i16x16(); - let r = vpcmpw256(a, b, IMM8, 0b11111111_11111111); - transmute(r) + vpcmpw256(a, b, IMM8, 0b11111111_11111111) } /// Compare packed signed 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3965,8 +3950,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i16x16(); let b = b.as_i16x16(); - let r = vpcmpw256(a, b, IMM8, k1); - transmute(r) + vpcmpw256(a, b, IMM8, k1) } /// Compare packed signed 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -3980,8 +3964,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i16x8(); let b = b.as_i16x8(); - let r = vpcmpw128(a, b, IMM8, 0b11111111); - transmute(r) + vpcmpw128(a, b, IMM8, 0b11111111) } /// Compare packed signed 16-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -3999,8 +3982,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i16x8(); let b = b.as_i16x8(); - let r = vpcmpw128(a, b, IMM8, k1); - transmute(r) + vpcmpw128(a, b, IMM8, k1) } /// Compare packed signed 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -4014,13 +3996,12 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i8x64(); let b = b.as_i8x64(); - let r = vpcmpb( + vpcmpb( a, b, IMM8, 0b11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111, - ); - transmute(r) + ) } /// Compare packed signed 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -4038,8 +4019,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i8x64(); let b = b.as_i8x64(); - let r = vpcmpb(a, b, IMM8, k1); - transmute(r) + vpcmpb(a, b, IMM8, k1) } /// Compare packed signed 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -4053,8 +4033,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i8x32(); let b = b.as_i8x32(); - let r = vpcmpb256(a, b, IMM8, 0b11111111_11111111_11111111_11111111); - transmute(r) + vpcmpb256(a, b, IMM8, 0b11111111_11111111_11111111_11111111) } /// Compare packed signed 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -4072,8 +4051,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i8x32(); let b = b.as_i8x32(); - let r = vpcmpb256(a, b, IMM8, k1); - transmute(r) + vpcmpb256(a, b, IMM8, k1) } /// Compare packed signed 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k. @@ -4087,8 +4065,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i8x16(); let b = b.as_i8x16(); - let r = vpcmpb128(a, b, IMM8, 0b11111111_11111111); - transmute(r) + vpcmpb128(a, b, IMM8, 0b11111111_11111111) } /// Compare packed signed 8-bit integers in a and b based on the comparison operand specified by imm8, and store the results in mask vector k using zeromask k1 (elements are zeroed out when the corresponding mask bit is not set). @@ -4106,8 +4083,7 @@ static_assert_uimm_bits!(IMM8, 3); let a = a.as_i8x16(); let b = b.as_i8x16(); - let r = vpcmpb128(a, b, IMM8, k1); - transmute(r) + vpcmpb128(a, b, IMM8, k1) } /// Load 512-bits (composed of 32 packed 16-bit integers) from memory into dst. mem_addr does not need to be aligned on any particular boundary. @@ -8566,7 +8542,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kadd_mask32(a: __mmask32, b: __mmask32) -> __mmask32 { - transmute(a + b) + a + b } /// Add 64-bit masks in a and b, and store the result in k. @@ -8575,7 +8551,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kadd_mask64(a: __mmask64, b: __mmask64) -> __mmask64 { - transmute(a + b) + a + b } /// Compute the bitwise AND of 32-bit masks a and b, and store the result in k. @@ -8584,7 +8560,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kand_mask32(a: __mmask32, b: __mmask32) -> __mmask32 { - transmute(a & b) + a & b } /// Compute the bitwise AND of 64-bit masks a and b, and store the result in k. @@ -8593,7 +8569,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kand_mask64(a: __mmask64, b: __mmask64) -> __mmask64 { - transmute(a & b) + a & b } /// Compute the bitwise NOT of 32-bit mask a, and store the result in k. @@ -8602,7 +8578,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _knot_mask32(a: __mmask32) -> __mmask32 { - transmute(a ^ 0b11111111_11111111_11111111_11111111) + a ^ 0b11111111_11111111_11111111_11111111 } /// Compute the bitwise NOT of 64-bit mask a, and store the result in k. @@ -8611,7 +8587,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _knot_mask64(a: __mmask64) -> __mmask64 { - transmute(a ^ 0b11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111) + a ^ 0b11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111 } /// Compute the bitwise NOT of 32-bit masks a and then AND with b, and store the result in k. @@ -8620,7 +8596,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kandn_mask32(a: __mmask32, b: __mmask32) -> __mmask32 { - transmute(_knot_mask32(a) & b) + _knot_mask32(a) & b } /// Compute the bitwise NOT of 64-bit masks a and then AND with b, and store the result in k. @@ -8629,7 +8605,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kandn_mask64(a: __mmask64, b: __mmask64) -> __mmask64 { - transmute(_knot_mask64(a) & b) + _knot_mask64(a) & b } /// Compute the bitwise OR of 32-bit masks a and b, and store the result in k. @@ -8638,7 +8614,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kor_mask32(a: __mmask32, b: __mmask32) -> __mmask32 { - transmute(a | b) + a | b } /// Compute the bitwise OR of 64-bit masks a and b, and store the result in k. @@ -8647,7 +8623,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kor_mask64(a: __mmask64, b: __mmask64) -> __mmask64 { - transmute(a | b) + a | b } /// Compute the bitwise XOR of 32-bit masks a and b, and store the result in k. @@ -8656,7 +8632,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kxor_mask32(a: __mmask32, b: __mmask32) -> __mmask32 { - transmute(a ^ b) + a ^ b } /// Compute the bitwise XOR of 64-bit masks a and b, and store the result in k. @@ -8665,7 +8641,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kxor_mask64(a: __mmask64, b: __mmask64) -> __mmask64 { - transmute(a ^ b) + a ^ b } /// Compute the bitwise XNOR of 32-bit masks a and b, and store the result in k. @@ -8674,7 +8650,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kxnor_mask32(a: __mmask32, b: __mmask32) -> __mmask32 { - transmute(_knot_mask32(a ^ b)) + _knot_mask32(a ^ b) } /// Compute the bitwise XNOR of 64-bit masks a and b, and store the result in k. @@ -8683,7 +8659,7 @@ #[inline] #[target_feature(enable = "avx512bw")] pub unsafe fn _kxnor_mask64(a: __mmask64, b: __mmask64) -> __mmask64 { - transmute(_knot_mask64(a ^ b)) + _knot_mask64(a ^ b) } /// Convert packed 16-bit integers in a to packed 8-bit integers with truncation, and store the results in dst. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512f.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512f.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512f.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/avx512f.rs 2023-12-21 16:55:32.000000000 +0000 @@ -17144,7 +17144,7 @@ if IMM8 >= 32 { _mm512_setzero_si512() } else { - transmute(simd_shl(a.as_u32x16(), u32x16::splat(IMM8 as u32))) + transmute(simd_shl(a.as_u32x16(), u32x16::splat(IMM8))) } } @@ -20132,7 +20132,7 @@ #[target_feature(enable = "avx512f,avx512vl")] #[cfg_attr(test, assert_instr(vperm))] //should be vpermd pub unsafe fn _mm256_permutexvar_epi32(idx: __m256i, a: __m256i) -> __m256i { - transmute(_mm256_permutevar8x32_epi32(a, idx)) // llvm use llvm.x86.avx2.permd + _mm256_permutevar8x32_epi32(a, idx) // llvm use llvm.x86.avx2.permd } /// Shuffle 32-bit integers in a across lanes using the corresponding index in idx, and store the results in dst using writemask k (elements are copied from src when the corresponding mask bit is not set). @@ -20284,7 +20284,7 @@ #[target_feature(enable = "avx512f,avx512vl")] #[cfg_attr(test, assert_instr(vpermps))] pub unsafe fn _mm256_permutexvar_ps(idx: __m256i, a: __m256) -> __m256 { - transmute(_mm256_permutevar8x32_ps(a, idx)) //llvm.x86.avx2.permps + _mm256_permutevar8x32_ps(a, idx) //llvm.x86.avx2.permps } /// Shuffle single-precision (32-bit) floating-point elements in a across lanes using the corresponding index in idx, and store the results in dst using writemask k (elements are copied from src when the corresponding mask bit is not set). @@ -23943,7 +23943,7 @@ #[cfg_attr(all(test, not(target_os = "windows")), assert_instr(vmovd))] pub unsafe fn _mm512_cvtsi512_si32(a: __m512i) -> i32 { let extract: i32 = simd_extract(a.as_i32x16(), 0); - transmute(extract) + extract } /// Broadcast the low packed 32-bit integer from a to all elements of dst. @@ -25744,7 +25744,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(and))] // generate normal and code instead of kandw pub unsafe fn _kand_mask16(a: __mmask16, b: __mmask16) -> __mmask16 { - transmute(a & b) + a & b } /// Compute the bitwise AND of 16-bit masks a and b, and store the result in k. @@ -25754,7 +25754,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(and))] // generate normal and code instead of kandw pub unsafe fn _mm512_kand(a: __mmask16, b: __mmask16) -> __mmask16 { - transmute(a & b) + a & b } /// Compute the bitwise OR of 16-bit masks a and b, and store the result in k. @@ -25764,7 +25764,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(or))] // generate normal or code instead of korw pub unsafe fn _kor_mask16(a: __mmask16, b: __mmask16) -> __mmask16 { - transmute(a | b) + a | b } /// Compute the bitwise OR of 16-bit masks a and b, and store the result in k. @@ -25774,7 +25774,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(or))] // generate normal or code instead of korw pub unsafe fn _mm512_kor(a: __mmask16, b: __mmask16) -> __mmask16 { - transmute(a | b) + a | b } /// Compute the bitwise XOR of 16-bit masks a and b, and store the result in k. @@ -25784,7 +25784,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(xor))] // generate normal xor code instead of kxorw pub unsafe fn _kxor_mask16(a: __mmask16, b: __mmask16) -> __mmask16 { - transmute(a ^ b) + a ^ b } /// Compute the bitwise XOR of 16-bit masks a and b, and store the result in k. @@ -25794,7 +25794,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(xor))] // generate normal xor code instead of kxorw pub unsafe fn _mm512_kxor(a: __mmask16, b: __mmask16) -> __mmask16 { - transmute(a ^ b) + a ^ b } /// Compute the bitwise NOT of 16-bit mask a, and store the result in k. @@ -25803,7 +25803,7 @@ #[inline] #[target_feature(enable = "avx512f")] pub unsafe fn _knot_mask16(a: __mmask16) -> __mmask16 { - transmute(a ^ 0b11111111_11111111) + a ^ 0b11111111_11111111 } /// Compute the bitwise NOT of 16-bit mask a, and store the result in k. @@ -25812,7 +25812,7 @@ #[inline] #[target_feature(enable = "avx512f")] pub unsafe fn _mm512_knot(a: __mmask16) -> __mmask16 { - transmute(a ^ 0b11111111_11111111) + a ^ 0b11111111_11111111 } /// Compute the bitwise NOT of 16-bit masks a and then AND with b, and store the result in k. @@ -25862,8 +25862,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(mov))] // generate normal and code instead of kmovw pub unsafe fn _mm512_kmov(a: __mmask16) -> __mmask16 { - let r: u16 = a; - transmute(r) + a } /// Converts integer mask into bitmask, storing the result in dst. @@ -25872,8 +25871,7 @@ #[inline] #[target_feature(enable = "avx512f")] // generate normal and code instead of kmovw pub unsafe fn _mm512_int2mask(mask: i32) -> __mmask16 { - let r: u16 = mask as u16; - transmute(r) + mask as u16 } /// Converts bit mask k1 into an integer value, storing the results in dst. @@ -25883,8 +25881,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(mov))] // generate normal and code instead of kmovw pub unsafe fn _mm512_mask2int(k1: __mmask16) -> i32 { - let r: i32 = k1 as i32; - transmute(r) + k1 as i32 } /// Unpack and interleave 8 bits from masks a and b, and store the 16-bit result in k. @@ -25896,7 +25893,7 @@ pub unsafe fn _mm512_kunpackb(a: __mmask16, b: __mmask16) -> __mmask16 { let a = a & 0b00000000_11111111; let b = b & 0b11111111_00000000; - transmute(a | b) + a | b } /// Performs bitwise OR between k1 and k2, storing the result in dst. CF flag is set if dst consists of all 1's. @@ -32352,8 +32349,7 @@ if (k & 0b00000001) != 0 { mov = simd_extract(b, 0); } - let r = simd_insert(a, 0, mov); - transmute(r) + simd_insert(a, 0, mov) } /// Move the lower single-precision (32-bit) floating-point element from b to the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32367,8 +32363,7 @@ if (k & 0b00000001) != 0 { mov = simd_extract(b, 0); } - let r = simd_insert(a, 0, mov); - transmute(r) + simd_insert(a, 0, mov) } /// Move the lower double-precision (64-bit) floating-point element from b to the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32383,8 +32378,7 @@ if (k & 0b00000001) != 0 { mov = simd_extract(b, 0); } - let r = simd_insert(a, 0, mov); - transmute(r) + simd_insert(a, 0, mov) } /// Move the lower double-precision (64-bit) floating-point element from b to the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32398,8 +32392,7 @@ if (k & 0b00000001) != 0 { mov = simd_extract(b, 0); } - let r = simd_insert(a, 0, mov); - transmute(r) + simd_insert(a, 0, mov) } /// Add the lower single-precision (32-bit) floating-point element in a and b, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32416,8 +32409,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta + extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Add the lower single-precision (32-bit) floating-point element in a and b, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32433,8 +32425,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta + extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Add the lower double-precision (64-bit) floating-point element in a and b, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32451,8 +32442,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta + extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Add the lower double-precision (64-bit) floating-point element in a and b, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32468,8 +32458,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta + extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Subtract the lower single-precision (32-bit) floating-point element in b from the lower single-precision (32-bit) floating-point element in a, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32486,8 +32475,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta - extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Subtract the lower single-precision (32-bit) floating-point element in b from the lower single-precision (32-bit) floating-point element in a, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32503,8 +32491,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta - extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Subtract the lower double-precision (64-bit) floating-point element in b from the lower double-precision (64-bit) floating-point element in a, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32521,8 +32508,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta - extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Subtract the lower double-precision (64-bit) floating-point element in b from the lower double-precision (64-bit) floating-point element in a, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32538,8 +32524,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta - extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Multiply the lower single-precision (32-bit) floating-point element in a and b, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32556,8 +32541,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta * extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Multiply the lower single-precision (32-bit) floating-point element in a and b, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32573,8 +32557,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta * extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Multiply the lower double-precision (64-bit) floating-point element in a and b, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32591,8 +32574,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta * extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Multiply the lower double-precision (64-bit) floating-point element in a and b, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32608,8 +32590,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta * extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Divide the lower single-precision (32-bit) floating-point element in a by the lower single-precision (32-bit) floating-point element in b, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32626,8 +32607,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta / extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Divide the lower single-precision (32-bit) floating-point element in a by the lower single-precision (32-bit) floating-point element in b, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -32643,8 +32623,7 @@ let extractb: f32 = simd_extract(b, 0); add = extracta / extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Divide the lower double-precision (64-bit) floating-point element in a by the lower double-precision (64-bit) floating-point element in b, store the result in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32661,8 +32640,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta / extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Divide the lower double-precision (64-bit) floating-point element in a by the lower double-precision (64-bit) floating-point element in b, store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -32678,8 +32656,7 @@ let extractb: f64 = simd_extract(b, 0); add = extracta / extractb; } - let r = simd_insert(a, 0, add); - transmute(r) + simd_insert(a, 0, add) } /// Compare the lower single-precision (32-bit) floating-point elements in a and b, store the maximum value in the lower element of dst using writemask k (the element is copied from src when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33587,8 +33564,7 @@ let extractc: f32 = simd_extract(c, 0); fmadd = vfmadd132ss(fmadd, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33605,8 +33581,7 @@ let extractc: f32 = simd_extract(c, 0); fmadd = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst. @@ -33622,8 +33597,7 @@ let extractb: f32 = simd_extract(b, 0); fmadd = vfmadd132ss(extracta, extractb, fmadd, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fmadd); - transmute(r) + simd_insert(c, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33639,8 +33613,7 @@ let extractc: f64 = simd_extract(c, 0); fmadd = vfmadd132sd(fmadd, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33657,8 +33630,7 @@ let extractc: f64 = simd_extract(c, 0); fmadd = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst. @@ -33674,8 +33646,7 @@ let extractb: f64 = simd_extract(b, 0); fmadd = vfmadd132sd(extracta, extractb, fmadd, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fmadd); - transmute(r) + simd_insert(c, 0, fmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33692,8 +33663,7 @@ let extractc = -extractc; fmsub = vfmadd132ss(fmsub, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33711,8 +33681,7 @@ let extractc = -extractc; fmsub = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst. @@ -33729,8 +33698,7 @@ let extractc = -fmsub; fmsub = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fmsub); - transmute(r) + simd_insert(c, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33747,8 +33715,7 @@ let extractc = -extractc; fmsub = vfmadd132sd(fmsub, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33766,8 +33733,7 @@ let extractc = -extractc; fmsub = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst. @@ -33784,8 +33750,7 @@ let extractc = -fmsub; fmsub = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fmsub); - transmute(r) + simd_insert(c, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33802,8 +33767,7 @@ let extractc: f32 = simd_extract(c, 0); fnmadd = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33821,8 +33785,7 @@ let extractc: f32 = simd_extract(c, 0); fnmadd = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst. @@ -33839,8 +33802,7 @@ let extractb: f32 = simd_extract(b, 0); fnmadd = vfmadd132ss(extracta, extractb, fnmadd, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fnmadd); - transmute(r) + simd_insert(c, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33857,8 +33819,7 @@ let extractc: f64 = simd_extract(c, 0); fnmadd = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33876,8 +33837,7 @@ let extractc: f64 = simd_extract(c, 0); fnmadd = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst. @@ -33894,8 +33854,7 @@ let extractb: f64 = simd_extract(b, 0); fnmadd = vfmadd132sd(extracta, extractb, fnmadd, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fnmadd); - transmute(r) + simd_insert(c, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33913,8 +33872,7 @@ let extractc = -extractc; fnmsub = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst. @@ -33933,8 +33891,7 @@ let extractc = -extractc; fnmsub = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst. @@ -33952,8 +33909,7 @@ let extractc = -fnmsub; fnmsub = vfmadd132ss(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fnmsub); - transmute(r) + simd_insert(c, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33971,8 +33927,7 @@ let extractc = -extractc; fnmsub = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst. @@ -33991,8 +33946,7 @@ let extractc = -extractc; fnmsub = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst. @@ -34010,8 +33964,7 @@ let extractc = -fnmsub; fnmsub = vfmadd132sd(extracta, extractb, extractc, _MM_FROUND_CUR_DIRECTION); } - let r = simd_insert(c, 0, fnmsub); - transmute(r) + simd_insert(c, 0, fnmsub) } /// Add the lower single-precision (32-bit) floating-point element in a and b, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -35705,8 +35658,7 @@ let extractb: f32 = simd_extract(b, 0); let extractc: f32 = simd_extract(c, 0); let r = vfmadd132ss(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, r); - transmute(r) + simd_insert(a, 0, r) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -35736,8 +35688,7 @@ let extractc: f32 = simd_extract(c, 0); fmadd = vfmadd132ss(fmadd, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -35768,8 +35719,7 @@ let extractc: f32 = simd_extract(c, 0); fmadd = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst.\ @@ -35799,8 +35749,7 @@ let extractb: f32 = simd_extract(b, 0); fmadd = vfmadd132ss(extracta, extractb, fmadd, ROUNDING); } - let r = simd_insert(c, 0, fmadd); - transmute(r) + simd_insert(c, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst, and copy the upper element from a to the upper element of dst.\ @@ -35827,8 +35776,7 @@ let extractb: f64 = simd_extract(b, 0); let extractc: f64 = simd_extract(c, 0); let fmadd = vfmadd132sd(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -35858,8 +35806,7 @@ let extractc: f64 = simd_extract(c, 0); fmadd = vfmadd132sd(fmadd, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -35890,8 +35837,7 @@ let extractc: f64 = simd_extract(c, 0); fmadd = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmadd); - transmute(r) + simd_insert(a, 0, fmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst.\ @@ -35921,8 +35867,7 @@ let extractb: f64 = simd_extract(b, 0); fmadd = vfmadd132sd(extracta, extractb, fmadd, ROUNDING); } - let r = simd_insert(c, 0, fmadd); - transmute(r) + simd_insert(c, 0, fmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -35946,8 +35891,7 @@ let extractc: f32 = simd_extract(c, 0); let extractc = -extractc; let fmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -35978,8 +35922,7 @@ let extractc = -extractc; fmsub = vfmadd132ss(fmsub, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36011,8 +35954,7 @@ let extractc = -extractc; fmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst.\ @@ -36043,8 +35985,7 @@ let extractc = -fmsub; fmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(c, 0, fmsub); - transmute(r) + simd_insert(c, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst, and copy the upper element from a to the upper element of dst.\ @@ -36072,8 +36013,7 @@ let extractc: f64 = simd_extract(c, 0); let extractc = -extractc; let fmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -36104,8 +36044,7 @@ let extractc = -extractc; fmsub = vfmadd132sd(fmsub, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -36137,8 +36076,7 @@ let extractc = -extractc; fmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fmsub); - transmute(r) + simd_insert(a, 0, fmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst.\ @@ -36169,8 +36107,7 @@ let extractc = -fmsub; fmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(c, 0, fmsub); - transmute(r) + simd_insert(c, 0, fmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36194,8 +36131,7 @@ let extractb: f32 = simd_extract(b, 0); let extractc: f32 = simd_extract(c, 0); let fnmadd = vfmadd132ss(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36226,8 +36162,7 @@ let extractc: f32 = simd_extract(c, 0); fnmadd = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36259,8 +36194,7 @@ let extractc: f32 = simd_extract(c, 0); fnmadd = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst.\ @@ -36291,8 +36225,7 @@ let extractb: f32 = simd_extract(b, 0); fnmadd = vfmadd132ss(extracta, extractb, fnmadd, ROUNDING); } - let r = simd_insert(c, 0, fnmadd); - transmute(r) + simd_insert(c, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst, and copy the upper element from a to the upper element of dst.\ @@ -36320,8 +36253,7 @@ let extractb: f64 = simd_extract(b, 0); let extractc: f64 = simd_extract(c, 0); let fnmadd = vfmadd132sd(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from a when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -36352,8 +36284,7 @@ let extractc: f64 = simd_extract(c, 0); fnmadd = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -36385,8 +36316,7 @@ let extractc: f64 = simd_extract(c, 0); fnmadd = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmadd); - transmute(r) + simd_insert(a, 0, fnmadd) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and add the negated intermediate result to the lower element in c. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst.\ @@ -36417,8 +36347,7 @@ let extractb: f64 = simd_extract(b, 0); fnmadd = vfmadd132sd(extracta, extractb, fnmadd, ROUNDING); } - let r = simd_insert(c, 0, fnmadd); - transmute(r) + simd_insert(c, 0, fnmadd) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, subtract the lower element in c from the negated intermediate result, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36443,8 +36372,7 @@ let extractc: f32 = simd_extract(c, 0); let extractc = -extractc; let fnmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36476,8 +36404,7 @@ let extractc = -extractc; fnmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -36510,8 +36437,7 @@ let extractc = -extractc; fnmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower single-precision (32-bit) floating-point elements in a and b, subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper 3 packed elements from c to the upper elements of dst.\ @@ -36543,8 +36469,7 @@ let extractc = -fnmsub; fnmsub = vfmadd132ss(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(c, 0, fnmsub); - transmute(r) + simd_insert(c, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst, and copy the upper element from a to the upper element of dst.\ @@ -36573,8 +36498,7 @@ let extractc: f64 = simd_extract(c, 0); let extractc = -extractc; let fnmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -36606,8 +36530,7 @@ let extractc = -extractc; fnmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in dst using zeromask k (the element is zeroed out when mask bit 0 is not set), and copy the upper element from a to the upper element of dst.\ @@ -36640,8 +36563,7 @@ let extractc = -extractc; fnmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(a, 0, fnmsub); - transmute(r) + simd_insert(a, 0, fnmsub) } /// Multiply the lower double-precision (64-bit) floating-point elements in a and b, and subtract the lower element in c from the negated intermediate result. Store the result in the lower element of dst using writemask k (the element is copied from c when mask bit 0 is not set), and copy the upper element from c to the upper element of dst.\ @@ -36673,8 +36595,7 @@ let extractc = -fnmsub; fnmsub = vfmadd132sd(extracta, extractb, extractc, ROUNDING); } - let r = simd_insert(c, 0, fnmsub); - transmute(r) + simd_insert(c, 0, fnmsub) } /// Fix up the lower single-precision (32-bit) floating-point elements in a and b using the lower 32-bit integer in c, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst. imm8 is used to set the required flags reporting. @@ -37168,8 +37089,7 @@ pub unsafe fn _mm_cvt_roundss_si32(a: __m128) -> i32 { static_assert_rounding!(ROUNDING); let a = a.as_f32x4(); - let r = vcvtss2si(a, ROUNDING); - transmute(r) + vcvtss2si(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 32-bit integer, and store the result in dst.\ @@ -37188,8 +37108,7 @@ pub unsafe fn _mm_cvt_roundss_i32(a: __m128) -> i32 { static_assert_rounding!(ROUNDING); let a = a.as_f32x4(); - let r = vcvtss2si(a, ROUNDING); - transmute(r) + vcvtss2si(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 32-bit integer, and store the result in dst.\ @@ -37208,8 +37127,7 @@ pub unsafe fn _mm_cvt_roundss_u32(a: __m128) -> u32 { static_assert_rounding!(ROUNDING); let a = a.as_f32x4(); - let r = vcvtss2usi(a, ROUNDING); - transmute(r) + vcvtss2usi(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 32-bit integer, and store the result in dst. @@ -37219,7 +37137,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2si))] pub unsafe fn _mm_cvtss_i32(a: __m128) -> i32 { - transmute(vcvtss2si(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2si(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 32-bit integer, and store the result in dst. @@ -37229,7 +37147,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2usi))] pub unsafe fn _mm_cvtss_u32(a: __m128) -> u32 { - transmute(vcvtss2usi(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2usi(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 32-bit integer, and store the result in dst.\ @@ -37248,8 +37166,7 @@ pub unsafe fn _mm_cvt_roundsd_si32(a: __m128d) -> i32 { static_assert_rounding!(ROUNDING); let a = a.as_f64x2(); - let r = vcvtsd2si(a, ROUNDING); - transmute(r) + vcvtsd2si(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 32-bit integer, and store the result in dst.\ @@ -37268,8 +37185,7 @@ pub unsafe fn _mm_cvt_roundsd_i32(a: __m128d) -> i32 { static_assert_rounding!(ROUNDING); let a = a.as_f64x2(); - let r = vcvtsd2si(a, ROUNDING); - transmute(r) + vcvtsd2si(a, ROUNDING) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 32-bit integer, and store the result in dst.\ @@ -37288,8 +37204,7 @@ pub unsafe fn _mm_cvt_roundsd_u32(a: __m128d) -> u32 { static_assert_rounding!(ROUNDING); let a = a.as_f64x2(); - let r = vcvtsd2usi(a, ROUNDING); - transmute(r) + vcvtsd2usi(a, ROUNDING) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 32-bit integer, and store the result in dst. @@ -37299,7 +37214,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2si))] pub unsafe fn _mm_cvtsd_i32(a: __m128d) -> i32 { - transmute(vcvtsd2si(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2si(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 32-bit integer, and store the result in dst. @@ -37309,7 +37224,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2usi))] pub unsafe fn _mm_cvtsd_u32(a: __m128d) -> u32 { - transmute(vcvtsd2usi(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2usi(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the signed 32-bit integer b to a single-precision (32-bit) floating-point element, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst.\ @@ -37382,8 +37297,7 @@ #[cfg_attr(test, assert_instr(vcvtsi2ss))] pub unsafe fn _mm_cvti32_ss(a: __m128, b: i32) -> __m128 { let b = b as f32; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the signed 32-bit integer b to a double-precision (64-bit) floating-point element, store the result in the lower element of dst, and copy the upper element from a to the upper element of dst. @@ -37394,8 +37308,7 @@ #[cfg_attr(test, assert_instr(vcvtsi2sd))] pub unsafe fn _mm_cvti32_sd(a: __m128d, b: i32) -> __m128d { let b = b as f64; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 32-bit integer with truncation, and store the result in dst.\ @@ -37409,8 +37322,7 @@ pub unsafe fn _mm_cvtt_roundss_si32(a: __m128) -> i32 { static_assert_sae!(SAE); let a = a.as_f32x4(); - let r = vcvtss2si(a, SAE); - transmute(r) + vcvtss2si(a, SAE) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 32-bit integer with truncation, and store the result in dst.\ @@ -37424,8 +37336,7 @@ pub unsafe fn _mm_cvtt_roundss_i32(a: __m128) -> i32 { static_assert_sae!(SAE); let a = a.as_f32x4(); - let r = vcvtss2si(a, SAE); - transmute(r) + vcvtss2si(a, SAE) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 32-bit integer with truncation, and store the result in dst.\ @@ -37439,8 +37350,7 @@ pub unsafe fn _mm_cvtt_roundss_u32(a: __m128) -> u32 { static_assert_sae!(SAE); let a = a.as_f32x4(); - let r = vcvtss2usi(a, SAE); - transmute(r) + vcvtss2usi(a, SAE) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 32-bit integer with truncation, and store the result in dst. @@ -37450,7 +37360,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2si))] pub unsafe fn _mm_cvttss_i32(a: __m128) -> i32 { - transmute(vcvtss2si(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2si(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 32-bit integer with truncation, and store the result in dst. @@ -37460,7 +37370,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2usi))] pub unsafe fn _mm_cvttss_u32(a: __m128) -> u32 { - transmute(vcvtss2usi(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2usi(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 32-bit integer with truncation, and store the result in dst.\ @@ -37474,8 +37384,7 @@ pub unsafe fn _mm_cvtt_roundsd_si32(a: __m128d) -> i32 { static_assert_sae!(SAE); let a = a.as_f64x2(); - let r = vcvtsd2si(a, SAE); - transmute(r) + vcvtsd2si(a, SAE) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 32-bit integer with truncation, and store the result in dst.\ @@ -37489,8 +37398,7 @@ pub unsafe fn _mm_cvtt_roundsd_i32(a: __m128d) -> i32 { static_assert_sae!(SAE); let a = a.as_f64x2(); - let r = vcvtsd2si(a, SAE); - transmute(r) + vcvtsd2si(a, SAE) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 32-bit integer with truncation, and store the result in dst.\ @@ -37504,8 +37412,7 @@ pub unsafe fn _mm_cvtt_roundsd_u32(a: __m128d) -> u32 { static_assert_sae!(SAE); let a = a.as_f64x2(); - let r = vcvtsd2usi(a, SAE); - transmute(r) + vcvtsd2usi(a, SAE) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 32-bit integer with truncation, and store the result in dst. @@ -37515,7 +37422,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2si))] pub unsafe fn _mm_cvttsd_i32(a: __m128d) -> i32 { - transmute(vcvtsd2si(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2si(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 32-bit integer with truncation, and store the result in dst. @@ -37525,7 +37432,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2usi))] pub unsafe fn _mm_cvttsd_u32(a: __m128d) -> u32 { - transmute(vcvtsd2usi(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2usi(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the unsigned 32-bit integer b to a single-precision (32-bit) floating-point element, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst. @@ -37536,8 +37443,7 @@ #[cfg_attr(test, assert_instr(vcvtusi2ss))] pub unsafe fn _mm_cvtu32_ss(a: __m128, b: u32) -> __m128 { let b = b as f32; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the unsigned 32-bit integer b to a double-precision (64-bit) floating-point element, store the result in the lower element of dst, and copy the upper element from a to the upper element of dst. @@ -37548,8 +37454,7 @@ #[cfg_attr(test, assert_instr(vcvtusi2sd))] pub unsafe fn _mm_cvtu32_sd(a: __m128d, b: u32) -> __m128d { let b = b as f64; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Compare the lower single-precision (32-bit) floating-point element in a and b based on the comparison operand specified by imm8, and return the boolean result (0 or 1).\ @@ -37565,8 +37470,7 @@ static_assert_mantissas_sae!(SAE); let a = a.as_f32x4(); let b = b.as_f32x4(); - let r = vcomiss(a, b, IMM5, SAE); - transmute(r) + vcomiss(a, b, IMM5, SAE) } /// Compare the lower double-precision (64-bit) floating-point element in a and b based on the comparison operand specified by imm8, and return the boolean result (0 or 1).\ @@ -37582,8 +37486,7 @@ static_assert_mantissas_sae!(SAE); let a = a.as_f64x2(); let b = b.as_f64x2(); - let r = vcomisd(a, b, IMM5, SAE); - transmute(r) + vcomisd(a, b, IMM5, SAE) } /// Equal diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse.rs 2023-12-21 16:55:32.000000000 +0000 @@ -790,8 +790,7 @@ /// /// The result is rounded according to the current rounding mode. If the result /// cannot be represented as a 32 bit integer the result will be `0x8000_0000` -/// (`i32::MIN`) or an invalid operation floating point exception if -/// unmasked (see [`_mm_setcsr`](fn._mm_setcsr.html)). +/// (`i32::MIN`). /// /// This corresponds to the `CVTSS2SI` instruction (with 32 bit output). /// @@ -821,8 +820,7 @@ /// /// The result is rounded always using truncation (round towards zero). If the /// result cannot be represented as a 32 bit integer the result will be -/// `0x8000_0000` (`i32::MIN`) or an invalid operation floating point -/// exception if unmasked (see [`_mm_setcsr`](fn._mm_setcsr.html)). +/// `0x8000_0000` (`i32::MIN`). /// /// This corresponds to the `CVTTSS2SI` instruction (with 32 bit output). /// @@ -1083,7 +1081,10 @@ #[cfg_attr(test, assert_instr(movmskps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_movemask_ps(a: __m128) -> i32 { - movmskps(a) + // Propagate the highest bit to the rest, because simd_bitmask + // requires all-1 or all-0. + let mask: i32x4 = simd_lt(transmute(a), i32x4::splat(0)); + simd_bitmask::(mask).into() } /// Construct a `__m128` with the lowest element read from `p` and the other @@ -1365,6 +1366,15 @@ /// Gets the unsigned 32-bit value of the MXCSR control and status register. /// +/// Note that Rust makes no guarantees whatsoever about the contents of this register: Rust +/// floating-point operations may or may not result in this register getting updated with exception +/// state, and the register can change between two invocations of this function even when no +/// floating-point operations appear in the source code (since floating-point operations appearing +/// earlier or later can be reordered). +/// +/// If you need to perform some floating-point operations and check whether they raised an +/// exception, use an inline assembly block for the entire sequence of operations. +/// /// For more info see [`_mm_setcsr`](fn._mm_setcsr.html) /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_getcsr) @@ -1372,6 +1382,10 @@ #[target_feature(enable = "sse")] #[cfg_attr(test, assert_instr(stmxcsr))] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_getcsr` documentation - use inline assembly instead" +)] pub unsafe fn _mm_getcsr() -> u32 { let mut result = 0_i32; stmxcsr(&mut result as *mut _ as *mut i8); @@ -1401,6 +1415,16 @@ /// * The *denormals-are-zero mode flag* turns all numbers which would be /// denormalized (exponent bits are all zeros) into zeros. /// +/// Note that modifying the masking flags, rounding mode, or denormals-are-zero mode flags leads to +/// **immediate Undefined Behavior**: Rust assumes that these are always in their default state and +/// will optimize accordingly. This even applies when the register is altered and later reset to its +/// original value without any floating-point operations appearing in the source code between those +/// operations (since floating-point operations appearing earlier or later can be reordered). +/// +/// If you need to perform some floating-point operations under a different masking flags, rounding +/// mode, or denormals-are-zero mode, use an inline assembly block and make sure to restore the +/// original MXCSR register state before the end of the block. +/// /// ## Exception Flags /// /// * `_MM_EXCEPT_INVALID`: An invalid operation was performed (e.g., dividing @@ -1509,6 +1533,10 @@ #[target_feature(enable = "sse")] #[cfg_attr(test, assert_instr(ldmxcsr))] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_setcsr` documentation - use inline assembly instead" +)] pub unsafe fn _mm_setcsr(val: u32) { ldmxcsr(&val as *const _ as *const i8); } @@ -1588,9 +1616,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_GET_EXCEPTION_MASK) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_getcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_GET_EXCEPTION_MASK() -> u32 { _mm_getcsr() & _MM_MASK_MASK } @@ -1599,9 +1632,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_GET_EXCEPTION_STATE) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_getcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_GET_EXCEPTION_STATE() -> u32 { _mm_getcsr() & _MM_EXCEPT_MASK } @@ -1610,9 +1648,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_GET_FLUSH_ZERO_MODE) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_getcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_GET_FLUSH_ZERO_MODE() -> u32 { _mm_getcsr() & _MM_FLUSH_ZERO_MASK } @@ -1621,9 +1664,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_GET_ROUNDING_MODE) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_getcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_GET_ROUNDING_MODE() -> u32 { _mm_getcsr() & _MM_ROUND_MASK } @@ -1632,9 +1680,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_SET_EXCEPTION_MASK) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_setcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_SET_EXCEPTION_MASK(x: u32) { _mm_setcsr((_mm_getcsr() & !_MM_MASK_MASK) | x) } @@ -1643,9 +1696,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_SET_EXCEPTION_STATE) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_setcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_SET_EXCEPTION_STATE(x: u32) { _mm_setcsr((_mm_getcsr() & !_MM_EXCEPT_MASK) | x) } @@ -1654,9 +1712,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_SET_FLUSH_ZERO_MODE) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_setcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_SET_FLUSH_ZERO_MODE(x: u32) { let val = (_mm_getcsr() & !_MM_FLUSH_ZERO_MASK) | x; // println!("setting csr={:x}", val); @@ -1667,9 +1730,14 @@ /// /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_MM_SET_ROUNDING_MODE) #[inline] +#[allow(deprecated)] // Deprecated function implemented on top of deprecated function #[allow(non_snake_case)] #[target_feature(enable = "sse")] #[stable(feature = "simd_x86", since = "1.27.0")] +#[deprecated( + since = "1.75.0", + note = "see `_mm_setcsr` documentation - use inline assembly instead" +)] pub unsafe fn _MM_SET_ROUNDING_MODE(x: u32) { _mm_setcsr((_mm_getcsr() & !_MM_ROUND_MASK) | x) } @@ -1820,8 +1888,6 @@ fn maxss(a: __m128, b: __m128) -> __m128; #[link_name = "llvm.x86.sse.max.ps"] fn maxps(a: __m128, b: __m128) -> __m128; - #[link_name = "llvm.x86.sse.movmsk.ps"] - fn movmskps(a: __m128) -> i32; #[link_name = "llvm.x86.sse.cmp.ps"] fn cmpps(a: __m128, b: __m128, imm8: i8) -> __m128; #[link_name = "llvm.x86.sse.comieq.ss"] @@ -1974,7 +2040,11 @@ let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0); let r = _mm_rcp_ss(a); let e = _mm_setr_ps(0.24993896, 13.0, 16.0, 100.0); - assert_eq_m128(r, e); + let rel_err = 0.00048828125; + assert_approx_eq!(get_m128(r, 0), get_m128(e, 0), 2. * rel_err); + for i in 1..4 { + assert_eq!(get_m128(r, i), get_m128(e, i)); + } } #[simd_test(enable = "sse")] @@ -2055,6 +2125,17 @@ let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0); let r = _mm_max_ps(a, b); assert_eq_m128(r, _mm_setr_ps(-1.0, 20.0, 0.0, -5.0)); + + // Check SSE-specific semantics for -0.0 handling. + let a = _mm_setr_ps(-0.0, 0.0, 0.0, 0.0); + let b = _mm_setr_ps(0.0, 0.0, 0.0, 0.0); + let r1: [u8; 16] = transmute(_mm_max_ps(a, b)); + let r2: [u8; 16] = transmute(_mm_max_ps(b, a)); + let a: [u8; 16] = transmute(a); + let b: [u8; 16] = transmute(b); + assert_eq!(r1, b); + assert_eq!(r2, a); + assert_ne!(a, b); // sanity check that -0.0 is actually present } #[simd_test(enable = "sse")] @@ -2098,12 +2179,12 @@ let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0); let b = _mm_setr_ps(-1.0, 5.0, 6.0, 7.0); let r: u32x4 = transmute(_mm_cmpeq_ss(a, b)); - let e: u32x4 = transmute(_mm_setr_ps(transmute(0u32), 2.0, 3.0, 4.0)); + let e: u32x4 = transmute(_mm_setr_ps(f32::from_bits(0), 2.0, 3.0, 4.0)); assert_eq!(r, e); let b2 = _mm_setr_ps(1.0, 5.0, 6.0, 7.0); let r2: u32x4 = transmute(_mm_cmpeq_ss(a, b2)); - let e2: u32x4 = transmute(_mm_setr_ps(transmute(0xffffffffu32), 2.0, 3.0, 4.0)); + let e2: u32x4 = transmute(_mm_setr_ps(f32::from_bits(0xffffffff), 2.0, 3.0, 4.0)); assert_eq!(r2, e2); } @@ -2119,15 +2200,15 @@ let d1 = !0u32; // a.extract(0) < d.extract(0) let rb: u32x4 = transmute(_mm_cmplt_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmplt_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmplt_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2143,15 +2224,15 @@ let d1 = !0u32; // a.extract(0) <= d.extract(0) let rb: u32x4 = transmute(_mm_cmple_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmple_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmple_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2167,15 +2248,15 @@ let d1 = 0u32; // a.extract(0) > d.extract(0) let rb: u32x4 = transmute(_mm_cmpgt_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpgt_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpgt_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2191,15 +2272,15 @@ let d1 = 0u32; // a.extract(0) >= d.extract(0) let rb: u32x4 = transmute(_mm_cmpge_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpge_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpge_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2215,15 +2296,15 @@ let d1 = !0u32; // a.extract(0) != d.extract(0) let rb: u32x4 = transmute(_mm_cmpneq_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpneq_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpneq_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2244,15 +2325,15 @@ let d1 = 0u32; // a.extract(0) >= d.extract(0) let rb: u32x4 = transmute(_mm_cmpnlt_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpnlt_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpnlt_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2273,15 +2354,15 @@ let d1 = 0u32; // a.extract(0) > d.extract(0) let rb: u32x4 = transmute(_mm_cmpnle_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpnle_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpnle_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2302,15 +2383,15 @@ let d1 = !0u32; // a.extract(0) <= d.extract(0) let rb: u32x4 = transmute(_mm_cmpngt_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpngt_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpngt_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2331,15 +2412,15 @@ let d1 = !0u32; // a.extract(0) < d.extract(0) let rb: u32x4 = transmute(_mm_cmpnge_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpnge_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpnge_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2355,15 +2436,15 @@ let d1 = !0u32; // a.extract(0) ord d.extract(0) let rb: u32x4 = transmute(_mm_cmpord_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpord_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpord_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2379,15 +2460,15 @@ let d1 = 0u32; // a.extract(0) unord d.extract(0) let rb: u32x4 = transmute(_mm_cmpunord_ss(a, b)); - let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0)); + let eb: u32x4 = transmute(_mm_setr_ps(f32::from_bits(b1), 2.0, 3.0, 4.0)); assert_eq!(rb, eb); let rc: u32x4 = transmute(_mm_cmpunord_ss(a, c)); - let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0)); + let ec: u32x4 = transmute(_mm_setr_ps(f32::from_bits(c1), 2.0, 3.0, 4.0)); assert_eq!(rc, ec); let rd: u32x4 = transmute(_mm_cmpunord_ss(a, d)); - let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0)); + let ed: u32x4 = transmute(_mm_setr_ps(f32::from_bits(d1), 2.0, 3.0, 4.0)); assert_eq!(rd, ed); } @@ -2766,7 +2847,9 @@ } } + #[allow(deprecated)] // FIXME: This test uses deprecated CSR access functions #[simd_test(enable = "sse")] + #[cfg_attr(miri, ignore)] // Uses _mm_setcsr, which is not supported by Miri unsafe fn test_mm_comieq_ss_vs_ucomieq_ss() { // If one of the arguments is a quiet NaN `comieq_ss` should signal an // Invalid Operation Exception while `ucomieq_ss` should not. @@ -3072,7 +3155,7 @@ let mut p = vals.as_mut_ptr(); if (p as usize) & 0xf != 0 { - ofs = ((16 - (p as usize)) & 0xf) >> 2; + ofs = (16 - ((p as usize) & 0xf)) >> 2; p = p.add(ofs); } @@ -3098,7 +3181,7 @@ // Align p to 16-byte boundary if (p as usize) & 0xf != 0 { - ofs = ((16 - (p as usize)) & 0xf) >> 2; + ofs = (16 - ((p as usize) & 0xf)) >> 2; p = p.add(ofs); } @@ -3124,7 +3207,7 @@ // Align p to 16-byte boundary if (p as usize) & 0xf != 0 { - ofs = ((16 - (p as usize)) & 0xf) >> 2; + ofs = (16 - ((p as usize) & 0xf)) >> 2; p = p.add(ofs); } @@ -3186,11 +3269,15 @@ } #[simd_test(enable = "sse")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + #[cfg_attr(miri, ignore)] unsafe fn test_mm_sfence() { _mm_sfence(); } + #[allow(deprecated)] // FIXME: This tests functions that are immediate UB #[simd_test(enable = "sse")] + #[cfg_attr(miri, ignore)] // Miri does not support accesing the CSR unsafe fn test_mm_getcsr_setcsr_1() { let saved_csr = _mm_getcsr(); @@ -3206,7 +3293,9 @@ assert_eq_m128(r, exp); // first component is a denormalized f32 } + #[allow(deprecated)] // FIXME: This tests functions that are immediate UB #[simd_test(enable = "sse")] + #[cfg_attr(miri, ignore)] // Miri does not support accesing the CSR unsafe fn test_mm_getcsr_setcsr_2() { // Same as _mm_setcsr_1 test, but with opposite flag value. @@ -3224,7 +3313,9 @@ assert_eq_m128(r, exp); // first component is a denormalized f32 } + #[allow(deprecated)] // FIXME: This tests functions that are immediate UB #[simd_test(enable = "sse")] + #[cfg_attr(miri, ignore)] // Miri does not support accesing the CSR unsafe fn test_mm_getcsr_setcsr_underflow() { _MM_SET_EXCEPTION_STATE(0); @@ -3263,6 +3354,9 @@ } #[simd_test(enable = "sse")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + // (non-temporal store) + #[cfg_attr(miri, ignore)] unsafe fn test_mm_stream_ps() { let a = _mm_set1_ps(7.0); let mut mem = Memory { data: [-1.0; 4] }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse2.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse2.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse2.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse2.rs 2023-12-21 16:55:32.000000000 +0000 @@ -165,7 +165,10 @@ #[cfg_attr(test, assert_instr(pavgb))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_avg_epu8(a: __m128i, b: __m128i) -> __m128i { - transmute(pavgb(a.as_u8x16(), b.as_u8x16())) + let a = simd_cast::<_, u16x16>(a.as_u8x16()); + let b = simd_cast::<_, u16x16>(b.as_u8x16()); + let r = simd_shr(simd_add(simd_add(a, b), u16x16::splat(1)), u16x16::splat(1)); + transmute(simd_cast::<_, u8x16>(r)) } /// Averages packed unsigned 16-bit integers in `a` and `b`. @@ -176,7 +179,10 @@ #[cfg_attr(test, assert_instr(pavgw))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_avg_epu16(a: __m128i, b: __m128i) -> __m128i { - transmute(pavgw(a.as_u16x8(), b.as_u16x8())) + let a = simd_cast::<_, u32x8>(a.as_u16x8()); + let b = simd_cast::<_, u32x8>(b.as_u16x8()); + let r = simd_shr(simd_add(simd_add(a, b), u32x8::splat(1)), u32x8::splat(1)); + transmute(simd_cast::<_, u16x8>(r)) } /// Multiplies and then horizontally add signed 16 bit integers in `a` and `b`. @@ -261,7 +267,10 @@ #[cfg_attr(test, assert_instr(pmulhw))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_mulhi_epi16(a: __m128i, b: __m128i) -> __m128i { - transmute(pmulhw(a.as_i16x8(), b.as_i16x8())) + let a = simd_cast::<_, i32x8>(a.as_i16x8()); + let b = simd_cast::<_, i32x8>(b.as_i16x8()); + let r = simd_shr(simd_mul(a, b), i32x8::splat(16)); + transmute(simd_cast::(r)) } /// Multiplies the packed unsigned 16-bit integers in `a` and `b`. @@ -275,7 +284,10 @@ #[cfg_attr(test, assert_instr(pmulhuw))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_mulhi_epu16(a: __m128i, b: __m128i) -> __m128i { - transmute(pmulhuw(a.as_u16x8(), b.as_u16x8())) + let a = simd_cast::<_, u32x8>(a.as_u16x8()); + let b = simd_cast::<_, u32x8>(b.as_u16x8()); + let r = simd_shr(simd_mul(a, b), u32x8::splat(16)); + transmute(simd_cast::(r)) } /// Multiplies the packed 16-bit integers in `a` and `b`. @@ -303,7 +315,10 @@ #[cfg_attr(test, assert_instr(pmuludq))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_mul_epu32(a: __m128i, b: __m128i) -> __m128i { - transmute(pmuludq(a.as_u32x4(), b.as_u32x4())) + let a = a.as_u64x2(); + let b = b.as_u64x2(); + let mask = u64x2::splat(u32::MAX.into()); + transmute(simd_mul(simd_and(a, mask), simd_and(b, mask))) } /// Sum the absolute differences of packed unsigned 8-bit integers. @@ -952,7 +967,7 @@ #[cfg_attr(test, assert_instr(cvtdq2ps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_cvtepi32_ps(a: __m128i) -> __m128 { - cvtdq2ps(a.as_i32x4()) + transmute(simd_cast::<_, f32x4>(a.as_i32x4())) } /// Converts packed single-precision (32-bit) floating-point elements in `a` @@ -2240,7 +2255,9 @@ #[cfg_attr(test, assert_instr(cvtpd2ps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_cvtpd_ps(a: __m128d) -> __m128 { - cvtpd2ps(a) + let r = simd_cast::<_, f32x2>(a.as_f64x2()); + let zero = f32x2::new(0.0, 0.0); + transmute::(simd_shuffle!(r, zero, [0, 1, 2, 3])) } /// Converts packed single-precision (32-bit) floating-point elements in `a` to @@ -2253,7 +2270,8 @@ #[cfg_attr(test, assert_instr(cvtps2pd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_cvtps_pd(a: __m128) -> __m128d { - cvtps2pd(a) + let a = a.as_f32x4(); + transmute(simd_cast::(simd_shuffle!(a, a, [0, 1]))) } /// Converts packed double-precision (64-bit) floating-point elements in `a` to @@ -2432,7 +2450,10 @@ #[cfg_attr(test, assert_instr(movmskpd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_movemask_pd(a: __m128d) -> i32 { - movmskpd(a) + // Propagate the highest bit to the rest, because simd_bitmask + // requires all-1 or all-0. + let mask: i64x2 = simd_lt(transmute(a), i64x2::splat(0)); + simd_bitmask::(mask).into() } /// Loads 128-bits (composed of 2 packed double-precision (64-bit) @@ -2826,18 +2847,8 @@ fn lfence(); #[link_name = "llvm.x86.sse2.mfence"] fn mfence(); - #[link_name = "llvm.x86.sse2.pavg.b"] - fn pavgb(a: u8x16, b: u8x16) -> u8x16; - #[link_name = "llvm.x86.sse2.pavg.w"] - fn pavgw(a: u16x8, b: u16x8) -> u16x8; #[link_name = "llvm.x86.sse2.pmadd.wd"] fn pmaddwd(a: i16x8, b: i16x8) -> i32x4; - #[link_name = "llvm.x86.sse2.pmulh.w"] - fn pmulhw(a: i16x8, b: i16x8) -> i16x8; - #[link_name = "llvm.x86.sse2.pmulhu.w"] - fn pmulhuw(a: u16x8, b: u16x8) -> u16x8; - #[link_name = "llvm.x86.sse2.pmulu.dq"] - fn pmuludq(a: u32x4, b: u32x4) -> u64x2; #[link_name = "llvm.x86.sse2.psad.bw"] fn psadbw(a: u8x16, b: u8x16) -> u64x2; #[link_name = "llvm.x86.sse2.psll.w"] @@ -2856,8 +2867,6 @@ fn psrld(a: i32x4, count: i32x4) -> i32x4; #[link_name = "llvm.x86.sse2.psrl.q"] fn psrlq(a: i64x2, count: i64x2) -> i64x2; - #[link_name = "llvm.x86.sse2.cvtdq2ps"] - fn cvtdq2ps(a: i32x4) -> __m128; #[link_name = "llvm.x86.sse2.cvtps2dq"] fn cvtps2dq(a: __m128) -> i32x4; #[link_name = "llvm.x86.sse2.maskmov.dqu"] @@ -2908,12 +2917,6 @@ fn ucomigesd(a: __m128d, b: __m128d) -> i32; #[link_name = "llvm.x86.sse2.ucomineq.sd"] fn ucomineqsd(a: __m128d, b: __m128d) -> i32; - #[link_name = "llvm.x86.sse2.movmsk.pd"] - fn movmskpd(a: __m128d) -> i32; - #[link_name = "llvm.x86.sse2.cvtpd2ps"] - fn cvtpd2ps(a: __m128d) -> __m128; - #[link_name = "llvm.x86.sse2.cvtps2pd"] - fn cvtps2pd(a: __m128) -> __m128d; #[link_name = "llvm.x86.sse2.cvtpd2dq"] fn cvtpd2dq(a: __m128d) -> i32x4; #[link_name = "llvm.x86.sse2.cvtsd2si"] @@ -2956,11 +2959,15 @@ } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + #[cfg_attr(miri, ignore)] unsafe fn test_mm_lfence() { _mm_lfence(); } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + #[cfg_attr(miri, ignore)] unsafe fn test_mm_mfence() { _mm_mfence(); } @@ -3343,83 +3350,124 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_slli_epi16() { - #[rustfmt::skip] - let a = _mm_setr_epi16( - 0xFFFF as u16 as i16, 0x0FFF, 0x00FF, 0x000F, 0, 0, 0, 0, - ); + let a = _mm_setr_epi16(0xCC, -0xCC, 0xDD, -0xDD, 0xEE, -0xEE, 0xFF, -0xFF); let r = _mm_slli_epi16::<4>(a); - - #[rustfmt::skip] - let e = _mm_setr_epi16( - 0xFFF0 as u16 as i16, 0xFFF0 as u16 as i16, 0x0FF0, 0x00F0, - 0, 0, 0, 0, + assert_eq_m128i( + r, + _mm_setr_epi16(0xCC0, -0xCC0, 0xDD0, -0xDD0, 0xEE0, -0xEE0, 0xFF0, -0xFF0), ); - assert_eq_m128i(r, e); + let r = _mm_slli_epi16::<16>(a); + assert_eq_m128i(r, _mm_set1_epi16(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_sll_epi16() { - let a = _mm_setr_epi16(0xFF, 0, 0, 0, 0, 0, 0, 0); - let r = _mm_sll_epi16(a, _mm_setr_epi16(4, 0, 0, 0, 0, 0, 0, 0)); - assert_eq_m128i(r, _mm_setr_epi16(0xFF0, 0, 0, 0, 0, 0, 0, 0)); - let r = _mm_sll_epi16(a, _mm_setr_epi16(0, 0, 0, 0, 4, 0, 0, 0)); - assert_eq_m128i(r, _mm_setr_epi16(0xFF, 0, 0, 0, 0, 0, 0, 0)); + let a = _mm_setr_epi16(0xCC, -0xCC, 0xDD, -0xDD, 0xEE, -0xEE, 0xFF, -0xFF); + let r = _mm_sll_epi16(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i( + r, + _mm_setr_epi16(0xCC0, -0xCC0, 0xDD0, -0xDD0, 0xEE0, -0xEE0, 0xFF0, -0xFF0), + ); + let r = _mm_sll_epi16(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_sll_epi16(a, _mm_set_epi64x(0, 16)); + assert_eq_m128i(r, _mm_set1_epi16(0)); + let r = _mm_sll_epi16(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_set1_epi16(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_slli_epi32() { - let r = _mm_slli_epi32::<4>(_mm_set1_epi32(0xFFFF)); - assert_eq_m128i(r, _mm_set1_epi32(0xFFFF0)); + let a = _mm_setr_epi32(0xEEEE, -0xEEEE, 0xFFFF, -0xFFFF); + let r = _mm_slli_epi32::<4>(a); + assert_eq_m128i(r, _mm_setr_epi32(0xEEEE0, -0xEEEE0, 0xFFFF0, -0xFFFF0)); + let r = _mm_slli_epi32::<32>(a); + assert_eq_m128i(r, _mm_set1_epi32(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_sll_epi32() { - let a = _mm_set1_epi32(0xFFFF); - let b = _mm_setr_epi32(4, 0, 0, 0); - let r = _mm_sll_epi32(a, b); - assert_eq_m128i(r, _mm_set1_epi32(0xFFFF0)); + let a = _mm_setr_epi32(0xEEEE, -0xEEEE, 0xFFFF, -0xFFFF); + let r = _mm_sll_epi32(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i(r, _mm_setr_epi32(0xEEEE0, -0xEEEE0, 0xFFFF0, -0xFFFF0)); + let r = _mm_sll_epi32(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_sll_epi32(a, _mm_set_epi64x(0, 32)); + assert_eq_m128i(r, _mm_set1_epi32(0)); + let r = _mm_sll_epi32(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_set1_epi32(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_slli_epi64() { - let r = _mm_slli_epi64::<4>(_mm_set1_epi64x(0xFFFFFFFF)); - assert_eq_m128i(r, _mm_set1_epi64x(0xFFFFFFFF0)); + let a = _mm_set_epi64x(0xFFFFFFFF, -0xFFFFFFFF); + let r = _mm_slli_epi64::<4>(a); + assert_eq_m128i(r, _mm_set_epi64x(0xFFFFFFFF0, -0xFFFFFFFF0)); + let r = _mm_slli_epi64::<64>(a); + assert_eq_m128i(r, _mm_set1_epi64x(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_sll_epi64() { - let a = _mm_set1_epi64x(0xFFFFFFFF); - let b = _mm_setr_epi64x(4, 0); - let r = _mm_sll_epi64(a, b); - assert_eq_m128i(r, _mm_set1_epi64x(0xFFFFFFFF0)); + let a = _mm_set_epi64x(0xFFFFFFFF, -0xFFFFFFFF); + let r = _mm_sll_epi64(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i(r, _mm_set_epi64x(0xFFFFFFFF0, -0xFFFFFFFF0)); + let r = _mm_sll_epi64(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_sll_epi64(a, _mm_set_epi64x(0, 64)); + assert_eq_m128i(r, _mm_set1_epi64x(0)); + let r = _mm_sll_epi64(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_set1_epi64x(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srai_epi16() { - let r = _mm_srai_epi16::<1>(_mm_set1_epi16(-1)); - assert_eq_m128i(r, _mm_set1_epi16(-1)); + let a = _mm_setr_epi16(0xCC, -0xCC, 0xDD, -0xDD, 0xEE, -0xEE, 0xFF, -0xFF); + let r = _mm_srai_epi16::<4>(a); + assert_eq_m128i( + r, + _mm_setr_epi16(0xC, -0xD, 0xD, -0xE, 0xE, -0xF, 0xF, -0x10), + ); + let r = _mm_srai_epi16::<16>(a); + assert_eq_m128i(r, _mm_setr_epi16(0, -1, 0, -1, 0, -1, 0, -1)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_sra_epi16() { - let a = _mm_set1_epi16(-1); - let b = _mm_setr_epi16(1, 0, 0, 0, 0, 0, 0, 0); - let r = _mm_sra_epi16(a, b); - assert_eq_m128i(r, _mm_set1_epi16(-1)); + let a = _mm_setr_epi16(0xCC, -0xCC, 0xDD, -0xDD, 0xEE, -0xEE, 0xFF, -0xFF); + let r = _mm_sra_epi16(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i( + r, + _mm_setr_epi16(0xC, -0xD, 0xD, -0xE, 0xE, -0xF, 0xF, -0x10), + ); + let r = _mm_sra_epi16(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_sra_epi16(a, _mm_set_epi64x(0, 16)); + assert_eq_m128i(r, _mm_setr_epi16(0, -1, 0, -1, 0, -1, 0, -1)); + let r = _mm_sra_epi16(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_setr_epi16(0, -1, 0, -1, 0, -1, 0, -1)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srai_epi32() { - let r = _mm_srai_epi32::<1>(_mm_set1_epi32(-1)); - assert_eq_m128i(r, _mm_set1_epi32(-1)); + let a = _mm_setr_epi32(0xEEEE, -0xEEEE, 0xFFFF, -0xFFFF); + let r = _mm_srai_epi32::<4>(a); + assert_eq_m128i(r, _mm_setr_epi32(0xEEE, -0xEEF, 0xFFF, -0x1000)); + let r = _mm_srai_epi32::<32>(a); + assert_eq_m128i(r, _mm_setr_epi32(0, -1, 0, -1)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_sra_epi32() { - let a = _mm_set1_epi32(-1); - let b = _mm_setr_epi32(1, 0, 0, 0); - let r = _mm_sra_epi32(a, b); - assert_eq_m128i(r, _mm_set1_epi32(-1)); + let a = _mm_setr_epi32(0xEEEE, -0xEEEE, 0xFFFF, -0xFFFF); + let r = _mm_sra_epi32(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i(r, _mm_setr_epi32(0xEEE, -0xEEF, 0xFFF, -0x1000)); + let r = _mm_sra_epi32(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_sra_epi32(a, _mm_set_epi64x(0, 32)); + assert_eq_m128i(r, _mm_setr_epi32(0, -1, 0, -1)); + let r = _mm_sra_epi32(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_setr_epi32(0, -1, 0, -1)); } #[simd_test(enable = "sse2")] @@ -3453,53 +3501,74 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_srli_epi16() { - #[rustfmt::skip] - let a = _mm_setr_epi16( - 0xFFFF as u16 as i16, 0x0FFF, 0x00FF, 0x000F, 0, 0, 0, 0, - ); + let a = _mm_setr_epi16(0xCC, -0xCC, 0xDD, -0xDD, 0xEE, -0xEE, 0xFF, -0xFF); let r = _mm_srli_epi16::<4>(a); - #[rustfmt::skip] - let e = _mm_setr_epi16( - 0xFFF as u16 as i16, 0xFF as u16 as i16, 0xF, 0, 0, 0, 0, 0, + assert_eq_m128i( + r, + _mm_setr_epi16(0xC, 0xFF3, 0xD, 0xFF2, 0xE, 0xFF1, 0xF, 0xFF0), ); - assert_eq_m128i(r, e); + let r = _mm_srli_epi16::<16>(a); + assert_eq_m128i(r, _mm_set1_epi16(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srl_epi16() { - let a = _mm_setr_epi16(0xFF, 0, 0, 0, 0, 0, 0, 0); - let r = _mm_srl_epi16(a, _mm_setr_epi16(4, 0, 0, 0, 0, 0, 0, 0)); - assert_eq_m128i(r, _mm_setr_epi16(0xF, 0, 0, 0, 0, 0, 0, 0)); - let r = _mm_srl_epi16(a, _mm_setr_epi16(0, 0, 0, 0, 4, 0, 0, 0)); - assert_eq_m128i(r, _mm_setr_epi16(0xFF, 0, 0, 0, 0, 0, 0, 0)); + let a = _mm_setr_epi16(0xCC, -0xCC, 0xDD, -0xDD, 0xEE, -0xEE, 0xFF, -0xFF); + let r = _mm_srl_epi16(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i( + r, + _mm_setr_epi16(0xC, 0xFF3, 0xD, 0xFF2, 0xE, 0xFF1, 0xF, 0xFF0), + ); + let r = _mm_srl_epi16(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_srl_epi16(a, _mm_set_epi64x(0, 16)); + assert_eq_m128i(r, _mm_set1_epi16(0)); + let r = _mm_srl_epi16(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_set1_epi16(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srli_epi32() { - let r = _mm_srli_epi32::<4>(_mm_set1_epi32(0xFFFF)); - assert_eq_m128i(r, _mm_set1_epi32(0xFFF)); + let a = _mm_setr_epi32(0xEEEE, -0xEEEE, 0xFFFF, -0xFFFF); + let r = _mm_srli_epi32::<4>(a); + assert_eq_m128i(r, _mm_setr_epi32(0xEEE, 0xFFFF111, 0xFFF, 0xFFFF000)); + let r = _mm_srli_epi32::<32>(a); + assert_eq_m128i(r, _mm_set1_epi32(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srl_epi32() { - let a = _mm_set1_epi32(0xFFFF); - let b = _mm_setr_epi32(4, 0, 0, 0); - let r = _mm_srl_epi32(a, b); - assert_eq_m128i(r, _mm_set1_epi32(0xFFF)); + let a = _mm_setr_epi32(0xEEEE, -0xEEEE, 0xFFFF, -0xFFFF); + let r = _mm_srl_epi32(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i(r, _mm_setr_epi32(0xEEE, 0xFFFF111, 0xFFF, 0xFFFF000)); + let r = _mm_srl_epi32(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_srl_epi32(a, _mm_set_epi64x(0, 32)); + assert_eq_m128i(r, _mm_set1_epi32(0)); + let r = _mm_srl_epi32(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_set1_epi32(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srli_epi64() { - let r = _mm_srli_epi64::<4>(_mm_set1_epi64x(0xFFFFFFFF)); - assert_eq_m128i(r, _mm_set1_epi64x(0xFFFFFFF)); + let a = _mm_set_epi64x(0xFFFFFFFF, -0xFFFFFFFF); + let r = _mm_srli_epi64::<4>(a); + assert_eq_m128i(r, _mm_set_epi64x(0xFFFFFFF, 0xFFFFFFFF0000000)); + let r = _mm_srli_epi64::<64>(a); + assert_eq_m128i(r, _mm_set1_epi64x(0)); } #[simd_test(enable = "sse2")] unsafe fn test_mm_srl_epi64() { - let a = _mm_set1_epi64x(0xFFFFFFFF); - let b = _mm_setr_epi64x(4, 0); - let r = _mm_srl_epi64(a, b); - assert_eq_m128i(r, _mm_set1_epi64x(0xFFFFFFF)); + let a = _mm_set_epi64x(0xFFFFFFFF, -0xFFFFFFFF); + let r = _mm_srl_epi64(a, _mm_set_epi64x(0, 4)); + assert_eq_m128i(r, _mm_set_epi64x(0xFFFFFFF, 0xFFFFFFFF0000000)); + let r = _mm_srl_epi64(a, _mm_set_epi64x(4, 0)); + assert_eq_m128i(r, a); + let r = _mm_srl_epi64(a, _mm_set_epi64x(0, 64)); + assert_eq_m128i(r, _mm_set1_epi64x(0)); + let r = _mm_srl_epi64(a, _mm_set_epi64x(0, i64::MAX)); + assert_eq_m128i(r, _mm_set1_epi64x(0)); } #[simd_test(enable = "sse2")] @@ -3766,6 +3835,9 @@ } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + // (non-temporal store) + #[cfg_attr(miri, ignore)] unsafe fn test_mm_maskmoveu_si128() { let a = _mm_set1_epi8(9); #[rustfmt::skip] @@ -3804,6 +3876,9 @@ } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + // (non-temporal store) + #[cfg_attr(miri, ignore)] unsafe fn test_mm_stream_si128() { let a = _mm_setr_epi32(1, 2, 3, 4); let mut r = _mm_undefined_si128(); @@ -3812,6 +3887,9 @@ } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + // (non-temporal store) + #[cfg_attr(miri, ignore)] unsafe fn test_mm_stream_si32() { let a: i32 = 7; let mut mem = boxed::Box::::new(-1); @@ -4055,6 +4133,17 @@ let b = _mm_setr_pd(5.0, 10.0); let r = _mm_max_pd(a, b); assert_eq_m128d(r, _mm_setr_pd(5.0, 10.0)); + + // Check SSE(2)-specific semantics for -0.0 handling. + let a = _mm_setr_pd(-0.0, 0.0); + let b = _mm_setr_pd(0.0, 0.0); + let r1: [u8; 16] = transmute(_mm_max_pd(a, b)); + let r2: [u8; 16] = transmute(_mm_max_pd(b, a)); + let a: [u8; 16] = transmute(a); + let b: [u8; 16] = transmute(b); + assert_eq!(r1, b); + assert_eq!(r2, a); + assert_ne!(a, b); // sanity check that -0.0 is actually present } #[simd_test(enable = "sse2")] @@ -4071,6 +4160,17 @@ let b = _mm_setr_pd(5.0, 10.0); let r = _mm_min_pd(a, b); assert_eq_m128d(r, _mm_setr_pd(1.0, 2.0)); + + // Check SSE(2)-specific semantics for -0.0 handling. + let a = _mm_setr_pd(-0.0, 0.0); + let b = _mm_setr_pd(0.0, 0.0); + let r1: [u8; 16] = transmute(_mm_min_pd(a, b)); + let r2: [u8; 16] = transmute(_mm_min_pd(b, a)); + let a: [u8; 16] = transmute(a); + let b: [u8; 16] = transmute(b); + assert_eq!(r1, b); + assert_eq!(r2, a); + assert_ne!(a, b); // sanity check that -0.0 is actually present } #[simd_test(enable = "sse2")] @@ -4158,7 +4258,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpeq_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpeq_sd(a, b)); assert_eq_m128i(r, e); } @@ -4166,7 +4266,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmplt_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(5.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmplt_sd(a, b)); assert_eq_m128i(r, e); } @@ -4174,7 +4274,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmple_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmple_sd(a, b)); assert_eq_m128i(r, e); } @@ -4182,7 +4282,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpgt_sd() { let (a, b) = (_mm_setr_pd(5.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpgt_sd(a, b)); assert_eq_m128i(r, e); } @@ -4190,7 +4290,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpge_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpge_sd(a, b)); assert_eq_m128i(r, e); } @@ -4198,7 +4298,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpord_sd() { let (a, b) = (_mm_setr_pd(NAN, 2.0), _mm_setr_pd(5.0, 3.0)); - let e = _mm_setr_epi64x(0, transmute(2.0f64)); + let e = _mm_setr_epi64x(0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpord_sd(a, b)); assert_eq_m128i(r, e); } @@ -4206,7 +4306,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpunord_sd() { let (a, b) = (_mm_setr_pd(NAN, 2.0), _mm_setr_pd(5.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpunord_sd(a, b)); assert_eq_m128i(r, e); } @@ -4214,7 +4314,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpneq_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(5.0, 3.0)); - let e = _mm_setr_epi64x(!0, transmute(2.0f64)); + let e = _mm_setr_epi64x(!0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpneq_sd(a, b)); assert_eq_m128i(r, e); } @@ -4222,7 +4322,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpnlt_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(5.0, 3.0)); - let e = _mm_setr_epi64x(0, transmute(2.0f64)); + let e = _mm_setr_epi64x(0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpnlt_sd(a, b)); assert_eq_m128i(r, e); } @@ -4230,7 +4330,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpnle_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(0, transmute(2.0f64)); + let e = _mm_setr_epi64x(0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpnle_sd(a, b)); assert_eq_m128i(r, e); } @@ -4238,7 +4338,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpngt_sd() { let (a, b) = (_mm_setr_pd(5.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(0, transmute(2.0f64)); + let e = _mm_setr_epi64x(0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpngt_sd(a, b)); assert_eq_m128i(r, e); } @@ -4246,7 +4346,7 @@ #[simd_test(enable = "sse2")] unsafe fn test_mm_cmpnge_sd() { let (a, b) = (_mm_setr_pd(1.0, 2.0), _mm_setr_pd(1.0, 3.0)); - let e = _mm_setr_epi64x(0, transmute(2.0f64)); + let e = _mm_setr_epi64x(0, 2.0f64.to_bits() as i64); let r = transmute::<_, __m128i>(_mm_cmpnge_sd(a, b)); assert_eq_m128i(r, e); } @@ -4478,6 +4578,9 @@ } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + // (non-temporal store) + #[cfg_attr(miri, ignore)] unsafe fn test_mm_stream_pd() { #[repr(align(128))] struct Memory { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse3.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse3.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse3.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse3.rs 2023-12-21 16:55:32.000000000 +0000 @@ -1,7 +1,7 @@ //! Streaming SIMD Extensions 3 (SSE3) use crate::{ - core_arch::{simd::*, simd_llvm::simd_shuffle, x86::*}, + core_arch::{simd::*, simd_llvm::*, x86::*}, mem::transmute, }; @@ -17,7 +17,11 @@ #[cfg_attr(test, assert_instr(addsubps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_addsub_ps(a: __m128, b: __m128) -> __m128 { - addsubps(a, b) + let a = a.as_f32x4(); + let b = b.as_f32x4(); + let add = simd_add(a, b); + let sub = simd_sub(a, b); + simd_shuffle!(add, sub, [4, 1, 6, 3]) } /// Alternatively add and subtract packed double-precision (64-bit) @@ -29,7 +33,11 @@ #[cfg_attr(test, assert_instr(addsubpd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_addsub_pd(a: __m128d, b: __m128d) -> __m128d { - addsubpd(a, b) + let a = a.as_f64x2(); + let b = b.as_f64x2(); + let add = simd_add(a, b); + let sub = simd_sub(a, b); + simd_shuffle!(add, sub, [2, 1]) } /// Horizontally adds adjacent pairs of double-precision (64-bit) @@ -143,10 +151,6 @@ #[allow(improper_ctypes)] extern "C" { - #[link_name = "llvm.x86.sse3.addsub.ps"] - fn addsubps(a: __m128, b: __m128) -> __m128; - #[link_name = "llvm.x86.sse3.addsub.pd"] - fn addsubpd(a: __m128d, b: __m128d) -> __m128d; #[link_name = "llvm.x86.sse3.hadd.pd"] fn haddpd(a: __m128d, b: __m128d) -> __m128d; #[link_name = "llvm.x86.sse3.hadd.ps"] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse41.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse41.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse41.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/sse41.rs 2023-12-21 16:55:32.000000000 +0000 @@ -62,7 +62,8 @@ #[cfg_attr(test, assert_instr(pblendvb))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_blendv_epi8(a: __m128i, b: __m128i, mask: __m128i) -> __m128i { - transmute(pblendvb(a.as_i8x16(), b.as_i8x16(), mask.as_i8x16())) + let mask: i8x16 = simd_lt(mask.as_i8x16(), i8x16::splat(0)); + transmute(simd_select(mask, b.as_i8x16(), a.as_i8x16())) } /// Blend packed 16-bit integers from `a` and `b` using the mask `IMM8`. @@ -74,15 +75,25 @@ /// [Intel's documentation](https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_blend_epi16) #[inline] #[target_feature(enable = "sse4.1")] -// Note: LLVM7 prefers the single-precision floating-point domain when possible -// see https://bugs.llvm.org/show_bug.cgi?id=38195 -// #[cfg_attr(test, assert_instr(pblendw, IMM8 = 0xF0))] -#[cfg_attr(test, assert_instr(blendps, IMM8 = 0xF0))] +#[cfg_attr(test, assert_instr(pblendw, IMM8 = 0xB1))] #[rustc_legacy_const_generics(2)] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_blend_epi16(a: __m128i, b: __m128i) -> __m128i { static_assert_uimm_bits!(IMM8, 8); - transmute(pblendw(a.as_i16x8(), b.as_i16x8(), IMM8 as u8)) + transmute::(simd_shuffle!( + a.as_i16x8(), + b.as_i16x8(), + [ + [0, 8][IMM8 as usize & 1], + [1, 9][(IMM8 >> 1) as usize & 1], + [2, 10][(IMM8 >> 2) as usize & 1], + [3, 11][(IMM8 >> 3) as usize & 1], + [4, 12][(IMM8 >> 4) as usize & 1], + [5, 13][(IMM8 >> 5) as usize & 1], + [6, 14][(IMM8 >> 6) as usize & 1], + [7, 15][(IMM8 >> 7) as usize & 1], + ] + )) } /// Blend packed double-precision (64-bit) floating-point elements from `a` @@ -94,7 +105,8 @@ #[cfg_attr(test, assert_instr(blendvpd))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_blendv_pd(a: __m128d, b: __m128d, mask: __m128d) -> __m128d { - blendvpd(a, b, mask) + let mask: i64x2 = simd_lt(transmute::<_, i64x2>(mask), i64x2::splat(0)); + transmute(simd_select(mask, b.as_f64x2(), a.as_f64x2())) } /// Blend packed single-precision (32-bit) floating-point elements from `a` @@ -106,7 +118,8 @@ #[cfg_attr(test, assert_instr(blendvps))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_blendv_ps(a: __m128, b: __m128, mask: __m128) -> __m128 { - blendvps(a, b, mask) + let mask: i32x4 = simd_lt(transmute::<_, i32x4>(mask), i32x4::splat(0)); + transmute(simd_select(mask, b.as_f32x4(), a.as_f32x4())) } /// Blend packed double-precision (64-bit) floating-point elements from `a` @@ -123,7 +136,11 @@ #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_blend_pd(a: __m128d, b: __m128d) -> __m128d { static_assert_uimm_bits!(IMM2, 2); - blendpd(a, b, IMM2 as u8) + transmute::(simd_shuffle!( + a.as_f64x2(), + b.as_f64x2(), + [[0, 2][IMM2 as usize & 1], [1, 3][(IMM2 >> 1) as usize & 1]] + )) } /// Blend packed single-precision (32-bit) floating-point elements from `a` @@ -137,7 +154,16 @@ #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_blend_ps(a: __m128, b: __m128) -> __m128 { static_assert_uimm_bits!(IMM4, 4); - blendps(a, b, IMM4 as u8) + transmute::(simd_shuffle!( + a.as_f32x4(), + b.as_f32x4(), + [ + [0, 4][IMM4 as usize & 1], + [1, 5][(IMM4 >> 1) as usize & 1], + [2, 6][(IMM4 >> 2) as usize & 1], + [3, 7][(IMM4 >> 3) as usize & 1], + ] + )) } /// Extracts a single-precision (32-bit) floating-point element from `a`, @@ -175,7 +201,7 @@ #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_extract_ps(a: __m128) -> i32 { static_assert_uimm_bits!(IMM8, 2); - transmute(simd_extract::<_, f32>(a, IMM8 as u32)) + simd_extract::<_, f32>(a, IMM8 as u32).to_bits() as i32 } /// Extracts an 8-bit integer from `a`, selected with `IMM8`. Returns a 32-bit @@ -923,7 +949,9 @@ #[cfg_attr(test, assert_instr(pmuldq))] #[stable(feature = "simd_x86", since = "1.27.0")] pub unsafe fn _mm_mul_epi32(a: __m128i, b: __m128i) -> __m128i { - transmute(pmuldq(a.as_i32x4(), b.as_i32x4())) + let a = simd_cast::<_, i64x2>(simd_cast::<_, i32x2>(a.as_i64x2())); + let b = simd_cast::<_, i64x2>(simd_cast::<_, i32x2>(b.as_i64x2())); + transmute(simd_mul(a, b)) } /// Multiplies the packed 32-bit integers in `a` and `b`, producing intermediate @@ -1124,18 +1152,6 @@ #[allow(improper_ctypes)] extern "C" { - #[link_name = "llvm.x86.sse41.pblendvb"] - fn pblendvb(a: i8x16, b: i8x16, mask: i8x16) -> i8x16; - #[link_name = "llvm.x86.sse41.blendvpd"] - fn blendvpd(a: __m128d, b: __m128d, mask: __m128d) -> __m128d; - #[link_name = "llvm.x86.sse41.blendvps"] - fn blendvps(a: __m128, b: __m128, mask: __m128) -> __m128; - #[link_name = "llvm.x86.sse41.blendpd"] - fn blendpd(a: __m128d, b: __m128d, imm2: u8) -> __m128d; - #[link_name = "llvm.x86.sse41.blendps"] - fn blendps(a: __m128, b: __m128, imm4: u8) -> __m128; - #[link_name = "llvm.x86.sse41.pblendw"] - fn pblendw(a: i16x8, b: i16x8, imm8: u8) -> i16x8; #[link_name = "llvm.x86.sse41.insertps"] fn insertps(a: __m128, b: __m128, imm8: u8) -> __m128; #[link_name = "llvm.x86.sse41.packusdw"] @@ -1154,8 +1170,6 @@ fn roundss(a: __m128, b: __m128, rounding: i32) -> __m128; #[link_name = "llvm.x86.sse41.phminposuw"] fn phminposuw(a: u16x8) -> u16x8; - #[link_name = "llvm.x86.sse41.pmuldq"] - fn pmuldq(a: i32x4, b: i32x4) -> i64x2; #[link_name = "llvm.x86.sse41.mpsadbw"] fn mpsadbw(a: u8x16, b: u8x16, imm8: u8) -> u16x8; #[link_name = "llvm.x86.sse41.ptestz"] @@ -1245,9 +1259,9 @@ #[simd_test(enable = "sse4.1")] unsafe fn test_mm_extract_ps() { let a = _mm_setr_ps(0.0, 1.0, 2.0, 3.0); - let r: f32 = transmute(_mm_extract_ps::<1>(a)); + let r: f32 = f32::from_bits(_mm_extract_ps::<1>(a) as u32); assert_eq!(r, 1.0); - let r: f32 = transmute(_mm_extract_ps::<3>(a)); + let r: f32 = f32::from_bits(_mm_extract_ps::<3>(a) as u32); assert_eq!(r, 3.0); } @@ -1668,6 +1682,7 @@ assert_eq_m128(r, e); } + #[allow(deprecated)] // FIXME: This test uses deprecated CSR access functions #[simd_test(enable = "sse4.1")] unsafe fn test_mm_round_sd() { let a = _mm_setr_pd(1.5, 3.5); @@ -1680,6 +1695,7 @@ assert_eq_m128d(r, e); } + #[allow(deprecated)] // FIXME: This test uses deprecated CSR access functions #[simd_test(enable = "sse4.1")] unsafe fn test_mm_round_ss() { let a = _mm_setr_ps(1.5, 3.5, 7.5, 15.5); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/test.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86/test.rs 2023-12-21 16:55:32.000000000 +0000 @@ -3,11 +3,13 @@ use crate::core_arch::x86::*; use std::mem::transmute; +#[track_caller] #[target_feature(enable = "sse2")] pub unsafe fn assert_eq_m128i(a: __m128i, b: __m128i) { assert_eq!(transmute::<_, [u64; 2]>(a), transmute::<_, [u64; 2]>(b)) } +#[track_caller] #[target_feature(enable = "sse2")] pub unsafe fn assert_eq_m128d(a: __m128d, b: __m128d) { if _mm_movemask_pd(_mm_cmpeq_pd(a, b)) != 0b11 { @@ -20,6 +22,7 @@ transmute::<_, [f64; 2]>(a)[idx] } +#[track_caller] #[target_feature(enable = "sse")] pub unsafe fn assert_eq_m128(a: __m128, b: __m128) { let r = _mm_cmpeq_ps(a, b); @@ -40,11 +43,13 @@ _mm_set_epi64x(b, a) } +#[track_caller] #[target_feature(enable = "avx")] pub unsafe fn assert_eq_m256i(a: __m256i, b: __m256i) { assert_eq!(transmute::<_, [u64; 4]>(a), transmute::<_, [u64; 4]>(b)) } +#[track_caller] #[target_feature(enable = "avx")] pub unsafe fn assert_eq_m256d(a: __m256d, b: __m256d) { let cmp = _mm256_cmp_pd::<_CMP_EQ_OQ>(a, b); @@ -58,6 +63,7 @@ transmute::<_, [f64; 4]>(a)[idx] } +#[track_caller] #[target_feature(enable = "avx")] pub unsafe fn assert_eq_m256(a: __m256, b: __m256) { let cmp = _mm256_cmp_ps::<_CMP_EQ_OQ>(a, b); @@ -125,10 +131,12 @@ } pub use self::x86_polyfill::*; +#[track_caller] pub unsafe fn assert_eq_m512i(a: __m512i, b: __m512i) { assert_eq!(transmute::<_, [i32; 16]>(a), transmute::<_, [i32; 16]>(b)) } +#[track_caller] pub unsafe fn assert_eq_m512(a: __m512, b: __m512) { let cmp = _mm512_cmp_ps_mask::<_CMP_EQ_OQ>(a, b); if cmp != 0b11111111_11111111 { @@ -136,6 +144,7 @@ } } +#[track_caller] pub unsafe fn assert_eq_m512d(a: __m512d, b: __m512d) { let cmp = _mm512_cmp_pd_mask::<_CMP_EQ_OQ>(a, b); if cmp != 0b11111111 { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/avx512f.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/avx512f.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/avx512f.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/avx512f.rs 2023-12-21 16:55:32.000000000 +0000 @@ -33,7 +33,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2usi))] pub unsafe fn _mm_cvtss_u64(a: __m128) -> u64 { - transmute(vcvtss2usi64(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2usi64(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 64-bit integer, and store the result in dst. @@ -43,7 +43,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2usi))] pub unsafe fn _mm_cvtsd_u64(a: __m128d) -> u64 { - transmute(vcvtsd2usi64(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2usi64(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the signed 64-bit integer b to a single-precision (32-bit) floating-point element, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst. @@ -54,8 +54,7 @@ #[cfg_attr(test, assert_instr(vcvtsi2ss))] pub unsafe fn _mm_cvti64_ss(a: __m128, b: i64) -> __m128 { let b = b as f32; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the signed 64-bit integer b to a double-precision (64-bit) floating-point element, store the result in the lower element of dst, and copy the upper element from a to the upper element of dst. @@ -66,8 +65,7 @@ #[cfg_attr(test, assert_instr(vcvtsi2sd))] pub unsafe fn _mm_cvti64_sd(a: __m128d, b: i64) -> __m128d { let b = b as f64; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the unsigned 64-bit integer b to a single-precision (32-bit) floating-point element, store the result in the lower element of dst, and copy the upper 3 packed elements from a to the upper elements of dst. @@ -78,8 +76,7 @@ #[cfg_attr(test, assert_instr(vcvtusi2ss))] pub unsafe fn _mm_cvtu64_ss(a: __m128, b: u64) -> __m128 { let b = b as f32; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the unsigned 64-bit integer b to a double-precision (64-bit) floating-point element, store the result in the lower element of dst, and copy the upper element from a to the upper element of dst. @@ -90,8 +87,7 @@ #[cfg_attr(test, assert_instr(vcvtusi2sd))] pub unsafe fn _mm_cvtu64_sd(a: __m128d, b: u64) -> __m128d { let b = b as f64; - let r = simd_insert(a, 0, b); - transmute(r) + simd_insert(a, 0, b) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 64-bit integer with truncation, and store the result in dst. @@ -101,7 +97,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2si))] pub unsafe fn _mm_cvttsd_i64(a: __m128d) -> i64 { - transmute(vcvtsd2si64(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2si64(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 64-bit integer with truncation, and store the result in dst. @@ -111,7 +107,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtsd2usi))] pub unsafe fn _mm_cvttsd_u64(a: __m128d) -> u64 { - transmute(vcvtsd2usi64(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION)) + vcvtsd2usi64(a.as_f64x2(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 64-bit integer with truncation, and store the result in dst. @@ -121,7 +117,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2si))] pub unsafe fn _mm_cvttss_i64(a: __m128) -> i64 { - transmute(vcvtss2si64(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2si64(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 64-bit integer with truncation, and store the result in dst. @@ -131,7 +127,7 @@ #[target_feature(enable = "avx512f")] #[cfg_attr(test, assert_instr(vcvtss2usi))] pub unsafe fn _mm_cvttss_u64(a: __m128) -> u64 { - transmute(vcvtss2usi64(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION)) + vcvtss2usi64(a.as_f32x4(), _MM_FROUND_CUR_DIRECTION) } /// Convert the signed 64-bit integer b to a double-precision (64-bit) floating-point element, store the result in the lower element of dst, and copy the upper element from a to the upper element of dst. @@ -270,8 +266,7 @@ pub unsafe fn _mm_cvt_roundsd_si64(a: __m128d) -> i64 { static_assert_rounding!(ROUNDING); let a = a.as_f64x2(); - let r = vcvtsd2si64(a, ROUNDING); - transmute(r) + vcvtsd2si64(a, ROUNDING) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 64-bit integer, and store the result in dst.\ @@ -290,8 +285,7 @@ pub unsafe fn _mm_cvt_roundsd_i64(a: __m128d) -> i64 { static_assert_rounding!(ROUNDING); let a = a.as_f64x2(); - let r = vcvtsd2si64(a, ROUNDING); - transmute(r) + vcvtsd2si64(a, ROUNDING) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 64-bit integer, and store the result in dst.\ @@ -310,8 +304,7 @@ pub unsafe fn _mm_cvt_roundsd_u64(a: __m128d) -> u64 { static_assert_rounding!(ROUNDING); let a = a.as_f64x2(); - let r = vcvtsd2usi64(a, ROUNDING); - transmute(r) + vcvtsd2usi64(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 64-bit integer, and store the result in dst.\ @@ -330,8 +323,7 @@ pub unsafe fn _mm_cvt_roundss_si64(a: __m128) -> i64 { static_assert_rounding!(ROUNDING); let a = a.as_f32x4(); - let r = vcvtss2si64(a, ROUNDING); - transmute(r) + vcvtss2si64(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 64-bit integer, and store the result in dst.\ @@ -350,8 +342,7 @@ pub unsafe fn _mm_cvt_roundss_i64(a: __m128) -> i64 { static_assert_rounding!(ROUNDING); let a = a.as_f32x4(); - let r = vcvtss2si64(a, ROUNDING); - transmute(r) + vcvtss2si64(a, ROUNDING) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 64-bit integer, and store the result in dst.\ @@ -370,8 +361,7 @@ pub unsafe fn _mm_cvt_roundss_u64(a: __m128) -> u64 { static_assert_rounding!(ROUNDING); let a = a.as_f32x4(); - let r = vcvtss2usi64(a, ROUNDING); - transmute(r) + vcvtss2usi64(a, ROUNDING) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 64-bit integer with truncation, and store the result in dst.\ @@ -385,8 +375,7 @@ pub unsafe fn _mm_cvtt_roundsd_si64(a: __m128d) -> i64 { static_assert_sae!(SAE); let a = a.as_f64x2(); - let r = vcvtsd2si64(a, SAE); - transmute(r) + vcvtsd2si64(a, SAE) } /// Convert the lower double-precision (64-bit) floating-point element in a to a 64-bit integer with truncation, and store the result in dst.\ @@ -400,8 +389,7 @@ pub unsafe fn _mm_cvtt_roundsd_i64(a: __m128d) -> i64 { static_assert_sae!(SAE); let a = a.as_f64x2(); - let r = vcvtsd2si64(a, SAE); - transmute(r) + vcvtsd2si64(a, SAE) } /// Convert the lower double-precision (64-bit) floating-point element in a to an unsigned 64-bit integer with truncation, and store the result in dst.\ @@ -415,8 +403,7 @@ pub unsafe fn _mm_cvtt_roundsd_u64(a: __m128d) -> u64 { static_assert_sae!(SAE); let a = a.as_f64x2(); - let r = vcvtsd2usi64(a, SAE); - transmute(r) + vcvtsd2usi64(a, SAE) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 64-bit integer with truncation, and store the result in dst.\ @@ -430,8 +417,7 @@ pub unsafe fn _mm_cvtt_roundss_i64(a: __m128) -> i64 { static_assert_sae!(SAE); let a = a.as_f32x4(); - let r = vcvtss2si64(a, SAE); - transmute(r) + vcvtss2si64(a, SAE) } /// Convert the lower single-precision (32-bit) floating-point element in a to a 64-bit integer with truncation, and store the result in dst.\ @@ -445,8 +431,7 @@ pub unsafe fn _mm_cvtt_roundss_si64(a: __m128) -> i64 { static_assert_sae!(SAE); let a = a.as_f32x4(); - let r = vcvtss2si64(a, SAE); - transmute(r) + vcvtss2si64(a, SAE) } /// Convert the lower single-precision (32-bit) floating-point element in a to an unsigned 64-bit integer with truncation, and store the result in dst.\ @@ -460,8 +445,7 @@ pub unsafe fn _mm_cvtt_roundss_u64(a: __m128) -> u64 { static_assert_sae!(SAE); let a = a.as_f32x4(); - let r = vcvtss2usi64(a, SAE); - transmute(r) + vcvtss2usi64(a, SAE) } #[allow(improper_ctypes)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/sse2.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/sse2.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/sse2.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/core_arch/src/x86_64/sse2.rs 2023-12-21 16:55:32.000000000 +0000 @@ -181,6 +181,9 @@ } #[simd_test(enable = "sse2")] + // Miri cannot support this until it is clear how it fits in the Rust memory model + // (non-temporal store) + #[cfg_attr(miri, ignore)] unsafe fn test_mm_stream_si64() { let a: i64 = 7; let mut mem = boxed::Box::::new(-1); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -12,10 +12,10 @@ serde = { version = "1", features = ["derive"] } serde_json = "1.0" csv = "1.1" -clap = "2.33.3" +clap = { version = "4.4", features = ["derive"] } regex = "1.4.2" log = "0.4.11" -pretty_env_logger = "0.4.0" +pretty_env_logger = "0.5.0" rayon = "1.5.0" diff = "0.1.12" -itertools = "0.10.1" +itertools = "0.11.0" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/README.md rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/README.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/README.md 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/README.md 2023-12-21 16:55:32.000000000 +0000 @@ -4,15 +4,17 @@ # Usage ``` USAGE: - intrinsic-test [OPTIONS] + intrinsic-test [FLAGS] [OPTIONS] FLAGS: + --a32 Run tests for A32 instrinsics instead of A64 -h, --help Prints help information -V, --version Prints version information OPTIONS: --cppcompiler The C++ compiler to use for compiling the c++ code [default: clang++] --runner Run the C programs under emulation with this command + --skip Filename for a list of intrinsics to skip (one per line) --toolchain The rust toolchain to use for building the rust code ARGS: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/json_parser.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/json_parser.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/json_parser.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/json_parser.rs 2023-12-21 16:55:32.000000000 +0000 @@ -1,4 +1,5 @@ use std::collections::HashMap; +use std::path::Path; use serde::Deserialize; @@ -41,7 +42,7 @@ architectures: Vec, } -pub fn get_neon_intrinsics(filename: &str) -> Result, Box> { +pub fn get_neon_intrinsics(filename: &Path) -> Result, Box> { let file = std::fs::File::open(filename)?; let reader = std::io::BufReader::new(file); let json: Vec = serde_json::from_reader(reader).expect("Couldn't parse JSON"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/main.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/intrinsic-test/src/main.rs 2023-12-21 16:55:32.000000000 +0000 @@ -4,9 +4,9 @@ use std::fs::File; use std::io::Write; +use std::path::PathBuf; use std::process::Command; -use clap::{App, Arg}; use intrinsic::Intrinsic; use itertools::Itertools; use rayon::prelude::*; @@ -320,58 +320,47 @@ } } +/// Intrinsic test tool +#[derive(clap::Parser)] +#[command( + name = "Intrinsic test tool", + about = "Generates Rust and C programs for intrinsics and compares the output" +)] +struct Cli { + /// The input file containing the intrinsics + input: PathBuf, + + /// The rust toolchain to use for building the rust code + #[arg(long)] + toolchain: Option, + + /// The C++ compiler to use for compiling the c++ code + #[arg(long, default_value_t = String::from("clang++"))] + cppcompiler: String, + + /// Run the C programs under emulation with this command + #[arg(long)] + runner: Option, + + /// Filename for a list of intrinsics to skip (one per line) + #[arg(long)] + skip: Option, + + /// Run tests for A32 instrinsics instead of A64 + #[arg(long)] + a32: bool, +} + fn main() { pretty_env_logger::init(); - let matches = App::new("Intrinsic test tool") - .about("Generates Rust and C programs for intrinsics and compares the output") - .arg( - Arg::with_name("INPUT") - .help("The input file containing the intrinsics") - .required(true) - .index(1), - ) - .arg( - Arg::with_name("TOOLCHAIN") - .takes_value(true) - .long("toolchain") - .help("The rust toolchain to use for building the rust code"), - ) - .arg( - Arg::with_name("CPPCOMPILER") - .takes_value(true) - .default_value("clang++") - .long("cppcompiler") - .help("The C++ compiler to use for compiling the c++ code"), - ) - .arg( - Arg::with_name("RUNNER") - .takes_value(true) - .long("runner") - .help("Run the C programs under emulation with this command"), - ) - .arg( - Arg::with_name("SKIP") - .takes_value(true) - .long("skip") - .help("Filename for a list of intrinsics to skip (one per line)"), - ) - .arg( - Arg::with_name("A32") - .takes_value(false) - .long("a32") - .help("Run tests for A32 instrinsics instead of A64"), - ) - .get_matches(); + let args: Cli = clap::Parser::parse(); - let filename = matches.value_of("INPUT").unwrap(); - let toolchain = matches - .value_of("TOOLCHAIN") - .map_or("".into(), |t| format!("+{t}")); - - let cpp_compiler = matches.value_of("CPPCOMPILER").unwrap(); - let c_runner = matches.value_of("RUNNER").unwrap_or(""); - let skip = if let Some(filename) = matches.value_of("SKIP") { + let filename = args.input; + let toolchain = args.toolchain.map_or_else(String::new, |t| format!("+{t}")); + let cpp_compiler = args.cppcompiler; + let c_runner = args.runner.unwrap_or_else(String::new); + let skip = if let Some(filename) = args.skip { let data = std::fs::read_to_string(&filename).expect("Failed to open file"); data.lines() .map(str::trim) @@ -381,8 +370,8 @@ } else { Default::default() }; - let a32 = matches.is_present("A32"); - let mut intrinsics = get_neon_intrinsics(filename).expect("Error parsing input file"); + let a32 = args.a32; + let mut intrinsics = get_neon_intrinsics(&filename).expect("Error parsing input file"); intrinsics.sort_by(|a, b| a.name.cmp(&b.name)); @@ -409,7 +398,7 @@ let notices = build_notices("// "); - if !build_c(¬ices, &intrinsics, cpp_compiler, a32) { + if !build_c(¬ices, &intrinsics, &cpp_compiler, a32) { std::process::exit(2); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -11,3 +11,4 @@ [dependencies] proc-macro2 = "1.0" quote = "1.0" +syn = { version = "2.0", features = ["full"] } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/src/lib.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/simd-test-macro/src/lib.rs 2023-12-21 16:55:32.000000000 +0000 @@ -7,7 +7,7 @@ #[macro_use] extern crate quote; -use proc_macro2::{Delimiter, Ident, Literal, Span, TokenStream, TokenTree}; +use proc_macro2::{Ident, Literal, Span, TokenStream, TokenTree}; use quote::ToTokens; use std::env; @@ -44,13 +44,9 @@ .collect(); let enable_feature = string(enable_feature); - let item = TokenStream::from(item); - let name = find_name(item.clone()); - - let name: TokenStream = name - .to_string() - .parse() - .unwrap_or_else(|_| panic!("failed to parse name: {}", name.to_string())); + let mut item = syn::parse_macro_input!(item as syn::ItemFn); + let item_attrs = std::mem::take(&mut item.attrs); + let name = &item.sig.ident; let target = env::var("TARGET").expect( "TARGET environment variable should be set for rustc (e.g. TARGET=x86_64-apple-darwin cargo test)" @@ -109,6 +105,7 @@ #[allow(non_snake_case)] #[test] #maybe_ignore + #(#item_attrs)* fn #name() { if #force_test | (#cfg_target_features) { let v = unsafe { #name() }; @@ -123,29 +120,3 @@ }; ret.into() } - -fn find_name(item: TokenStream) -> Ident { - let mut tokens = item.into_iter(); - while let Some(tok) = tokens.next() { - if let TokenTree::Ident(word) = tok { - if word == "fn" { - break; - } - } - } - - fn get_ident(tt: TokenTree) -> Option { - match tt { - TokenTree::Ident(i) => Some(i), - TokenTree::Group(g) if g.delimiter() == Delimiter::None => { - get_ident(g.stream().into_iter().next()?) - } - _ => None, - } - } - - tokens - .next() - .and_then(get_ident) - .expect("failed to find function name") -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -30,7 +30,6 @@ alloc = { version = "1.0.0", optional = true, package = "rustc-std-workspace-alloc" } [dev-dependencies] -auxv = "0.3.3" cupid = "0.6.0" [features] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/linux/auxvec.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/linux/auxvec.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/linux/auxvec.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/linux/auxvec.rs 2023-12-21 16:55:32.000000000 +0000 @@ -19,6 +19,7 @@ /// If an entry cannot be read all the bits in the bitfield are set to zero. /// This should be interpreted as all the features being disabled. #[derive(Debug, Copy, Clone)] +#[cfg_attr(test, derive(PartialEq))] pub(crate) struct AuxVec { pub hwcap: usize, #[cfg(any( @@ -174,9 +175,12 @@ /// Tries to read the `key` from the auxiliary vector by calling the /// dynamically-linked `getauxval` function. If the function is not linked, /// this function return `Err`. -#[cfg(all( - feature = "std_detect_dlsym_getauxval", - not(all(target_os = "linux", target_env = "gnu")) +#[cfg(any( + test, + all( + feature = "std_detect_dlsym_getauxval", + not(all(target_os = "linux", target_env = "gnu")) + ) ))] fn getauxval(key: usize) -> Result { use libc; @@ -262,35 +266,8 @@ #[cfg(test)] mod tests { - extern crate auxv as auxv_crate; use super::*; - // Reads the Auxiliary Vector key from /proc/self/auxv - // using the auxv crate. - #[cfg(feature = "std_detect_file_io")] - fn auxv_crate_getprocfs(key: usize) -> Option { - use self::auxv_crate::procfs::search_procfs_auxv; - use self::auxv_crate::AuxvType; - let k = key as AuxvType; - match search_procfs_auxv(&[k]) { - Ok(v) => Some(v[&k] as usize), - Err(_) => None, - } - } - - // Reads the Auxiliary Vector key from getauxval() - // using the auxv crate. - #[cfg(not(any(target_arch = "mips", target_arch = "mips64")))] - fn auxv_crate_getauxval(key: usize) -> Option { - use self::auxv_crate::getauxval::Getauxval; - use self::auxv_crate::AuxvType; - let q = auxv_crate::getauxval::NativeGetauxval {}; - match q.getauxval(key as AuxvType) { - Ok(v) => Some(v as usize), - Err(_) => None, - } - } - // FIXME: on mips/mips64 getauxval returns 0, and /proc/self/auxv // does not always contain the AT_HWCAP key under qemu. #[cfg(any( @@ -301,7 +278,7 @@ #[test] fn auxv_crate() { let v = auxv(); - if let Some(hwcap) = auxv_crate_getauxval(AT_HWCAP) { + if let Ok(hwcap) = getauxval(AT_HWCAP) { let rt_hwcap = v.expect("failed to find hwcap key").hwcap; assert_eq!(rt_hwcap, hwcap); } @@ -314,7 +291,7 @@ target_arch = "powerpc64" ))] { - if let Some(hwcap2) = auxv_crate_getauxval(AT_HWCAP2) { + if let Ok(hwcap2) = getauxval(AT_HWCAP2) { let rt_hwcap2 = v.expect("failed to find hwcap2 key").hwcap2; assert_eq!(rt_hwcap2, hwcap2); } @@ -391,22 +368,8 @@ #[test] #[cfg(feature = "std_detect_file_io")] fn auxv_crate_procfs() { - let v = auxv(); - if let Some(hwcap) = auxv_crate_getprocfs(AT_HWCAP) { - assert_eq!(v.unwrap().hwcap, hwcap); - } - - // Targets with AT_HWCAP and AT_HWCAP2: - #[cfg(any( - target_arch = "aarch64", - target_arch = "arm", - target_arch = "powerpc", - target_arch = "powerpc64" - ))] - { - if let Some(hwcap2) = auxv_crate_getprocfs(AT_HWCAP2) { - assert_eq!(v.unwrap().hwcap2, hwcap2); - } + if let Ok(procfs_auxv) = auxv_from_file("/proc/self/auxv") { + assert_eq!(auxv().unwrap(), procfs_auxv); } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/x86.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/x86.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/x86.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/std_detect/src/detect/os/x86.rs 2023-12-21 16:55:32.000000000 +0000 @@ -49,11 +49,7 @@ ecx, edx, } = __cpuid(0); - let vendor_id: [[u8; 4]; 3] = [ - mem::transmute(ebx), - mem::transmute(edx), - mem::transmute(ecx), - ]; + let vendor_id: [[u8; 4]; 3] = [ebx.to_ne_bytes(), edx.to_ne_bytes(), ecx.to_ne_bytes()]; let vendor_id: [u8; 12] = mem::transmute(vendor_id); (max_basic_leaf, vendor_id) }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -20,7 +20,7 @@ # time, and we want to make updates to this explicit rather than automatically # picking up updates which might break CI with new instruction names. [target.'cfg(target_arch = "wasm32")'.dependencies] -wasmprinter = "=0.2.53" +wasmprinter = "=0.2.67" [features] default = [] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/src/disassembly.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/src/disassembly.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/src/disassembly.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-test/src/disassembly.rs 2023-12-21 16:55:32.000000000 +0000 @@ -81,6 +81,8 @@ let add_args = if cfg!(target_os = "macos") && cfg!(target_arch = "aarch64") { // Target features need to be enabled for LLVM objdump on Macos ARM64 vec!["--mattr=+v8.6a,+crypto,+tme"] + } else if cfg!(target_arch = "riscv64") { + vec!["--mattr=+zk,+zks,+zbc,+zbb"] } else { vec![] }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -7,7 +7,7 @@ [dependencies] proc-macro2 = "1.0" quote = "1.0" -syn = { version = "1.0", features = ["full"] } +syn = { version = "2.0", features = ["full"] } [lib] proc-macro = true @@ -15,5 +15,5 @@ [dev-dependencies] serde = { version = "1.0", features = ['derive'] } -serde-xml-rs = "0.3" +serde-xml-rs = "0.6" serde_json = "1.0.96" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/src/lib.rs 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/crates/stdarch-verify/src/lib.rs 2023-12-21 16:55:32.000000000 +0000 @@ -7,6 +7,7 @@ use proc_macro::TokenStream; use std::{fs::File, io::Read, path::Path}; use syn::ext::IdentExt; +use syn::parse::Parser as _; #[proc_macro] pub fn x86_functions(input: TokenStream) -> TokenStream { @@ -416,7 +417,7 @@ fn find_instrs(attrs: &[syn::Attribute]) -> Vec { struct AssertInstr { - instr: String, + instr: Option, } // A small custom parser to parse out the instruction in `assert_instr`. @@ -424,15 +425,21 @@ // TODO: should probably just reuse `Invoc` from the `assert-instr-macro` // crate. impl syn::parse::Parse for AssertInstr { - fn parse(content: syn::parse::ParseStream<'_>) -> syn::Result { - let input; - parenthesized!(input in content); - let _ = input.parse::()?; - let _ = input.parse::()?; - let ident = input.parse::()?; - if ident != "assert_instr" { - return Err(input.error("expected `assert_instr`")); + fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result { + let _ = input.parse::().unwrap(); + let _ = input.parse::().unwrap(); + + match input.parse::() { + Ok(ident) if ident == "assert_instr" => {} + _ => { + while !input.is_empty() { + // consume everything + drop(input.parse::()); + } + return Ok(Self { instr: None }); + } } + let instrs; parenthesized!(instrs in input); @@ -452,18 +459,24 @@ return Err(input.error("failed to parse instruction")); } } - Ok(Self { instr }) + Ok(Self { instr: Some(instr) }) } } attrs .iter() - .filter(|a| a.path.is_ident("cfg_attr")) .filter_map(|a| { - syn::parse2::(a.tokens.clone()) - .ok() - .map(|a| a.instr) + if let syn::Meta::List(ref l) = a.meta { + if l.path.is_ident("cfg_attr") { + Some(l) + } else { + None + } + } else { + None + } }) + .filter_map(|l| syn::parse2::(l.tokens.clone()).unwrap().instr) .collect() } @@ -471,19 +484,26 @@ attrs .iter() .flat_map(|a| { - if let Ok(syn::Meta::List(i)) = a.parse_meta() { - if i.path.is_ident("target_feature") { - return i.nested; + if let syn::Meta::List(ref l) = a.meta { + if l.path.is_ident("target_feature") { + if let Ok(l) = + syn::punctuated::Punctuated::::parse_terminated + .parse2(l.tokens.clone()) + { + return l; + } } } syn::punctuated::Punctuated::new() }) - .filter_map(|nested| match nested { - syn::NestedMeta::Meta(m) => Some(m), - syn::NestedMeta::Lit(_) => None, - }) .find_map(|m| match m { - syn::Meta::NameValue(ref i) if i.path.is_ident("enable") => Some(i.clone().lit), + syn::Meta::NameValue(i) if i.path.is_ident("enable") => { + if let syn::Expr::Lit(lit) = i.value { + Some(lit.lit) + } else { + None + } + } _ => None, }) } @@ -491,9 +511,16 @@ fn find_required_const(name: &str, attrs: &[syn::Attribute]) -> Vec { attrs .iter() - .flat_map(|a| { - if a.path.segments[0].ident == name { - syn::parse::(a.tokens.clone().into()) + .filter_map(|a| { + if let syn::Meta::List(ref l) = a.meta { + Some(l) + } else { + None + } + }) + .flat_map(|l| { + if l.path.segments[0].ident == name { + syn::parse2::(l.tokens.clone()) .unwrap() .args } else { @@ -509,10 +536,7 @@ impl syn::parse::Parse for RustcArgsRequiredConst { fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result { - let content; - parenthesized!(content in input); - let list = - syn::punctuated::Punctuated::::parse_terminated(&content)?; + let list = syn::punctuated::Punctuated::::parse_terminated(&input)?; Ok(Self { args: list .into_iter() diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/examples/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/examples/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/stdarch/examples/Cargo.toml 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/stdarch/examples/Cargo.toml 2023-12-21 16:55:32.000000000 +0000 @@ -13,8 +13,8 @@ [dependencies] core_arch = { path = "../crates/core_arch" } std_detect = { path = "../crates/std_detect" } -quickcheck = "0.9" -rand = "0.7" +quickcheck = "1.0" +rand = "0.8" [[bin]] name = "hex" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/test/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/test/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/test/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/test/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -16,6 +16,8 @@ #![unstable(feature = "test", issue = "50297")] #![doc(test(attr(deny(warnings))))] +#![cfg_attr(not(bootstrap), doc(rust_logo))] +#![cfg_attr(not(bootstrap), feature(rustdoc_internals))] #![feature(internal_output_capture)] #![feature(staged_api)] #![feature(process_exitcode_internals)] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -19,9 +19,6 @@ compiler_builtins = "0.1.0" cfg-if = "1.0" -[build-dependencies] -cc = "1.0.76" - [features] # Only applies for Linux and Fuchsia targets diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/build.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/build.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/build.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/build.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -use std::env; - -fn main() { - println!("cargo:rerun-if-changed=build.rs"); - println!("cargo:rerun-if-env-changed=CARGO_CFG_MIRI"); - - if env::var_os("CARGO_CFG_MIRI").is_some() { - // Miri doesn't need the linker flags or a libunwind build. - return; - } - - let target = env::var("TARGET").expect("TARGET was not set"); - if target.contains("android") { - let build = cc::Build::new(); - - // Since ndk r23 beta 3 `libgcc` was replaced with `libunwind` thus - // check if we have `libunwind` available and if so use it. Otherwise - // fall back to `libgcc` to support older ndk versions. - let has_unwind = build.is_flag_supported("-lunwind").expect("Unable to invoke compiler"); - - if has_unwind { - println!("cargo:rustc-cfg=feature=\"system-llvm-libunwind\""); - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/src/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -4,6 +4,7 @@ #![feature(staged_api)] #![feature(c_unwind)] #![feature(cfg_target_abi)] +#![feature(strict_provenance)] #![cfg_attr(not(target_env = "msvc"), feature(libc))] #![allow(internal_features)] @@ -75,14 +76,10 @@ cfg_if::cfg_if! { if #[cfg(feature = "llvm-libunwind")] { compile_error!("`llvm-libunwind` is not supported for Android targets"); - } else if #[cfg(feature = "system-llvm-libunwind")] { + } else { #[link(name = "unwind", kind = "static", modifiers = "-bundle", cfg(target_feature = "crt-static"))] #[link(name = "unwind", cfg(not(target_feature = "crt-static")))] extern "C" {} - } else { - #[link(name = "gcc", kind = "static", modifiers = "-bundle", cfg(target_feature = "crt-static"))] - #[link(name = "gcc", cfg(not(target_feature = "crt-static")))] - extern "C" {} } } // Android's unwinding library depends on dl_iterate_phdr in `libdl`. @@ -145,6 +142,10 @@ #[link(name = "gcc_s")] extern "C" {} +#[cfg(target_os = "aix")] +#[link(name = "unwind")] +extern "C" {} + #[cfg(target_os = "nto")] #[link(name = "gcc_s")] extern "C" {} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/src/libunwind.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/src/libunwind.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/library/unwind/src/libunwind.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/library/unwind/src/libunwind.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,6 +1,6 @@ #![allow(nonstandard_style)] -use libc::{c_int, c_void, uintptr_t}; +use libc::{c_int, c_void}; #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq)] @@ -19,8 +19,8 @@ pub use _Unwind_Reason_Code::*; pub type _Unwind_Exception_Class = u64; -pub type _Unwind_Word = uintptr_t; -pub type _Unwind_Ptr = uintptr_t; +pub type _Unwind_Word = *const u8; +pub type _Unwind_Ptr = *const u8; pub type _Unwind_Trace_Fn = extern "C" fn(ctx: *mut _Unwind_Context, arg: *mut c_void) -> _Unwind_Reason_Code; @@ -214,7 +214,7 @@ // On Android or ARM/Linux, these are implemented as macros: pub unsafe fn _Unwind_GetGR(ctx: *mut _Unwind_Context, reg_index: c_int) -> _Unwind_Word { - let mut val: _Unwind_Word = 0; + let mut val: _Unwind_Word = core::ptr::null(); _Unwind_VRS_Get(ctx, _UVRSC_CORE, reg_index as _Unwind_Word, _UVRSD_UINT32, &mut val as *mut _ as *mut c_void); val @@ -229,14 +229,14 @@ pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> _Unwind_Word { let val = _Unwind_GetGR(ctx, UNWIND_IP_REG); - (val & !1) as _Unwind_Word + val.map_addr(|v| v & !1) } pub unsafe fn _Unwind_SetIP(ctx: *mut _Unwind_Context, value: _Unwind_Word) { // Propagate thumb bit to instruction pointer - let thumb_state = _Unwind_GetGR(ctx, UNWIND_IP_REG) & 1; - let value = value | thumb_state; + let thumb_state = _Unwind_GetGR(ctx, UNWIND_IP_REG).addr() & 1; + let value = value.map_addr(|v| v | thumb_state); _Unwind_SetGR(ctx, UNWIND_IP_REG, value); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/CHANGELOG.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/CHANGELOG.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/CHANGELOG.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/CHANGELOG.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,71 +0,0 @@ -# Changelog - -All notable changes to bootstrap will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - - -## [Changes since the last major version] - -- Vendoring is no longer done automatically when building from git sources. To use vendoring, run `cargo vendor` manually, or use the pre-vendored `rustc-src` tarball. -- `llvm-libunwind` now accepts `in-tree` (formerly true), `system` or `no` (formerly false) [#77703](https://github.com/rust-lang/rust/pull/77703) -- The options `infodir`, `localstatedir`, and `gpg-password-file` are no longer allowed in config.toml. Previously, they were ignored without warning. Note that `infodir` and `localstatedir` are still accepted by `./configure`, with a warning. [#82451](https://github.com/rust-lang/rust/pull/82451) -- Change the names for `dist` commands to match the component they generate. [#90684](https://github.com/rust-lang/rust/pull/90684) -- The `build.fast-submodules` option has been removed. Fast submodule checkouts are enabled unconditionally. Automatic submodule handling can still be disabled with `build.submodules = false`. -- Several unsupported `./configure` options have been removed: `optimize`, `parallel-compiler`. These can still be enabled with `--set`, although it isn't recommended. -- `remote-test-server`'s `verbose` argument has been removed in favor of the `--verbose` flag -- `remote-test-server`'s `remote` argument has been removed in favor of the `--bind` flag. Use `--bind 0.0.0.0:12345` to replicate the behavior of the `remote` argument. -- `x.py fmt` now formats only files modified between the merge-base of HEAD and the last commit in the master branch of the rust-lang repository and the current working directory. To restore old behaviour, use `x.py fmt .`. The check mode is not affected by this change. [#105702](https://github.com/rust-lang/rust/pull/105702) -- The `llvm.version-check` config option has been removed. Older versions were never supported. If you still need to support older versions (e.g. you are applying custom patches), patch `check_llvm_version` in bootstrap to change the minimum version. [#108619](https://github.com/rust-lang/rust/pull/108619) -- The `rust.ignore-git` option has been renamed to `rust.omit-git-hash`. [#110059](https://github.com/rust-lang/rust/pull/110059) -- `--exclude` no longer accepts a `Kind` as part of a Step; instead it uses the top-level Kind of the subcommand. If this matches how you were already using --exclude (e.g. `x test --exclude test::std`), simply remove the kind: `--exclude std`. If you were using a kind that did not match the top-level subcommand, please open an issue explaining why you wanted this feature. - -### Non-breaking changes - -- `x.py check` needs opt-in to check tests (--all-targets) [#77473](https://github.com/rust-lang/rust/pull/77473) -- The default bootstrap profiles are now located at `bootstrap/defaults/config.$PROFILE.toml` (previously they were located at `bootstrap/defaults/config.toml.$PROFILE`) [#77558](https://github.com/rust-lang/rust/pull/77558) -- If you have Rust already installed, `x.py` will now infer the host target - from the default rust toolchain. [#78513](https://github.com/rust-lang/rust/pull/78513) -- Add options for enabling overflow checks, one for std (`overflow-checks-std`) and one for everything else (`overflow-checks`). Both default to false. -- Add llvm option `enable-warnings` to have control on llvm compilation warnings. Default to false. -- Add `rpath` option in `target` section to support set rpath option for each target independently. [#111242](https://github.com/rust-lang/rust/pull/111242) - - -## [Version 2] - 2020-09-25 - -- `host` now defaults to the value of `build` in all cases - + Previously `host` defaulted to an empty list when `target` was overridden, and to `build` otherwise - -### Non-breaking changes - -- Add `x.py setup` [#76631](https://github.com/rust-lang/rust/pull/76631) -- Add a changelog for x.py [#76626](https://github.com/rust-lang/rust/pull/76626) -- Optionally, download LLVM from CI on Linux and NixOS. This can be enabled with `download-ci-llvm = true` under `[llvm]`. - + [#76439](https://github.com/rust-lang/rust/pull/76349) - + [#76667](https://github.com/rust-lang/rust/pull/76667) - + [#76708](https://github.com/rust-lang/rust/pull/76708) -- Distribute rustc sources as part of `rustc-dev` [#76856](https://github.com/rust-lang/rust/pull/76856) -- Make the default stage for x.py configurable [#76625](https://github.com/rust-lang/rust/pull/76625). This can be enabled with `build-stage = N`, `doc-stage = N`, etc. -- Add a dedicated debug-logging option [#76588](https://github.com/rust-lang/rust/pull/76588). Previously, `debug-logging` could only be set with `debug-assertions`, slowing down the compiler more than necessary. -- Add sample defaults for x.py [#76628](https://github.com/rust-lang/rust/pull/76628) -- Add `--keep-stage-std`, which behaves like `keep-stage` but allows the stage - 0 compiler artifacts (i.e., stage1/bin/rustc) to be rebuilt if changed - [#77120](https://github.com/rust-lang/rust/pull/77120). -- File locking is now used to avoid collisions between multiple running instances of `x.py` (e.g. when using `rust-analyzer` and `x.py` at the same time). Note that Solaris and possibly other non Unix and non Windows systems don't support it [#108607](https://github.com/rust-lang/rust/pull/108607). This might possibly lead to build data corruption. - - -## [Version 1] - 2020-09-11 - -This is the first changelog entry, and it does not attempt to be an exhaustive list of features in x.py. -Instead, this documents the changes to bootstrap in the past 2 months. - -- Improve defaults in `x.py` [#73964](https://github.com/rust-lang/rust/pull/73964) - (see [blog post] for details) -- Set `ninja = true` by default [#74922](https://github.com/rust-lang/rust/pull/74922) -- Avoid trying to inversely cross-compile for build triple from host triples [#76415](https://github.com/rust-lang/rust/pull/76415) -- Allow blessing expect-tests in tools [#75975](https://github.com/rust-lang/rust/pull/75975) -- `x.py check` checks tests/examples/benches [#76258](https://github.com/rust-lang/rust/pull/76258) -- Fix `rust.use-lld` when linker is not set [#76326](https://github.com/rust-lang/rust/pull/76326) -- Build tests with LLD if `use-lld = true` was passed [#76378](https://github.com/rust-lang/rust/pull/76378) - -[blog post]: https://blog.rust-lang.org/inside-rust/2020/08/30/changes-to-x-py-defaults.html diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.lock 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -30,6 +30,12 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] name = "block-buffer" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -50,6 +56,7 @@ "fd-lock", "filetime", "hex", + "home", "ignore", "junction", "libc", @@ -104,40 +111,38 @@ [[package]] name = "clap" -version = "4.2.4" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "956ac1f6381d8d82ab4684768f89c0ea3afe66925ceadb4eeb3fc452ffc55d62" +checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.2.4" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84080e799e54cff944f4b4a4b0e71630b0e0443b25b985175c7dddc1a859b749" +checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663" dependencies = [ "anstyle", - "bitflags", "clap_lex", ] [[package]] name = "clap_complete" -version = "4.2.2" +version = "4.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36774babb166352bb4f7b9cb16f781ffa3439d2a8f12cd31bea85a38c888fea3" +checksum = "e3ae8ba90b9d8b007efe66e55e48fb936272f5ca00349b5b0e89877520d35ea7" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.2.0" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", @@ -147,9 +152,9 @@ [[package]] name = "clap_lex" -version = "0.4.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "cmake" @@ -176,16 +181,6 @@ ] [[package]] -name = "crossbeam-channel" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] name = "crossbeam-deque" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -252,30 +247,19 @@ [[package]] name = "errno" -version = "0.3.0" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ - "errno-dragonfly", "libc", "windows-sys", ] [[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] name = "fd-lock" -version = "3.0.11" +version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9799aefb4a2e4a01cc47610b1dd47c18ab13d991f27bbcaed9296f5a53d5cbad" +checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ "cfg-if", "rustix", @@ -330,27 +314,21 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hermit-abi" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" - -[[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] +name = "home" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408" +dependencies = [ + "winapi", +] + +[[package]] name = "ignore" version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -369,17 +347,6 @@ ] [[package]] -name = "io-lifetimes" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb" -dependencies = [ - "hermit-abi 0.3.2", - "libc", - "windows-sys", -] - -[[package]] name = "itoa" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -403,15 +370,15 @@ [[package]] name = "libc" -version = "0.2.140" +version = "0.2.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "linux-raw-sys" -version = "0.3.2" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f508063cc7bb32987c71511216bd5a32be15bccb6a80b52df8b9d7f01fc3aa2" +checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" [[package]] name = "log" @@ -458,16 +425,6 @@ ] [[package]] -name = "num_cpus" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi 0.1.19", - "libc", -] - -[[package]] name = "object" version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -528,25 +485,22 @@ [[package]] name = "rayon" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ - "crossbeam-deque", "either", "rayon-core", ] [[package]] name = "rayon-core" -version = "1.10.1" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] @@ -555,7 +509,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -583,13 +537,12 @@ [[package]] name = "rustix" -version = "0.37.6" +version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d097081ed288dfe45699b72f5b5d648e5f15d64d900c7080273baa20c16a6849" +checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ - "bitflags", + "bitflags 2.4.1", "errno", - "io-lifetimes", "libc", "linux-raw-sys", "windows-sys", @@ -787,27 +740,37 @@ [[package]] name = "windows" -version = "0.46.0" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca229916c5ee38c2f2bc1e9d8f04df975b4bd93f9955dc69fabb5d91270045c9" +dependencies = [ + "windows-core", + "windows-targets", +] + +[[package]] +name = "windows-core" +version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" -version = "0.45.0" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -820,45 +783,45 @@ [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_i686_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_x86_64_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "xattr" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -5,38 +5,48 @@ build = "build.rs" default-run = "bootstrap" +[features] +build-metrics = ["sysinfo"] + [lib] -path = "lib.rs" +path = "src/lib.rs" doctest = false [[bin]] name = "bootstrap" -path = "bin/main.rs" +path = "src/bin/main.rs" test = false [[bin]] name = "rustc" -path = "bin/rustc.rs" +path = "src/bin/rustc.rs" test = false [[bin]] name = "rustdoc" -path = "bin/rustdoc.rs" +path = "src/bin/rustdoc.rs" test = false [[bin]] name = "sccache-plus-cl" -path = "bin/sccache-plus-cl.rs" +path = "src/bin/sccache-plus-cl.rs" test = false [dependencies] build_helper = { path = "../tools/build_helper" } +cc = "1.0.69" +clap = { version = "4.4.7", default-features = false, features = ["std", "usage", "help", "derive", "error-context"] } +clap_complete = "4.4.3" cmake = "0.1.38" filetime = "0.2" -cc = "1.0.69" -libc = "0.2" hex = "0.4" +home = "0.5.4" +ignore = "0.4.10" +libc = "0.2" object = { version = "0.32.0", default-features = false, features = ["archive", "coff", "read_core", "unaligned"] } +once_cell = "1.7.2" +opener = "0.5" +semver = "1.0.17" serde = "1.0.137" # Directly use serde_derive rather than through the derive feature of serde to allow building both # in parallel and to allow serde_json and toml to start building as soon as serde has been built. @@ -46,27 +56,21 @@ tar = "0.4" termcolor = "1.2.0" toml = "0.5" -ignore = "0.4.10" -opener = "0.5" -once_cell = "1.7.2" -xz2 = "0.1" walkdir = "2" +xz2 = "0.1" # Dependencies needed by the build-metrics feature sysinfo = { version = "0.26.0", optional = true } -clap = { version = "4.2.4", default-features = false, features = ["std", "usage", "help", "derive", "error-context"] } -clap_complete = "4.2.2" -semver = "1.0.17" # Solaris doesn't support flock() and thus fd-lock is not option now [target.'cfg(not(target_os = "solaris"))'.dependencies] -fd-lock = "3.0.8" +fd-lock = "3.0.13" [target.'cfg(windows)'.dependencies.junction] version = "1.0.0" [target.'cfg(windows)'.dependencies.windows] -version = "0.46.0" +version = "0.51.1" features = [ "Win32_Foundation", "Win32_Security", @@ -80,9 +84,6 @@ [dev-dependencies] pretty_assertions = "1.4" -[features] -build-metrics = ["sysinfo"] - # We care a lot about bootstrap's compile times, so don't include debuginfo for # dependencies, only bootstrap itself. [profile.dev] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/README.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/README.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/README.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/README.md 2023-12-21 16:55:28.000000000 +0000 @@ -181,11 +181,10 @@ `Config` struct. * Adding a sanity check? Take a look at `bootstrap/sanity.rs`. -If you make a major change, please remember to: +If you make a major change on bootstrap configuration, please remember to: -+ Update `VERSION` in `src/bootstrap/main.rs`. -* Update `changelog-seen = N` in `config.example.toml`. -* Add an entry in `src/bootstrap/CHANGELOG.md`. ++ Update `CONFIG_CHANGE_HISTORY` in `src/bootstrap/lib.rs`. +* Update `change-id = {pull-request-id}` in `config.example.toml`. A 'major change' includes @@ -193,7 +192,7 @@ * A change in the default options. Changes that do not affect contributors to the compiler or users -building rustc from source don't need an update to `VERSION`. +building rustc from source don't need an update to `CONFIG_CHANGE_HISTORY`. If you have any questions, feel free to reach out on the `#t-infra/bootstrap` channel at [Rust Bootstrap Zulip server][rust-bootstrap-zulip]. When you encounter bugs, @@ -201,3 +200,8 @@ [rust-bootstrap-zulip]: https://rust-lang.zulipchat.com/#narrow/stream/t-infra.2Fbootstrap [rust-issue-tracker]: https://github.com/rust-lang/rust/issues + +## Changelog + +Because we do not release bootstrap with versions, we also do not maintain CHANGELOG files. To +review the changes made to bootstrap, simply run `git log --no-merges --oneline -- src/bootstrap`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/_helper.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/_helper.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/_helper.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/_helper.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -/// Parses the value of the "RUSTC_VERBOSE" environment variable and returns it as a `usize`. -/// If it was not defined, returns 0 by default. -/// -/// Panics if "RUSTC_VERBOSE" is defined with the value that is not an unsigned integer. -fn parse_rustc_verbose() -> usize { - use std::str::FromStr; - - match std::env::var("RUSTC_VERBOSE") { - Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"), - Err(_) => 0, - } -} - -/// Parses the value of the "RUSTC_STAGE" environment variable and returns it as a `String`. -/// -/// If "RUSTC_STAGE" was not set, the program will be terminated with 101. -fn parse_rustc_stage() -> String { - std::env::var("RUSTC_STAGE").unwrap_or_else(|_| { - // Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead. - eprintln!("rustc shim: fatal: RUSTC_STAGE was not set"); - eprintln!("rustc shim: note: use `x.py build -vvv` to see all environment variables set by bootstrap"); - exit(101); - }) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/main.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/main.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -//! rustbuild, the Rust build system -//! -//! This is the entry point for the build system used to compile the `rustc` -//! compiler. Lots of documentation can be found in the `README.md` file in the -//! parent directory, and otherwise documentation can be found throughout the `build` -//! directory in each respective module. - -#[cfg(all(any(unix, windows), not(target_os = "solaris")))] -use std::io::Write; -#[cfg(all(any(unix, windows), not(target_os = "solaris")))] -use std::process; -use std::{env, fs}; - -#[cfg(all(any(unix, windows), not(target_os = "solaris")))] -use bootstrap::t; -use bootstrap::{Build, Config, Subcommand, VERSION}; - -fn main() { - let args = env::args().skip(1).collect::>(); - let config = Config::parse(&args); - - #[cfg(all(any(unix, windows), not(target_os = "solaris")))] - let mut build_lock; - #[cfg(all(any(unix, windows), not(target_os = "solaris")))] - let _build_lock_guard; - #[cfg(all(any(unix, windows), not(target_os = "solaris")))] - // Display PID of process holding the lock - // PID will be stored in a lock file - { - let path = config.out.join("lock"); - let pid = match fs::read_to_string(&path) { - Ok(contents) => contents, - Err(_) => String::new(), - }; - - build_lock = - fd_lock::RwLock::new(t!(fs::OpenOptions::new().write(true).create(true).open(&path))); - _build_lock_guard = match build_lock.try_write() { - Ok(mut lock) => { - t!(lock.write(&process::id().to_string().as_ref())); - lock - } - err => { - drop(err); - println!("warning: build directory locked by process {pid}, waiting for lock"); - let mut lock = t!(build_lock.write()); - t!(lock.write(&process::id().to_string().as_ref())); - lock - } - }; - } - - #[cfg(any(not(any(unix, windows)), target_os = "solaris"))] - println!("warning: file locking not supported for target, not locking build directory"); - - // check_version warnings are not printed during setup - let changelog_suggestion = - if matches!(config.cmd, Subcommand::Setup { .. }) { None } else { check_version(&config) }; - - // NOTE: Since `./configure` generates a `config.toml`, distro maintainers will see the - // changelog warning, not the `x.py setup` message. - let suggest_setup = config.config.is_none() && !matches!(config.cmd, Subcommand::Setup { .. }); - if suggest_setup { - println!("warning: you have not made a `config.toml`"); - println!( - "help: consider running `./x.py setup` or copying `config.example.toml` by running \ - `cp config.example.toml config.toml`" - ); - } else if let Some(suggestion) = &changelog_suggestion { - println!("{suggestion}"); - } - - let pre_commit = config.src.join(".git").join("hooks").join("pre-commit"); - Build::new(config).build(); - - if suggest_setup { - println!("warning: you have not made a `config.toml`"); - println!( - "help: consider running `./x.py setup` or copying `config.example.toml` by running \ - `cp config.example.toml config.toml`" - ); - } else if let Some(suggestion) = &changelog_suggestion { - println!("{suggestion}"); - } - - // Give a warning if the pre-commit script is in pre-commit and not pre-push. - // HACK: Since the commit script uses hard links, we can't actually tell if it was installed by x.py setup or not. - // We could see if it's identical to src/etc/pre-push.sh, but pre-push may have been modified in the meantime. - // Instead, look for this comment, which is almost certainly not in any custom hook. - if fs::read_to_string(pre_commit).map_or(false, |contents| { - contents.contains("https://github.com/rust-lang/rust/issues/77620#issuecomment-705144570") - }) { - println!( - "warning: You have the pre-push script installed to .git/hooks/pre-commit. \ - Consider moving it to .git/hooks/pre-push instead, which runs less often." - ); - } - - if suggest_setup || changelog_suggestion.is_some() { - println!("note: this message was printed twice to make it more likely to be seen"); - } -} - -fn check_version(config: &Config) -> Option { - let mut msg = String::new(); - - let suggestion = if let Some(seen) = config.changelog_seen { - if seen != VERSION { - msg.push_str("warning: there have been changes to x.py since you last updated.\n"); - format!("update `config.toml` to use `changelog-seen = {VERSION}` instead") - } else { - return None; - } - } else { - msg.push_str("warning: x.py has made several changes recently you may want to look at\n"); - format!("add `changelog-seen = {VERSION}` at the top of `config.toml`") - }; - - msg.push_str("help: consider looking at the changes in `src/bootstrap/CHANGELOG.md`\n"); - msg.push_str("note: to silence this warning, "); - msg.push_str(&suggestion); - - Some(msg) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,410 +0,0 @@ -//! Shim which is passed to Cargo as "rustc" when running the bootstrap. -//! -//! This shim will take care of some various tasks that our build process -//! requires that Cargo can't quite do through normal configuration: -//! -//! 1. When compiling build scripts and build dependencies, we need a guaranteed -//! full standard library available. The only compiler which actually has -//! this is the snapshot, so we detect this situation and always compile with -//! the snapshot compiler. -//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling -//! (and this slightly differs based on a whether we're using a snapshot or -//! not), so we do that all here. -//! -//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of -//! switching compilers for the bootstrap and for build scripts will probably -//! never get replaced. - -include!("../dylib_util.rs"); -include!("./_helper.rs"); - -use std::env; -use std::path::PathBuf; -use std::process::{exit, Child, Command}; -use std::time::Instant; - -fn main() { - let args = env::args_os().skip(1).collect::>(); - let arg = |name| args.windows(2).find(|args| args[0] == name).and_then(|args| args[1].to_str()); - - let stage = parse_rustc_stage(); - let verbose = parse_rustc_verbose(); - - // Detect whether or not we're a build script depending on whether --target - // is passed (a bit janky...) - let target = arg("--target"); - let version = args.iter().find(|w| &**w == "-vV"); - - // Use a different compiler for build scripts, since there may not yet be a - // libstd for the real compiler to use. However, if Cargo is attempting to - // determine the version of the compiler, the real compiler needs to be - // used. Currently, these two states are differentiated based on whether - // --target and -vV is/isn't passed. - let (rustc, libdir) = if target.is_none() && version.is_none() { - ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR") - } else { - ("RUSTC_REAL", "RUSTC_LIBDIR") - }; - - let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set"); - let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new); - - let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc)); - let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir)); - let mut dylib_path = dylib_path(); - dylib_path.insert(0, PathBuf::from(&libdir)); - - let mut cmd = Command::new(rustc); - cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - - // Get the name of the crate we're compiling, if any. - let crate_name = arg("--crate-name"); - - if let Some(crate_name) = crate_name { - if let Some(target) = env::var_os("RUSTC_TIME") { - if target == "all" - || target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name) - { - cmd.arg("-Ztime-passes"); - } - } - } - - // Print backtrace in case of ICE - if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() { - cmd.env("RUST_BACKTRACE", "1"); - } - - if let Ok(lint_flags) = env::var("RUSTC_LINT_FLAGS") { - cmd.args(lint_flags.split_whitespace()); - } - - if target.is_some() { - // The stage0 compiler has a special sysroot distinct from what we - // actually downloaded, so we just always pass the `--sysroot` option, - // unless one is already set. - if !args.iter().any(|arg| arg == "--sysroot") { - cmd.arg("--sysroot").arg(&sysroot); - } - - // If we're compiling specifically the `panic_abort` crate then we pass - // the `-C panic=abort` option. Note that we do not do this for any - // other crate intentionally as this is the only crate for now that we - // ship with panic=abort. - // - // This... is a bit of a hack how we detect this. Ideally this - // information should be encoded in the crate I guess? Would likely - // require an RFC amendment to RFC 1513, however. - if crate_name == Some("panic_abort") { - cmd.arg("-C").arg("panic=abort"); - } - - // `-Ztls-model=initial-exec` must not be applied to proc-macros, see - // issue https://github.com/rust-lang/rust/issues/100530 - if env::var("RUSTC_TLS_MODEL_INITIAL_EXEC").is_ok() - && arg("--crate-type") != Some("proc-macro") - && !matches!(crate_name, Some("proc_macro2" | "quote" | "syn" | "synstructure")) - { - cmd.arg("-Ztls-model=initial-exec"); - } - } else { - // FIXME(rust-lang/cargo#5754) we shouldn't be using special env vars - // here, but rather Cargo should know what flags to pass rustc itself. - - // Override linker if necessary. - if let Ok(host_linker) = env::var("RUSTC_HOST_LINKER") { - cmd.arg(format!("-Clinker={host_linker}")); - } - if env::var_os("RUSTC_HOST_FUSE_LD_LLD").is_some() { - cmd.arg("-Clink-args=-fuse-ld=lld"); - } - - if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") { - if s == "true" { - cmd.arg("-C").arg("target-feature=+crt-static"); - } - if s == "false" { - cmd.arg("-C").arg("target-feature=-crt-static"); - } - } - - // Cargo doesn't pass RUSTFLAGS to proc_macros: - // https://github.com/rust-lang/cargo/issues/4423 - // Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`. - // We also declare that the flag is expected, which we need to do to not - // get warnings about it being unexpected. - if stage == "0" { - cmd.arg("--cfg=bootstrap"); - } - cmd.arg("-Zunstable-options"); - cmd.arg("--check-cfg=values(bootstrap)"); - } - - if let Ok(map) = env::var("RUSTC_DEBUGINFO_MAP") { - cmd.arg("--remap-path-prefix").arg(&map); - } - - // Force all crates compiled by this compiler to (a) be unstable and (b) - // allow the `rustc_private` feature to link to other unstable crates - // also in the sysroot. We also do this for host crates, since those - // may be proc macros, in which case we might ship them. - if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() { - cmd.arg("-Z").arg("force-unstable-if-unmarked"); - } - - // allow-features is handled from within this rustc wrapper because of - // issues with build scripts. Some packages use build scripts to - // dynamically detect if certain nightly features are available. - // There are different ways this causes problems: - // - // * rustix runs `rustc` on a small test program to see if the feature is - // available (and sets a `cfg` if it is). It does not honor - // CARGO_ENCODED_RUSTFLAGS. - // * proc-macro2 detects if `rustc -vV` says "nighty" or "dev" and enables - // nightly features. It will scan CARGO_ENCODED_RUSTFLAGS for - // -Zallow-features. Unfortunately CARGO_ENCODED_RUSTFLAGS is not set - // for build-dependencies when --target is used. - // - // The issues above means we can't just use RUSTFLAGS, and we can't use - // `cargo -Zallow-features=…`. Passing it through here ensures that it - // always gets set. Unfortunately that also means we need to enable more - // features than we really want (like those for proc-macro2), but there - // isn't much of a way around it. - // - // I think it is unfortunate that build scripts are doing this at all, - // since changes to nightly features can cause crates to break even if the - // user didn't want or care about the use of the nightly features. I think - // nightly features should be opt-in only. Unfortunately the dynamic - // checks are now too wide spread that we just need to deal with it. - // - // If you want to try to remove this, I suggest working with the crate - // authors to remove the dynamic checking. Another option is to pursue - // https://github.com/rust-lang/cargo/issues/11244 and - // https://github.com/rust-lang/cargo/issues/4423, which will likely be - // very difficult, but could help expose -Zallow-features into build - // scripts so they could try to honor them. - if let Ok(allow_features) = env::var("RUSTC_ALLOW_FEATURES") { - cmd.arg(format!("-Zallow-features={allow_features}")); - } - - if let Ok(flags) = env::var("MAGIC_EXTRA_RUSTFLAGS") { - for flag in flags.split(' ') { - cmd.arg(flag); - } - } - - let is_test = args.iter().any(|a| a == "--test"); - if verbose > 2 { - let rust_env_vars = - env::vars().filter(|(k, _)| k.starts_with("RUST") || k.starts_with("CARGO")); - let prefix = if is_test { "[RUSTC-SHIM] rustc --test" } else { "[RUSTC-SHIM] rustc" }; - let prefix = match crate_name { - Some(crate_name) => format!("{prefix} {crate_name}"), - None => prefix.to_string(), - }; - for (i, (k, v)) in rust_env_vars.enumerate() { - eprintln!("{prefix} env[{i}]: {k:?}={v:?}"); - } - eprintln!("{} working directory: {}", prefix, env::current_dir().unwrap().display()); - eprintln!( - "{} command: {:?}={:?} {:?}", - prefix, - dylib_path_var(), - env::join_paths(&dylib_path).unwrap(), - cmd, - ); - eprintln!("{prefix} sysroot: {sysroot:?}"); - eprintln!("{prefix} libdir: {libdir:?}"); - } - - let start = Instant::now(); - let (child, status) = { - let errmsg = format!("\nFailed to run:\n{cmd:?}\n-------------"); - let mut child = cmd.spawn().expect(&errmsg); - let status = child.wait().expect(&errmsg); - (child, status) - }; - - if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some() - || env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some() - { - if let Some(crate_name) = crate_name { - let dur = start.elapsed(); - // If the user requested resource usage data, then - // include that in addition to the timing output. - let rusage_data = - env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child)); - eprintln!( - "[RUSTC-TIMING] {} test:{} {}.{:03}{}{}", - crate_name, - is_test, - dur.as_secs(), - dur.subsec_millis(), - if rusage_data.is_some() { " " } else { "" }, - rusage_data.unwrap_or(String::new()), - ); - } - } - - if status.success() { - std::process::exit(0); - // note: everything below here is unreachable. do not put code that - // should run on success, after this block. - } - if verbose > 0 { - println!("\nDid not run successfully: {status}\n{cmd:?}\n-------------"); - } - - if let Some(mut on_fail) = on_fail { - on_fail.status().expect("Could not run the on_fail command"); - } - - // Preserve the exit code. In case of signal, exit with 0xfe since it's - // awkward to preserve this status in a cross-platform way. - match status.code() { - Some(i) => std::process::exit(i), - None => { - eprintln!("rustc exited with {status}"); - std::process::exit(0xfe); - } - } -} - -#[cfg(all(not(unix), not(windows)))] -// In the future we can add this for more platforms -fn format_rusage_data(_child: Child) -> Option { - None -} - -#[cfg(windows)] -fn format_rusage_data(child: Child) -> Option { - use std::os::windows::io::AsRawHandle; - - use windows::{ - Win32::Foundation::HANDLE, - Win32::System::ProcessStatus::{ - K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS, PROCESS_MEMORY_COUNTERS_EX, - }, - Win32::System::Threading::GetProcessTimes, - Win32::System::Time::FileTimeToSystemTime, - }; - - let handle = HANDLE(child.as_raw_handle() as isize); - - let mut user_filetime = Default::default(); - let mut user_time = Default::default(); - let mut kernel_filetime = Default::default(); - let mut kernel_time = Default::default(); - let mut memory_counters = PROCESS_MEMORY_COUNTERS::default(); - - unsafe { - GetProcessTimes( - handle, - &mut Default::default(), - &mut Default::default(), - &mut kernel_filetime, - &mut user_filetime, - ) - } - .ok() - .ok()?; - unsafe { FileTimeToSystemTime(&user_filetime, &mut user_time) }.ok().ok()?; - unsafe { FileTimeToSystemTime(&kernel_filetime, &mut kernel_time) }.ok().ok()?; - - // Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process - // with the given handle and none of that process's children. - unsafe { - K32GetProcessMemoryInfo( - handle, - &mut memory_counters, - std::mem::size_of::() as u32, - ) - } - .ok() - .ok()?; - - // Guide on interpreting these numbers: - // https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information - let peak_working_set = memory_counters.PeakWorkingSetSize / 1024; - let peak_page_file = memory_counters.PeakPagefileUsage / 1024; - let peak_paged_pool = memory_counters.QuotaPeakPagedPoolUsage / 1024; - let peak_nonpaged_pool = memory_counters.QuotaPeakNonPagedPoolUsage / 1024; - Some(format!( - "user: {USER_SEC}.{USER_USEC:03} \ - sys: {SYS_SEC}.{SYS_USEC:03} \ - peak working set (kb): {PEAK_WORKING_SET} \ - peak page file usage (kb): {PEAK_PAGE_FILE} \ - peak paged pool usage (kb): {PEAK_PAGED_POOL} \ - peak non-paged pool usage (kb): {PEAK_NONPAGED_POOL} \ - page faults: {PAGE_FAULTS}", - USER_SEC = user_time.wSecond + (user_time.wMinute * 60), - USER_USEC = user_time.wMilliseconds, - SYS_SEC = kernel_time.wSecond + (kernel_time.wMinute * 60), - SYS_USEC = kernel_time.wMilliseconds, - PEAK_WORKING_SET = peak_working_set, - PEAK_PAGE_FILE = peak_page_file, - PEAK_PAGED_POOL = peak_paged_pool, - PEAK_NONPAGED_POOL = peak_nonpaged_pool, - PAGE_FAULTS = memory_counters.PageFaultCount, - )) -} - -#[cfg(unix)] -/// Tries to build a string with human readable data for several of the rusage -/// fields. Note that we are focusing mainly on data that we believe to be -/// supplied on Linux (the `rusage` struct has other fields in it but they are -/// currently unsupported by Linux). -fn format_rusage_data(_child: Child) -> Option { - let rusage: libc::rusage = unsafe { - let mut recv = std::mem::zeroed(); - // -1 is RUSAGE_CHILDREN, which means to get the rusage for all children - // (and grandchildren, etc) processes that have respectively terminated - // and been waited for. - let retval = libc::getrusage(-1, &mut recv); - if retval != 0 { - return None; - } - recv - }; - // Mac OS X reports the maxrss in bytes, not kb. - let divisor = if env::consts::OS == "macos" { 1024 } else { 1 }; - let maxrss = (rusage.ru_maxrss + (divisor - 1)) / divisor; - - let mut init_str = format!( - "user: {USER_SEC}.{USER_USEC:03} \ - sys: {SYS_SEC}.{SYS_USEC:03} \ - max rss (kb): {MAXRSS}", - USER_SEC = rusage.ru_utime.tv_sec, - USER_USEC = rusage.ru_utime.tv_usec, - SYS_SEC = rusage.ru_stime.tv_sec, - SYS_USEC = rusage.ru_stime.tv_usec, - MAXRSS = maxrss - ); - - // The remaining rusage stats vary in platform support. So we treat - // uniformly zero values in each category as "not worth printing", since it - // either means no events of that type occurred, or that the platform - // does not support it. - - let minflt = rusage.ru_minflt; - let majflt = rusage.ru_majflt; - if minflt != 0 || majflt != 0 { - init_str.push_str(&format!(" page reclaims: {minflt} page faults: {majflt}")); - } - - let inblock = rusage.ru_inblock; - let oublock = rusage.ru_oublock; - if inblock != 0 || oublock != 0 { - init_str.push_str(&format!(" fs block inputs: {inblock} fs block outputs: {oublock}")); - } - - let nvcsw = rusage.ru_nvcsw; - let nivcsw = rusage.ru_nivcsw; - if nvcsw != 0 || nivcsw != 0 { - init_str.push_str(&format!( - " voluntary ctxt switches: {nvcsw} involuntary ctxt switches: {nivcsw}" - )); - } - - return Some(init_str); -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustdoc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustdoc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustdoc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/rustdoc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,86 +0,0 @@ -//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap. -//! -//! See comments in `src/bootstrap/rustc.rs` for more information. - -use std::env; -use std::ffi::OsString; -use std::path::PathBuf; -use std::process::{exit, Command}; - -include!("../dylib_util.rs"); - -include!("./_helper.rs"); - -fn main() { - let args = env::args_os().skip(1).collect::>(); - - let stage = parse_rustc_stage(); - let verbose = parse_rustc_verbose(); - - let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set"); - let libdir = env::var_os("RUSTDOC_LIBDIR").expect("RUSTDOC_LIBDIR was not set"); - let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set"); - - // Detect whether or not we're a build script depending on whether --target - // is passed (a bit janky...) - let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str()); - - let mut dylib_path = dylib_path(); - dylib_path.insert(0, PathBuf::from(libdir.clone())); - - let mut cmd = Command::new(rustdoc); - - if target.is_some() { - // The stage0 compiler has a special sysroot distinct from what we - // actually downloaded, so we just always pass the `--sysroot` option, - // unless one is already set. - if !args.iter().any(|arg| arg == "--sysroot") { - cmd.arg("--sysroot").arg(&sysroot); - } - } - - cmd.args(&args); - cmd.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - - // Force all crates compiled by this compiler to (a) be unstable and (b) - // allow the `rustc_private` feature to link to other unstable crates - // also in the sysroot. - if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() { - cmd.arg("-Z").arg("force-unstable-if-unmarked"); - } - if let Some(linker) = env::var_os("RUSTDOC_LINKER") { - let mut arg = OsString::from("-Clinker="); - arg.push(&linker); - cmd.arg(arg); - } - if let Ok(no_threads) = env::var("RUSTDOC_LLD_NO_THREADS") { - cmd.arg("-Clink-arg=-fuse-ld=lld"); - cmd.arg(format!("-Clink-arg=-Wl,{no_threads}")); - } - // Cargo doesn't pass RUSTDOCFLAGS to proc_macros: - // https://github.com/rust-lang/cargo/issues/4423 - // Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`. - // We also declare that the flag is expected, which we need to do to not - // get warnings about it being unexpected. - if stage == "0" { - cmd.arg("--cfg=bootstrap"); - } - cmd.arg("-Zunstable-options"); - cmd.arg("--check-cfg=values(bootstrap)"); - - if verbose > 1 { - eprintln!( - "rustdoc command: {:?}={:?} {:?}", - dylib_path_var(), - env::join_paths(&dylib_path).unwrap(), - cmd, - ); - eprintln!("sysroot: {sysroot:?}"); - eprintln!("libdir: {libdir:?}"); - } - - std::process::exit(match cmd.status() { - Ok(s) => s.code().unwrap_or(1), - Err(e) => panic!("\n\nfailed to run {cmd:?}: {e}\n\n"), - }) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/sccache-plus-cl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/sccache-plus-cl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bin/sccache-plus-cl.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bin/sccache-plus-cl.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -use std::env; -use std::process::{self, Command}; - -fn main() { - let target = env::var("SCCACHE_TARGET").unwrap(); - // Locate the actual compiler that we're invoking - env::set_var("CC", env::var_os("SCCACHE_CC").unwrap()); - env::set_var("CXX", env::var_os("SCCACHE_CXX").unwrap()); - let mut cfg = cc::Build::new(); - cfg.cargo_metadata(false) - .out_dir("/") - .target(&target) - .host(&target) - .opt_level(0) - .warnings(false) - .debug(false); - let compiler = cfg.get_compiler(); - - // Invoke sccache with said compiler - let sccache_path = env::var_os("SCCACHE_PATH").unwrap(); - let mut cmd = Command::new(&sccache_path); - cmd.arg(compiler.path()); - for &(ref k, ref v) in compiler.env() { - cmd.env(k, v); - } - for arg in env::args().skip(1) { - cmd.arg(arg); - } - - if let Ok(s) = env::var("SCCACHE_EXTRA_ARGS") { - for s in s.split_whitespace() { - cmd.arg(s); - } - } - - let status = cmd.status().expect("failed to spawn"); - process::exit(status.code().unwrap_or(2)) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap.py rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap.py 2023-12-21 16:55:28.000000000 +0000 @@ -211,7 +211,7 @@ if exception: raise elif exit: - eprint("error: unable to run `{}`: {}".format(' '.join(cmd), exc)) + eprint("ERROR: unable to run `{}`: {}".format(' '.join(cmd), exc)) eprint("Please make sure it's installed and in the path.") sys.exit(1) return None @@ -681,7 +681,7 @@ answer = self._should_fix_bins_and_dylibs = get_answer() if answer: - eprint("info: You seem to be using Nix.") + eprint("INFO: You seem to be using Nix.") return answer def fix_bin_or_dylib(self, fname): @@ -727,7 +727,7 @@ "nix-build", "-E", nix_expr, "-o", nix_deps_dir, ]) except subprocess.CalledProcessError as reason: - eprint("warning: failed to call nix-build:", reason) + eprint("WARNING: failed to call nix-build:", reason) return self.nix_deps_dir = nix_deps_dir @@ -747,7 +747,7 @@ try: subprocess.check_output([patchelf] + patchelf_args + [fname]) except subprocess.CalledProcessError as reason: - eprint("warning: failed to call patchelf:", reason) + eprint("WARNING: failed to call patchelf:", reason) return def rustc_stamp(self): @@ -954,6 +954,13 @@ if deny_warnings: env["RUSTFLAGS"] += " -Dwarnings" + # Add RUSTFLAGS_BOOTSTRAP to RUSTFLAGS for bootstrap compilation. + # Note that RUSTFLAGS_BOOTSTRAP should always be added to the end of + # RUSTFLAGS to be actually effective (e.g., if we have `-Dwarnings` in + # RUSTFLAGS, passing `-Awarnings` from RUSTFLAGS_BOOTSTRAP should override it). + if "RUSTFLAGS_BOOTSTRAP" in env: + env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"] + env["PATH"] = os.path.join(self.bin_root(), "bin") + \ os.pathsep + env["PATH"] if not os.path.isfile(self.cargo()): @@ -998,7 +1005,7 @@ if 'SUDO_USER' in os.environ and not self.use_vendored_sources: if os.getuid() == 0: self.use_vendored_sources = True - eprint('info: looks like you\'re trying to run this command as root') + eprint('INFO: looks like you\'re trying to run this command as root') eprint(' and so in order to preserve your $HOME this will now') eprint(' use vendored sources by default.') @@ -1010,14 +1017,14 @@ "--sync ./src/tools/rust-analyzer/Cargo.toml " \ "--sync ./compiler/rustc_codegen_cranelift/Cargo.toml " \ "--sync ./src/bootstrap/Cargo.toml " - eprint('error: vendoring required, but vendor directory does not exist.') + eprint('ERROR: vendoring required, but vendor directory does not exist.') eprint(' Run `cargo vendor {}` to initialize the ' 'vendor directory.'.format(sync_dirs)) eprint('Alternatively, use the pre-vendored `rustc-src` dist component.') raise Exception("{} not found".format(vendor_dir)) if not os.path.exists(cargo_dir): - eprint('error: vendoring required, but .cargo/config does not exist.') + eprint('ERROR: vendoring required, but .cargo/config does not exist.') raise Exception("{} not found".format(cargo_dir)) else: if os.path.exists(cargo_dir): @@ -1042,6 +1049,12 @@ """Configure, fetch, build and run the initial bootstrap""" rust_root = os.path.abspath(os.path.join(__file__, '../../..')) + if not os.path.exists(os.path.join(rust_root, '.git')) and \ + os.path.exists(os.path.join(rust_root, '.github')): + eprint("warn: Looks like you are trying to bootstrap Rust from a source that is neither a " + "git clone nor distributed tarball.\nThis build may fail due to missing submodules " + "unless you put them in place manually.") + # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`, # then `config.toml` in the root directory. toml_path = args.config or os.getenv('RUST_BOOTSTRAP_CONFIG') @@ -1112,7 +1125,7 @@ # process has to happen before anything is printed out. if help_triggered: eprint( - "info: Downloading and building bootstrap before processing --help command.\n" + "INFO: Downloading and building bootstrap before processing --help command.\n" " See src/bootstrap/README.md for help with common commands.") exit_code = 0 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap_test.py rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap_test.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap_test.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/bootstrap_test.py 2023-12-21 16:55:28.000000000 +0000 @@ -34,7 +34,7 @@ # Verify this is actually valid TOML. tomllib.loads(build.config_toml) except ImportError: - print("warning: skipping TOML validation, need at least python 3.11", file=sys.stderr) + print("WARNING: skipping TOML validation, need at least python 3.11", file=sys.stderr) return build @@ -103,7 +103,6 @@ """Test that we can serialize and deserialize a config.toml file""" def test_no_args(self): build = serialize_and_parse([]) - self.assertEqual(build.get_toml("changelog-seen"), '2') self.assertEqual(build.get_toml("profile"), 'dist') self.assertIsNone(build.get_toml("llvm.download-ci-llvm")) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/builder/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/builder/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/builder/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/builder/tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,702 +0,0 @@ -use super::*; -use crate::config::{Config, DryRun, TargetSelection}; -use crate::doc::DocumentationFormat; -use std::thread; - -fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config { - configure_with_args(&[cmd.to_owned()], host, target) -} - -fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config { - let mut config = Config::parse(cmd); - // don't save toolstates - config.save_toolstates = None; - config.dry_run = DryRun::SelfCheck; - - // Ignore most submodules, since we don't need them for a dry run. - // But make sure to check out the `doc` and `rust-analyzer` submodules, since some steps need them - // just to know which commands to run. - let submodule_build = Build::new(Config { - // don't include LLVM, so CI doesn't require ninja/cmake to be installed - rust_codegen_backends: vec![], - ..Config::parse(&["check".to_owned()]) - }); - submodule_build.update_submodule(Path::new("src/doc/book")); - submodule_build.update_submodule(Path::new("src/tools/rust-analyzer")); - config.submodules = Some(false); - - config.ninja_in_file = false; - // try to avoid spurious failures in dist where we create/delete each others file - // HACK: rather than pull in `tempdir`, use the one that cargo has conveniently created for us - let dir = Path::new(env!("OUT_DIR")) - .join("tmp-rustbuild-tests") - .join(&thread::current().name().unwrap_or("unknown").replace(":", "-")); - t!(fs::create_dir_all(&dir)); - config.out = dir; - config.build = TargetSelection::from_user("A"); - config.hosts = host.iter().map(|s| TargetSelection::from_user(s)).collect(); - config.targets = target.iter().map(|s| TargetSelection::from_user(s)).collect(); - config -} - -fn first(v: Vec<(A, B)>) -> Vec { - v.into_iter().map(|(a, _)| a).collect::>() -} - -fn run_build(paths: &[PathBuf], config: Config) -> Cache { - let kind = config.cmd.kind(); - let build = Build::new(config); - let builder = Builder::new(&build); - builder.run_step_descriptions(&Builder::get_step_descriptions(kind), paths); - builder.cache -} - -fn check_cli(paths: [&str; N]) { - run_build( - &paths.map(PathBuf::from), - configure_with_args(&paths.map(String::from), &["A"], &["A"]), - ); -} - -macro_rules! std { - ($host:ident => $target:ident, stage = $stage:literal) => { - compile::Std::new( - Compiler { host: TargetSelection::from_user(stringify!($host)), stage: $stage }, - TargetSelection::from_user(stringify!($target)), - ) - }; -} - -macro_rules! doc_std { - ($host:ident => $target:ident, stage = $stage:literal) => {{ - let config = configure("doc", &["A"], &["A"]); - let build = Build::new(config); - let builder = Builder::new(&build); - doc::Std::new( - $stage, - TargetSelection::from_user(stringify!($target)), - &builder, - DocumentationFormat::HTML, - ) - }}; -} - -macro_rules! rustc { - ($host:ident => $target:ident, stage = $stage:literal) => { - compile::Rustc::new( - Compiler { host: TargetSelection::from_user(stringify!($host)), stage: $stage }, - TargetSelection::from_user(stringify!($target)), - ) - }; -} - -#[test] -fn test_valid() { - // make sure multi suite paths are accepted - check_cli(["test", "tests/ui/attr-start.rs", "tests/ui/attr-shebang.rs"]); -} - -#[test] -#[should_panic] -fn test_invalid() { - // make sure that invalid paths are caught, even when combined with valid paths - check_cli(["test", "library/std", "x"]); -} - -#[test] -fn test_intersection() { - let set = |paths: &[&str]| { - PathSet::Set(paths.into_iter().map(|p| TaskPath { path: p.into(), kind: None }).collect()) - }; - let library_set = set(&["library/core", "library/alloc", "library/std"]); - let mut command_paths = - vec![Path::new("library/core"), Path::new("library/alloc"), Path::new("library/stdarch")]; - let subset = library_set.intersection_removing_matches(&mut command_paths, Kind::Build); - assert_eq!(subset, set(&["library/core", "library/alloc"]),); - assert_eq!(command_paths, vec![Path::new("library/stdarch")]); -} - -#[test] -fn test_exclude() { - let mut config = configure("test", &["A"], &["A"]); - config.skip = vec!["src/tools/tidy".into()]; - let cache = run_build(&[], config); - - // Ensure we have really excluded tidy - assert!(!cache.contains::()); - - // Ensure other tests are not affected. - assert!(cache.contains::()); -} - -#[test] -fn test_exclude_kind() { - let path = PathBuf::from("compiler/rustc_data_structures"); - - let mut config = configure("test", &["A"], &["A"]); - // Ensure our test is valid, and `test::Rustc` would be run without the exclude. - assert!(run_build(&[], config.clone()).contains::()); - // Ensure tests for rustc are not skipped. - config.skip = vec![path.clone()]; - assert!(run_build(&[], config.clone()).contains::()); - // Ensure builds for rustc are not skipped. - assert!(run_build(&[], config).contains::()); -} - -/// Ensure that if someone passes both a single crate and `library`, all library crates get built. -#[test] -fn alias_and_path_for_library() { - let mut cache = - run_build(&["library".into(), "core".into()], configure("build", &["A"], &["A"])); - assert_eq!( - first(cache.all::()), - &[std!(A => A, stage = 0), std!(A => A, stage = 1)] - ); - - let mut cache = run_build(&["library".into(), "core".into()], configure("doc", &["A"], &["A"])); - assert_eq!(first(cache.all::()), &[doc_std!(A => A, stage = 0)]); -} - -#[test] -fn test_beta_rev_parsing() { - use crate::extract_beta_rev; - - // single digit revision - assert_eq!(extract_beta_rev("1.99.9-beta.7 (xxxxxx)"), Some("7".to_string())); - // multiple digits - assert_eq!(extract_beta_rev("1.99.9-beta.777 (xxxxxx)"), Some("777".to_string())); - // nightly channel (no beta revision) - assert_eq!(extract_beta_rev("1.99.9-nightly (xxxxxx)"), None); - // stable channel (no beta revision) - assert_eq!(extract_beta_rev("1.99.9 (xxxxxxx)"), None); - // invalid string - assert_eq!(extract_beta_rev("invalid"), None); -} - -mod defaults { - use super::{configure, first, run_build}; - use crate::builder::*; - use crate::Config; - use pretty_assertions::assert_eq; - - #[test] - fn build_default() { - let mut cache = run_build(&[], configure("build", &["A"], &["A"])); - - let a = TargetSelection::from_user("A"); - assert_eq!( - first(cache.all::()), - &[std!(A => A, stage = 0), std!(A => A, stage = 1),] - ); - assert!(!cache.all::().is_empty()); - // Make sure rustdoc is only built once. - assert_eq!( - first(cache.all::()), - // Recall that rustdoc stages are off-by-one - // - this is the compiler it's _linked_ to, not built with. - &[tool::Rustdoc { compiler: Compiler { host: a, stage: 1 } }], - ); - assert_eq!(first(cache.all::()), &[rustc!(A => A, stage = 0)],); - } - - #[test] - fn build_stage_0() { - let config = Config { stage: 0, ..configure("build", &["A"], &["A"]) }; - let mut cache = run_build(&[], config); - - let a = TargetSelection::from_user("A"); - assert_eq!(first(cache.all::()), &[std!(A => A, stage = 0)]); - assert!(!cache.all::().is_empty()); - assert_eq!( - first(cache.all::()), - // This is the beta rustdoc. - // Add an assert here to make sure this is the only rustdoc built. - &[tool::Rustdoc { compiler: Compiler { host: a, stage: 0 } }], - ); - assert!(cache.all::().is_empty()); - } - - #[test] - fn build_cross_compile() { - let config = Config { stage: 1, ..configure("build", &["A", "B"], &["A", "B"]) }; - let mut cache = run_build(&[], config); - - let a = TargetSelection::from_user("A"); - let b = TargetSelection::from_user("B"); - - // Ideally, this build wouldn't actually have `target: a` - // rustdoc/rustcc/std here (the user only requested a host=B build, so - // there's not really a need for us to build for target A in this case - // (since we're producing stage 1 libraries/binaries). But currently - // rustbuild is just a bit buggy here; this should be fixed though. - assert_eq!( - first(cache.all::()), - &[ - std!(A => A, stage = 0), - std!(A => A, stage = 1), - std!(A => B, stage = 0), - std!(A => B, stage = 1), - ] - ); - assert_eq!( - first(cache.all::()), - &[ - compile::Assemble { target_compiler: Compiler { host: a, stage: 0 } }, - compile::Assemble { target_compiler: Compiler { host: a, stage: 1 } }, - compile::Assemble { target_compiler: Compiler { host: b, stage: 1 } }, - ] - ); - assert_eq!( - first(cache.all::()), - &[ - tool::Rustdoc { compiler: Compiler { host: a, stage: 1 } }, - tool::Rustdoc { compiler: Compiler { host: b, stage: 1 } }, - ], - ); - assert_eq!( - first(cache.all::()), - &[rustc!(A => A, stage = 0), rustc!(A => B, stage = 0),] - ); - } - - #[test] - fn doc_default() { - let mut config = configure("doc", &["A"], &["A"]); - config.compiler_docs = true; - config.cmd = Subcommand::Doc { open: false, json: false }; - let mut cache = run_build(&[], config); - let a = TargetSelection::from_user("A"); - - // error_index_generator uses stage 0 to share rustdoc artifacts with the - // rustdoc tool. - assert_eq!(first(cache.all::()), &[doc::ErrorIndex { target: a },]); - assert_eq!( - first(cache.all::()), - &[tool::ErrorIndex { compiler: Compiler { host: a, stage: 0 } }] - ); - // docs should be built with the beta compiler, not with the stage0 artifacts. - // recall that rustdoc is off-by-one: `stage` is the compiler rustdoc is _linked_ to, - // not the one it was built by. - assert_eq!( - first(cache.all::()), - &[tool::Rustdoc { compiler: Compiler { host: a, stage: 0 } },] - ); - } -} - -mod dist { - use super::{first, run_build, Config}; - use crate::builder::*; - use pretty_assertions::assert_eq; - - fn configure(host: &[&str], target: &[&str]) -> Config { - Config { stage: 2, ..super::configure("dist", host, target) } - } - - #[test] - fn dist_baseline() { - let mut cache = run_build(&[], configure(&["A"], &["A"])); - - let a = TargetSelection::from_user("A"); - - assert_eq!(first(cache.all::()), &[dist::Docs { host: a },]); - assert_eq!(first(cache.all::()), &[dist::Mingw { host: a },]); - assert_eq!( - first(cache.all::()), - &[dist::Rustc { compiler: Compiler { host: a, stage: 2 } },] - ); - assert_eq!( - first(cache.all::()), - &[dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a },] - ); - assert_eq!(first(cache.all::()), &[dist::Src]); - // Make sure rustdoc is only built once. - assert_eq!( - first(cache.all::()), - &[tool::Rustdoc { compiler: Compiler { host: a, stage: 2 } },] - ); - } - - #[test] - fn dist_with_targets() { - let mut cache = run_build(&[], configure(&["A"], &["A", "B"])); - - let a = TargetSelection::from_user("A"); - let b = TargetSelection::from_user("B"); - - assert_eq!( - first(cache.all::()), - &[dist::Docs { host: a }, dist::Docs { host: b },] - ); - assert_eq!( - first(cache.all::()), - &[dist::Mingw { host: a }, dist::Mingw { host: b },] - ); - assert_eq!( - first(cache.all::()), - &[dist::Rustc { compiler: Compiler { host: a, stage: 2 } },] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, - dist::Std { compiler: Compiler { host: a, stage: 2 }, target: b }, - ] - ); - assert_eq!(first(cache.all::()), &[dist::Src]); - } - - #[test] - fn dist_with_hosts() { - let mut cache = run_build(&[], configure(&["A", "B"], &["A", "B"])); - - let a = TargetSelection::from_user("A"); - let b = TargetSelection::from_user("B"); - - assert_eq!( - first(cache.all::()), - &[dist::Docs { host: a }, dist::Docs { host: b },] - ); - assert_eq!( - first(cache.all::()), - &[dist::Mingw { host: a }, dist::Mingw { host: b },] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, - dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, - ] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: b }, - ] - ); - assert_eq!( - first(cache.all::()), - &[ - std!(A => A, stage = 0), - std!(A => A, stage = 1), - std!(A => A, stage = 2), - std!(A => B, stage = 1), - std!(A => B, stage = 2), - ], - ); - assert_eq!(first(cache.all::()), &[dist::Src]); - } - - #[test] - fn dist_only_cross_host() { - let b = TargetSelection::from_user("B"); - let mut config = configure(&["A", "B"], &["A", "B"]); - config.docs = false; - config.extended = true; - config.hosts = vec![b]; - let mut cache = run_build(&[], config); - - assert_eq!( - first(cache.all::()), - &[dist::Rustc { compiler: Compiler { host: b, stage: 2 } },] - ); - assert_eq!( - first(cache.all::()), - &[rustc!(A => A, stage = 0), rustc!(A => B, stage = 1),] - ); - } - - #[test] - fn dist_with_targets_and_hosts() { - let mut cache = run_build(&[], configure(&["A", "B"], &["A", "B", "C"])); - - let a = TargetSelection::from_user("A"); - let b = TargetSelection::from_user("B"); - let c = TargetSelection::from_user("C"); - - assert_eq!( - first(cache.all::()), - &[dist::Docs { host: a }, dist::Docs { host: b }, dist::Docs { host: c },] - ); - assert_eq!( - first(cache.all::()), - &[dist::Mingw { host: a }, dist::Mingw { host: b }, dist::Mingw { host: c },] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, - dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, - ] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: b }, - dist::Std { compiler: Compiler { host: a, stage: 2 }, target: c }, - ] - ); - assert_eq!(first(cache.all::()), &[dist::Src]); - } - - #[test] - fn dist_with_empty_host() { - let config = configure(&[], &["C"]); - let mut cache = run_build(&[], config); - - let a = TargetSelection::from_user("A"); - let c = TargetSelection::from_user("C"); - - assert_eq!(first(cache.all::()), &[dist::Docs { host: c },]); - assert_eq!(first(cache.all::()), &[dist::Mingw { host: c },]); - assert_eq!( - first(cache.all::()), - &[dist::Std { compiler: Compiler { host: a, stage: 2 }, target: c },] - ); - } - - #[test] - fn dist_with_same_targets_and_hosts() { - let mut cache = run_build(&[], configure(&["A", "B"], &["A", "B"])); - - let a = TargetSelection::from_user("A"); - let b = TargetSelection::from_user("B"); - - assert_eq!( - first(cache.all::()), - &[dist::Docs { host: a }, dist::Docs { host: b },] - ); - assert_eq!( - first(cache.all::()), - &[dist::Mingw { host: a }, dist::Mingw { host: b },] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, - dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, - ] - ); - assert_eq!( - first(cache.all::()), - &[ - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, - dist::Std { compiler: Compiler { host: a, stage: 1 }, target: b }, - ] - ); - assert_eq!(first(cache.all::()), &[dist::Src]); - assert_eq!( - first(cache.all::()), - &[ - std!(A => A, stage = 0), - std!(A => A, stage = 1), - std!(A => A, stage = 2), - std!(A => B, stage = 1), - std!(A => B, stage = 2), - ] - ); - assert_eq!( - first(cache.all::()), - &[ - compile::Assemble { target_compiler: Compiler { host: a, stage: 0 } }, - compile::Assemble { target_compiler: Compiler { host: a, stage: 1 } }, - compile::Assemble { target_compiler: Compiler { host: a, stage: 2 } }, - compile::Assemble { target_compiler: Compiler { host: b, stage: 2 } }, - ] - ); - } - - #[test] - fn build_all() { - let build = Build::new(configure(&["A", "B"], &["A", "B", "C"])); - let mut builder = Builder::new(&build); - builder.run_step_descriptions( - &Builder::get_step_descriptions(Kind::Build), - &["compiler/rustc".into(), "library".into()], - ); - - assert_eq!( - first(builder.cache.all::()), - &[ - std!(A => A, stage = 0), - std!(A => A, stage = 1), - std!(A => A, stage = 2), - std!(A => B, stage = 1), - std!(A => B, stage = 2), - std!(A => C, stage = 2), - ] - ); - assert_eq!(builder.cache.all::().len(), 5); - assert_eq!( - first(builder.cache.all::()), - &[ - rustc!(A => A, stage = 0), - rustc!(A => A, stage = 1), - rustc!(A => A, stage = 2), - rustc!(A => B, stage = 1), - rustc!(A => B, stage = 2), - ] - ); - } - - #[test] - fn build_with_empty_host() { - let config = configure(&[], &["C"]); - let build = Build::new(config); - let mut builder = Builder::new(&build); - builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]); - - let a = TargetSelection::from_user("A"); - - assert_eq!( - first(builder.cache.all::()), - &[std!(A => A, stage = 0), std!(A => A, stage = 1), std!(A => C, stage = 2),] - ); - assert_eq!( - first(builder.cache.all::()), - &[ - compile::Assemble { target_compiler: Compiler { host: a, stage: 0 } }, - compile::Assemble { target_compiler: Compiler { host: a, stage: 1 } }, - compile::Assemble { target_compiler: Compiler { host: a, stage: 2 } }, - ] - ); - assert_eq!( - first(builder.cache.all::()), - &[rustc!(A => A, stage = 0), rustc!(A => A, stage = 1),] - ); - } - - #[test] - fn test_with_no_doc_stage0() { - let mut config = configure(&["A"], &["A"]); - config.stage = 0; - config.paths = vec!["library/std".into()]; - config.cmd = Subcommand::Test { - test_args: vec![], - rustc_args: vec![], - no_fail_fast: false, - no_doc: true, - doc: false, - bless: false, - force_rerun: false, - compare_mode: None, - rustfix_coverage: false, - pass: None, - run: None, - only_modified: false, - skip: vec![], - extra_checks: None, - }; - - let build = Build::new(config); - let mut builder = Builder::new(&build); - - let host = TargetSelection::from_user("A"); - - builder.run_step_descriptions( - &[StepDescription::from::(Kind::Test)], - &["library/std".into()], - ); - - // Ensure we don't build any compiler artifacts. - assert!(!builder.cache.contains::()); - assert_eq!( - first(builder.cache.all::()), - &[test::Crate { - compiler: Compiler { host, stage: 0 }, - target: host, - mode: Mode::Std, - crates: vec![INTERNER.intern_str("std")], - },] - ); - } - - #[test] - fn doc_ci() { - let mut config = configure(&["A"], &["A"]); - config.compiler_docs = true; - config.cmd = Subcommand::Doc { open: false, json: false }; - let build = Build::new(config); - let mut builder = Builder::new(&build); - builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]); - let a = TargetSelection::from_user("A"); - - // error_index_generator uses stage 1 to share rustdoc artifacts with the - // rustdoc tool. - assert_eq!( - first(builder.cache.all::()), - &[doc::ErrorIndex { target: a },] - ); - assert_eq!( - first(builder.cache.all::()), - &[tool::ErrorIndex { compiler: Compiler { host: a, stage: 1 } }] - ); - // This is actually stage 1, but Rustdoc::run swaps out the compiler with - // stage minus 1 if --stage is not 0. Very confusing! - assert_eq!( - first(builder.cache.all::()), - &[tool::Rustdoc { compiler: Compiler { host: a, stage: 2 } },] - ); - } - - #[test] - fn test_docs() { - // Behavior of `x.py test` doing various documentation tests. - let mut config = configure(&["A"], &["A"]); - config.cmd = Subcommand::Test { - test_args: vec![], - rustc_args: vec![], - no_fail_fast: false, - doc: true, - no_doc: false, - skip: vec![], - bless: false, - force_rerun: false, - compare_mode: None, - rustfix_coverage: false, - pass: None, - run: None, - only_modified: false, - extra_checks: None, - }; - // Make sure rustfmt binary not being found isn't an error. - config.channel = "beta".to_string(); - let build = Build::new(config); - let mut builder = Builder::new(&build); - - builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]); - let a = TargetSelection::from_user("A"); - - // error_index_generator uses stage 1 to share rustdoc artifacts with the - // rustdoc tool. - assert_eq!( - first(builder.cache.all::()), - &[doc::ErrorIndex { target: a },] - ); - assert_eq!( - first(builder.cache.all::()), - &[tool::ErrorIndex { compiler: Compiler { host: a, stage: 1 } }] - ); - // Unfortunately rustdoc is built twice. Once from stage1 for compiletest - // (and other things), and once from stage0 for std crates. Ideally it - // would only be built once. If someone wants to fix this, it might be - // worth investigating if it would be possible to test std from stage1. - // Note that the stages here are +1 than what they actually are because - // Rustdoc::run swaps out the compiler with stage minus 1 if --stage is - // not 0. - // - // The stage 0 copy is the one downloaded for bootstrapping. It is - // (currently) needed to run "cargo test" on the linkchecker, and - // should be relatively "free". - assert_eq!( - first(builder.cache.all::()), - &[ - tool::Rustdoc { compiler: Compiler { host: a, stage: 0 } }, - tool::Rustdoc { compiler: Compiler { host: a, stage: 1 } }, - tool::Rustdoc { compiler: Compiler { host: a, stage: 2 } }, - ] - ); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/builder.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/builder.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2318 +0,0 @@ -use std::any::{type_name, Any}; -use std::cell::{Cell, RefCell}; -use std::collections::BTreeSet; -use std::env; -use std::ffi::OsStr; -use std::fmt::{Debug, Write}; -use std::fs::{self, File}; -use std::hash::Hash; -use std::io::{BufRead, BufReader}; -use std::ops::Deref; -use std::path::{Path, PathBuf}; -use std::process::Command; -use std::time::{Duration, Instant}; - -use crate::cache::{Cache, Interned, INTERNER}; -use crate::config::{DryRun, SplitDebuginfo, TargetSelection}; -use crate::doc; -use crate::flags::{Color, Subcommand}; -use crate::install; -use crate::llvm; -use crate::run; -use crate::setup; -use crate::test; -use crate::tool::{self, SourceType}; -use crate::util::{self, add_dylib_path, add_link_lib_path, exe, libdir, output, t}; -use crate::EXTRA_CHECK_CFGS; -use crate::{check, compile, Crate}; -use crate::{clean, dist}; -use crate::{Build, CLang, DocTests, GitRepo, Mode}; - -pub use crate::Compiler; -// FIXME: -// - use std::lazy for `Lazy` -// - use std::cell for `OnceCell` -// Once they get stabilized and reach beta. -use clap::ValueEnum; -use once_cell::sync::{Lazy, OnceCell}; - -pub struct Builder<'a> { - pub build: &'a Build, - pub top_stage: u32, - pub kind: Kind, - cache: Cache, - stack: RefCell>>, - time_spent_on_dependencies: Cell, - pub paths: Vec, -} - -impl<'a> Deref for Builder<'a> { - type Target = Build; - - fn deref(&self) -> &Self::Target { - self.build - } -} - -pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { - /// `PathBuf` when directories are created or to return a `Compiler` once - /// it's been assembled. - type Output: Clone; - - /// Whether this step is run by default as part of its respective phase. - /// `true` here can still be overwritten by `should_run` calling `default_condition`. - const DEFAULT: bool = false; - - /// If true, then this rule should be skipped if --target was specified, but --host was not - const ONLY_HOSTS: bool = false; - - /// Primary function to execute this rule. Can call `builder.ensure()` - /// with other steps to run those. - fn run(self, builder: &Builder<'_>) -> Self::Output; - - /// When bootstrap is passed a set of paths, this controls whether this rule - /// will execute. However, it does not get called in a "default" context - /// when we are not passed any paths; in that case, `make_run` is called - /// directly. - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_>; - - /// Builds up a "root" rule, either as a default rule or from a path passed - /// to us. - /// - /// When path is `None`, we are executing in a context where no paths were - /// passed. When `./x.py build` is run, for example, this rule could get - /// called if it is in the correct list below with a path of `None`. - fn make_run(_run: RunConfig<'_>) { - // It is reasonable to not have an implementation of make_run for rules - // who do not want to get called from the root context. This means that - // they are likely dependencies (e.g., sysroot creation) or similar, and - // as such calling them from ./x.py isn't logical. - unimplemented!() - } -} - -pub struct RunConfig<'a> { - pub builder: &'a Builder<'a>, - pub target: TargetSelection, - pub paths: Vec, -} - -impl RunConfig<'_> { - pub fn build_triple(&self) -> TargetSelection { - self.builder.build.build - } - - /// Return a list of crate names selected by `run.paths`. - #[track_caller] - pub fn cargo_crates_in_set(&self) -> Interned> { - let mut crates = Vec::new(); - for krate in &self.paths { - let path = krate.assert_single_path(); - let Some(crate_name) = self.builder.crate_paths.get(&path.path) else { - panic!("missing crate for path {}", path.path.display()) - }; - crates.push(crate_name.to_string()); - } - INTERNER.intern_list(crates) - } - - /// Given an `alias` selected by the `Step` and the paths passed on the command line, - /// return a list of the crates that should be built. - /// - /// Normally, people will pass *just* `library` if they pass it. - /// But it's possible (although strange) to pass something like `library std core`. - /// Build all crates anyway, as if they hadn't passed the other args. - pub fn make_run_crates(&self, alias: Alias) -> Interned> { - let has_alias = - self.paths.iter().any(|set| set.assert_single_path().path.ends_with(alias.as_str())); - if !has_alias { - return self.cargo_crates_in_set(); - } - - let crates = match alias { - Alias::Library => self.builder.in_tree_crates("sysroot", Some(self.target)), - Alias::Compiler => self.builder.in_tree_crates("rustc-main", Some(self.target)), - }; - - let crate_names = crates.into_iter().map(|krate| krate.name.to_string()).collect(); - INTERNER.intern_list(crate_names) - } -} - -#[derive(Debug, Copy, Clone)] -pub enum Alias { - Library, - Compiler, -} - -impl Alias { - fn as_str(self) -> &'static str { - match self { - Alias::Library => "library", - Alias::Compiler => "compiler", - } - } -} - -/// A description of the crates in this set, suitable for passing to `builder.info`. -/// -/// `crates` should be generated by [`RunConfig::cargo_crates_in_set`]. -pub fn crate_description(crates: &[impl AsRef]) -> String { - if crates.is_empty() { - return "".into(); - } - - let mut descr = String::from(" {"); - descr.push_str(crates[0].as_ref()); - for krate in &crates[1..] { - descr.push_str(", "); - descr.push_str(krate.as_ref()); - } - descr.push('}'); - descr -} - -struct StepDescription { - default: bool, - only_hosts: bool, - should_run: fn(ShouldRun<'_>) -> ShouldRun<'_>, - make_run: fn(RunConfig<'_>), - name: &'static str, - kind: Kind, -} - -#[derive(Clone, PartialOrd, Ord, PartialEq, Eq)] -pub struct TaskPath { - pub path: PathBuf, - pub kind: Option, -} - -impl Debug for TaskPath { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let Some(kind) = &self.kind { - write!(f, "{}::", kind.as_str())?; - } - write!(f, "{}", self.path.display()) - } -} - -/// Collection of paths used to match a task rule. -#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] -pub enum PathSet { - /// A collection of individual paths or aliases. - /// - /// These are generally matched as a path suffix. For example, a - /// command-line value of `std` will match if `library/std` is in the - /// set. - /// - /// NOTE: the paths within a set should always be aliases of one another. - /// For example, `src/librustdoc` and `src/tools/rustdoc` should be in the same set, - /// but `library/core` and `library/std` generally should not, unless there's no way (for that Step) - /// to build them separately. - Set(BTreeSet), - /// A "suite" of paths. - /// - /// These can match as a path suffix (like `Set`), or as a prefix. For - /// example, a command-line value of `tests/ui/abi/variadic-ffi.rs` - /// will match `tests/ui`. A command-line value of `ui` would also - /// match `tests/ui`. - Suite(TaskPath), -} - -impl PathSet { - fn empty() -> PathSet { - PathSet::Set(BTreeSet::new()) - } - - fn one>(path: P, kind: Kind) -> PathSet { - let mut set = BTreeSet::new(); - set.insert(TaskPath { path: path.into(), kind: Some(kind) }); - PathSet::Set(set) - } - - fn has(&self, needle: &Path, module: Kind) -> bool { - match self { - PathSet::Set(set) => set.iter().any(|p| Self::check(p, needle, module)), - PathSet::Suite(suite) => Self::check(suite, needle, module), - } - } - - // internal use only - fn check(p: &TaskPath, needle: &Path, module: Kind) -> bool { - if let Some(p_kind) = &p.kind { - p.path.ends_with(needle) && *p_kind == module - } else { - p.path.ends_with(needle) - } - } - - /// Return all `TaskPath`s in `Self` that contain any of the `needles`, removing the - /// matched needles. - /// - /// This is used for `StepDescription::krate`, which passes all matching crates at once to - /// `Step::make_run`, rather than calling it many times with a single crate. - /// See `tests.rs` for examples. - fn intersection_removing_matches(&self, needles: &mut Vec<&Path>, module: Kind) -> PathSet { - let mut check = |p| { - for (i, n) in needles.iter().enumerate() { - let matched = Self::check(p, n, module); - if matched { - needles.remove(i); - return true; - } - } - false - }; - match self { - PathSet::Set(set) => PathSet::Set(set.iter().filter(|&p| check(p)).cloned().collect()), - PathSet::Suite(suite) => { - if check(suite) { - self.clone() - } else { - PathSet::empty() - } - } - } - } - - /// A convenience wrapper for Steps which know they have no aliases and all their sets contain only a single path. - /// - /// This can be used with [`ShouldRun::crate_or_deps`], [`ShouldRun::path`], or [`ShouldRun::alias`]. - #[track_caller] - pub fn assert_single_path(&self) -> &TaskPath { - match self { - PathSet::Set(set) => { - assert_eq!(set.len(), 1, "called assert_single_path on multiple paths"); - set.iter().next().unwrap() - } - PathSet::Suite(_) => unreachable!("called assert_single_path on a Suite path"), - } - } -} - -impl StepDescription { - fn from(kind: Kind) -> StepDescription { - StepDescription { - default: S::DEFAULT, - only_hosts: S::ONLY_HOSTS, - should_run: S::should_run, - make_run: S::make_run, - name: std::any::type_name::(), - kind, - } - } - - fn maybe_run(&self, builder: &Builder<'_>, mut pathsets: Vec) { - pathsets.retain(|set| !self.is_excluded(builder, set)); - - if pathsets.is_empty() { - return; - } - - // Determine the targets participating in this rule. - let targets = if self.only_hosts { &builder.hosts } else { &builder.targets }; - - for target in targets { - let run = RunConfig { builder, paths: pathsets.clone(), target: *target }; - (self.make_run)(run); - } - } - - fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool { - if builder.config.skip.iter().any(|e| pathset.has(&e, builder.kind)) { - if !matches!(builder.config.dry_run, DryRun::SelfCheck) { - println!("Skipping {pathset:?} because it is excluded"); - } - return true; - } - - if !builder.config.skip.is_empty() && !matches!(builder.config.dry_run, DryRun::SelfCheck) { - builder.verbose(&format!( - "{:?} not skipped for {:?} -- not in {:?}", - pathset, self.name, builder.config.skip - )); - } - false - } - - fn run(v: &[StepDescription], builder: &Builder<'_>, paths: &[PathBuf]) { - let should_runs = v - .iter() - .map(|desc| (desc.should_run)(ShouldRun::new(builder, desc.kind))) - .collect::>(); - - // sanity checks on rules - for (desc, should_run) in v.iter().zip(&should_runs) { - assert!( - !should_run.paths.is_empty(), - "{:?} should have at least one pathset", - desc.name - ); - } - - if paths.is_empty() || builder.config.include_default_paths { - for (desc, should_run) in v.iter().zip(&should_runs) { - if desc.default && should_run.is_really_default() { - desc.maybe_run(builder, should_run.paths.iter().cloned().collect()); - } - } - } - - // strip CurDir prefix if present - let mut paths: Vec<_> = - paths.into_iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect(); - - // Handle all test suite paths. - // (This is separate from the loop below to avoid having to handle multiple paths in `is_suite_path` somehow.) - paths.retain(|path| { - for (desc, should_run) in v.iter().zip(&should_runs) { - if let Some(suite) = should_run.is_suite_path(&path) { - desc.maybe_run(builder, vec![suite.clone()]); - return false; - } - } - true - }); - - if paths.is_empty() { - return; - } - - // Handle all PathSets. - for (desc, should_run) in v.iter().zip(&should_runs) { - let pathsets = should_run.pathset_for_paths_removing_matches(&mut paths, desc.kind); - if !pathsets.is_empty() { - desc.maybe_run(builder, pathsets); - } - } - - if !paths.is_empty() { - eprintln!("error: no `{}` rules matched {:?}", builder.kind.as_str(), paths,); - eprintln!( - "help: run `x.py {} --help --verbose` to show a list of available paths", - builder.kind.as_str() - ); - eprintln!( - "note: if you are adding a new Step to bootstrap itself, make sure you register it with `describe!`" - ); - crate::exit!(1); - } - } -} - -enum ReallyDefault<'a> { - Bool(bool), - Lazy(Lazy bool + 'a>>), -} - -pub struct ShouldRun<'a> { - pub builder: &'a Builder<'a>, - kind: Kind, - - // use a BTreeSet to maintain sort order - paths: BTreeSet, - - // If this is a default rule, this is an additional constraint placed on - // its run. Generally something like compiler docs being enabled. - is_really_default: ReallyDefault<'a>, -} - -impl<'a> ShouldRun<'a> { - fn new(builder: &'a Builder<'_>, kind: Kind) -> ShouldRun<'a> { - ShouldRun { - builder, - kind, - paths: BTreeSet::new(), - is_really_default: ReallyDefault::Bool(true), // by default no additional conditions - } - } - - pub fn default_condition(mut self, cond: bool) -> Self { - self.is_really_default = ReallyDefault::Bool(cond); - self - } - - pub fn lazy_default_condition(mut self, lazy_cond: Box bool + 'a>) -> Self { - self.is_really_default = ReallyDefault::Lazy(Lazy::new(lazy_cond)); - self - } - - pub fn is_really_default(&self) -> bool { - match &self.is_really_default { - ReallyDefault::Bool(val) => *val, - ReallyDefault::Lazy(lazy) => *lazy.deref(), - } - } - - /// Indicates it should run if the command-line selects the given crate or - /// any of its (local) dependencies. - /// - /// `make_run` will be called a single time with all matching command-line paths. - pub fn crate_or_deps(self, name: &str) -> Self { - let crates = self.builder.in_tree_crates(name, None); - self.crates(crates) - } - - /// Indicates it should run if the command-line selects any of the given crates. - /// - /// `make_run` will be called a single time with all matching command-line paths. - /// - /// Prefer [`ShouldRun::crate_or_deps`] to this function where possible. - pub(crate) fn crates(mut self, crates: Vec<&Crate>) -> Self { - for krate in crates { - let path = krate.local_path(self.builder); - self.paths.insert(PathSet::one(path, self.kind)); - } - self - } - - // single alias, which does not correspond to any on-disk path - pub fn alias(mut self, alias: &str) -> Self { - // exceptional case for `Kind::Setup` because its `library` - // and `compiler` options would otherwise naively match with - // `compiler` and `library` folders respectively. - assert!( - self.kind == Kind::Setup || !self.builder.src.join(alias).exists(), - "use `builder.path()` for real paths: {alias}" - ); - self.paths.insert(PathSet::Set( - std::iter::once(TaskPath { path: alias.into(), kind: Some(self.kind) }).collect(), - )); - self - } - - // single, non-aliased path - pub fn path(self, path: &str) -> Self { - self.paths(&[path]) - } - - /// Multiple aliases for the same job. - /// - /// This differs from [`path`] in that multiple calls to path will end up calling `make_run` - /// multiple times, whereas a single call to `paths` will only ever generate a single call to - /// `paths`. - /// - /// This is analogous to `all_krates`, although `all_krates` is gone now. Prefer [`path`] where possible. - /// - /// [`path`]: ShouldRun::path - pub fn paths(mut self, paths: &[&str]) -> Self { - static SUBMODULES_PATHS: OnceCell> = OnceCell::new(); - - let init_submodules_paths = |src: &PathBuf| { - let file = File::open(src.join(".gitmodules")).unwrap(); - - let mut submodules_paths = vec![]; - for line in BufReader::new(file).lines() { - if let Ok(line) = line { - let line = line.trim(); - - if line.starts_with("path") { - let actual_path = - line.split(' ').last().expect("Couldn't get value of path"); - submodules_paths.push(actual_path.to_owned()); - } - } - } - - submodules_paths - }; - - let submodules_paths = - SUBMODULES_PATHS.get_or_init(|| init_submodules_paths(&self.builder.src)); - - self.paths.insert(PathSet::Set( - paths - .iter() - .map(|p| { - // assert only if `p` isn't submodule - if submodules_paths.iter().find(|sm_p| p.contains(*sm_p)).is_none() { - assert!( - self.builder.src.join(p).exists(), - "`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}", - p - ); - } - - TaskPath { path: p.into(), kind: Some(self.kind) } - }) - .collect(), - )); - self - } - - /// Handles individual files (not directories) within a test suite. - fn is_suite_path(&self, requested_path: &Path) -> Option<&PathSet> { - self.paths.iter().find(|pathset| match pathset { - PathSet::Suite(suite) => requested_path.starts_with(&suite.path), - PathSet::Set(_) => false, - }) - } - - pub fn suite_path(mut self, suite: &str) -> Self { - self.paths.insert(PathSet::Suite(TaskPath { path: suite.into(), kind: Some(self.kind) })); - self - } - - // allows being more explicit about why should_run in Step returns the value passed to it - pub fn never(mut self) -> ShouldRun<'a> { - self.paths.insert(PathSet::empty()); - self - } - - /// Given a set of requested paths, return the subset which match the Step for this `ShouldRun`, - /// removing the matches from `paths`. - /// - /// NOTE: this returns multiple PathSets to allow for the possibility of multiple units of work - /// within the same step. For example, `test::Crate` allows testing multiple crates in the same - /// cargo invocation, which are put into separate sets because they aren't aliases. - /// - /// The reason we return PathSet instead of PathBuf is to allow for aliases that mean the same thing - /// (for now, just `all_krates` and `paths`, but we may want to add an `aliases` function in the future?) - fn pathset_for_paths_removing_matches( - &self, - paths: &mut Vec<&Path>, - kind: Kind, - ) -> Vec { - let mut sets = vec![]; - for pathset in &self.paths { - let subset = pathset.intersection_removing_matches(paths, kind); - if subset != PathSet::empty() { - sets.push(subset); - } - } - sets - } -} - -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] -pub enum Kind { - #[clap(alias = "b")] - Build, - #[clap(alias = "c")] - Check, - Clippy, - Fix, - Format, - #[clap(alias = "t")] - Test, - Bench, - #[clap(alias = "d")] - Doc, - Clean, - Dist, - Install, - #[clap(alias = "r")] - Run, - Setup, - Suggest, -} - -impl Kind { - pub fn parse(string: &str) -> Option { - // these strings, including the one-letter aliases, must match the x.py help text - Some(match string { - "build" | "b" => Kind::Build, - "check" | "c" => Kind::Check, - "clippy" => Kind::Clippy, - "fix" => Kind::Fix, - "fmt" => Kind::Format, - "test" | "t" => Kind::Test, - "bench" => Kind::Bench, - "doc" | "d" => Kind::Doc, - "clean" => Kind::Clean, - "dist" => Kind::Dist, - "install" => Kind::Install, - "run" | "r" => Kind::Run, - "setup" => Kind::Setup, - "suggest" => Kind::Suggest, - _ => return None, - }) - } - - pub fn as_str(&self) -> &'static str { - match self { - Kind::Build => "build", - Kind::Check => "check", - Kind::Clippy => "clippy", - Kind::Fix => "fix", - Kind::Format => "fmt", - Kind::Test => "test", - Kind::Bench => "bench", - Kind::Doc => "doc", - Kind::Clean => "clean", - Kind::Dist => "dist", - Kind::Install => "install", - Kind::Run => "run", - Kind::Setup => "setup", - Kind::Suggest => "suggest", - } - } - - pub fn description(&self) -> String { - match self { - Kind::Test => "Testing", - Kind::Bench => "Benchmarking", - Kind::Doc => "Documenting", - Kind::Run => "Running", - Kind::Suggest => "Suggesting", - _ => { - let title_letter = self.as_str()[0..1].to_ascii_uppercase(); - return format!("{title_letter}{}ing", &self.as_str()[1..]); - } - } - .to_owned() - } -} - -impl<'a> Builder<'a> { - fn get_step_descriptions(kind: Kind) -> Vec { - macro_rules! describe { - ($($rule:ty),+ $(,)?) => {{ - vec![$(StepDescription::from::<$rule>(kind)),+] - }}; - } - match kind { - Kind::Build => describe!( - compile::Std, - compile::Rustc, - compile::Assemble, - compile::CodegenBackend, - compile::StartupObjects, - tool::BuildManifest, - tool::Rustbook, - tool::ErrorIndex, - tool::UnstableBookGen, - tool::Tidy, - tool::Linkchecker, - tool::CargoTest, - tool::Compiletest, - tool::RemoteTestServer, - tool::RemoteTestClient, - tool::RustInstaller, - tool::Cargo, - tool::Rls, - tool::RustAnalyzer, - tool::RustAnalyzerProcMacroSrv, - tool::RustDemangler, - tool::Rustdoc, - tool::Clippy, - tool::CargoClippy, - llvm::Llvm, - llvm::Sanitizers, - tool::Rustfmt, - tool::Miri, - tool::CargoMiri, - llvm::Lld, - llvm::CrtBeginEnd, - tool::RustdocGUITest, - tool::OptimizedDist, - tool::CoverageDump, - ), - Kind::Check | Kind::Clippy | Kind::Fix => describe!( - check::Std, - check::Rustc, - check::Rustdoc, - check::CodegenBackend, - check::Clippy, - check::Miri, - check::CargoMiri, - check::MiroptTestTools, - check::Rls, - check::Rustfmt, - check::RustAnalyzer, - check::Bootstrap - ), - Kind::Test => describe!( - crate::toolstate::ToolStateCheck, - test::ExpandYamlAnchors, - test::Tidy, - test::Ui, - test::RunPassValgrind, - test::CoverageMap, - test::RunCoverage, - test::MirOpt, - test::Codegen, - test::CodegenUnits, - test::Assembly, - test::Incremental, - test::Debuginfo, - test::UiFullDeps, - test::CodegenCranelift, - test::Rustdoc, - test::RunCoverageRustdoc, - test::Pretty, - test::Crate, - test::CrateLibrustc, - test::CrateRustdoc, - test::CrateRustdocJsonTypes, - test::CrateBootstrap, - test::Linkcheck, - test::TierCheck, - test::Cargotest, - test::Cargo, - test::RustAnalyzer, - test::ErrorIndex, - test::Distcheck, - test::RunMakeFullDeps, - test::Nomicon, - test::Reference, - test::RustdocBook, - test::RustByExample, - test::TheBook, - test::UnstableBook, - test::RustcBook, - test::LintDocs, - test::RustcGuide, - test::EmbeddedBook, - test::EditionGuide, - test::Rustfmt, - test::Miri, - test::Clippy, - test::RustDemangler, - test::CompiletestTest, - test::RustdocJSStd, - test::RustdocJSNotStd, - test::RustdocGUI, - test::RustdocTheme, - test::RustdocUi, - test::RustdocJson, - test::HtmlCheck, - test::RustInstaller, - // Run bootstrap close to the end as it's unlikely to fail - test::Bootstrap, - // Run run-make last, since these won't pass without make on Windows - test::RunMake, - ), - Kind::Bench => describe!(test::Crate, test::CrateLibrustc), - Kind::Doc => describe!( - doc::UnstableBook, - doc::UnstableBookGen, - doc::TheBook, - doc::Standalone, - doc::Std, - doc::Rustc, - doc::Rustdoc, - doc::Rustfmt, - doc::ErrorIndex, - doc::Nomicon, - doc::Reference, - doc::RustdocBook, - doc::RustByExample, - doc::RustcBook, - doc::Cargo, - doc::CargoBook, - doc::Clippy, - doc::ClippyBook, - doc::Miri, - doc::EmbeddedBook, - doc::EditionGuide, - doc::StyleGuide, - doc::Tidy, - doc::Bootstrap, - ), - Kind::Dist => describe!( - dist::Docs, - dist::RustcDocs, - dist::JsonDocs, - dist::Mingw, - dist::Rustc, - dist::Std, - dist::RustcDev, - dist::Analysis, - dist::Src, - dist::Cargo, - dist::Rls, - dist::RustAnalyzer, - dist::Rustfmt, - dist::RustDemangler, - dist::Clippy, - dist::Miri, - dist::LlvmTools, - dist::RustDev, - dist::Bootstrap, - dist::Extended, - // It seems that PlainSourceTarball somehow changes how some of the tools - // perceive their dependencies (see #93033) which would invalidate fingerprints - // and force us to rebuild tools after vendoring dependencies. - // To work around this, create the Tarball after building all the tools. - dist::PlainSourceTarball, - dist::BuildManifest, - dist::ReproducibleArtifacts, - ), - Kind::Install => describe!( - install::Docs, - install::Std, - install::Cargo, - install::RustAnalyzer, - install::Rustfmt, - install::RustDemangler, - install::Clippy, - install::Miri, - install::LlvmTools, - install::Src, - install::Rustc - ), - Kind::Run => describe!( - run::ExpandYamlAnchors, - run::BuildManifest, - run::BumpStage0, - run::ReplaceVersionPlaceholder, - run::Miri, - run::CollectLicenseMetadata, - run::GenerateCopyright, - run::GenerateWindowsSys, - run::GenerateCompletions, - ), - Kind::Setup => describe!(setup::Profile, setup::Hook, setup::Link, setup::Vscode), - Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std), - // special-cased in Build::build() - Kind::Format | Kind::Suggest => vec![], - } - } - - pub fn get_help(build: &Build, kind: Kind) -> Option { - let step_descriptions = Builder::get_step_descriptions(kind); - if step_descriptions.is_empty() { - return None; - } - - let builder = Self::new_internal(build, kind, vec![]); - let builder = &builder; - // The "build" kind here is just a placeholder, it will be replaced with something else in - // the following statement. - let mut should_run = ShouldRun::new(builder, Kind::Build); - for desc in step_descriptions { - should_run.kind = desc.kind; - should_run = (desc.should_run)(should_run); - } - let mut help = String::from("Available paths:\n"); - let mut add_path = |path: &Path| { - t!(write!(help, " ./x.py {} {}\n", kind.as_str(), path.display())); - }; - for pathset in should_run.paths { - match pathset { - PathSet::Set(set) => { - for path in set { - add_path(&path.path); - } - } - PathSet::Suite(path) => { - add_path(&path.path.join("...")); - } - } - } - Some(help) - } - - fn new_internal(build: &Build, kind: Kind, paths: Vec) -> Builder<'_> { - Builder { - build, - top_stage: build.config.stage, - kind, - cache: Cache::new(), - stack: RefCell::new(Vec::new()), - time_spent_on_dependencies: Cell::new(Duration::new(0, 0)), - paths, - } - } - - pub fn new(build: &Build) -> Builder<'_> { - let paths = &build.config.paths; - let (kind, paths) = match build.config.cmd { - Subcommand::Build => (Kind::Build, &paths[..]), - Subcommand::Check { .. } => (Kind::Check, &paths[..]), - Subcommand::Clippy { .. } => (Kind::Clippy, &paths[..]), - Subcommand::Fix => (Kind::Fix, &paths[..]), - Subcommand::Doc { .. } => (Kind::Doc, &paths[..]), - Subcommand::Test { .. } => (Kind::Test, &paths[..]), - Subcommand::Bench { .. } => (Kind::Bench, &paths[..]), - Subcommand::Dist => (Kind::Dist, &paths[..]), - Subcommand::Install => (Kind::Install, &paths[..]), - Subcommand::Run { .. } => (Kind::Run, &paths[..]), - Subcommand::Clean { .. } => (Kind::Clean, &paths[..]), - Subcommand::Format { .. } => (Kind::Format, &[][..]), - Subcommand::Suggest { .. } => (Kind::Suggest, &[][..]), - Subcommand::Setup { profile: ref path } => ( - Kind::Setup, - path.as_ref().map_or([].as_slice(), |path| std::slice::from_ref(path)), - ), - }; - - Self::new_internal(build, kind, paths.to_owned()) - } - - pub fn execute_cli(&self) { - self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.paths); - } - - pub fn default_doc(&self, paths: &[PathBuf]) { - self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), paths); - } - - pub fn doc_rust_lang_org_channel(&self) -> String { - let channel = match &*self.config.channel { - "stable" => &self.version, - "beta" => "beta", - "nightly" | "dev" => "nightly", - // custom build of rustdoc maybe? link to the latest stable docs just in case - _ => "stable", - }; - "https://doc.rust-lang.org/".to_owned() + channel - } - - fn run_step_descriptions(&self, v: &[StepDescription], paths: &[PathBuf]) { - StepDescription::run(v, self, paths); - } - - /// Obtain a compiler at a given stage and for a given host. Explicitly does - /// not take `Compiler` since all `Compiler` instances are meant to be - /// obtained through this function, since it ensures that they are valid - /// (i.e., built and assembled). - pub fn compiler(&self, stage: u32, host: TargetSelection) -> Compiler { - self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } }) - } - - /// Similar to `compiler`, except handles the full-bootstrap option to - /// silently use the stage1 compiler instead of a stage2 compiler if one is - /// requested. - /// - /// Note that this does *not* have the side effect of creating - /// `compiler(stage, host)`, unlike `compiler` above which does have such - /// a side effect. The returned compiler here can only be used to compile - /// new artifacts, it can't be used to rely on the presence of a particular - /// sysroot. - /// - /// See `force_use_stage1` and `force_use_stage2` for documentation on what each argument is. - pub fn compiler_for( - &self, - stage: u32, - host: TargetSelection, - target: TargetSelection, - ) -> Compiler { - if self.build.force_use_stage2(stage) { - self.compiler(2, self.config.build) - } else if self.build.force_use_stage1(stage, target) { - self.compiler(1, self.config.build) - } else { - self.compiler(stage, host) - } - } - - pub fn sysroot(&self, compiler: Compiler) -> Interned { - self.ensure(compile::Sysroot::new(compiler)) - } - - /// Returns the libdir where the standard library and other artifacts are - /// found for a compiler's sysroot. - pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] - struct Libdir { - compiler: Compiler, - target: TargetSelection, - } - impl Step for Libdir { - type Output = Interned; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - fn run(self, builder: &Builder<'_>) -> Interned { - let lib = builder.sysroot_libdir_relative(self.compiler); - let sysroot = builder - .sysroot(self.compiler) - .join(lib) - .join("rustlib") - .join(self.target.triple) - .join("lib"); - // Avoid deleting the rustlib/ directory we just copied - // (in `impl Step for Sysroot`). - if !builder.download_rustc() { - builder.verbose(&format!( - "Removing sysroot {} to avoid caching bugs", - sysroot.display() - )); - let _ = fs::remove_dir_all(&sysroot); - t!(fs::create_dir_all(&sysroot)); - } - - if self.compiler.stage == 0 { - // The stage 0 compiler for the build triple is always pre-built. - // Ensure that `libLLVM.so` ends up in the target libdir, so that ui-fulldeps tests can use it when run. - dist::maybe_install_llvm_target( - builder, - self.compiler.host, - &builder.sysroot(self.compiler), - ); - } - - INTERNER.intern_path(sysroot) - } - } - self.ensure(Libdir { compiler, target }) - } - - pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf { - self.sysroot_libdir(compiler, compiler.host).with_file_name("codegen-backends") - } - - /// Returns the compiler's libdir where it stores the dynamic libraries that - /// it itself links against. - /// - /// For example this returns `/lib` on Unix and `/bin` on - /// Windows. - pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf { - if compiler.is_snapshot(self) { - self.rustc_snapshot_libdir() - } else { - match self.config.libdir_relative() { - Some(relative_libdir) if compiler.stage >= 1 => { - self.sysroot(compiler).join(relative_libdir) - } - _ => self.sysroot(compiler).join(libdir(compiler.host)), - } - } - } - - /// Returns the compiler's relative libdir where it stores the dynamic libraries that - /// it itself links against. - /// - /// For example this returns `lib` on Unix and `bin` on - /// Windows. - pub fn libdir_relative(&self, compiler: Compiler) -> &Path { - if compiler.is_snapshot(self) { - libdir(self.config.build).as_ref() - } else { - match self.config.libdir_relative() { - Some(relative_libdir) if compiler.stage >= 1 => relative_libdir, - _ => libdir(compiler.host).as_ref(), - } - } - } - - /// Returns the compiler's relative libdir where the standard library and other artifacts are - /// found for a compiler's sysroot. - /// - /// For example this returns `lib` on Unix and Windows. - pub fn sysroot_libdir_relative(&self, compiler: Compiler) -> &Path { - match self.config.libdir_relative() { - Some(relative_libdir) if compiler.stage >= 1 => relative_libdir, - _ if compiler.stage == 0 => &self.build.initial_libdir, - _ => Path::new("lib"), - } - } - - pub fn rustc_lib_paths(&self, compiler: Compiler) -> Vec { - let mut dylib_dirs = vec![self.rustc_libdir(compiler)]; - - // Ensure that the downloaded LLVM libraries can be found. - if self.config.llvm_from_ci { - let ci_llvm_lib = self.out.join(&*compiler.host.triple).join("ci-llvm").join("lib"); - dylib_dirs.push(ci_llvm_lib); - } - - dylib_dirs - } - - /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic - /// library lookup path. - pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) { - // Windows doesn't need dylib path munging because the dlls for the - // compiler live next to the compiler and the system will find them - // automatically. - if cfg!(windows) { - return; - } - - add_dylib_path(self.rustc_lib_paths(compiler), cmd); - } - - /// Gets a path to the compiler specified. - pub fn rustc(&self, compiler: Compiler) -> PathBuf { - if compiler.is_snapshot(self) { - self.initial_rustc.clone() - } else { - self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host)) - } - } - - /// Gets the paths to all of the compiler's codegen backends. - fn codegen_backends(&self, compiler: Compiler) -> impl Iterator { - fs::read_dir(self.sysroot_codegen_backends(compiler)) - .into_iter() - .flatten() - .filter_map(Result::ok) - .map(|entry| entry.path()) - } - - pub fn rustdoc(&self, compiler: Compiler) -> PathBuf { - self.ensure(tool::Rustdoc { compiler }) - } - - pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command { - let mut cmd = Command::new(&self.bootstrap_out.join("rustdoc")); - cmd.env("RUSTC_STAGE", compiler.stage.to_string()) - .env("RUSTC_SYSROOT", self.sysroot(compiler)) - // Note that this is *not* the sysroot_libdir because rustdoc must be linked - // equivalently to rustc. - .env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler)) - .env("CFG_RELEASE_CHANNEL", &self.config.channel) - .env("RUSTDOC_REAL", self.rustdoc(compiler)) - .env("RUSTC_BOOTSTRAP", "1"); - - cmd.arg("-Wrustdoc::invalid_codeblock_attributes"); - - if self.config.deny_warnings { - cmd.arg("-Dwarnings"); - } - cmd.arg("-Znormalize-docs"); - - // Remove make-related flags that can cause jobserver problems. - cmd.env_remove("MAKEFLAGS"); - cmd.env_remove("MFLAGS"); - - if let Some(linker) = self.linker(compiler.host) { - cmd.env("RUSTDOC_LINKER", linker); - } - if self.is_fuse_ld_lld(compiler.host) { - cmd.env("RUSTDOC_FUSE_LD_LLD", "1"); - } - cmd - } - - /// Return the path to `llvm-config` for the target, if it exists. - /// - /// Note that this returns `None` if LLVM is disabled, or if we're in a - /// check build or dry-run, where there's no need to build all of LLVM. - fn llvm_config(&self, target: TargetSelection) -> Option { - if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run() { - let llvm::LlvmResult { llvm_config, .. } = self.ensure(llvm::Llvm { target }); - if llvm_config.is_file() { - return Some(llvm_config); - } - } - None - } - - /// Like `cargo`, but only passes flags that are valid for all commands. - pub fn bare_cargo( - &self, - compiler: Compiler, - mode: Mode, - target: TargetSelection, - cmd: &str, - ) -> Command { - let mut cargo = Command::new(&self.initial_cargo); - // Run cargo from the source root so it can find .cargo/config. - // This matters when using vendoring and the working directory is outside the repository. - cargo.current_dir(&self.src); - - let out_dir = self.stage_out(compiler, mode); - cargo.env("CARGO_TARGET_DIR", &out_dir).arg(cmd); - - // Found with `rg "init_env_logger\("`. If anyone uses `init_env_logger` - // from out of tree it shouldn't matter, since x.py is only used for - // building in-tree. - let color_logs = ["RUSTDOC_LOG_COLOR", "RUSTC_LOG_COLOR", "RUST_LOG_COLOR"]; - match self.build.config.color { - Color::Always => { - cargo.arg("--color=always"); - for log in &color_logs { - cargo.env(log, "always"); - } - } - Color::Never => { - cargo.arg("--color=never"); - for log in &color_logs { - cargo.env(log, "never"); - } - } - Color::Auto => {} // nothing to do - } - - if cmd != "install" { - cargo.arg("--target").arg(target.rustc_target_arg()); - } else { - assert_eq!(target, compiler.host); - } - - if self.config.rust_optimize.is_release() { - // FIXME: cargo bench/install do not accept `--release` - if cmd != "bench" && cmd != "install" { - cargo.arg("--release"); - } - } - - // Remove make-related flags to ensure Cargo can correctly set things up - cargo.env_remove("MAKEFLAGS"); - cargo.env_remove("MFLAGS"); - - cargo - } - - /// Prepares an invocation of `cargo` to be run. - /// - /// This will create a `Command` that represents a pending execution of - /// Cargo. This cargo will be configured to use `compiler` as the actual - /// rustc compiler, its output will be scoped by `mode`'s output directory, - /// it will pass the `--target` flag for the specified `target`, and will be - /// executing the Cargo command `cmd`. - pub fn cargo( - &self, - compiler: Compiler, - mode: Mode, - source_type: SourceType, - target: TargetSelection, - cmd: &str, - ) -> Cargo { - let mut cargo = self.bare_cargo(compiler, mode, target, cmd); - let out_dir = self.stage_out(compiler, mode); - - // Codegen backends are not yet tracked by -Zbinary-dep-depinfo, - // so we need to explicitly clear out if they've been updated. - for backend in self.codegen_backends(compiler) { - self.clear_if_dirty(&out_dir, &backend); - } - - if cmd == "doc" || cmd == "rustdoc" { - let my_out = match mode { - // This is the intended out directory for compiler documentation. - Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target), - Mode::Std => { - if self.config.cmd.json() { - out_dir.join(target.triple).join("json-doc") - } else { - out_dir.join(target.triple).join("doc") - } - } - _ => panic!("doc mode {mode:?} not expected"), - }; - let rustdoc = self.rustdoc(compiler); - self.clear_if_dirty(&my_out, &rustdoc); - } - - let profile_var = |name: &str| { - let profile = if self.config.rust_optimize.is_release() { "RELEASE" } else { "DEV" }; - format!("CARGO_PROFILE_{}_{}", profile, name) - }; - - // See comment in rustc_llvm/build.rs for why this is necessary, largely llvm-config - // needs to not accidentally link to libLLVM in stage0/lib. - cargo.env("REAL_LIBRARY_PATH_VAR", &util::dylib_path_var()); - if let Some(e) = env::var_os(util::dylib_path_var()) { - cargo.env("REAL_LIBRARY_PATH", e); - } - - // Set a flag for `check`/`clippy`/`fix`, so that certain build - // scripts can do less work (i.e. not building/requiring LLVM). - if cmd == "check" || cmd == "clippy" || cmd == "fix" { - // If we've not yet built LLVM, or it's stale, then bust - // the rustc_llvm cache. That will always work, even though it - // may mean that on the next non-check build we'll need to rebuild - // rustc_llvm. But if LLVM is stale, that'll be a tiny amount - // of work comparatively, and we'd likely need to rebuild it anyway, - // so that's okay. - if crate::llvm::prebuilt_llvm_config(self, target).is_err() { - cargo.env("RUST_CHECK", "1"); - } - } - - let stage = if compiler.stage == 0 && self.local_rebuild { - // Assume the local-rebuild rustc already has stage1 features. - 1 - } else { - compiler.stage - }; - - let mut rustflags = Rustflags::new(target); - if stage != 0 { - if let Ok(s) = env::var("CARGOFLAGS_NOT_BOOTSTRAP") { - cargo.args(s.split_whitespace()); - } - rustflags.env("RUSTFLAGS_NOT_BOOTSTRAP"); - } else { - if let Ok(s) = env::var("CARGOFLAGS_BOOTSTRAP") { - cargo.args(s.split_whitespace()); - } - rustflags.env("RUSTFLAGS_BOOTSTRAP"); - if cmd == "clippy" { - // clippy overwrites sysroot if we pass it to cargo. - // Pass it directly to clippy instead. - // NOTE: this can't be fixed in clippy because we explicitly don't set `RUSTC`, - // so it has no way of knowing the sysroot. - rustflags.arg("--sysroot"); - rustflags.arg( - self.sysroot(compiler) - .as_os_str() - .to_str() - .expect("sysroot must be valid UTF-8"), - ); - // Only run clippy on a very limited subset of crates (in particular, not build scripts). - cargo.arg("-Zunstable-options"); - // Explicitly does *not* set `--cfg=bootstrap`, since we're using a nightly clippy. - let host_version = Command::new("rustc").arg("--version").output().map_err(|_| ()); - let output = host_version.and_then(|output| { - if output.status.success() { - Ok(output) - } else { - Err(()) - } - }).unwrap_or_else(|_| { - eprintln!( - "error: `x.py clippy` requires a host `rustc` toolchain with the `clippy` component" - ); - eprintln!("help: try `rustup component add clippy`"); - crate::exit!(1); - }); - if !t!(std::str::from_utf8(&output.stdout)).contains("nightly") { - rustflags.arg("--cfg=bootstrap"); - } - } else { - rustflags.arg("--cfg=bootstrap"); - } - } - - let use_new_symbol_mangling = match self.config.rust_new_symbol_mangling { - Some(setting) => { - // If an explicit setting is given, use that - setting - } - None => { - if mode == Mode::Std { - // The standard library defaults to the legacy scheme - false - } else { - // The compiler and tools default to the new scheme - true - } - } - }; - - // By default, windows-rs depends on a native library that doesn't get copied into the - // sysroot. Passing this cfg enables raw-dylib support instead, which makes the native - // library unnecessary. This can be removed when windows-rs enables raw-dylib - // unconditionally. - if let Mode::Rustc | Mode::ToolRustc = mode { - rustflags.arg("--cfg=windows_raw_dylib"); - } - - if use_new_symbol_mangling { - rustflags.arg("-Csymbol-mangling-version=v0"); - } else { - rustflags.arg("-Csymbol-mangling-version=legacy"); - rustflags.arg("-Zunstable-options"); - } - - // Enable cfg checking of cargo features for everything but std and also enable cfg - // checking of names and values. - // - // Note: `std`, `alloc` and `core` imports some dependencies by #[path] (like - // backtrace, core_simd, std_float, ...), those dependencies have their own - // features but cargo isn't involved in the #[path] process and so cannot pass the - // complete list of features, so for that reason we don't enable checking of - // features for std crates. - cargo.arg(if mode != Mode::Std { - "-Zcheck-cfg=names,values,output,features" - } else { - "-Zcheck-cfg=names,values,output" - }); - - // Add extra cfg not defined in/by rustc - // - // Note: Although it would seems that "-Zunstable-options" to `rustflags` is useless as - // cargo would implicitly add it, it was discover that sometimes bootstrap only use - // `rustflags` without `cargo` making it required. - rustflags.arg("-Zunstable-options"); - for (restricted_mode, name, values) in EXTRA_CHECK_CFGS { - if *restricted_mode == None || *restricted_mode == Some(mode) { - // Creating a string of the values by concatenating each value: - // ',"tvos","watchos"' or '' (nothing) when there are no values - let values = match values { - Some(values) => values - .iter() - .map(|val| [",", "\"", val, "\""]) - .flatten() - .collect::(), - None => String::new(), - }; - rustflags.arg(&format!("--check-cfg=values({name}{values})")); - } - } - - // FIXME: It might be better to use the same value for both `RUSTFLAGS` and `RUSTDOCFLAGS`, - // but this breaks CI. At the very least, stage0 `rustdoc` needs `--cfg bootstrap`. See - // #71458. - let mut rustdocflags = rustflags.clone(); - rustdocflags.propagate_cargo_env("RUSTDOCFLAGS"); - if stage == 0 { - rustdocflags.env("RUSTDOCFLAGS_BOOTSTRAP"); - } else { - rustdocflags.env("RUSTDOCFLAGS_NOT_BOOTSTRAP"); - } - - if let Ok(s) = env::var("CARGOFLAGS") { - cargo.args(s.split_whitespace()); - } - - match mode { - Mode::Std | Mode::ToolBootstrap | Mode::ToolStd => {} - Mode::Rustc | Mode::Codegen | Mode::ToolRustc => { - // Build proc macros both for the host and the target - if target != compiler.host && cmd != "check" { - cargo.arg("-Zdual-proc-macros"); - rustflags.arg("-Zdual-proc-macros"); - } - } - } - - // This tells Cargo (and in turn, rustc) to output more complete - // dependency information. Most importantly for rustbuild, this - // includes sysroot artifacts, like libstd, which means that we don't - // need to track those in rustbuild (an error prone process!). This - // feature is currently unstable as there may be some bugs and such, but - // it represents a big improvement in rustbuild's reliability on - // rebuilds, so we're using it here. - // - // For some additional context, see #63470 (the PR originally adding - // this), as well as #63012 which is the tracking issue for this - // feature on the rustc side. - cargo.arg("-Zbinary-dep-depinfo"); - let allow_features = match mode { - Mode::ToolBootstrap | Mode::ToolStd => { - // Restrict the allowed features so we don't depend on nightly - // accidentally. - // - // binary-dep-depinfo is used by rustbuild itself for all - // compilations. - // - // Lots of tools depend on proc_macro2 and proc-macro-error. - // Those have build scripts which assume nightly features are - // available if the `rustc` version is "nighty" or "dev". See - // bin/rustc.rs for why that is a problem. Instead of labeling - // those features for each individual tool that needs them, - // just blanket allow them here. - // - // If this is ever removed, be sure to add something else in - // its place to keep the restrictions in place (or make a way - // to unset RUSTC_BOOTSTRAP). - "binary-dep-depinfo,proc_macro_span,proc_macro_span_shrink,proc_macro_diagnostic" - .to_string() - } - Mode::Std | Mode::Rustc | Mode::Codegen | Mode::ToolRustc => String::new(), - }; - - cargo.arg("-j").arg(self.jobs().to_string()); - - // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005 - // Force cargo to output binaries with disambiguating hashes in the name - let mut metadata = if compiler.stage == 0 { - // Treat stage0 like a special channel, whether it's a normal prior- - // release rustc or a local rebuild with the same version, so we - // never mix these libraries by accident. - "bootstrap".to_string() - } else { - self.config.channel.to_string() - }; - // We want to make sure that none of the dependencies between - // std/test/rustc unify with one another. This is done for weird linkage - // reasons but the gist of the problem is that if librustc, libtest, and - // libstd all depend on libc from crates.io (which they actually do) we - // want to make sure they all get distinct versions. Things get really - // weird if we try to unify all these dependencies right now, namely - // around how many times the library is linked in dynamic libraries and - // such. If rustc were a static executable or if we didn't ship dylibs - // this wouldn't be a problem, but we do, so it is. This is in general - // just here to make sure things build right. If you can remove this and - // things still build right, please do! - match mode { - Mode::Std => metadata.push_str("std"), - // When we're building rustc tools, they're built with a search path - // that contains things built during the rustc build. For example, - // bitflags is built during the rustc build, and is a dependency of - // rustdoc as well. We're building rustdoc in a different target - // directory, though, which means that Cargo will rebuild the - // dependency. When we go on to build rustdoc, we'll look for - // bitflags, and find two different copies: one built during the - // rustc step and one that we just built. This isn't always a - // problem, somehow -- not really clear why -- but we know that this - // fixes things. - Mode::ToolRustc => metadata.push_str("tool-rustc"), - // Same for codegen backends. - Mode::Codegen => metadata.push_str("codegen"), - _ => {} - } - cargo.env("__CARGO_DEFAULT_LIB_METADATA", &metadata); - - if cmd == "clippy" { - rustflags.arg("-Zforce-unstable-if-unmarked"); - } - - rustflags.arg("-Zmacro-backtrace"); - - let want_rustdoc = self.doc_tests != DocTests::No; - - // We synthetically interpret a stage0 compiler used to build tools as a - // "raw" compiler in that it's the exact snapshot we download. Normally - // the stage0 build means it uses libraries build by the stage0 - // compiler, but for tools we just use the precompiled libraries that - // we've downloaded - let use_snapshot = mode == Mode::ToolBootstrap; - assert!(!use_snapshot || stage == 0 || self.local_rebuild); - - let maybe_sysroot = self.sysroot(compiler); - let sysroot = if use_snapshot { self.rustc_snapshot_sysroot() } else { &maybe_sysroot }; - let libdir = self.rustc_libdir(compiler); - - // Clear the output directory if the real rustc we're using has changed; - // Cargo cannot detect this as it thinks rustc is bootstrap/debug/rustc. - // - // Avoid doing this during dry run as that usually means the relevant - // compiler is not yet linked/copied properly. - // - // Only clear out the directory if we're compiling std; otherwise, we - // should let Cargo take care of things for us (via depdep info) - if !self.config.dry_run() && mode == Mode::Std && cmd == "build" { - self.clear_if_dirty(&out_dir, &self.rustc(compiler)); - } - - // Customize the compiler we're running. Specify the compiler to cargo - // as our shim and then pass it some various options used to configure - // how the actual compiler itself is called. - // - // These variables are primarily all read by - // src/bootstrap/bin/{rustc.rs,rustdoc.rs} - cargo - .env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) - .env("RUSTC_REAL", self.rustc(compiler)) - .env("RUSTC_STAGE", stage.to_string()) - .env("RUSTC_SYSROOT", &sysroot) - .env("RUSTC_LIBDIR", &libdir) - .env("RUSTDOC", self.bootstrap_out.join("rustdoc")) - .env( - "RUSTDOC_REAL", - if cmd == "doc" || cmd == "rustdoc" || (cmd == "test" && want_rustdoc) { - self.rustdoc(compiler) - } else { - PathBuf::from("/path/to/nowhere/rustdoc/not/required") - }, - ) - .env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir()) - .env("RUSTC_BREAK_ON_ICE", "1"); - // Clippy support is a hack and uses the default `cargo-clippy` in path. - // Don't override RUSTC so that the `cargo-clippy` in path will be run. - if cmd != "clippy" { - cargo.env("RUSTC", self.bootstrap_out.join("rustc")); - } - - // Dealing with rpath here is a little special, so let's go into some - // detail. First off, `-rpath` is a linker option on Unix platforms - // which adds to the runtime dynamic loader path when looking for - // dynamic libraries. We use this by default on Unix platforms to ensure - // that our nightlies behave the same on Windows, that is they work out - // of the box. This can be disabled by setting `rpath = false` in `[rust]` - // table of `config.toml` - // - // Ok, so the astute might be wondering "why isn't `-C rpath` used - // here?" and that is indeed a good question to ask. This codegen - // option is the compiler's current interface to generating an rpath. - // Unfortunately it doesn't quite suffice for us. The flag currently - // takes no value as an argument, so the compiler calculates what it - // should pass to the linker as `-rpath`. This unfortunately is based on - // the **compile time** directory structure which when building with - // Cargo will be very different than the runtime directory structure. - // - // All that's a really long winded way of saying that if we use - // `-Crpath` then the executables generated have the wrong rpath of - // something like `$ORIGIN/deps` when in fact the way we distribute - // rustc requires the rpath to be `$ORIGIN/../lib`. - // - // So, all in all, to set up the correct rpath we pass the linker - // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it - // fun to pass a flag to a tool to pass a flag to pass a flag to a tool - // to change a flag in a binary? - if self.config.rpath_enabled(target) && util::use_host_linker(target) { - let libdir = self.sysroot_libdir_relative(compiler).to_str().unwrap(); - let rpath = if target.contains("apple") { - // Note that we need to take one extra step on macOS to also pass - // `-Wl,-instal_name,@rpath/...` to get things to work right. To - // do that we pass a weird flag to the compiler to get it to do - // so. Note that this is definitely a hack, and we should likely - // flesh out rpath support more fully in the future. - rustflags.arg("-Zosx-rpath-install-name"); - Some(format!("-Wl,-rpath,@loader_path/../{libdir}")) - } else if !target.contains("windows") - && !target.contains("aix") - && !target.contains("xous") - { - rustflags.arg("-Clink-args=-Wl,-z,origin"); - Some(format!("-Wl,-rpath,$ORIGIN/../{libdir}")) - } else { - None - }; - if let Some(rpath) = rpath { - rustflags.arg(&format!("-Clink-args={rpath}")); - } - } - - if let Some(host_linker) = self.linker(compiler.host) { - cargo.env("RUSTC_HOST_LINKER", host_linker); - } - if self.is_fuse_ld_lld(compiler.host) { - cargo.env("RUSTC_HOST_FUSE_LD_LLD", "1"); - cargo.env("RUSTDOC_FUSE_LD_LLD", "1"); - } - - if let Some(target_linker) = self.linker(target) { - let target = crate::envify(&target.triple); - cargo.env(&format!("CARGO_TARGET_{target}_LINKER"), target_linker); - } - if self.is_fuse_ld_lld(target) { - rustflags.arg("-Clink-args=-fuse-ld=lld"); - } - self.lld_flags(target).for_each(|flag| { - rustdocflags.arg(&flag); - }); - - if !(["build", "check", "clippy", "fix", "rustc"].contains(&cmd)) && want_rustdoc { - cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler)); - } - - let debuginfo_level = match mode { - Mode::Rustc | Mode::Codegen => self.config.rust_debuginfo_level_rustc, - Mode::Std => self.config.rust_debuginfo_level_std, - Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolRustc => { - self.config.rust_debuginfo_level_tools - } - }; - cargo.env(profile_var("DEBUG"), debuginfo_level.to_string()); - if let Some(opt_level) = &self.config.rust_optimize.get_opt_level() { - cargo.env(profile_var("OPT_LEVEL"), opt_level); - } - if !self.config.dry_run() && self.cc.borrow()[&target].args().iter().any(|arg| arg == "-gz") - { - rustflags.arg("-Clink-arg=-gz"); - } - cargo.env( - profile_var("DEBUG_ASSERTIONS"), - if mode == Mode::Std { - self.config.rust_debug_assertions_std.to_string() - } else { - self.config.rust_debug_assertions.to_string() - }, - ); - cargo.env( - profile_var("OVERFLOW_CHECKS"), - if mode == Mode::Std { - self.config.rust_overflow_checks_std.to_string() - } else { - self.config.rust_overflow_checks.to_string() - }, - ); - - let split_debuginfo_is_stable = target.contains("linux") - || target.contains("apple") - || (target.contains("msvc") - && self.config.rust_split_debuginfo == SplitDebuginfo::Packed) - || (target.contains("windows") - && self.config.rust_split_debuginfo == SplitDebuginfo::Off); - - if !split_debuginfo_is_stable { - rustflags.arg("-Zunstable-options"); - } - match self.config.rust_split_debuginfo { - SplitDebuginfo::Packed => rustflags.arg("-Csplit-debuginfo=packed"), - SplitDebuginfo::Unpacked => rustflags.arg("-Csplit-debuginfo=unpacked"), - SplitDebuginfo::Off => rustflags.arg("-Csplit-debuginfo=off"), - }; - - if self.config.cmd.bless() { - // Bless `expect!` tests. - cargo.env("UPDATE_EXPECT", "1"); - } - - if !mode.is_tool() { - cargo.env("RUSTC_FORCE_UNSTABLE", "1"); - } - - if let Some(x) = self.crt_static(target) { - if x { - rustflags.arg("-Ctarget-feature=+crt-static"); - } else { - rustflags.arg("-Ctarget-feature=-crt-static"); - } - } - - if let Some(x) = self.crt_static(compiler.host) { - cargo.env("RUSTC_HOST_CRT_STATIC", x.to_string()); - } - - if let Some(map_to) = self.build.debuginfo_map_to(GitRepo::Rustc) { - let map = format!("{}={}", self.build.src.display(), map_to); - cargo.env("RUSTC_DEBUGINFO_MAP", map); - - // `rustc` needs to know the virtual `/rustc/$hash` we're mapping to, - // in order to opportunistically reverse it later. - cargo.env("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR", map_to); - } - - // Enable usage of unstable features - cargo.env("RUSTC_BOOTSTRAP", "1"); - self.add_rust_test_threads(&mut cargo); - - // Almost all of the crates that we compile as part of the bootstrap may - // have a build script, including the standard library. To compile a - // build script, however, it itself needs a standard library! This - // introduces a bit of a pickle when we're compiling the standard - // library itself. - // - // To work around this we actually end up using the snapshot compiler - // (stage0) for compiling build scripts of the standard library itself. - // The stage0 compiler is guaranteed to have a libstd available for use. - // - // For other crates, however, we know that we've already got a standard - // library up and running, so we can use the normal compiler to compile - // build scripts in that situation. - if mode == Mode::Std { - cargo - .env("RUSTC_SNAPSHOT", &self.initial_rustc) - .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); - } else { - cargo - .env("RUSTC_SNAPSHOT", self.rustc(compiler)) - .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); - } - - // Tools that use compiler libraries may inherit the `-lLLVM` link - // requirement, but the `-L` library path is not propagated across - // separate Cargo projects. We can add LLVM's library path to the - // platform-specific environment variable as a workaround. - if mode == Mode::ToolRustc || mode == Mode::Codegen { - if let Some(llvm_config) = self.llvm_config(target) { - let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir")); - add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo); - } - } - - // Compile everything except libraries and proc macros with the more - // efficient initial-exec TLS model. This doesn't work with `dlopen`, - // so we can't use it by default in general, but we can use it for tools - // and our own internal libraries. - if !mode.must_support_dlopen() && !target.triple.starts_with("powerpc-") { - cargo.env("RUSTC_TLS_MODEL_INITIAL_EXEC", "1"); - } - - // Ignore incremental modes except for stage0, since we're - // not guaranteeing correctness across builds if the compiler - // is changing under your feet. - if self.config.incremental && compiler.stage == 0 { - cargo.env("CARGO_INCREMENTAL", "1"); - } else { - // Don't rely on any default setting for incr. comp. in Cargo - cargo.env("CARGO_INCREMENTAL", "0"); - } - - if let Some(ref on_fail) = self.config.on_fail { - cargo.env("RUSTC_ON_FAIL", on_fail); - } - - if self.config.print_step_timings { - cargo.env("RUSTC_PRINT_STEP_TIMINGS", "1"); - } - - if self.config.print_step_rusage { - cargo.env("RUSTC_PRINT_STEP_RUSAGE", "1"); - } - - if self.config.backtrace_on_ice { - cargo.env("RUSTC_BACKTRACE_ON_ICE", "1"); - } - - cargo.env("RUSTC_VERBOSE", self.verbosity.to_string()); - - // Downstream forks of the Rust compiler might want to use a custom libc to add support for - // targets that are not yet available upstream. Adding a patch to replace libc with a - // custom one would cause compilation errors though, because Cargo would interpret the - // custom libc as part of the workspace, and apply the check-cfg lints on it. - // - // The libc build script emits check-cfg flags only when this environment variable is set, - // so this line allows the use of custom libcs. - cargo.env("LIBC_CHECK_CFG", "1"); - - if source_type == SourceType::InTree { - let mut lint_flags = Vec::new(); - // When extending this list, add the new lints to the RUSTFLAGS of the - // build_bootstrap function of src/bootstrap/bootstrap.py as well as - // some code doesn't go through this `rustc` wrapper. - lint_flags.push("-Wrust_2018_idioms"); - lint_flags.push("-Wunused_lifetimes"); - lint_flags.push("-Wsemicolon_in_expressions_from_macros"); - - if self.config.deny_warnings { - lint_flags.push("-Dwarnings"); - rustdocflags.arg("-Dwarnings"); - } - - // This does not use RUSTFLAGS due to caching issues with Cargo. - // Clippy is treated as an "in tree" tool, but shares the same - // cache as other "submodule" tools. With these options set in - // RUSTFLAGS, that causes *every* shared dependency to be rebuilt. - // By injecting this into the rustc wrapper, this circumvents - // Cargo's fingerprint detection. This is fine because lint flags - // are always ignored in dependencies. Eventually this should be - // fixed via better support from Cargo. - cargo.env("RUSTC_LINT_FLAGS", lint_flags.join(" ")); - - rustdocflags.arg("-Wrustdoc::invalid_codeblock_attributes"); - } - - if mode == Mode::Rustc { - rustflags.arg("-Zunstable-options"); - rustflags.arg("-Wrustc::internal"); - } - - // Throughout the build Cargo can execute a number of build scripts - // compiling C/C++ code and we need to pass compilers, archivers, flags, etc - // obtained previously to those build scripts. - // Build scripts use either the `cc` crate or `configure/make` so we pass - // the options through environment variables that are fetched and understood by both. - // - // FIXME: the guard against msvc shouldn't need to be here - if target.contains("msvc") { - if let Some(ref cl) = self.config.llvm_clang_cl { - cargo.env("CC", cl).env("CXX", cl); - } - } else { - let ccache = self.config.ccache.as_ref(); - let ccacheify = |s: &Path| { - let ccache = match ccache { - Some(ref s) => s, - None => return s.display().to_string(), - }; - // FIXME: the cc-rs crate only recognizes the literal strings - // `ccache` and `sccache` when doing caching compilations, so we - // mirror that here. It should probably be fixed upstream to - // accept a new env var or otherwise work with custom ccache - // vars. - match &ccache[..] { - "ccache" | "sccache" => format!("{} {}", ccache, s.display()), - _ => s.display().to_string(), - } - }; - let triple_underscored = target.triple.replace("-", "_"); - let cc = ccacheify(&self.cc(target)); - cargo.env(format!("CC_{triple_underscored}"), &cc); - - let cflags = self.cflags(target, GitRepo::Rustc, CLang::C).join(" "); - cargo.env(format!("CFLAGS_{triple_underscored}"), &cflags); - - if let Some(ar) = self.ar(target) { - let ranlib = format!("{} s", ar.display()); - cargo - .env(format!("AR_{triple_underscored}"), ar) - .env(format!("RANLIB_{triple_underscored}"), ranlib); - } - - if let Ok(cxx) = self.cxx(target) { - let cxx = ccacheify(&cxx); - let cxxflags = self.cflags(target, GitRepo::Rustc, CLang::Cxx).join(" "); - cargo - .env(format!("CXX_{triple_underscored}"), &cxx) - .env(format!("CXXFLAGS_{triple_underscored}"), cxxflags); - } - } - - // If Control Flow Guard is enabled, pass the `control-flow-guard` flag to rustc - // when compiling the standard library, since this might be linked into the final outputs - // produced by rustc. Since this mitigation is only available on Windows, only enable it - // for the standard library in case the compiler is run on a non-Windows platform. - // This is not needed for stage 0 artifacts because these will only be used for building - // the stage 1 compiler. - if cfg!(windows) - && mode == Mode::Std - && self.config.control_flow_guard - && compiler.stage >= 1 - { - rustflags.arg("-Ccontrol-flow-guard"); - } - - // For `cargo doc` invocations, make rustdoc print the Rust version into the docs - // This replaces spaces with tabs because RUSTDOCFLAGS does not - // support arguments with regular spaces. Hopefully someday Cargo will - // have space support. - let rust_version = self.rust_version().replace(' ', "\t"); - rustdocflags.arg("--crate-version").arg(&rust_version); - - // Environment variables *required* throughout the build - // - // FIXME: should update code to not require this env var - cargo.env("CFG_COMPILER_HOST_TRIPLE", target.triple); - - // Set this for all builds to make sure doc builds also get it. - cargo.env("CFG_RELEASE_CHANNEL", &self.config.channel); - - // This one's a bit tricky. As of the time of this writing the compiler - // links to the `winapi` crate on crates.io. This crate provides raw - // bindings to Windows system functions, sort of like libc does for - // Unix. This crate also, however, provides "import libraries" for the - // MinGW targets. There's an import library per dll in the windows - // distribution which is what's linked to. These custom import libraries - // are used because the winapi crate can reference Windows functions not - // present in the MinGW import libraries. - // - // For example MinGW may ship libdbghelp.a, but it may not have - // references to all the functions in the dbghelp dll. Instead the - // custom import library for dbghelp in the winapi crates has all this - // information. - // - // Unfortunately for us though the import libraries are linked by - // default via `-ldylib=winapi_foo`. That is, they're linked with the - // `dylib` type with a `winapi_` prefix (so the winapi ones don't - // conflict with the system MinGW ones). This consequently means that - // the binaries we ship of things like rustc_codegen_llvm (aka the rustc_codegen_llvm - // DLL) when linked against *again*, for example with procedural macros - // or plugins, will trigger the propagation logic of `-ldylib`, passing - // `-lwinapi_foo` to the linker again. This isn't actually available in - // our distribution, however, so the link fails. - // - // To solve this problem we tell winapi to not use its bundled import - // libraries. This means that it will link to the system MinGW import - // libraries by default, and the `-ldylib=foo` directives will still get - // passed to the final linker, but they'll look like `-lfoo` which can - // be resolved because MinGW has the import library. The downside is we - // don't get newer functions from Windows, but we don't use any of them - // anyway. - if !mode.is_tool() { - cargo.env("WINAPI_NO_BUNDLED_LIBRARIES", "1"); - } - - for _ in 0..self.verbosity { - cargo.arg("-v"); - } - - match (mode, self.config.rust_codegen_units_std, self.config.rust_codegen_units) { - (Mode::Std, Some(n), _) | (_, _, Some(n)) => { - cargo.env(profile_var("CODEGEN_UNITS"), n.to_string()); - } - _ => { - // Don't set anything - } - } - - if self.config.locked_deps { - cargo.arg("--locked"); - } - if self.config.vendor || self.is_sudo { - cargo.arg("--frozen"); - } - - // Try to use a sysroot-relative bindir, in case it was configured absolutely. - cargo.env("RUSTC_INSTALL_BINDIR", self.config.bindir_relative()); - - self.ci_env.force_coloring_in_ci(&mut cargo); - - // When we build Rust dylibs they're all intended for intermediate - // usage, so make sure we pass the -Cprefer-dynamic flag instead of - // linking all deps statically into the dylib. - if matches!(mode, Mode::Std | Mode::Rustc) { - rustflags.arg("-Cprefer-dynamic"); - } - - // When building incrementally we default to a lower ThinLTO import limit - // (unless explicitly specified otherwise). This will produce a somewhat - // slower code but give way better compile times. - { - let limit = match self.config.rust_thin_lto_import_instr_limit { - Some(limit) => Some(limit), - None if self.config.incremental => Some(10), - _ => None, - }; - - if let Some(limit) = limit { - if stage == 0 || self.config.default_codegen_backend().unwrap_or_default() == "llvm" - { - rustflags.arg(&format!("-Cllvm-args=-import-instr-limit={limit}")); - } - } - } - - if matches!(mode, Mode::Std) { - if let Some(mir_opt_level) = self.config.rust_validate_mir_opts { - rustflags.arg("-Zvalidate-mir"); - rustflags.arg(&format!("-Zmir-opt-level={mir_opt_level}")); - } - // Always enable inlining MIR when building the standard library. - // Without this flag, MIR inlining is disabled when incremental compilation is enabled. - // That causes some mir-opt tests which inline functions from the standard library to - // break when incremental compilation is enabled. So this overrides the "no inlining - // during incremental builds" heuristic for the standard library. - rustflags.arg("-Zinline-mir"); - } - - // set rustc args passed from command line - let rustc_args = - self.config.cmd.rustc_args().iter().map(|s| s.to_string()).collect::>(); - if !rustc_args.is_empty() { - cargo.env("RUSTFLAGS", &rustc_args.join(" ")); - } - - Cargo { command: cargo, rustflags, rustdocflags, allow_features } - } - - /// Ensure that a given step is built, returning its output. This will - /// cache the step, so it is safe (and good!) to call this as often as - /// needed to ensure that all dependencies are built. - pub fn ensure(&'a self, step: S) -> S::Output { - { - let mut stack = self.stack.borrow_mut(); - for stack_step in stack.iter() { - // should skip - if stack_step.downcast_ref::().map_or(true, |stack_step| *stack_step != step) { - continue; - } - let mut out = String::new(); - out += &format!("\n\nCycle in build detected when adding {step:?}\n"); - for el in stack.iter().rev() { - out += &format!("\t{el:?}\n"); - } - panic!("{}", out); - } - if let Some(out) = self.cache.get(&step) { - self.verbose_than(1, &format!("{}c {:?}", " ".repeat(stack.len()), step)); - - return out; - } - self.verbose_than(1, &format!("{}> {:?}", " ".repeat(stack.len()), step)); - stack.push(Box::new(step.clone())); - } - - #[cfg(feature = "build-metrics")] - self.metrics.enter_step(&step, self); - - let (out, dur) = { - let start = Instant::now(); - let zero = Duration::new(0, 0); - let parent = self.time_spent_on_dependencies.replace(zero); - let out = step.clone().run(self); - let dur = start.elapsed(); - let deps = self.time_spent_on_dependencies.replace(parent + dur); - (out, dur - deps) - }; - - if self.config.print_step_timings && !self.config.dry_run() { - let step_string = format!("{step:?}"); - let brace_index = step_string.find("{").unwrap_or(0); - let type_string = type_name::(); - println!( - "[TIMING] {} {} -- {}.{:03}", - &type_string.strip_prefix("bootstrap::").unwrap_or(type_string), - &step_string[brace_index..], - dur.as_secs(), - dur.subsec_millis() - ); - } - - #[cfg(feature = "build-metrics")] - self.metrics.exit_step(self); - - { - let mut stack = self.stack.borrow_mut(); - let cur_step = stack.pop().expect("step stack empty"); - assert_eq!(cur_step.downcast_ref(), Some(&step)); - } - self.verbose_than(1, &format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step)); - self.cache.put(step, out.clone()); - out - } - - /// Ensure that a given step is built *only if it's supposed to be built by default*, returning - /// its output. This will cache the step, so it's safe (and good!) to call this as often as - /// needed to ensure that all dependencies are build. - pub(crate) fn ensure_if_default>>( - &'a self, - step: S, - kind: Kind, - ) -> S::Output { - let desc = StepDescription::from::(kind); - let should_run = (desc.should_run)(ShouldRun::new(self, desc.kind)); - - // Avoid running steps contained in --skip - for pathset in &should_run.paths { - if desc.is_excluded(self, pathset) { - return None; - } - } - - // Only execute if it's supposed to run as default - if desc.default && should_run.is_really_default() { self.ensure(step) } else { None } - } - - /// Checks if any of the "should_run" paths is in the `Builder` paths. - pub(crate) fn was_invoked_explicitly(&'a self, kind: Kind) -> bool { - let desc = StepDescription::from::(kind); - let should_run = (desc.should_run)(ShouldRun::new(self, desc.kind)); - - for path in &self.paths { - if should_run.paths.iter().any(|s| s.has(path, desc.kind)) - && !desc.is_excluded( - self, - &PathSet::Suite(TaskPath { path: path.clone(), kind: Some(desc.kind) }), - ) - { - return true; - } - } - - false - } - - pub(crate) fn maybe_open_in_browser(&self, path: impl AsRef) { - if self.was_invoked_explicitly::(Kind::Doc) { - self.open_in_browser(path); - } - } - - pub(crate) fn open_in_browser(&self, path: impl AsRef) { - if self.config.dry_run() || !self.config.cmd.open() { - return; - } - - let path = path.as_ref(); - self.info(&format!("Opening doc {}", path.display())); - if let Err(err) = opener::open(path) { - self.info(&format!("{err}\n")); - } - } -} - -#[cfg(test)] -mod tests; - -/// Represents flag values in `String` form with whitespace delimiter to pass it to the compiler later. -/// -/// `-Z crate-attr` flags will be applied recursively on the target code using the `rustc_parse::parser::Parser`. -/// See `rustc_builtin_macros::cmdline_attrs::inject` for more information. -#[derive(Debug, Clone)] -struct Rustflags(String, TargetSelection); - -impl Rustflags { - fn new(target: TargetSelection) -> Rustflags { - let mut ret = Rustflags(String::new(), target); - ret.propagate_cargo_env("RUSTFLAGS"); - ret - } - - /// By default, cargo will pick up on various variables in the environment. However, bootstrap - /// reuses those variables to pass additional flags to rustdoc, so by default they get overridden. - /// Explicitly add back any previous value in the environment. - /// - /// `prefix` is usually `RUSTFLAGS` or `RUSTDOCFLAGS`. - fn propagate_cargo_env(&mut self, prefix: &str) { - // Inherit `RUSTFLAGS` by default ... - self.env(prefix); - - // ... and also handle target-specific env RUSTFLAGS if they're configured. - let target_specific = format!("CARGO_TARGET_{}_{}", crate::envify(&self.1.triple), prefix); - self.env(&target_specific); - } - - fn env(&mut self, env: &str) { - if let Ok(s) = env::var(env) { - for part in s.split(' ') { - self.arg(part); - } - } - } - - fn arg(&mut self, arg: &str) -> &mut Self { - assert_eq!(arg.split(' ').count(), 1); - if !self.0.is_empty() { - self.0.push(' '); - } - self.0.push_str(arg); - self - } -} - -#[derive(Debug)] -pub struct Cargo { - command: Command, - rustflags: Rustflags, - rustdocflags: Rustflags, - allow_features: String, -} - -impl Cargo { - pub fn rustdocflag(&mut self, arg: &str) -> &mut Cargo { - self.rustdocflags.arg(arg); - self - } - pub fn rustflag(&mut self, arg: &str) -> &mut Cargo { - self.rustflags.arg(arg); - self - } - - pub fn arg(&mut self, arg: impl AsRef) -> &mut Cargo { - self.command.arg(arg.as_ref()); - self - } - - pub fn args(&mut self, args: I) -> &mut Cargo - where - I: IntoIterator, - S: AsRef, - { - for arg in args { - self.arg(arg.as_ref()); - } - self - } - - pub fn env(&mut self, key: impl AsRef, value: impl AsRef) -> &mut Cargo { - // These are managed through rustflag/rustdocflag interfaces. - assert_ne!(key.as_ref(), "RUSTFLAGS"); - assert_ne!(key.as_ref(), "RUSTDOCFLAGS"); - self.command.env(key.as_ref(), value.as_ref()); - self - } - - pub fn add_rustc_lib_path(&mut self, builder: &Builder<'_>, compiler: Compiler) { - builder.add_rustc_lib_path(compiler, &mut self.command); - } - - pub fn current_dir(&mut self, dir: &Path) -> &mut Cargo { - self.command.current_dir(dir); - self - } - - /// Adds nightly-only features that this invocation is allowed to use. - /// - /// By default, all nightly features are allowed. Once this is called, it - /// will be restricted to the given set. - pub fn allow_features(&mut self, features: &str) -> &mut Cargo { - if !self.allow_features.is_empty() { - self.allow_features.push(','); - } - self.allow_features.push_str(features); - self - } -} - -impl From for Command { - fn from(mut cargo: Cargo) -> Command { - let rustflags = &cargo.rustflags.0; - if !rustflags.is_empty() { - cargo.command.env("RUSTFLAGS", rustflags); - } - - let rustdocflags = &cargo.rustdocflags.0; - if !rustdocflags.is_empty() { - cargo.command.env("RUSTDOCFLAGS", rustdocflags); - } - - if !cargo.allow_features.is_empty() { - cargo.command.env("RUSTC_ALLOW_FEATURES", cargo.allow_features); - } - - cargo.command - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/cache.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/cache.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/cache.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/cache.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,272 +0,0 @@ -use std::any::{Any, TypeId}; -use std::borrow::Borrow; -use std::cell::RefCell; -use std::cmp::Ordering; -use std::collections::HashMap; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::marker::PhantomData; -use std::mem; -use std::ops::Deref; -use std::path::PathBuf; -use std::sync::Mutex; - -// FIXME: replace with std::lazy after it gets stabilized and reaches beta -use once_cell::sync::Lazy; - -use crate::builder::Step; - -pub struct Interned(usize, PhantomData<*const T>); - -impl Default for Interned { - fn default() -> Self { - T::default().intern() - } -} - -impl Copy for Interned {} -impl Clone for Interned { - fn clone(&self) -> Interned { - *self - } -} - -impl PartialEq for Interned { - fn eq(&self, other: &Self) -> bool { - self.0 == other.0 - } -} -impl Eq for Interned {} - -impl PartialEq for Interned { - fn eq(&self, other: &str) -> bool { - *self == other - } -} -impl<'a> PartialEq<&'a str> for Interned { - fn eq(&self, other: &&str) -> bool { - **self == **other - } -} -impl<'a, T> PartialEq<&'a Interned> for Interned { - fn eq(&self, other: &&Self) -> bool { - self.0 == other.0 - } -} -impl<'a, T> PartialEq> for &'a Interned { - fn eq(&self, other: &Interned) -> bool { - self.0 == other.0 - } -} - -unsafe impl Send for Interned {} -unsafe impl Sync for Interned {} - -impl fmt::Display for Interned { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let s: &str = &*self; - f.write_str(s) - } -} - -impl fmt::Debug for Interned -where - Self: Deref, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let s: &U = &*self; - f.write_fmt(format_args!("{s:?}")) - } -} - -impl Hash for Interned { - fn hash(&self, state: &mut H) { - let l = T::intern_cache().lock().unwrap(); - l.get(*self).hash(state) - } -} - -impl Deref for Interned { - type Target = T::Target; - fn deref(&self) -> &Self::Target { - let l = T::intern_cache().lock().unwrap(); - unsafe { mem::transmute::<&Self::Target, &Self::Target>(l.get(*self)) } - } -} - -impl, U: ?Sized> AsRef for Interned { - fn as_ref(&self) -> &U { - let l = T::intern_cache().lock().unwrap(); - unsafe { mem::transmute::<&U, &U>(l.get(*self).as_ref()) } - } -} - -impl PartialOrd for Interned { - fn partial_cmp(&self, other: &Self) -> Option { - let l = T::intern_cache().lock().unwrap(); - l.get(*self).partial_cmp(l.get(*other)) - } -} - -impl Ord for Interned { - fn cmp(&self, other: &Self) -> Ordering { - let l = T::intern_cache().lock().unwrap(); - l.get(*self).cmp(l.get(*other)) - } -} - -struct TyIntern { - items: Vec, - set: HashMap>, -} - -impl Default for TyIntern { - fn default() -> Self { - TyIntern { items: Vec::new(), set: Default::default() } - } -} - -impl TyIntern { - fn intern_borrow(&mut self, item: &B) -> Interned - where - B: Eq + Hash + ToOwned + ?Sized, - T: Borrow, - { - if let Some(i) = self.set.get(&item) { - return *i; - } - let item = item.to_owned(); - let interned = Interned(self.items.len(), PhantomData::<*const T>); - self.set.insert(item.clone(), interned); - self.items.push(item); - interned - } - - fn intern(&mut self, item: T) -> Interned { - if let Some(i) = self.set.get(&item) { - return *i; - } - let interned = Interned(self.items.len(), PhantomData::<*const T>); - self.set.insert(item.clone(), interned); - self.items.push(item); - interned - } - - fn get(&self, i: Interned) -> &T { - &self.items[i.0] - } -} - -#[derive(Default)] -pub struct Interner { - strs: Mutex>, - paths: Mutex>, - lists: Mutex>>, -} - -trait Internable: Clone + Eq + Hash + 'static { - fn intern_cache() -> &'static Mutex>; - - fn intern(self) -> Interned { - Self::intern_cache().lock().unwrap().intern(self) - } -} - -impl Internable for String { - fn intern_cache() -> &'static Mutex> { - &INTERNER.strs - } -} - -impl Internable for PathBuf { - fn intern_cache() -> &'static Mutex> { - &INTERNER.paths - } -} - -impl Internable for Vec { - fn intern_cache() -> &'static Mutex> { - &INTERNER.lists - } -} - -impl Interner { - pub fn intern_str(&self, s: &str) -> Interned { - self.strs.lock().unwrap().intern_borrow(s) - } - pub fn intern_string(&self, s: String) -> Interned { - self.strs.lock().unwrap().intern(s) - } - - pub fn intern_path(&self, s: PathBuf) -> Interned { - self.paths.lock().unwrap().intern(s) - } - - pub fn intern_list(&self, v: Vec) -> Interned> { - self.lists.lock().unwrap().intern(v) - } -} - -pub static INTERNER: Lazy = Lazy::new(Interner::default); - -/// This is essentially a `HashMap` which allows storing any type in its input and -/// any type in its output. It is a write-once cache; values are never evicted, -/// which means that references to the value can safely be returned from the -/// `get()` method. -#[derive(Debug)] -pub struct Cache( - RefCell< - HashMap< - TypeId, - Box, // actually a HashMap> - >, - >, -); - -impl Cache { - pub fn new() -> Cache { - Cache(RefCell::new(HashMap::new())) - } - - pub fn put(&self, step: S, value: S::Output) { - let mut cache = self.0.borrow_mut(); - let type_id = TypeId::of::(); - let stepcache = cache - .entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) - .downcast_mut::>() - .expect("invalid type mapped"); - assert!(!stepcache.contains_key(&step), "processing {step:?} a second time"); - stepcache.insert(step, value); - } - - pub fn get(&self, step: &S) -> Option { - let mut cache = self.0.borrow_mut(); - let type_id = TypeId::of::(); - let stepcache = cache - .entry(type_id) - .or_insert_with(|| Box::new(HashMap::::new())) - .downcast_mut::>() - .expect("invalid type mapped"); - stepcache.get(step).cloned() - } -} - -#[cfg(test)] -impl Cache { - pub fn all(&mut self) -> Vec<(S, S::Output)> { - let cache = self.0.get_mut(); - let type_id = TypeId::of::(); - let mut v = cache - .remove(&type_id) - .map(|b| b.downcast::>().expect("correct type")) - .map(|m| m.into_iter().collect::>()) - .unwrap_or_default(); - v.sort_by_key(|(s, _)| s.clone()); - v - } - - pub fn contains(&self) -> bool { - self.0.borrow().contains_key(&TypeId::of::()) - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/cc_detect.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/cc_detect.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/cc_detect.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/cc_detect.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,273 +0,0 @@ -//! C-compiler probing and detection. -//! -//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for -//! C and C++ compilers for each target configured. A compiler is found through -//! a number of vectors (in order of precedence) -//! -//! 1. Configuration via `target.$target.cc` in `config.toml`. -//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if -//! applicable -//! 3. Special logic to probe on OpenBSD -//! 4. The `CC_$target` environment variable. -//! 5. The `CC` environment variable. -//! 6. "cc" -//! -//! Some of this logic is implemented here, but much of it is farmed out to the -//! `cc` crate itself, so we end up having the same fallbacks as there. -//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is -//! used. -//! -//! It is intended that after this module has run no C/C++ compiler will -//! ever be probed for. Instead the compilers found here will be used for -//! everything. - -use std::collections::HashSet; -use std::path::{Path, PathBuf}; -use std::process::Command; -use std::{env, iter}; - -use crate::config::{Target, TargetSelection}; -use crate::util::output; -use crate::{Build, CLang, GitRepo}; - -// The `cc` crate doesn't provide a way to obtain a path to the detected archiver, -// so use some simplified logic here. First we respect the environment variable `AR`, then -// try to infer the archiver path from the C compiler path. -// In the future this logic should be replaced by calling into the `cc` crate. -fn cc2ar(cc: &Path, target: TargetSelection) -> Option { - if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace("-", "_"))) { - Some(PathBuf::from(ar)) - } else if let Some(ar) = env::var_os("AR") { - Some(PathBuf::from(ar)) - } else if target.contains("msvc") { - None - } else if target.contains("musl") { - Some(PathBuf::from("ar")) - } else if target.contains("openbsd") { - Some(PathBuf::from("ar")) - } else if target.contains("vxworks") { - Some(PathBuf::from("wr-ar")) - } else if target.contains("android") { - Some(cc.parent().unwrap().join(PathBuf::from("llvm-ar"))) - } else { - let parent = cc.parent().unwrap(); - let file = cc.file_name().unwrap().to_str().unwrap(); - for suffix in &["gcc", "cc", "clang"] { - if let Some(idx) = file.rfind(suffix) { - let mut file = file[..idx].to_owned(); - file.push_str("ar"); - return Some(parent.join(&file)); - } - } - Some(parent.join(file)) - } -} - -fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build { - let mut cfg = cc::Build::new(); - cfg.cargo_metadata(false) - .opt_level(2) - .warnings(false) - .debug(false) - // Compress debuginfo - .flag_if_supported("-gz") - .target(&target.triple) - .host(&build.build.triple); - match build.crt_static(target) { - Some(a) => { - cfg.static_crt(a); - } - None => { - if target.contains("msvc") { - cfg.static_crt(true); - } - if target.contains("musl") { - cfg.static_flag(true); - } - } - } - cfg -} - -pub fn find(build: &Build) { - // For all targets we're going to need a C compiler for building some shims - // and such as well as for being a linker for Rust code. - let targets = build - .targets - .iter() - .chain(&build.hosts) - .cloned() - .chain(iter::once(build.build)) - .collect::>(); - for target in targets.into_iter() { - find_target(build, target); - } -} - -pub fn find_target(build: &Build, target: TargetSelection) { - let mut cfg = new_cc_build(build, target); - let config = build.config.target_config.get(&target); - if let Some(cc) = config.and_then(|c| c.cc.as_ref()) { - cfg.compiler(cc); - } else { - set_compiler(&mut cfg, Language::C, target, config, build); - } - - let compiler = cfg.get_compiler(); - let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) { - ar - } else { - cc2ar(compiler.path(), target) - }; - - build.cc.borrow_mut().insert(target, compiler.clone()); - let cflags = build.cflags(target, GitRepo::Rustc, CLang::C); - - // If we use llvm-libunwind, we will need a C++ compiler as well for all targets - // We'll need one anyways if the target triple is also a host triple - let mut cfg = new_cc_build(build, target); - cfg.cpp(true); - let cxx_configured = if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) { - cfg.compiler(cxx); - true - } else if build.hosts.contains(&target) || build.build == target { - set_compiler(&mut cfg, Language::CPlusPlus, target, config, build); - true - } else { - // Use an auto-detected compiler (or one configured via `CXX_target_triple` env vars). - cfg.try_get_compiler().is_ok() - }; - - // for VxWorks, record CXX compiler which will be used in lib.rs:linker() - if cxx_configured || target.contains("vxworks") { - let compiler = cfg.get_compiler(); - build.cxx.borrow_mut().insert(target, compiler); - } - - build.verbose(&format!("CC_{} = {:?}", &target.triple, build.cc(target))); - build.verbose(&format!("CFLAGS_{} = {:?}", &target.triple, cflags)); - if let Ok(cxx) = build.cxx(target) { - let cxxflags = build.cflags(target, GitRepo::Rustc, CLang::Cxx); - build.verbose(&format!("CXX_{} = {:?}", &target.triple, cxx)); - build.verbose(&format!("CXXFLAGS_{} = {:?}", &target.triple, cxxflags)); - } - if let Some(ar) = ar { - build.verbose(&format!("AR_{} = {:?}", &target.triple, ar)); - build.ar.borrow_mut().insert(target, ar); - } - - if let Some(ranlib) = config.and_then(|c| c.ranlib.clone()) { - build.ranlib.borrow_mut().insert(target, ranlib); - } -} - -fn set_compiler( - cfg: &mut cc::Build, - compiler: Language, - target: TargetSelection, - config: Option<&Target>, - build: &Build, -) { - match &*target.triple { - // When compiling for android we may have the NDK configured in the - // config.toml in which case we look there. Otherwise the default - // compiler already takes into account the triple in question. - t if t.contains("android") => { - if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) { - cfg.compiler(ndk_compiler(compiler, &*target.triple, ndk)); - } - } - - // The default gcc version from OpenBSD may be too old, try using egcc, - // which is a gcc version from ports, if this is the case. - t if t.contains("openbsd") => { - let c = cfg.get_compiler(); - let gnu_compiler = compiler.gcc(); - if !c.path().ends_with(gnu_compiler) { - return; - } - - let output = output(c.to_command().arg("--version")); - let i = match output.find(" 4.") { - Some(i) => i, - None => return, - }; - match output[i + 3..].chars().next().unwrap() { - '0'..='6' => {} - _ => return, - } - let alternative = format!("e{gnu_compiler}"); - if Command::new(&alternative).output().is_ok() { - cfg.compiler(alternative); - } - } - - "mips-unknown-linux-musl" => { - if cfg.get_compiler().path().to_str() == Some("gcc") { - cfg.compiler("mips-linux-musl-gcc"); - } - } - "mipsel-unknown-linux-musl" => { - if cfg.get_compiler().path().to_str() == Some("gcc") { - cfg.compiler("mipsel-linux-musl-gcc"); - } - } - - t if t.contains("musl") => { - if let Some(root) = build.musl_root(target) { - let guess = root.join("bin/musl-gcc"); - if guess.exists() { - cfg.compiler(guess); - } - } - } - - _ => {} - } -} - -pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf { - let mut triple_iter = triple.split("-"); - let triple_translated = if let Some(arch) = triple_iter.next() { - let arch_new = match arch { - "arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a", - other => other, - }; - std::iter::once(arch_new).chain(triple_iter).collect::>().join("-") - } else { - triple.to_string() - }; - - // API 19 is the earliest API level supported by NDK r25b but AArch64 and x86_64 support - // begins at API level 21. - let api_level = - if triple.contains("aarch64") || triple.contains("x86_64") { "21" } else { "19" }; - let compiler = format!("{}{}-{}", triple_translated, api_level, compiler.clang()); - ndk.join("bin").join(compiler) -} - -/// The target programming language for a native compiler. -pub(crate) enum Language { - /// The compiler is targeting C. - C, - /// The compiler is targeting C++. - CPlusPlus, -} - -impl Language { - /// Obtains the name of a compiler in the GCC collection. - fn gcc(self) -> &'static str { - match self { - Language::C => "gcc", - Language::CPlusPlus => "g++", - } - } - - /// Obtains the name of a compiler in the clang suite. - fn clang(self) -> &'static str { - match self { - Language::C => "clang", - Language::CPlusPlus => "clang++", - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/channel.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/channel.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/channel.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/channel.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,160 +0,0 @@ -//! Build configuration for Rust's release channels. -//! -//! Implements the stable/beta/nightly channel distinctions by setting various -//! flags like the `unstable_features`, calculating variables like `release` and -//! `package_vers`, and otherwise indicating to the compiler what it should -//! print out as part of its version information. - -use std::fs; -use std::path::Path; -use std::process::Command; - -use crate::util::output; -use crate::util::t; -use crate::Build; - -#[derive(Clone, Default)] -pub enum GitInfo { - /// This is not a git repository. - #[default] - Absent, - /// This is a git repository. - /// If the info should be used (`omit_git_hash` is false), this will be - /// `Some`, otherwise it will be `None`. - Present(Option), - /// This is not a git repository, but the info can be fetched from the - /// `git-commit-info` file. - RecordedForTarball(Info), -} - -#[derive(Clone)] -pub struct Info { - pub commit_date: String, - pub sha: String, - pub short_sha: String, -} - -impl GitInfo { - pub fn new(omit_git_hash: bool, dir: &Path) -> GitInfo { - // See if this even begins to look like a git dir - if !dir.join(".git").exists() { - match read_commit_info_file(dir) { - Some(info) => return GitInfo::RecordedForTarball(info), - None => return GitInfo::Absent, - } - } - - // Make sure git commands work - match Command::new("git").arg("rev-parse").current_dir(dir).output() { - Ok(ref out) if out.status.success() => {} - _ => return GitInfo::Absent, - } - - // If we're ignoring the git info, we don't actually need to collect it, just make sure this - // was a git repo in the first place. - if omit_git_hash { - return GitInfo::Present(None); - } - - // Ok, let's scrape some info - let ver_date = output( - Command::new("git") - .current_dir(dir) - .arg("log") - .arg("-1") - .arg("--date=short") - .arg("--pretty=format:%cd"), - ); - let ver_hash = output(Command::new("git").current_dir(dir).arg("rev-parse").arg("HEAD")); - let short_ver_hash = output( - Command::new("git").current_dir(dir).arg("rev-parse").arg("--short=9").arg("HEAD"), - ); - GitInfo::Present(Some(Info { - commit_date: ver_date.trim().to_string(), - sha: ver_hash.trim().to_string(), - short_sha: short_ver_hash.trim().to_string(), - })) - } - - pub fn info(&self) -> Option<&Info> { - match self { - GitInfo::Absent => None, - GitInfo::Present(info) => info.as_ref(), - GitInfo::RecordedForTarball(info) => Some(info), - } - } - - pub fn sha(&self) -> Option<&str> { - self.info().map(|s| &s.sha[..]) - } - - pub fn sha_short(&self) -> Option<&str> { - self.info().map(|s| &s.short_sha[..]) - } - - pub fn commit_date(&self) -> Option<&str> { - self.info().map(|s| &s.commit_date[..]) - } - - pub fn version(&self, build: &Build, num: &str) -> String { - let mut version = build.release(num); - if let Some(ref inner) = self.info() { - version.push_str(" ("); - version.push_str(&inner.short_sha); - version.push(' '); - version.push_str(&inner.commit_date); - version.push(')'); - } - version - } - - /// Returns whether this directory has a `.git` directory which should be managed by bootstrap. - pub fn is_managed_git_subrepository(&self) -> bool { - match self { - GitInfo::Absent | GitInfo::RecordedForTarball(_) => false, - GitInfo::Present(_) => true, - } - } - - /// Returns whether this is being built from a tarball. - pub fn is_from_tarball(&self) -> bool { - match self { - GitInfo::Absent | GitInfo::Present(_) => false, - GitInfo::RecordedForTarball(_) => true, - } - } -} - -/// Read the commit information from the `git-commit-info` file given the -/// project root. -pub fn read_commit_info_file(root: &Path) -> Option { - if let Ok(contents) = fs::read_to_string(root.join("git-commit-info")) { - let mut lines = contents.lines(); - let sha = lines.next(); - let short_sha = lines.next(); - let commit_date = lines.next(); - let info = match (commit_date, sha, short_sha) { - (Some(commit_date), Some(sha), Some(short_sha)) => Info { - commit_date: commit_date.to_owned(), - sha: sha.to_owned(), - short_sha: short_sha.to_owned(), - }, - _ => panic!("the `git-commit-info` file is malformed"), - }; - Some(info) - } else { - None - } -} - -/// Write the commit information to the `git-commit-info` file given the project -/// root. -pub fn write_commit_info_file(root: &Path, info: &Info) { - let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date); - t!(fs::write(root.join("git-commit-info"), &commit_info)); -} - -/// Write the commit hash to the `git-commit-hash` file given the project root. -pub fn write_commit_hash_file(root: &Path, sha: &str) { - t!(fs::write(root.join("git-commit-hash"), sha)); -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/check.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/check.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/check.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/check.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,542 +0,0 @@ -//! Implementation of compiling the compiler and standard library, in "check"-based modes. - -use crate::builder::{crate_description, Alias, Builder, Kind, RunConfig, ShouldRun, Step}; -use crate::cache::Interned; -use crate::compile::{add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo}; -use crate::config::TargetSelection; -use crate::tool::{prepare_tool_cargo, SourceType}; -use crate::INTERNER; -use crate::{Compiler, Mode, Subcommand}; -use std::path::{Path, PathBuf}; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Std { - pub target: TargetSelection, - /// Whether to build only a subset of crates. - /// - /// This shouldn't be used from other steps; see the comment on [`compile::Rustc`]. - /// - /// [`compile::Rustc`]: crate::compile::Rustc - crates: Interned>, -} - -/// Returns args for the subcommand itself (not for cargo) -fn args(builder: &Builder<'_>) -> Vec { - fn strings<'a>(arr: &'a [&str]) -> impl Iterator + 'a { - arr.iter().copied().map(String::from) - } - - if let Subcommand::Clippy { fix, allow, deny, warn, forbid, .. } = &builder.config.cmd { - // disable the most spammy clippy lints - let ignored_lints = vec![ - "many_single_char_names", // there are a lot in stdarch - "collapsible_if", - "type_complexity", - "missing_safety_doc", // almost 3K warnings - "too_many_arguments", - "needless_lifetimes", // people want to keep the lifetimes - "wrong_self_convention", - ]; - let mut args = vec![]; - if *fix { - #[rustfmt::skip] - args.extend(strings(&[ - "--fix", "-Zunstable-options", - // FIXME: currently, `--fix` gives an error while checking tests for libtest, - // possibly because libtest is not yet built in the sysroot. - // As a workaround, avoid checking tests and benches when passed --fix. - "--lib", "--bins", "--examples", - ])); - } - args.extend(strings(&["--", "--cap-lints", "warn"])); - args.extend(ignored_lints.iter().map(|lint| format!("-Aclippy::{}", lint))); - let mut clippy_lint_levels: Vec = Vec::new(); - allow.iter().for_each(|v| clippy_lint_levels.push(format!("-A{}", v))); - deny.iter().for_each(|v| clippy_lint_levels.push(format!("-D{}", v))); - warn.iter().for_each(|v| clippy_lint_levels.push(format!("-W{}", v))); - forbid.iter().for_each(|v| clippy_lint_levels.push(format!("-F{}", v))); - args.extend(clippy_lint_levels); - args.extend(builder.config.free_args.clone()); - args - } else { - builder.config.free_args.clone() - } -} - -fn cargo_subcommand(kind: Kind) -> &'static str { - match kind { - Kind::Check => "check", - Kind::Clippy => "clippy", - Kind::Fix => "fix", - _ => unreachable!(), - } -} - -impl Std { - pub fn new(target: TargetSelection) -> Self { - Self { target, crates: INTERNER.intern_list(vec![]) } - } -} - -impl Step for Std { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.crate_or_deps("sysroot").path("library") - } - - fn make_run(run: RunConfig<'_>) { - let crates = run.make_run_crates(Alias::Library); - run.builder.ensure(Std { target: run.target, crates }); - } - - fn run(self, builder: &Builder<'_>) { - builder.update_submodule(&Path::new("library").join("stdarch")); - - let target = self.target; - let compiler = builder.compiler(builder.top_stage, builder.config.build); - - let mut cargo = builder.cargo( - compiler, - Mode::Std, - SourceType::InTree, - target, - cargo_subcommand(builder.kind), - ); - std_cargo(builder, target, compiler.stage, &mut cargo); - if matches!(builder.config.cmd, Subcommand::Fix { .. }) { - // By default, cargo tries to fix all targets. Tell it not to fix tests until we've added `test` to the sysroot. - cargo.arg("--lib"); - } - - for krate in &*self.crates { - cargo.arg("-p").arg(krate); - } - - let _guard = builder.msg_check( - format_args!("library artifacts{}", crate_description(&self.crates)), - target, - ); - run_cargo( - builder, - cargo, - args(builder), - &libstd_stamp(builder, compiler, target), - vec![], - true, - false, - ); - - // We skip populating the sysroot in non-zero stage because that'll lead - // to rlib/rmeta conflicts if std gets built during this session. - if compiler.stage == 0 { - let libdir = builder.sysroot_libdir(compiler, target); - let hostdir = builder.sysroot_libdir(compiler, compiler.host); - add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); - } - drop(_guard); - - // don't run on std twice with x.py clippy - // don't check test dependencies if we haven't built libtest - if builder.kind == Kind::Clippy || !self.crates.iter().any(|krate| krate == "test") { - return; - } - - // Then run cargo again, once we've put the rmeta files for the library - // crates into the sysroot. This is needed because e.g., core's tests - // depend on `libtest` -- Cargo presumes it will exist, but it doesn't - // since we initialize with an empty sysroot. - // - // Currently only the "libtest" tree of crates does this. - let mut cargo = builder.cargo( - compiler, - Mode::Std, - SourceType::InTree, - target, - cargo_subcommand(builder.kind), - ); - - // If we're not in stage 0, tests and examples will fail to compile - // from `core` definitions being loaded from two different `libcore` - // .rmeta and .rlib files. - if compiler.stage == 0 { - cargo.arg("--all-targets"); - } - - std_cargo(builder, target, compiler.stage, &mut cargo); - - // Explicitly pass -p for all dependencies krates -- this will force cargo - // to also check the tests/benches/examples for these crates, rather - // than just the leaf crate. - for krate in &*self.crates { - cargo.arg("-p").arg(krate); - } - - let _guard = builder.msg_check("library test/bench/example targets", target); - run_cargo( - builder, - cargo, - args(builder), - &libstd_test_stamp(builder, compiler, target), - vec![], - true, - false, - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Rustc { - pub target: TargetSelection, - /// Whether to build only a subset of crates. - /// - /// This shouldn't be used from other steps; see the comment on [`compile::Rustc`]. - /// - /// [`compile::Rustc`]: crate::compile::Rustc - crates: Interned>, -} - -impl Rustc { - pub fn new(target: TargetSelection, builder: &Builder<'_>) -> Self { - let crates = builder - .in_tree_crates("rustc-main", Some(target)) - .into_iter() - .map(|krate| krate.name.to_string()) - .collect(); - Self { target, crates: INTERNER.intern_list(crates) } - } -} - -impl Step for Rustc { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.crate_or_deps("rustc-main").path("compiler") - } - - fn make_run(run: RunConfig<'_>) { - let crates = run.make_run_crates(Alias::Compiler); - run.builder.ensure(Rustc { target: run.target, crates }); - } - - /// Builds the compiler. - /// - /// This will build the compiler for a particular stage of the build using - /// the `compiler` targeting the `target` architecture. The artifacts - /// created will also be linked into the sysroot directory. - fn run(self, builder: &Builder<'_>) { - let compiler = builder.compiler(builder.top_stage, builder.config.build); - let target = self.target; - - if compiler.stage != 0 { - // If we're not in stage 0, then we won't have a std from the beta - // compiler around. That means we need to make sure there's one in - // the sysroot for the compiler to find. Otherwise, we're going to - // fail when building crates that need to generate code (e.g., build - // scripts and their dependencies). - builder.ensure(crate::compile::Std::new(compiler, compiler.host)); - builder.ensure(crate::compile::Std::new(compiler, target)); - } else { - builder.ensure(Std::new(target)); - } - - let mut cargo = builder.cargo( - compiler, - Mode::Rustc, - SourceType::InTree, - target, - cargo_subcommand(builder.kind), - ); - rustc_cargo(builder, &mut cargo, target, compiler.stage); - - // For ./x.py clippy, don't run with --all-targets because - // linting tests and benchmarks can produce very noisy results - if builder.kind != Kind::Clippy { - cargo.arg("--all-targets"); - } - - // Explicitly pass -p for all compiler crates -- this will force cargo - // to also check the tests/benches/examples for these crates, rather - // than just the leaf crate. - for krate in &*self.crates { - cargo.arg("-p").arg(krate); - } - - let _guard = builder.msg_check( - format_args!("compiler artifacts{}", crate_description(&self.crates)), - target, - ); - run_cargo( - builder, - cargo, - args(builder), - &librustc_stamp(builder, compiler, target), - vec![], - true, - false, - ); - - let libdir = builder.sysroot_libdir(compiler, target); - let hostdir = builder.sysroot_libdir(compiler, compiler.host); - add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target)); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CodegenBackend { - pub target: TargetSelection, - pub backend: Interned, -} - -impl Step for CodegenBackend { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.paths(&["compiler/rustc_codegen_cranelift", "compiler/rustc_codegen_gcc"]) - } - - fn make_run(run: RunConfig<'_>) { - for &backend in &[INTERNER.intern_str("cranelift"), INTERNER.intern_str("gcc")] { - run.builder.ensure(CodegenBackend { target: run.target, backend }); - } - } - - fn run(self, builder: &Builder<'_>) { - // FIXME: remove once https://github.com/rust-lang/rust/issues/112393 is resolved - if builder.build.config.vendor && &self.backend == "gcc" { - println!("Skipping checking of `rustc_codegen_gcc` with vendoring enabled."); - return; - } - - let compiler = builder.compiler(builder.top_stage, builder.config.build); - let target = self.target; - let backend = self.backend; - - builder.ensure(Rustc::new(target, builder)); - - let mut cargo = builder.cargo( - compiler, - Mode::Codegen, - SourceType::InTree, - target, - cargo_subcommand(builder.kind), - ); - cargo - .arg("--manifest-path") - .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); - rustc_cargo_env(builder, &mut cargo, target, compiler.stage); - - let _guard = builder.msg_check(&backend, target); - - run_cargo( - builder, - cargo, - args(builder), - &codegen_backend_stamp(builder, compiler, target, backend), - vec![], - true, - false, - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RustAnalyzer { - pub target: TargetSelection, -} - -impl Step for RustAnalyzer { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/tools/rust-analyzer").default_condition( - builder - .config - .tools - .as_ref() - .map_or(true, |tools| tools.iter().any(|tool| tool == "rust-analyzer")), - ) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustAnalyzer { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let compiler = builder.compiler(builder.top_stage, builder.config.build); - let target = self.target; - - builder.ensure(Std::new(target)); - - let mut cargo = prepare_tool_cargo( - builder, - compiler, - Mode::ToolStd, - target, - cargo_subcommand(builder.kind), - "src/tools/rust-analyzer", - SourceType::InTree, - &["rust-analyzer/in-rust-tree".to_owned()], - ); - - cargo.allow_features(crate::tool::RustAnalyzer::ALLOW_FEATURES); - - // For ./x.py clippy, don't check those targets because - // linting tests and benchmarks can produce very noisy results - if builder.kind != Kind::Clippy { - // can't use `--all-targets` because `--examples` doesn't work well - cargo.arg("--bins"); - cargo.arg("--tests"); - cargo.arg("--benches"); - } - - let _guard = builder.msg_check("rust-analyzer artifacts", target); - run_cargo( - builder, - cargo, - args(builder), - &stamp(builder, compiler, target), - vec![], - true, - false, - ); - - /// Cargo's output path in a given stage, compiled by a particular - /// compiler for the specified target. - fn stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { - builder.cargo_out(compiler, Mode::ToolStd, target).join(".rust-analyzer-check.stamp") - } - } -} - -macro_rules! tool_check_step { - ($name:ident, $path:literal, $($alias:literal, )* $source_type:path $(, $default:literal )?) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub struct $name { - pub target: TargetSelection, - } - - impl Step for $name { - type Output = (); - const ONLY_HOSTS: bool = true; - // don't ever check out-of-tree tools by default, they'll fail when toolstate is broken - const DEFAULT: bool = matches!($source_type, SourceType::InTree) $( && $default )?; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.paths(&[ $path, $($alias),* ]) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($name { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let compiler = builder.compiler(builder.top_stage, builder.config.build); - let target = self.target; - - builder.ensure(Rustc::new(target, builder)); - - let mut cargo = prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - target, - cargo_subcommand(builder.kind), - $path, - $source_type, - &[], - ); - - // For ./x.py clippy, don't run with --all-targets because - // linting tests and benchmarks can produce very noisy results - if builder.kind != Kind::Clippy { - cargo.arg("--all-targets"); - } - - // Enable internal lints for clippy and rustdoc - // NOTE: this doesn't enable lints for any other tools unless they explicitly add `#![warn(rustc::internal)]` - // See https://github.com/rust-lang/rust/pull/80573#issuecomment-754010776 - cargo.rustflag("-Zunstable-options"); - let _guard = builder.msg_check(&concat!(stringify!($name), " artifacts").to_lowercase(), target); - run_cargo( - builder, - cargo, - args(builder), - &stamp(builder, compiler, target), - vec![], - true, - false, - ); - - /// Cargo's output path in a given stage, compiled by a particular - /// compiler for the specified target. - fn stamp( - builder: &Builder<'_>, - compiler: Compiler, - target: TargetSelection, - ) -> PathBuf { - builder - .cargo_out(compiler, Mode::ToolRustc, target) - .join(format!(".{}-check.stamp", stringify!($name).to_lowercase())) - } - } - } - }; -} - -tool_check_step!(Rustdoc, "src/tools/rustdoc", "src/librustdoc", SourceType::InTree); -// Clippy, miri and Rustfmt are hybrids. They are external tools, but use a git subtree instead -// of a submodule. Since the SourceType only drives the deny-warnings -// behavior, treat it as in-tree so that any new warnings in clippy will be -// rejected. -tool_check_step!(Clippy, "src/tools/clippy", SourceType::InTree); -tool_check_step!(Miri, "src/tools/miri", SourceType::InTree); -tool_check_step!(CargoMiri, "src/tools/miri/cargo-miri", SourceType::InTree); -tool_check_step!(Rls, "src/tools/rls", SourceType::InTree); -tool_check_step!(Rustfmt, "src/tools/rustfmt", SourceType::InTree); -tool_check_step!(MiroptTestTools, "src/tools/miropt-test-tools", SourceType::InTree); - -tool_check_step!(Bootstrap, "src/bootstrap", SourceType::InTree, false); - -/// Cargo's output path for the standard library in a given stage, compiled -/// by a particular compiler for the specified target. -fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { - builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp") -} - -/// Cargo's output path for the standard library in a given stage, compiled -/// by a particular compiler for the specified target. -fn libstd_test_stamp( - builder: &Builder<'_>, - compiler: Compiler, - target: TargetSelection, -) -> PathBuf { - builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check-test.stamp") -} - -/// Cargo's output path for librustc in a given stage, compiled by a particular -/// compiler for the specified target. -fn librustc_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { - builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc-check.stamp") -} - -/// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular -/// compiler for the specified target and backend. -fn codegen_backend_stamp( - builder: &Builder<'_>, - compiler: Compiler, - target: TargetSelection, - backend: Interned, -) -> PathBuf { - builder - .cargo_out(compiler, Mode::Codegen, target) - .join(format!(".librustc_codegen_{backend}-check.stamp")) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/clean.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/clean.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/clean.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/clean.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,237 +0,0 @@ -//! Implementation of `make clean` in rustbuild. -//! -//! Responsible for cleaning out a build directory of all old and stale -//! artifacts to prepare for a fresh build. Currently doesn't remove the -//! `build/cache` directory (download cache) or the `build/$target/llvm` -//! directory unless the `--all` flag is present. - -use std::fs; -use std::io::{self, ErrorKind}; -use std::path::Path; - -use crate::builder::{crate_description, Builder, RunConfig, ShouldRun, Step}; -use crate::cache::Interned; -use crate::util::t; -use crate::{Build, Compiler, Mode, Subcommand}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct CleanAll {} - -impl Step for CleanAll { - const DEFAULT: bool = true; - type Output = (); - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(CleanAll {}) - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let Subcommand::Clean { all, stage } = builder.config.cmd else { - unreachable!("wrong subcommand?") - }; - - if all && stage.is_some() { - panic!("--all and --stage can't be used at the same time for `x clean`"); - } - - clean(builder.build, all, stage) - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() // handled by DEFAULT - } -} - -macro_rules! clean_crate_tree { - ( $( $name:ident, $mode:path, $root_crate:literal);+ $(;)? ) => { $( - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] - pub struct $name { - compiler: Compiler, - crates: Interned>, - } - - impl Step for $name { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let crates = run.builder.in_tree_crates($root_crate, None); - run.crates(crates) - } - - fn make_run(run: RunConfig<'_>) { - let builder = run.builder; - let compiler = builder.compiler(builder.top_stage, run.target); - builder.ensure(Self { crates: run.cargo_crates_in_set(), compiler }); - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let compiler = self.compiler; - let target = compiler.host; - let mut cargo = builder.bare_cargo(compiler, $mode, target, "clean"); - for krate in &*self.crates { - cargo.arg("-p"); - cargo.arg(krate); - } - - builder.info(&format!( - "Cleaning{} stage{} {} artifacts ({} -> {})", - crate_description(&self.crates), compiler.stage, stringify!($name).to_lowercase(), &compiler.host, target, - )); - - // NOTE: doesn't use `run_cargo` because we don't want to save a stamp file, - // and doesn't use `stream_cargo` to avoid passing `--message-format` which `clean` doesn't accept. - builder.run(&mut cargo); - } - } - )+ } -} - -clean_crate_tree! { - Rustc, Mode::Rustc, "rustc-main"; - Std, Mode::Std, "sysroot"; -} - -fn clean(build: &Build, all: bool, stage: Option) { - if build.config.dry_run() { - return; - } - - rm_rf("tmp".as_ref()); - - // Clean the entire build directory - if all { - rm_rf(&build.out); - return; - } - - // Clean the target stage artifacts - if let Some(stage) = stage { - clean_specific_stage(build, stage); - return; - } - - // Follow the default behaviour - clean_default(build); -} - -fn clean_specific_stage(build: &Build, stage: u32) { - for host in &build.hosts { - let entries = match build.out.join(host.triple).read_dir() { - Ok(iter) => iter, - Err(_) => continue, - }; - - for entry in entries { - let entry = t!(entry); - let stage_prefix = format!("stage{}", stage); - - // if current entry is not related with the target stage, continue - if !entry.file_name().to_str().unwrap_or("").contains(&stage_prefix) { - continue; - } - - let path = t!(entry.path().canonicalize()); - rm_rf(&path); - } - } -} - -fn clean_default(build: &Build) { - rm_rf(&build.out.join("tmp")); - rm_rf(&build.out.join("dist")); - rm_rf(&build.out.join("bootstrap")); - rm_rf(&build.out.join("rustfmt.stamp")); - - for host in &build.hosts { - let entries = match build.out.join(host.triple).read_dir() { - Ok(iter) => iter, - Err(_) => continue, - }; - - for entry in entries { - let entry = t!(entry); - if entry.file_name().to_str() == Some("llvm") { - continue; - } - let path = t!(entry.path().canonicalize()); - rm_rf(&path); - } - } -} - -fn rm_rf(path: &Path) { - match path.symlink_metadata() { - Err(e) => { - if e.kind() == ErrorKind::NotFound { - return; - } - panic!("failed to get metadata for file {}: {}", path.display(), e); - } - Ok(metadata) => { - if metadata.file_type().is_file() || metadata.file_type().is_symlink() { - do_op(path, "remove file", |p| { - fs::remove_file(p).or_else(|e| { - // Work around the fact that we cannot - // delete an executable while it runs on Windows. - #[cfg(windows)] - if e.kind() == std::io::ErrorKind::PermissionDenied - && p.file_name().and_then(std::ffi::OsStr::to_str) - == Some("bootstrap.exe") - { - eprintln!("warning: failed to delete '{}'.", p.display()); - return Ok(()); - } - Err(e) - }) - }); - return; - } - - for file in t!(fs::read_dir(path)) { - rm_rf(&t!(file).path()); - } - do_op(path, "remove dir", |p| { - fs::remove_dir(p).or_else(|e| { - // Check for dir not empty on Windows - // FIXME: Once `ErrorKind::DirectoryNotEmpty` is stabilized, - // match on `e.kind()` instead. - #[cfg(windows)] - if e.raw_os_error() == Some(145) { - return Ok(()); - } - - Err(e) - }) - }); - } - }; -} - -fn do_op(path: &Path, desc: &str, mut f: F) -where - F: FnMut(&Path) -> io::Result<()>, -{ - match f(path) { - Ok(()) => {} - // On windows we can't remove a readonly file, and git will often clone files as readonly. - // As a result, we have some special logic to remove readonly files on windows. - // This is also the reason that we can't use things like fs::remove_dir_all(). - Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => { - let m = t!(path.symlink_metadata()); - let mut p = m.permissions(); - p.set_readonly(false); - t!(fs::set_permissions(path, p)); - f(path).unwrap_or_else(|e| { - // Delete symlinked directories on Windows - #[cfg(windows)] - if m.file_type().is_symlink() && path.is_dir() && fs::remove_dir(path).is_ok() { - return; - } - panic!("failed to {} {}: {}", desc, path.display(), e); - }); - } - Err(e) => { - panic!("failed to {} {}: {}", desc, path.display(), e); - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/compile.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/compile.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/compile.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/compile.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2015 +0,0 @@ -//! Implementation of compiling various phases of the compiler and standard -//! library. -//! -//! This module contains some of the real meat in the rustbuild build system -//! which is where Cargo is used to compile the standard library, libtest, and -//! the compiler. This module is also responsible for assembling the sysroot as it -//! goes along from the output of the previous stage. - -use std::borrow::Cow; -use std::collections::HashSet; -use std::env; -use std::ffi::OsStr; -use std::fs; -use std::io::prelude::*; -use std::io::BufReader; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; -use std::str; - -use serde_derive::Deserialize; - -use crate::builder::crate_description; -use crate::builder::Cargo; -use crate::builder::{Builder, Kind, PathSet, RunConfig, ShouldRun, Step, TaskPath}; -use crate::cache::{Interned, INTERNER}; -use crate::config::{DebuginfoLevel, LlvmLibunwind, RustcLto, TargetSelection}; -use crate::dist; -use crate::llvm; -use crate::tool::SourceType; -use crate::util::get_clang_cl_resource_dir; -use crate::util::{exe, is_debug_info, is_dylib, output, symlink_dir, t, up_to_date}; -use crate::LLVM_TOOLS; -use crate::{CLang, Compiler, DependencyType, GitRepo, Mode}; -use filetime::FileTime; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Std { - pub target: TargetSelection, - pub compiler: Compiler, - /// Whether to build only a subset of crates in the standard library. - /// - /// This shouldn't be used from other steps; see the comment on [`Rustc`]. - crates: Interned>, - /// When using download-rustc, we need to use a new build of `std` for running unit tests of Std itself, - /// but we need to use the downloaded copy of std for linking to rustdoc. Allow this to be overriden by `builder.ensure` from other steps. - force_recompile: bool, -} - -impl Std { - pub fn new(compiler: Compiler, target: TargetSelection) -> Self { - Self { target, compiler, crates: Default::default(), force_recompile: false } - } - - pub fn force_recompile(compiler: Compiler, target: TargetSelection) -> Self { - Self { target, compiler, crates: Default::default(), force_recompile: true } - } -} - -impl Step for Std { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - // When downloading stage1, the standard library has already been copied to the sysroot, so - // there's no need to rebuild it. - let builder = run.builder; - run.crate_or_deps("sysroot") - .path("library") - .lazy_default_condition(Box::new(|| !builder.download_rustc())) - } - - fn make_run(run: RunConfig<'_>) { - // If the paths include "library", build the entire standard library. - let has_alias = - run.paths.iter().any(|set| set.assert_single_path().path.ends_with("library")); - let crates = if has_alias { Default::default() } else { run.cargo_crates_in_set() }; - - run.builder.ensure(Std { - compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), - target: run.target, - crates, - force_recompile: false, - }); - } - - /// Builds the standard library. - /// - /// This will build the standard library for a particular stage of the build - /// using the `compiler` targeting the `target` architecture. The artifacts - /// created will also be linked into the sysroot directory. - fn run(self, builder: &Builder<'_>) { - let target = self.target; - let compiler = self.compiler; - - // When using `download-rustc`, we already have artifacts for the host available. Don't - // recompile them. - if builder.download_rustc() && target == builder.build.build - // NOTE: the beta compiler may generate different artifacts than the downloaded compiler, so - // its artifacts can't be reused. - && compiler.stage != 0 - // This check is specific to testing std itself; see `test::Std` for more details. - && !self.force_recompile - { - cp_rustc_component_to_ci_sysroot( - builder, - compiler, - builder.config.ci_rust_std_contents(), - ); - return; - } - - if builder.config.keep_stage.contains(&compiler.stage) - || builder.config.keep_stage_std.contains(&compiler.stage) - { - builder.info("Warning: Using a potentially old libstd. This may not behave well."); - - copy_third_party_objects(builder, &compiler, target); - copy_self_contained_objects(builder, &compiler, target); - - builder.ensure(StdLink::from_std(self, compiler)); - return; - } - - builder.update_submodule(&Path::new("library").join("stdarch")); - - // Profiler information requires LLVM's compiler-rt - if builder.config.profiler { - builder.update_submodule(&Path::new("src/llvm-project")); - } - - let mut target_deps = builder.ensure(StartupObjects { compiler, target }); - - let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); - if compiler_to_use != compiler { - builder.ensure(Std::new(compiler_to_use, target)); - let msg = if compiler_to_use.host == target { - format!( - "Uplifting library (stage{} -> stage{})", - compiler_to_use.stage, compiler.stage - ) - } else { - format!( - "Uplifting library (stage{}:{} -> stage{}:{})", - compiler_to_use.stage, compiler_to_use.host, compiler.stage, target - ) - }; - builder.info(&msg); - - // Even if we're not building std this stage, the new sysroot must - // still contain the third party objects needed by various targets. - copy_third_party_objects(builder, &compiler, target); - copy_self_contained_objects(builder, &compiler, target); - - builder.ensure(StdLink::from_std(self, compiler_to_use)); - return; - } - - target_deps.extend(copy_third_party_objects(builder, &compiler, target)); - target_deps.extend(copy_self_contained_objects(builder, &compiler, target)); - - let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "build"); - std_cargo(builder, target, compiler.stage, &mut cargo); - for krate in &*self.crates { - cargo.arg("-p").arg(krate); - } - - // See src/bootstrap/synthetic_targets.rs - if target.is_synthetic() { - cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1"); - } - - let _guard = builder.msg( - Kind::Build, - compiler.stage, - format_args!("library artifacts{}", crate_description(&self.crates)), - compiler.host, - target, - ); - run_cargo( - builder, - cargo, - vec![], - &libstd_stamp(builder, compiler, target), - target_deps, - false, - false, - ); - - builder.ensure(StdLink::from_std( - self, - builder.compiler(compiler.stage, builder.config.build), - )); - } -} - -fn copy_and_stamp( - builder: &Builder<'_>, - libdir: &Path, - sourcedir: &Path, - name: &str, - target_deps: &mut Vec<(PathBuf, DependencyType)>, - dependency_type: DependencyType, -) { - let target = libdir.join(name); - builder.copy(&sourcedir.join(name), &target); - - target_deps.push((target, dependency_type)); -} - -fn copy_llvm_libunwind(builder: &Builder<'_>, target: TargetSelection, libdir: &Path) -> PathBuf { - let libunwind_path = builder.ensure(llvm::Libunwind { target }); - let libunwind_source = libunwind_path.join("libunwind.a"); - let libunwind_target = libdir.join("libunwind.a"); - builder.copy(&libunwind_source, &libunwind_target); - libunwind_target -} - -/// Copies third party objects needed by various targets. -fn copy_third_party_objects( - builder: &Builder<'_>, - compiler: &Compiler, - target: TargetSelection, -) -> Vec<(PathBuf, DependencyType)> { - let mut target_deps = vec![]; - - if builder.config.sanitizers_enabled(target) && compiler.stage != 0 { - // The sanitizers are only copied in stage1 or above, - // to avoid creating dependency on LLVM. - target_deps.extend( - copy_sanitizers(builder, &compiler, target) - .into_iter() - .map(|d| (d, DependencyType::Target)), - ); - } - - if target == "x86_64-fortanix-unknown-sgx" - || builder.config.llvm_libunwind(target) == LlvmLibunwind::InTree - && (target.contains("linux") || target.contains("fuchsia")) - { - let libunwind_path = - copy_llvm_libunwind(builder, target, &builder.sysroot_libdir(*compiler, target)); - target_deps.push((libunwind_path, DependencyType::Target)); - } - - target_deps -} - -/// Copies third party objects needed by various targets for self-contained linkage. -fn copy_self_contained_objects( - builder: &Builder<'_>, - compiler: &Compiler, - target: TargetSelection, -) -> Vec<(PathBuf, DependencyType)> { - let libdir_self_contained = builder.sysroot_libdir(*compiler, target).join("self-contained"); - t!(fs::create_dir_all(&libdir_self_contained)); - let mut target_deps = vec![]; - - // Copies the libc and CRT objects. - // - // rustc historically provides a more self-contained installation for musl targets - // not requiring the presence of a native musl toolchain. For example, it can fall back - // to using gcc from a glibc-targeting toolchain for linking. - // To do that we have to distribute musl startup objects as a part of Rust toolchain - // and link with them manually in the self-contained mode. - if target.contains("musl") && !target.contains("unikraft") { - let srcdir = builder.musl_libdir(target).unwrap_or_else(|| { - panic!("Target {:?} does not have a \"musl-libdir\" key", target.triple) - }); - for &obj in &["libc.a", "crt1.o", "Scrt1.o", "rcrt1.o", "crti.o", "crtn.o"] { - copy_and_stamp( - builder, - &libdir_self_contained, - &srcdir, - obj, - &mut target_deps, - DependencyType::TargetSelfContained, - ); - } - let crt_path = builder.ensure(llvm::CrtBeginEnd { target }); - for &obj in &["crtbegin.o", "crtbeginS.o", "crtend.o", "crtendS.o"] { - let src = crt_path.join(obj); - let target = libdir_self_contained.join(obj); - builder.copy(&src, &target); - target_deps.push((target, DependencyType::TargetSelfContained)); - } - - if !target.starts_with("s390x") { - let libunwind_path = copy_llvm_libunwind(builder, target, &libdir_self_contained); - target_deps.push((libunwind_path, DependencyType::TargetSelfContained)); - } - } else if target.contains("-wasi") { - let srcdir = builder - .wasi_root(target) - .unwrap_or_else(|| { - panic!("Target {:?} does not have a \"wasi-root\" key", target.triple) - }) - .join("lib") - .join(target.to_string().replace("-preview1", "")); - for &obj in &["libc.a", "crt1-command.o", "crt1-reactor.o"] { - copy_and_stamp( - builder, - &libdir_self_contained, - &srcdir, - obj, - &mut target_deps, - DependencyType::TargetSelfContained, - ); - } - } else if target.ends_with("windows-gnu") { - for obj in ["crt2.o", "dllcrt2.o"].iter() { - let src = compiler_file(builder, &builder.cc(target), target, CLang::C, obj); - let target = libdir_self_contained.join(obj); - builder.copy(&src, &target); - target_deps.push((target, DependencyType::TargetSelfContained)); - } - } - - target_deps -} - -/// Configure cargo to compile the standard library, adding appropriate env vars -/// and such. -pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, cargo: &mut Cargo) { - if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { - cargo.env("MACOSX_DEPLOYMENT_TARGET", target); - } - - if let Some(path) = builder.config.profiler_path(target) { - cargo.env("LLVM_PROFILER_RT_LIB", path); - } - - // Determine if we're going to compile in optimized C intrinsics to - // the `compiler-builtins` crate. These intrinsics live in LLVM's - // `compiler-rt` repository, but our `src/llvm-project` submodule isn't - // always checked out, so we need to conditionally look for this. (e.g. if - // an external LLVM is used we skip the LLVM submodule checkout). - // - // Note that this shouldn't affect the correctness of `compiler-builtins`, - // but only its speed. Some intrinsics in C haven't been translated to Rust - // yet but that's pretty rare. Other intrinsics have optimized - // implementations in C which have only had slower versions ported to Rust, - // so we favor the C version where we can, but it's not critical. - // - // If `compiler-rt` is available ensure that the `c` feature of the - // `compiler-builtins` crate is enabled and it's configured to learn where - // `compiler-rt` is located. - let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt"); - let compiler_builtins_c_feature = if compiler_builtins_root.exists() { - // Note that `libprofiler_builtins/build.rs` also computes this so if - // you're changing something here please also change that. - cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root); - " compiler-builtins-c" - } else { - "" - }; - - // `libtest` uses this to know whether or not to support - // `-Zunstable-options`. - if !builder.unstable_features() { - cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); - } - - let mut features = String::new(); - - // Cranelift doesn't support `asm`. - if stage != 0 && builder.config.default_codegen_backend().unwrap_or_default() == "cranelift" { - features += " compiler-builtins-no-asm"; - } - - if builder.no_std(target) == Some(true) { - features += " compiler-builtins-mem"; - if !target.starts_with("bpf") { - features.push_str(compiler_builtins_c_feature); - } - - // for no-std targets we only compile a few no_std crates - cargo - .args(&["-p", "alloc"]) - .arg("--manifest-path") - .arg(builder.src.join("library/alloc/Cargo.toml")) - .arg("--features") - .arg(features); - } else { - features += &builder.std_features(target); - features.push_str(compiler_builtins_c_feature); - - cargo - .arg("--features") - .arg(features) - .arg("--manifest-path") - .arg(builder.src.join("library/sysroot/Cargo.toml")); - - // Help the libc crate compile by assisting it in finding various - // sysroot native libraries. - if target.contains("musl") { - if let Some(p) = builder.musl_libdir(target) { - let root = format!("native={}", p.to_str().unwrap()); - cargo.rustflag("-L").rustflag(&root); - } - } - - if target.contains("-wasi") { - if let Some(p) = builder.wasi_root(target) { - let root = format!( - "native={}/lib/{}", - p.to_str().unwrap(), - target.to_string().replace("-preview1", "") - ); - cargo.rustflag("-L").rustflag(&root); - } - } - } - - // By default, rustc uses `-Cembed-bitcode=yes`, and Cargo overrides that - // with `-Cembed-bitcode=no` for non-LTO builds. However, libstd must be - // built with bitcode so that the produced rlibs can be used for both LTO - // builds (which use bitcode) and non-LTO builds (which use object code). - // So we override the override here! - // - // But we don't bother for the stage 0 compiler because it's never used - // with LTO. - if stage >= 1 { - cargo.rustflag("-Cembed-bitcode=yes"); - } - if builder.config.rust_lto == RustcLto::Off { - cargo.rustflag("-Clto=off"); - } - - // By default, rustc does not include unwind tables unless they are required - // for a particular target. They are not required by RISC-V targets, but - // compiling the standard library with them means that users can get - // backtraces without having to recompile the standard library themselves. - // - // This choice was discussed in https://github.com/rust-lang/rust/pull/69890 - if target.contains("riscv") { - cargo.rustflag("-Cforce-unwind-tables=yes"); - } - - let html_root = - format!("-Zcrate-attr=doc(html_root_url=\"{}/\")", builder.doc_rust_lang_org_channel(),); - cargo.rustflag(&html_root); - cargo.rustdocflag(&html_root); - - cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)"); -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -struct StdLink { - pub compiler: Compiler, - pub target_compiler: Compiler, - pub target: TargetSelection, - /// Not actually used; only present to make sure the cache invalidation is correct. - crates: Interned>, - /// See [`Std::force_recompile`]. - force_recompile: bool, -} - -impl StdLink { - fn from_std(std: Std, host_compiler: Compiler) -> Self { - Self { - compiler: host_compiler, - target_compiler: std.compiler, - target: std.target, - crates: std.crates, - force_recompile: std.force_recompile, - } - } -} - -impl Step for StdLink { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Link all libstd rlibs/dylibs into the sysroot location. - /// - /// Links those artifacts generated by `compiler` to the `stage` compiler's - /// sysroot for the specified `host` and `target`. - /// - /// Note that this assumes that `compiler` has already generated the libstd - /// libraries for `target`, and this method will find them in the relevant - /// output directory. - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target_compiler = self.target_compiler; - let target = self.target; - - // NOTE: intentionally does *not* check `target == builder.build` to avoid having to add the same check in `test::Crate`. - let (libdir, hostdir) = if self.force_recompile && builder.download_rustc() { - // NOTE: copies part of `sysroot_libdir` to avoid having to add a new `force_recompile` argument there too - let lib = builder.sysroot_libdir_relative(self.compiler); - let sysroot = builder.ensure(crate::compile::Sysroot { - compiler: self.compiler, - force_recompile: self.force_recompile, - }); - let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib"); - let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib"); - (INTERNER.intern_path(libdir), INTERNER.intern_path(hostdir)) - } else { - let libdir = builder.sysroot_libdir(target_compiler, target); - let hostdir = builder.sysroot_libdir(target_compiler, compiler.host); - (libdir, hostdir) - }; - - add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); - - // Special case for stage0, to make `rustup toolchain link` and `x dist --stage 0` - // work for stage0-sysroot. We only do this if the stage0 compiler comes from beta, - // and is not set to a custom path. - if compiler.stage == 0 - && builder - .build - .config - .initial_rustc - .starts_with(builder.out.join(&compiler.host.triple).join("stage0/bin")) - { - // Copy bin files from stage0/bin to stage0-sysroot/bin - let sysroot = builder.out.join(&compiler.host.triple).join("stage0-sysroot"); - - let host = compiler.host.triple; - let stage0_bin_dir = builder.out.join(&host).join("stage0/bin"); - let sysroot_bin_dir = sysroot.join("bin"); - t!(fs::create_dir_all(&sysroot_bin_dir)); - builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir); - - // Copy all *.so files from stage0/lib to stage0-sysroot/lib - let stage0_lib_dir = builder.out.join(&host).join("stage0/lib"); - if let Ok(files) = fs::read_dir(&stage0_lib_dir) { - for file in files { - let file = t!(file); - let path = file.path(); - if path.is_file() && is_dylib(&file.file_name().into_string().unwrap()) { - builder.copy(&path, &sysroot.join("lib").join(path.file_name().unwrap())); - } - } - } - - // Copy codegen-backends from stage0 - let sysroot_codegen_backends = builder.sysroot_codegen_backends(compiler); - t!(fs::create_dir_all(&sysroot_codegen_backends)); - let stage0_codegen_backends = builder - .out - .join(&host) - .join("stage0/lib/rustlib") - .join(&host) - .join("codegen-backends"); - builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends); - } - } -} - -/// Copies sanitizer runtime libraries into target libdir. -fn copy_sanitizers( - builder: &Builder<'_>, - compiler: &Compiler, - target: TargetSelection, -) -> Vec { - let runtimes: Vec = builder.ensure(llvm::Sanitizers { target }); - - if builder.config.dry_run() { - return Vec::new(); - } - - let mut target_deps = Vec::new(); - let libdir = builder.sysroot_libdir(*compiler, target); - - for runtime in &runtimes { - let dst = libdir.join(&runtime.name); - builder.copy(&runtime.path, &dst); - - // The `aarch64-apple-ios-macabi` and `x86_64-apple-ios-macabi` are also supported for - // sanitizers, but they share a sanitizer runtime with `${arch}-apple-darwin`, so we do - // not list them here to rename and sign the runtime library. - if target == "x86_64-apple-darwin" - || target == "aarch64-apple-darwin" - || target == "aarch64-apple-ios" - || target == "aarch64-apple-ios-sim" - || target == "x86_64-apple-ios" - { - // Update the library’s install name to reflect that it has been renamed. - apple_darwin_update_library_name(&dst, &format!("@rpath/{}", &runtime.name)); - // Upon renaming the install name, the code signature of the file will invalidate, - // so we will sign it again. - apple_darwin_sign_file(&dst); - } - - target_deps.push(dst); - } - - target_deps -} - -fn apple_darwin_update_library_name(library_path: &Path, new_name: &str) { - let status = Command::new("install_name_tool") - .arg("-id") - .arg(new_name) - .arg(library_path) - .status() - .expect("failed to execute `install_name_tool`"); - assert!(status.success()); -} - -fn apple_darwin_sign_file(file_path: &Path) { - let status = Command::new("codesign") - .arg("-f") // Force to rewrite the existing signature - .arg("-s") - .arg("-") - .arg(file_path) - .status() - .expect("failed to execute `codesign`"); - assert!(status.success()); -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct StartupObjects { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for StartupObjects { - type Output = Vec<(PathBuf, DependencyType)>; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("library/rtstartup") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(StartupObjects { - compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), - target: run.target, - }); - } - - /// Builds and prepare startup objects like rsbegin.o and rsend.o - /// - /// These are primarily used on Windows right now for linking executables/dlls. - /// They don't require any library support as they're just plain old object - /// files, so we just use the nightly snapshot compiler to always build them (as - /// no other compilers are guaranteed to be available). - fn run(self, builder: &Builder<'_>) -> Vec<(PathBuf, DependencyType)> { - let for_compiler = self.compiler; - let target = self.target; - if !target.ends_with("windows-gnu") { - return vec![]; - } - - let mut target_deps = vec![]; - - let src_dir = &builder.src.join("library").join("rtstartup"); - let dst_dir = &builder.native_dir(target).join("rtstartup"); - let sysroot_dir = &builder.sysroot_libdir(for_compiler, target); - t!(fs::create_dir_all(dst_dir)); - - for file in &["rsbegin", "rsend"] { - let src_file = &src_dir.join(file.to_string() + ".rs"); - let dst_file = &dst_dir.join(file.to_string() + ".o"); - if !up_to_date(src_file, dst_file) { - let mut cmd = Command::new(&builder.initial_rustc); - cmd.env("RUSTC_BOOTSTRAP", "1"); - if !builder.local_rebuild { - // a local_rebuild compiler already has stage1 features - cmd.arg("--cfg").arg("bootstrap"); - } - builder.run( - cmd.arg("--target") - .arg(target.rustc_target_arg()) - .arg("--emit=obj") - .arg("-o") - .arg(dst_file) - .arg(src_file), - ); - } - - let target = sysroot_dir.join((*file).to_string() + ".o"); - builder.copy(dst_file, &target); - target_deps.push((target, DependencyType::Target)); - } - - target_deps - } -} - -fn cp_rustc_component_to_ci_sysroot( - builder: &Builder<'_>, - compiler: Compiler, - contents: Vec, -) { - let sysroot = builder.ensure(Sysroot { compiler, force_recompile: false }); - let ci_rustc_dir = builder.config.ci_rustc_dir(); - - for file in contents { - let src = ci_rustc_dir.join(&file); - let dst = sysroot.join(file); - if src.is_dir() { - t!(fs::create_dir_all(dst)); - } else { - builder.copy(&src, &dst); - } - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Rustc { - pub target: TargetSelection, - pub compiler: Compiler, - /// Whether to build a subset of crates, rather than the whole compiler. - /// - /// This should only be requested by the user, not used within rustbuild itself. - /// Using it within rustbuild can lead to confusing situation where lints are replayed - /// in two different steps. - crates: Interned>, -} - -impl Rustc { - pub fn new(compiler: Compiler, target: TargetSelection) -> Self { - Self { target, compiler, crates: Default::default() } - } -} - -impl Step for Rustc { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let mut crates = run.builder.in_tree_crates("rustc-main", None); - for (i, krate) in crates.iter().enumerate() { - // We can't allow `build rustc` as an alias for this Step, because that's reserved by `Assemble`. - // Ideally Assemble would use `build compiler` instead, but that seems too confusing to be worth the breaking change. - if krate.name == "rustc-main" { - crates.swap_remove(i); - break; - } - } - run.crates(crates) - } - - fn make_run(run: RunConfig<'_>) { - let crates = run.cargo_crates_in_set(); - run.builder.ensure(Rustc { - compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), - target: run.target, - crates, - }); - } - - /// Builds the compiler. - /// - /// This will build the compiler for a particular stage of the build using - /// the `compiler` targeting the `target` architecture. The artifacts - /// created will also be linked into the sysroot directory. - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target = self.target; - - // NOTE: the ABI of the beta compiler is different from the ABI of the downloaded compiler, - // so its artifacts can't be reused. - if builder.download_rustc() && compiler.stage != 0 { - // Copy the existing artifacts instead of rebuilding them. - // NOTE: this path is only taken for tools linking to rustc-dev (including ui-fulldeps tests). - cp_rustc_component_to_ci_sysroot( - builder, - compiler, - builder.config.ci_rustc_dev_contents(), - ); - return; - } - - builder.ensure(Std::new(compiler, target)); - - if builder.config.keep_stage.contains(&compiler.stage) { - builder.info("Warning: Using a potentially old librustc. This may not behave well."); - builder.info("Warning: Use `--keep-stage-std` if you want to rebuild the compiler when it changes"); - builder.ensure(RustcLink::from_rustc(self, compiler)); - return; - } - - let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); - if compiler_to_use != compiler { - builder.ensure(Rustc::new(compiler_to_use, target)); - let msg = if compiler_to_use.host == target { - format!( - "Uplifting rustc (stage{} -> stage{})", - compiler_to_use.stage, - compiler.stage + 1 - ) - } else { - format!( - "Uplifting rustc (stage{}:{} -> stage{}:{})", - compiler_to_use.stage, - compiler_to_use.host, - compiler.stage + 1, - target - ) - }; - builder.info(&msg); - builder.ensure(RustcLink::from_rustc(self, compiler_to_use)); - return; - } - - // Ensure that build scripts and proc macros have a std / libproc_macro to link against. - builder.ensure(Std::new( - builder.compiler(self.compiler.stage, builder.config.build), - builder.config.build, - )); - - let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "build"); - rustc_cargo(builder, &mut cargo, target, compiler.stage); - - if builder.config.rust_profile_use.is_some() - && builder.config.rust_profile_generate.is_some() - { - panic!("Cannot use and generate PGO profiles at the same time"); - } - - // With LLD, we can use ICF (identical code folding) to reduce the executable size - // of librustc_driver/rustc and to improve i-cache utilization. - // - // -Wl,[link options] doesn't work on MSVC. However, /OPT:ICF (technically /OPT:REF,ICF) - // is already on by default in MSVC optimized builds, which is interpreted as --icf=all: - // https://github.com/llvm/llvm-project/blob/3329cec2f79185bafd678f310fafadba2a8c76d2/lld/COFF/Driver.cpp#L1746 - // https://github.com/rust-lang/rust/blob/f22819bcce4abaff7d1246a56eec493418f9f4ee/compiler/rustc_codegen_ssa/src/back/linker.rs#L827 - if builder.config.use_lld && !compiler.host.contains("msvc") { - cargo.rustflag("-Clink-args=-Wl,--icf=all"); - } - - let is_collecting = if let Some(path) = &builder.config.rust_profile_generate { - if compiler.stage == 1 { - cargo.rustflag(&format!("-Cprofile-generate={path}")); - // Apparently necessary to avoid overflowing the counters during - // a Cargo build profile - cargo.rustflag("-Cllvm-args=-vp-counters-per-site=4"); - true - } else { - false - } - } else if let Some(path) = &builder.config.rust_profile_use { - if compiler.stage == 1 { - cargo.rustflag(&format!("-Cprofile-use={path}")); - cargo.rustflag("-Cllvm-args=-pgo-warn-missing-function"); - true - } else { - false - } - } else { - false - }; - if is_collecting { - // Ensure paths to Rust sources are relative, not absolute. - cargo.rustflag(&format!( - "-Cllvm-args=-static-func-strip-dirname-prefix={}", - builder.config.src.components().count() - )); - } - - // We currently don't support cross-crate LTO in stage0. This also isn't hugely necessary - // and may just be a time sink. - if compiler.stage != 0 { - match builder.config.rust_lto { - RustcLto::Thin | RustcLto::Fat => { - // Since using LTO for optimizing dylibs is currently experimental, - // we need to pass -Zdylib-lto. - cargo.rustflag("-Zdylib-lto"); - // Cargo by default passes `-Cembed-bitcode=no` and doesn't pass `-Clto` when - // compiling dylibs (and their dependencies), even when LTO is enabled for the - // crate. Therefore, we need to override `-Clto` and `-Cembed-bitcode` here. - let lto_type = match builder.config.rust_lto { - RustcLto::Thin => "thin", - RustcLto::Fat => "fat", - _ => unreachable!(), - }; - cargo.rustflag(&format!("-Clto={lto_type}")); - cargo.rustflag("-Cembed-bitcode=yes"); - } - RustcLto::ThinLocal => { /* Do nothing, this is the default */ } - RustcLto::Off => { - cargo.rustflag("-Clto=off"); - } - } - } else if builder.config.rust_lto == RustcLto::Off { - cargo.rustflag("-Clto=off"); - } - - for krate in &*self.crates { - cargo.arg("-p").arg(krate); - } - - let _guard = builder.msg_sysroot_tool( - Kind::Build, - compiler.stage, - format_args!("compiler artifacts{}", crate_description(&self.crates)), - compiler.host, - target, - ); - let stamp = librustc_stamp(builder, compiler, target); - run_cargo( - builder, - cargo, - vec![], - &stamp, - vec![], - false, - true, // Only ship rustc_driver.so and .rmeta files, not all intermediate .rlib files. - ); - - // When building `librustc_driver.so` (like `libLLVM.so`) on linux, it can contain - // unexpected debuginfo from dependencies, for example from the C++ standard library used in - // our LLVM wrapper. Unless we're explicitly requesting `librustc_driver` to be built with - // debuginfo (via the debuginfo level of the executables using it): strip this debuginfo - // away after the fact. - if builder.config.rust_debuginfo_level_rustc == DebuginfoLevel::None - && builder.config.rust_debuginfo_level_tools == DebuginfoLevel::None - { - let target_root_dir = stamp.parent().unwrap(); - let rustc_driver = target_root_dir.join("librustc_driver.so"); - strip_debug(builder, target, &rustc_driver); - } - - builder.ensure(RustcLink::from_rustc( - self, - builder.compiler(compiler.stage, builder.config.build), - )); - } -} - -pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection, stage: u32) { - cargo - .arg("--features") - .arg(builder.rustc_features(builder.kind)) - .arg("--manifest-path") - .arg(builder.src.join("compiler/rustc/Cargo.toml")); - - cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)"); - - rustc_cargo_env(builder, cargo, target, stage); -} - -pub fn rustc_cargo_env( - builder: &Builder<'_>, - cargo: &mut Cargo, - target: TargetSelection, - stage: u32, -) { - // Set some configuration variables picked up by build scripts and - // the compiler alike - cargo - .env("CFG_RELEASE", builder.rust_release()) - .env("CFG_RELEASE_CHANNEL", &builder.config.channel) - .env("CFG_VERSION", builder.rust_version()); - - if let Some(backend) = builder.config.default_codegen_backend() { - cargo.env("CFG_DEFAULT_CODEGEN_BACKEND", backend); - } - - let libdir_relative = builder.config.libdir_relative().unwrap_or_else(|| Path::new("lib")); - let target_config = builder.config.target_config.get(&target); - - cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative); - - if let Some(ref ver_date) = builder.rust_info().commit_date() { - cargo.env("CFG_VER_DATE", ver_date); - } - if let Some(ref ver_hash) = builder.rust_info().sha() { - cargo.env("CFG_VER_HASH", ver_hash); - } - if !builder.unstable_features() { - cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); - } - - // Prefer the current target's own default_linker, else a globally - // specified one. - if let Some(s) = target_config.and_then(|c| c.default_linker.as_ref()) { - cargo.env("CFG_DEFAULT_LINKER", s); - } else if let Some(ref s) = builder.config.rustc_default_linker { - cargo.env("CFG_DEFAULT_LINKER", s); - } - - if builder.config.rustc_parallel { - // keep in sync with `bootstrap/lib.rs:Build::rustc_features` - // `cfg` option for rustc, `features` option for cargo, for conditional compilation - cargo.rustflag("--cfg=parallel_compiler"); - cargo.rustdocflag("--cfg=parallel_compiler"); - } - if builder.config.rust_verify_llvm_ir { - cargo.env("RUSTC_VERIFY_LLVM_IR", "1"); - } - - // Note that this is disabled if LLVM itself is disabled or we're in a check - // build. If we are in a check build we still go ahead here presuming we've - // detected that LLVM is already built and good to go which helps prevent - // busting caches (e.g. like #71152). - if builder.config.llvm_enabled() { - let building_is_expensive = crate::llvm::prebuilt_llvm_config(builder, target).is_err(); - // `top_stage == stage` might be false for `check --stage 1`, if we are building the stage 1 compiler - let can_skip_build = builder.kind == Kind::Check && builder.top_stage == stage; - let should_skip_build = building_is_expensive && can_skip_build; - if !should_skip_build { - rustc_llvm_env(builder, cargo, target) - } - } -} - -/// Pass down configuration from the LLVM build into the build of -/// rustc_llvm and rustc_codegen_llvm. -fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) { - let target_config = builder.config.target_config.get(&target); - - if builder.is_rust_llvm(target) { - cargo.env("LLVM_RUSTLLVM", "1"); - } - let llvm::LlvmResult { llvm_config, .. } = builder.ensure(llvm::Llvm { target }); - cargo.env("LLVM_CONFIG", &llvm_config); - if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { - cargo.env("CFG_LLVM_ROOT", s); - } - - // Some LLVM linker flags (-L and -l) may be needed to link `rustc_llvm`. Its build script - // expects these to be passed via the `LLVM_LINKER_FLAGS` env variable, separated by - // whitespace. - // - // For example: - // - on windows, when `clang-cl` is used with instrumentation, we need to manually add - // clang's runtime library resource directory so that the profiler runtime library can be - // found. This is to avoid the linker errors about undefined references to - // `__llvm_profile_instrument_memop` when linking `rustc_driver`. - let mut llvm_linker_flags = String::new(); - if builder.config.llvm_profile_generate && target.contains("msvc") { - if let Some(ref clang_cl_path) = builder.config.llvm_clang_cl { - // Add clang's runtime library directory to the search path - let clang_rt_dir = get_clang_cl_resource_dir(clang_cl_path); - llvm_linker_flags.push_str(&format!("-L{}", clang_rt_dir.display())); - } - } - - // The config can also specify its own llvm linker flags. - if let Some(ref s) = builder.config.llvm_ldflags { - if !llvm_linker_flags.is_empty() { - llvm_linker_flags.push_str(" "); - } - llvm_linker_flags.push_str(s); - } - - // Set the linker flags via the env var that `rustc_llvm`'s build script will read. - if !llvm_linker_flags.is_empty() { - cargo.env("LLVM_LINKER_FLAGS", llvm_linker_flags); - } - - // Building with a static libstdc++ is only supported on linux right now, - // not for MSVC or macOS - if builder.config.llvm_static_stdcpp - && !target.contains("freebsd") - && !target.contains("msvc") - && !target.contains("apple") - && !target.contains("solaris") - { - let file = compiler_file( - builder, - &builder.cxx(target).unwrap(), - target, - CLang::Cxx, - "libstdc++.a", - ); - cargo.env("LLVM_STATIC_STDCPP", file); - } - if builder.llvm_link_shared() { - cargo.env("LLVM_LINK_SHARED", "1"); - } - if builder.config.llvm_use_libcxx { - cargo.env("LLVM_USE_LIBCXX", "1"); - } - if builder.config.llvm_optimize && !builder.config.llvm_release_debuginfo { - cargo.env("LLVM_NDEBUG", "1"); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -struct RustcLink { - pub compiler: Compiler, - pub target_compiler: Compiler, - pub target: TargetSelection, - /// Not actually used; only present to make sure the cache invalidation is correct. - crates: Interned>, -} - -impl RustcLink { - fn from_rustc(rustc: Rustc, host_compiler: Compiler) -> Self { - Self { - compiler: host_compiler, - target_compiler: rustc.compiler, - target: rustc.target, - crates: rustc.crates, - } - } -} - -impl Step for RustcLink { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Same as `std_link`, only for librustc - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target_compiler = self.target_compiler; - let target = self.target; - add_to_sysroot( - builder, - &builder.sysroot_libdir(target_compiler, target), - &builder.sysroot_libdir(target_compiler, compiler.host), - &librustc_stamp(builder, compiler, target), - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CodegenBackend { - pub target: TargetSelection, - pub compiler: Compiler, - pub backend: Interned, -} - -fn needs_codegen_config(run: &RunConfig<'_>) -> bool { - let mut needs_codegen_cfg = false; - for path_set in &run.paths { - needs_codegen_cfg = match path_set { - PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)), - PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run), - } - } - needs_codegen_cfg -} - -pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_"; - -fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool { - if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) { - let mut needs_codegen_backend_config = true; - for &backend in &run.builder.config.rust_codegen_backends { - if path - .path - .to_str() - .unwrap() - .ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + &backend)) - { - needs_codegen_backend_config = false; - } - } - if needs_codegen_backend_config { - run.builder.info( - "Warning: no codegen-backends config matched the requested path to build a codegen backend. \ - Help: add backend to codegen-backends in config.toml.", - ); - return true; - } - } - - return false; -} - -impl Step for CodegenBackend { - type Output = (); - const ONLY_HOSTS: bool = true; - // Only the backends specified in the `codegen-backends` entry of `config.toml` are built. - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.paths(&["compiler/rustc_codegen_cranelift", "compiler/rustc_codegen_gcc"]) - } - - fn make_run(run: RunConfig<'_>) { - if needs_codegen_config(&run) { - return; - } - - for &backend in &run.builder.config.rust_codegen_backends { - if backend == "llvm" { - continue; // Already built as part of rustc - } - - run.builder.ensure(CodegenBackend { - target: run.target, - compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), - backend, - }); - } - } - - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target = self.target; - let backend = self.backend; - - builder.ensure(Rustc::new(compiler, target)); - - if builder.config.keep_stage.contains(&compiler.stage) { - builder.info( - "Warning: Using a potentially old codegen backend. \ - This may not behave well.", - ); - // Codegen backends are linked separately from this step today, so we don't do - // anything here. - return; - } - - let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); - if compiler_to_use != compiler { - builder.ensure(CodegenBackend { compiler: compiler_to_use, target, backend }); - return; - } - - let out_dir = builder.cargo_out(compiler, Mode::Codegen, target); - - let mut cargo = builder.cargo(compiler, Mode::Codegen, SourceType::InTree, target, "build"); - cargo - .arg("--manifest-path") - .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); - rustc_cargo_env(builder, &mut cargo, target, compiler.stage); - - let tmp_stamp = out_dir.join(".tmp.stamp"); - - let _guard = builder.msg_build(compiler, format_args!("codegen backend {backend}"), target); - let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false); - if builder.config.dry_run() { - return; - } - let mut files = files.into_iter().filter(|f| { - let filename = f.file_name().unwrap().to_str().unwrap(); - is_dylib(filename) && filename.contains("rustc_codegen_") - }); - let codegen_backend = match files.next() { - Some(f) => f, - None => panic!("no dylibs built for codegen backend?"), - }; - if let Some(f) = files.next() { - panic!( - "codegen backend built two dylibs:\n{}\n{}", - codegen_backend.display(), - f.display() - ); - } - let stamp = codegen_backend_stamp(builder, compiler, target, backend); - let codegen_backend = codegen_backend.to_str().unwrap(); - t!(fs::write(&stamp, &codegen_backend)); - } -} - -/// Creates the `codegen-backends` folder for a compiler that's about to be -/// assembled as a complete compiler. -/// -/// This will take the codegen artifacts produced by `compiler` and link them -/// into an appropriate location for `target_compiler` to be a functional -/// compiler. -fn copy_codegen_backends_to_sysroot( - builder: &Builder<'_>, - compiler: Compiler, - target_compiler: Compiler, -) { - let target = target_compiler.host; - - // Note that this step is different than all the other `*Link` steps in - // that it's not assembling a bunch of libraries but rather is primarily - // moving the codegen backend into place. The codegen backend of rustc is - // not linked into the main compiler by default but is rather dynamically - // selected at runtime for inclusion. - // - // Here we're looking for the output dylib of the `CodegenBackend` step and - // we're copying that into the `codegen-backends` folder. - let dst = builder.sysroot_codegen_backends(target_compiler); - t!(fs::create_dir_all(&dst), dst); - - if builder.config.dry_run() { - return; - } - - for backend in builder.config.rust_codegen_backends.iter() { - if backend == "llvm" { - continue; // Already built as part of rustc - } - - let stamp = codegen_backend_stamp(builder, compiler, target, *backend); - let dylib = t!(fs::read_to_string(&stamp)); - let file = Path::new(&dylib); - let filename = file.file_name().unwrap().to_str().unwrap(); - // change `librustc_codegen_cranelift-xxxxxx.so` to - // `librustc_codegen_cranelift-release.so` - let target_filename = { - let dash = filename.find('-').unwrap(); - let dot = filename.find('.').unwrap(); - format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..]) - }; - builder.copy(&file, &dst.join(target_filename)); - } -} - -/// Cargo's output path for the standard library in a given stage, compiled -/// by a particular compiler for the specified target. -pub fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { - builder.cargo_out(compiler, Mode::Std, target).join(".libstd.stamp") -} - -/// Cargo's output path for librustc in a given stage, compiled by a particular -/// compiler for the specified target. -pub fn librustc_stamp( - builder: &Builder<'_>, - compiler: Compiler, - target: TargetSelection, -) -> PathBuf { - builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc.stamp") -} - -/// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular -/// compiler for the specified target and backend. -fn codegen_backend_stamp( - builder: &Builder<'_>, - compiler: Compiler, - target: TargetSelection, - backend: Interned, -) -> PathBuf { - builder - .cargo_out(compiler, Mode::Codegen, target) - .join(format!(".librustc_codegen_{backend}.stamp")) -} - -pub fn compiler_file( - builder: &Builder<'_>, - compiler: &Path, - target: TargetSelection, - c: CLang, - file: &str, -) -> PathBuf { - if builder.config.dry_run() { - return PathBuf::new(); - } - let mut cmd = Command::new(compiler); - cmd.args(builder.cflags(target, GitRepo::Rustc, c)); - cmd.arg(format!("-print-file-name={file}")); - let out = output(&mut cmd); - PathBuf::from(out.trim()) -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Sysroot { - pub compiler: Compiler, - /// See [`Std::force_recompile`]. - force_recompile: bool, -} - -impl Sysroot { - pub(crate) fn new(compiler: Compiler) -> Self { - Sysroot { compiler, force_recompile: false } - } -} - -impl Step for Sysroot { - type Output = Interned; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Returns the sysroot for the `compiler` specified that *this build system - /// generates*. - /// - /// That is, the sysroot for the stage0 compiler is not what the compiler - /// thinks it is by default, but it's the same as the default for stages - /// 1-3. - fn run(self, builder: &Builder<'_>) -> Interned { - let compiler = self.compiler; - let host_dir = builder.out.join(&compiler.host.triple); - - let sysroot_dir = |stage| { - if stage == 0 { - host_dir.join("stage0-sysroot") - } else if self.force_recompile && stage == compiler.stage { - host_dir.join(format!("stage{stage}-test-sysroot")) - } else if builder.download_rustc() && compiler.stage != builder.top_stage { - host_dir.join("ci-rustc-sysroot") - } else { - host_dir.join(format!("stage{}", stage)) - } - }; - let sysroot = sysroot_dir(compiler.stage); - - builder.verbose(&format!("Removing sysroot {} to avoid caching bugs", sysroot.display())); - let _ = fs::remove_dir_all(&sysroot); - t!(fs::create_dir_all(&sysroot)); - - // In some cases(see https://github.com/rust-lang/rust/issues/109314), when the stage0 - // compiler relies on more recent version of LLVM than the beta compiler, it may not - // be able to locate the correct LLVM in the sysroot. This situation typically occurs - // when we upgrade LLVM version while the beta compiler continues to use an older version. - // - // Make sure to add the correct version of LLVM into the stage0 sysroot. - if compiler.stage == 0 { - dist::maybe_install_llvm_target(builder, compiler.host, &sysroot); - } - - // If we're downloading a compiler from CI, we can use the same compiler for all stages other than 0. - if builder.download_rustc() && compiler.stage != 0 { - assert_eq!( - builder.config.build, compiler.host, - "Cross-compiling is not yet supported with `download-rustc`", - ); - - // #102002, cleanup old toolchain folders when using download-rustc so people don't use them by accident. - for stage in 0..=2 { - if stage != compiler.stage { - let dir = sysroot_dir(stage); - if !dir.ends_with("ci-rustc-sysroot") { - let _ = fs::remove_dir_all(dir); - } - } - } - - // Copy the compiler into the correct sysroot. - // NOTE(#108767): We intentionally don't copy `rustc-dev` artifacts until they're requested with `builder.ensure(Rustc)`. - // This fixes an issue where we'd have multiple copies of libc in the sysroot with no way to tell which to load. - // There are a few quirks of bootstrap that interact to make this reliable: - // 1. The order `Step`s are run is hard-coded in `builder.rs` and not configurable. This - // avoids e.g. reordering `test::UiFulldeps` before `test::Ui` and causing the latter to - // fail because of duplicate metadata. - // 2. The sysroot is deleted and recreated between each invocation, so running `x test - // ui-fulldeps && x test ui` can't cause failures. - let mut filtered_files = Vec::new(); - let mut add_filtered_files = |suffix, contents| { - for path in contents { - let path = Path::new(&path); - if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) { - filtered_files.push(path.file_name().unwrap().to_owned()); - } - } - }; - let suffix = format!("lib/rustlib/{}/lib", compiler.host); - add_filtered_files(suffix.as_str(), builder.config.ci_rustc_dev_contents()); - // NOTE: we can't copy std eagerly because `stage2-test-sysroot` needs to have only the - // newly compiled std, not the downloaded std. - add_filtered_files("lib", builder.config.ci_rust_std_contents()); - - let filtered_extensions = [ - OsStr::new("rmeta"), - OsStr::new("rlib"), - // FIXME: this is wrong when compiler.host != build, but we don't support that today - OsStr::new(std::env::consts::DLL_EXTENSION), - ]; - let ci_rustc_dir = builder.config.ci_rustc_dir(); - builder.cp_filtered(&ci_rustc_dir, &sysroot, &|path| { - if path.extension().map_or(true, |ext| !filtered_extensions.contains(&ext)) { - return true; - } - if !path.parent().map_or(true, |p| p.ends_with(&suffix)) { - return true; - } - if !filtered_files.iter().all(|f| f != path.file_name().unwrap()) { - builder.verbose_than(1, &format!("ignoring {}", path.display())); - false - } else { - true - } - }); - } - - // Symlink the source root into the same location inside the sysroot, - // where `rust-src` component would go (`$sysroot/lib/rustlib/src/rust`), - // so that any tools relying on `rust-src` also work for local builds, - // and also for translating the virtual `/rustc/$hash` back to the real - // directory (for running tests with `rust.remap-debuginfo = true`). - let sysroot_lib_rustlib_src = sysroot.join("lib/rustlib/src"); - t!(fs::create_dir_all(&sysroot_lib_rustlib_src)); - let sysroot_lib_rustlib_src_rust = sysroot_lib_rustlib_src.join("rust"); - if let Err(e) = symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_src_rust) { - eprintln!( - "warning: creating symbolic link `{}` to `{}` failed with {}", - sysroot_lib_rustlib_src_rust.display(), - builder.src.display(), - e, - ); - if builder.config.rust_remap_debuginfo { - eprintln!( - "warning: some `tests/ui` tests will fail when lacking `{}`", - sysroot_lib_rustlib_src_rust.display(), - ); - } - } - // Same for the rustc-src component. - let sysroot_lib_rustlib_rustcsrc = sysroot.join("lib/rustlib/rustc-src"); - t!(fs::create_dir_all(&sysroot_lib_rustlib_rustcsrc)); - let sysroot_lib_rustlib_rustcsrc_rust = sysroot_lib_rustlib_rustcsrc.join("rust"); - if let Err(e) = - symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_rustcsrc_rust) - { - eprintln!( - "warning: creating symbolic link `{}` to `{}` failed with {}", - sysroot_lib_rustlib_rustcsrc_rust.display(), - builder.src.display(), - e, - ); - } - - INTERNER.intern_path(sysroot) - } -} - -#[derive(Debug, Copy, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)] -pub struct Assemble { - /// The compiler which we will produce in this step. Assemble itself will - /// take care of ensuring that the necessary prerequisites to do so exist, - /// that is, this target can be a stage2 compiler and Assemble will build - /// previous stages for you. - pub target_compiler: Compiler, -} - -impl Step for Assemble { - type Output = Compiler; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("compiler/rustc").path("compiler") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Assemble { - target_compiler: run.builder.compiler(run.builder.top_stage + 1, run.target), - }); - } - - /// Prepare a new compiler from the artifacts in `stage` - /// - /// This will assemble a compiler in `build/$host/stage$stage`. The compiler - /// must have been previously produced by the `stage - 1` builder.build - /// compiler. - fn run(self, builder: &Builder<'_>) -> Compiler { - let target_compiler = self.target_compiler; - - if target_compiler.stage == 0 { - assert_eq!( - builder.config.build, target_compiler.host, - "Cannot obtain compiler for non-native build triple at stage 0" - ); - // The stage 0 compiler for the build triple is always pre-built. - return target_compiler; - } - - // Get the compiler that we'll use to bootstrap ourselves. - // - // Note that this is where the recursive nature of the bootstrap - // happens, as this will request the previous stage's compiler on - // downwards to stage 0. - // - // Also note that we're building a compiler for the host platform. We - // only assume that we can run `build` artifacts, which means that to - // produce some other architecture compiler we need to start from - // `build` to get there. - // - // FIXME: It may be faster if we build just a stage 1 compiler and then - // use that to bootstrap this compiler forward. - let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build); - - // If we're downloading a compiler from CI, we can use the same compiler for all stages other than 0. - if builder.download_rustc() { - let sysroot = - builder.ensure(Sysroot { compiler: target_compiler, force_recompile: false }); - // Ensure that `libLLVM.so` ends up in the newly created target directory, - // so that tools using `rustc_private` can use it. - dist::maybe_install_llvm_target(builder, target_compiler.host, &sysroot); - // Lower stages use `ci-rustc-sysroot`, not stageN - if target_compiler.stage == builder.top_stage { - builder.info(&format!("Creating a sysroot for stage{stage} compiler (use `rustup toolchain link 'name' build/host/stage{stage}`)", stage=target_compiler.stage)); - } - return target_compiler; - } - - // Build the libraries for this compiler to link to (i.e., the libraries - // it uses at runtime). NOTE: Crates the target compiler compiles don't - // link to these. (FIXME: Is that correct? It seems to be correct most - // of the time but I think we do link to these for stage2/bin compilers - // when not performing a full bootstrap). - builder.ensure(Rustc::new(build_compiler, target_compiler.host)); - - // FIXME: For now patch over problems noted in #90244 by early returning here, even though - // we've not properly assembled the target sysroot. A full fix is pending further investigation, - // for now full bootstrap usage is rare enough that this is OK. - if target_compiler.stage >= 3 && !builder.config.full_bootstrap { - return target_compiler; - } - - for &backend in builder.config.rust_codegen_backends.iter() { - if backend == "llvm" { - continue; // Already built as part of rustc - } - - builder.ensure(CodegenBackend { - compiler: build_compiler, - target: target_compiler.host, - backend, - }); - } - - let lld_install = if builder.config.lld_enabled { - Some(builder.ensure(llvm::Lld { target: target_compiler.host })) - } else { - None - }; - - let stage = target_compiler.stage; - let host = target_compiler.host; - let (host_info, dir_name) = if build_compiler.host == host { - ("".into(), "host".into()) - } else { - (format!(" ({host})"), host.to_string()) - }; - // NOTE: "Creating a sysroot" is somewhat inconsistent with our internal terminology, since - // sysroots can temporarily be empty until we put the compiler inside. However, - // `ensure(Sysroot)` isn't really something that's user facing, so there shouldn't be any - // ambiguity. - let msg = format!( - "Creating a sysroot for stage{stage} compiler{host_info} (use `rustup toolchain link 'name' build/{dir_name}/stage{stage}`)" - ); - builder.info(&msg); - - // Link in all dylibs to the libdir - let stamp = librustc_stamp(builder, build_compiler, target_compiler.host); - let proc_macros = builder - .read_stamp_file(&stamp) - .into_iter() - .filter_map(|(path, dependency_type)| { - if dependency_type == DependencyType::Host { - Some(path.file_name().unwrap().to_owned().into_string().unwrap()) - } else { - None - } - }) - .collect::>(); - - let sysroot = builder.sysroot(target_compiler); - let rustc_libdir = builder.rustc_libdir(target_compiler); - t!(fs::create_dir_all(&rustc_libdir)); - let src_libdir = builder.sysroot_libdir(build_compiler, host); - for f in builder.read_dir(&src_libdir) { - let filename = f.file_name().into_string().unwrap(); - if (is_dylib(&filename) || is_debug_info(&filename)) && !proc_macros.contains(&filename) - { - builder.copy(&f.path(), &rustc_libdir.join(&filename)); - } - } - - copy_codegen_backends_to_sysroot(builder, build_compiler, target_compiler); - - // We prepend this bin directory to the user PATH when linking Rust binaries. To - // avoid shadowing the system LLD we rename the LLD we provide to `rust-lld`. - let libdir = builder.sysroot_libdir(target_compiler, target_compiler.host); - let libdir_bin = libdir.parent().unwrap().join("bin"); - t!(fs::create_dir_all(&libdir_bin)); - if let Some(lld_install) = lld_install { - let src_exe = exe("lld", target_compiler.host); - let dst_exe = exe("rust-lld", target_compiler.host); - builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe)); - // for `-Z gcc-ld=lld` - let gcc_ld_dir = libdir_bin.join("gcc-ld"); - t!(fs::create_dir(&gcc_ld_dir)); - let lld_wrapper_exe = builder.ensure(crate::tool::LldWrapper { - compiler: build_compiler, - target: target_compiler.host, - }); - for name in crate::LLD_FILE_NAMES { - builder.copy(&lld_wrapper_exe, &gcc_ld_dir.join(exe(name, target_compiler.host))); - } - } - - if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) { - let llvm::LlvmResult { llvm_config, .. } = - builder.ensure(llvm::Llvm { target: target_compiler.host }); - if !builder.config.dry_run() { - let llvm_bin_dir = output(Command::new(llvm_config).arg("--bindir")); - let llvm_bin_dir = Path::new(llvm_bin_dir.trim()); - - // Since we've already built the LLVM tools, install them to the sysroot. - // This is the equivalent of installing the `llvm-tools-preview` component via - // rustup, and lets developers use a locally built toolchain to - // build projects that expect llvm tools to be present in the sysroot - // (e.g. the `bootimage` crate). - for tool in LLVM_TOOLS { - let tool_exe = exe(tool, target_compiler.host); - let src_path = llvm_bin_dir.join(&tool_exe); - // When using `download-ci-llvm`, some of the tools - // may not exist, so skip trying to copy them. - if src_path.exists() { - builder.copy(&src_path, &libdir_bin.join(&tool_exe)); - } - } - } - } - - // Ensure that `libLLVM.so` ends up in the newly build compiler directory, - // so that it can be found when the newly built `rustc` is run. - dist::maybe_install_llvm_runtime(builder, target_compiler.host, &sysroot); - dist::maybe_install_llvm_target(builder, target_compiler.host, &sysroot); - - // Link the compiler binary itself into place - let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host); - let rustc = out_dir.join(exe("rustc-main", host)); - let bindir = sysroot.join("bin"); - t!(fs::create_dir_all(&bindir)); - let compiler = builder.rustc(target_compiler); - builder.copy(&rustc, &compiler); - - target_compiler - } -} - -/// Link some files into a rustc sysroot. -/// -/// For a particular stage this will link the file listed in `stamp` into the -/// `sysroot_dst` provided. -pub fn add_to_sysroot( - builder: &Builder<'_>, - sysroot_dst: &Path, - sysroot_host_dst: &Path, - stamp: &Path, -) { - let self_contained_dst = &sysroot_dst.join("self-contained"); - t!(fs::create_dir_all(&sysroot_dst)); - t!(fs::create_dir_all(&sysroot_host_dst)); - t!(fs::create_dir_all(&self_contained_dst)); - for (path, dependency_type) in builder.read_stamp_file(stamp) { - let dst = match dependency_type { - DependencyType::Host => sysroot_host_dst, - DependencyType::Target => sysroot_dst, - DependencyType::TargetSelfContained => self_contained_dst, - }; - builder.copy(&path, &dst.join(path.file_name().unwrap())); - } -} - -pub fn run_cargo( - builder: &Builder<'_>, - cargo: Cargo, - tail_args: Vec, - stamp: &Path, - additional_target_deps: Vec<(PathBuf, DependencyType)>, - is_check: bool, - rlib_only_metadata: bool, -) -> Vec { - if builder.config.dry_run() { - return Vec::new(); - } - - // `target_root_dir` looks like $dir/$target/release - let target_root_dir = stamp.parent().unwrap(); - // `target_deps_dir` looks like $dir/$target/release/deps - let target_deps_dir = target_root_dir.join("deps"); - // `host_root_dir` looks like $dir/release - let host_root_dir = target_root_dir - .parent() - .unwrap() // chop off `release` - .parent() - .unwrap() // chop off `$target` - .join(target_root_dir.file_name().unwrap()); - - // Spawn Cargo slurping up its JSON output. We'll start building up the - // `deps` array of all files it generated along with a `toplevel` array of - // files we need to probe for later. - let mut deps = Vec::new(); - let mut toplevel = Vec::new(); - let ok = stream_cargo(builder, cargo, tail_args, &mut |msg| { - let (filenames, crate_types) = match msg { - CargoMessage::CompilerArtifact { - filenames, - target: CargoTarget { crate_types }, - .. - } => (filenames, crate_types), - _ => return, - }; - for filename in filenames { - // Skip files like executables - let mut keep = false; - if filename.ends_with(".lib") - || filename.ends_with(".a") - || is_debug_info(&filename) - || is_dylib(&filename) - { - // Always keep native libraries, rust dylibs and debuginfo - keep = true; - } - if is_check && filename.ends_with(".rmeta") { - // During check builds we need to keep crate metadata - keep = true; - } else if rlib_only_metadata { - if filename.contains("jemalloc_sys") - || filename.contains("rustc_smir") - || filename.contains("stable_mir") - { - // jemalloc_sys and rustc_smir are not linked into librustc_driver.so, - // so we need to distribute them as rlib to be able to use them. - keep |= filename.ends_with(".rlib"); - } else { - // Distribute the rest of the rustc crates as rmeta files only to reduce - // the tarball sizes by about 50%. The object files are linked into - // librustc_driver.so, so it is still possible to link against them. - keep |= filename.ends_with(".rmeta"); - } - } else { - // In all other cases keep all rlibs - keep |= filename.ends_with(".rlib"); - } - - if !keep { - continue; - } - - let filename = Path::new(&*filename); - - // If this was an output file in the "host dir" we don't actually - // worry about it, it's not relevant for us - if filename.starts_with(&host_root_dir) { - // Unless it's a proc macro used in the compiler - if crate_types.iter().any(|t| t == "proc-macro") { - deps.push((filename.to_path_buf(), DependencyType::Host)); - } - continue; - } - - // If this was output in the `deps` dir then this is a precise file - // name (hash included) so we start tracking it. - if filename.starts_with(&target_deps_dir) { - deps.push((filename.to_path_buf(), DependencyType::Target)); - continue; - } - - // Otherwise this was a "top level artifact" which right now doesn't - // have a hash in the name, but there's a version of this file in - // the `deps` folder which *does* have a hash in the name. That's - // the one we'll want to we'll probe for it later. - // - // We do not use `Path::file_stem` or `Path::extension` here, - // because some generated files may have multiple extensions e.g. - // `std-.dll.lib` on Windows. The aforementioned methods only - // split the file name by the last extension (`.lib`) while we need - // to split by all extensions (`.dll.lib`). - let expected_len = t!(filename.metadata()).len(); - let filename = filename.file_name().unwrap().to_str().unwrap(); - let mut parts = filename.splitn(2, '.'); - let file_stem = parts.next().unwrap().to_owned(); - let extension = parts.next().unwrap().to_owned(); - - toplevel.push((file_stem, extension, expected_len)); - } - }); - - if !ok { - crate::exit!(1); - } - - // Ok now we need to actually find all the files listed in `toplevel`. We've - // got a list of prefix/extensions and we basically just need to find the - // most recent file in the `deps` folder corresponding to each one. - let contents = t!(target_deps_dir.read_dir()) - .map(|e| t!(e)) - .map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata()))) - .collect::>(); - for (prefix, extension, expected_len) in toplevel { - let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { - meta.len() == expected_len - && filename - .strip_prefix(&prefix[..]) - .map(|s| s.starts_with('-') && s.ends_with(&extension[..])) - .unwrap_or(false) - }); - let max = candidates.max_by_key(|&&(_, _, ref metadata)| { - metadata.modified().expect("mtime should be available on all relevant OSes") - }); - let path_to_add = match max { - Some(triple) => triple.0.to_str().unwrap(), - None => panic!("no output generated for {prefix:?} {extension:?}"), - }; - if is_dylib(path_to_add) { - let candidate = format!("{path_to_add}.lib"); - let candidate = PathBuf::from(candidate); - if candidate.exists() { - deps.push((candidate, DependencyType::Target)); - } - } - deps.push((path_to_add.into(), DependencyType::Target)); - } - - deps.extend(additional_target_deps); - deps.sort(); - let mut new_contents = Vec::new(); - for (dep, dependency_type) in deps.iter() { - new_contents.extend(match *dependency_type { - DependencyType::Host => b"h", - DependencyType::Target => b"t", - DependencyType::TargetSelfContained => b"s", - }); - new_contents.extend(dep.to_str().unwrap().as_bytes()); - new_contents.extend(b"\0"); - } - t!(fs::write(&stamp, &new_contents)); - deps.into_iter().map(|(d, _)| d).collect() -} - -pub fn stream_cargo( - builder: &Builder<'_>, - cargo: Cargo, - tail_args: Vec, - cb: &mut dyn FnMut(CargoMessage<'_>), -) -> bool { - let mut cargo = Command::from(cargo); - if builder.config.dry_run() { - return true; - } - // Instruct Cargo to give us json messages on stdout, critically leaving - // stderr as piped so we can get those pretty colors. - let mut message_format = if builder.config.json_output { - String::from("json") - } else { - String::from("json-render-diagnostics") - }; - if let Some(s) = &builder.config.rustc_error_format { - message_format.push_str(",json-diagnostic-"); - message_format.push_str(s); - } - cargo.arg("--message-format").arg(message_format).stdout(Stdio::piped()); - - for arg in tail_args { - cargo.arg(arg); - } - - builder.verbose(&format!("running: {cargo:?}")); - let mut child = match cargo.spawn() { - Ok(child) => child, - Err(e) => panic!("failed to execute command: {cargo:?}\nerror: {e}"), - }; - - // Spawn Cargo slurping up its JSON output. We'll start building up the - // `deps` array of all files it generated along with a `toplevel` array of - // files we need to probe for later. - let stdout = BufReader::new(child.stdout.take().unwrap()); - for line in stdout.lines() { - let line = t!(line); - match serde_json::from_str::>(&line) { - Ok(msg) => { - if builder.config.json_output { - // Forward JSON to stdout. - println!("{line}"); - } - cb(msg) - } - // If this was informational, just print it out and continue - Err(_) => println!("{line}"), - } - } - - // Make sure Cargo actually succeeded after we read all of its stdout. - let status = t!(child.wait()); - if builder.is_verbose() && !status.success() { - eprintln!( - "command did not execute successfully: {cargo:?}\n\ - expected success, got: {status}" - ); - } - status.success() -} - -#[derive(Deserialize)] -pub struct CargoTarget<'a> { - crate_types: Vec>, -} - -#[derive(Deserialize)] -#[serde(tag = "reason", rename_all = "kebab-case")] -pub enum CargoMessage<'a> { - CompilerArtifact { - package_id: Cow<'a, str>, - features: Vec>, - filenames: Vec>, - target: CargoTarget<'a>, - }, - BuildScriptExecuted { - package_id: Cow<'a, str>, - }, - BuildFinished { - success: bool, - }, -} - -pub fn strip_debug(builder: &Builder<'_>, target: TargetSelection, path: &Path) { - // FIXME: to make things simpler for now, limit this to the host and target where we know - // `strip -g` is both available and will fix the issue, i.e. on a x64 linux host that is not - // cross-compiling. Expand this to other appropriate targets in the future. - if target != "x86_64-unknown-linux-gnu" || target != builder.config.build || !path.exists() { - return; - } - - let previous_mtime = FileTime::from_last_modification_time(&path.metadata().unwrap()); - // Note: `output` will propagate any errors here. - output(Command::new("strip").arg("--strip-debug").arg(path)); - - // After running `strip`, we have to set the file modification time to what it was before, - // otherwise we risk Cargo invalidating its fingerprint and rebuilding the world next time - // bootstrap is invoked. - // - // An example of this is if we run this on librustc_driver.so. In the first invocation: - // - Cargo will build librustc_driver.so (mtime of 1) - // - Cargo will build rustc-main (mtime of 2) - // - Bootstrap will strip librustc_driver.so (changing the mtime to 3). - // - // In the second invocation of bootstrap, Cargo will see that the mtime of librustc_driver.so - // is greater than the mtime of rustc-main, and will rebuild rustc-main. That will then cause - // everything else (standard library, future stages...) to be rebuilt. - t!(filetime::set_file_mtime(path, previous_mtime)); -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/config/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/config/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/config/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/config/tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,198 +0,0 @@ -use crate::config::TomlConfig; - -use super::{Config, Flags}; -use clap::CommandFactory; -use serde::Deserialize; -use std::{env, path::Path}; - -fn parse(config: &str) -> Config { - Config::parse_inner(&["check".to_owned(), "--config=/does/not/exist".to_owned()], |&_| { - toml::from_str(config).unwrap() - }) -} - -#[test] -fn download_ci_llvm() { - if crate::llvm::is_ci_llvm_modified(&parse("")) { - eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); - return; - } - - let parse_llvm = |s| parse(s).llvm_from_ci; - let if_available = parse_llvm("llvm.download-ci-llvm = \"if-available\""); - - assert!(parse_llvm("llvm.download-ci-llvm = true")); - assert!(!parse_llvm("llvm.download-ci-llvm = false")); - assert_eq!(parse_llvm(""), if_available); - assert_eq!(parse_llvm("rust.channel = \"dev\""), if_available); - assert!(!parse_llvm("rust.channel = \"stable\"")); - assert!(parse_llvm("build.build = \"x86_64-unknown-linux-gnu\"")); - assert!(parse_llvm( - "llvm.assertions = true \r\n build.build = \"x86_64-unknown-linux-gnu\" \r\n llvm.download-ci-llvm = \"if-available\"" - )); - assert!(!parse_llvm( - "llvm.assertions = true \r\n build.build = \"aarch64-apple-darwin\" \r\n llvm.download-ci-llvm = \"if-available\"" - )); -} - -// FIXME(onur-ozkan): extend scope of the test -// refs: -// - https://github.com/rust-lang/rust/issues/109120 -// - https://github.com/rust-lang/rust/pull/109162#issuecomment-1496782487 -#[test] -fn detect_src_and_out() { - fn test(cfg: Config, build_dir: Option<&str>) { - // This will bring absolute form of `src/bootstrap` path - let current_dir = std::env::current_dir().unwrap(); - - // get `src` by moving into project root path - let expected_src = current_dir.ancestors().nth(2).unwrap(); - assert_eq!(&cfg.src, expected_src); - - // Sanity check for `src` - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let expected_src = manifest_dir.ancestors().nth(2).unwrap(); - assert_eq!(&cfg.src, expected_src); - - // test if build-dir was manually given in config.toml - if let Some(custom_build_dir) = build_dir { - assert_eq!(&cfg.out, Path::new(custom_build_dir)); - } - // test the native bootstrap way - else { - // This should bring output path of bootstrap in absolute form - let cargo_target_dir = env::var_os("CARGO_TARGET_DIR").expect( - "CARGO_TARGET_DIR must been provided for the test environment from bootstrap", - ); - - // Move to `build` from `build/bootstrap` - let expected_out = Path::new(&cargo_target_dir).parent().unwrap(); - assert_eq!(&cfg.out, expected_out); - - let args: Vec = env::args().collect(); - - // Another test for `out` as a sanity check - // - // This will bring something similar to: - // `{build-dir}/bootstrap/debug/deps/bootstrap-c7ee91d5661e2804` - // `{build-dir}` can be anywhere, not just in the rust project directory. - let dep = Path::new(args.first().unwrap()); - let expected_out = dep.ancestors().nth(4).unwrap(); - - assert_eq!(&cfg.out, expected_out); - } - } - - test(parse(""), None); - - { - let build_dir = if cfg!(windows) { Some("C:\\tmp") } else { Some("/tmp") }; - test(parse("build.build-dir = \"/tmp\""), build_dir); - } -} - -#[test] -fn clap_verify() { - Flags::command().debug_assert(); -} - -#[test] -fn override_toml() { - let config = Config::parse_inner( - &[ - "check".to_owned(), - "--config=/does/not/exist".to_owned(), - "--set=changelog-seen=1".to_owned(), - "--set=rust.lto=fat".to_owned(), - "--set=rust.deny-warnings=false".to_owned(), - "--set=build.gdb=\"bar\"".to_owned(), - "--set=build.tools=[\"cargo\"]".to_owned(), - "--set=llvm.build-config={\"foo\" = \"bar\"}".to_owned(), - ], - |&_| { - toml::from_str( - r#" -changelog-seen = 0 -[rust] -lto = "off" -deny-warnings = true - -[build] -gdb = "foo" -tools = [] - -[llvm] -download-ci-llvm = false -build-config = {} - "#, - ) - .unwrap() - }, - ); - assert_eq!(config.changelog_seen, Some(1), "setting top-level value"); - assert_eq!( - config.rust_lto, - crate::config::RustcLto::Fat, - "setting string value without quotes" - ); - assert_eq!(config.gdb, Some("bar".into()), "setting string value with quotes"); - assert!(!config.deny_warnings, "setting boolean value"); - assert_eq!( - config.tools, - Some(["cargo".to_string()].into_iter().collect()), - "setting list value" - ); - assert_eq!( - config.llvm_build_config, - [("foo".to_string(), "bar".to_string())].into_iter().collect(), - "setting dictionary value" - ); -} - -#[test] -#[should_panic] -fn override_toml_duplicate() { - Config::parse_inner( - &[ - "check".to_owned(), - "--config=/does/not/exist".to_owned(), - "--set=changelog-seen=1".to_owned(), - "--set=changelog-seen=2".to_owned(), - ], - |&_| toml::from_str("changelog-seen = 0").unwrap(), - ); -} - -#[test] -fn profile_user_dist() { - fn get_toml(file: &Path) -> TomlConfig { - let contents = if file.ends_with("config.toml") { - "profile = \"user\"".to_owned() - } else { - assert!(file.ends_with("config.dist.toml")); - std::fs::read_to_string(dbg!(file)).unwrap() - }; - toml::from_str(&contents) - .and_then(|table: toml::Value| TomlConfig::deserialize(table)) - .unwrap() - } - Config::parse_inner(&["check".to_owned()], get_toml); -} - -#[test] -fn rust_optimize() { - assert!(parse("").rust_optimize.is_release()); - assert!(!parse("rust.optimize = false").rust_optimize.is_release()); - assert!(parse("rust.optimize = true").rust_optimize.is_release()); - assert!(!parse("rust.optimize = 0").rust_optimize.is_release()); - assert!(parse("rust.optimize = 1").rust_optimize.is_release()); - assert!(parse("rust.optimize = \"s\"").rust_optimize.is_release()); - assert_eq!(parse("rust.optimize = 1").rust_optimize.get_opt_level(), Some("1".to_string())); - assert_eq!(parse("rust.optimize = \"s\"").rust_optimize.get_opt_level(), Some("s".to_string())); -} - -#[test] -#[should_panic] -fn invalid_rust_optimize() { - parse("rust.optimize = \"a\""); -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/config.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/config.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/config.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/config.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2104 +0,0 @@ -//! Serialized configuration of a build. -//! -//! This module implements parsing `config.toml` configuration files to tweak -//! how the build runs. - -#[cfg(test)] -mod tests; - -use std::cell::{Cell, RefCell}; -use std::cmp; -use std::collections::{HashMap, HashSet}; -use std::env; -use std::fmt::{self, Display}; -use std::fs; -use std::io::IsTerminal; -use std::path::{Path, PathBuf}; -use std::process::Command; -use std::str::FromStr; - -use crate::cache::{Interned, INTERNER}; -use crate::cc_detect::{ndk_compiler, Language}; -use crate::channel::{self, GitInfo}; -use crate::compile::CODEGEN_BACKEND_PREFIX; -pub use crate::flags::Subcommand; -use crate::flags::{Color, Flags, Warnings}; -use crate::util::{exe, output, t}; -use build_helper::exit; -use once_cell::sync::OnceCell; -use semver::Version; -use serde::{Deserialize, Deserializer}; -use serde_derive::Deserialize; - -macro_rules! check_ci_llvm { - ($name:expr) => { - assert!( - $name.is_none(), - "setting {} is incompatible with download-ci-llvm.", - stringify!($name) - ); - }; -} - -#[derive(Clone, Default)] -pub enum DryRun { - /// This isn't a dry run. - #[default] - Disabled, - /// This is a dry run enabled by bootstrap itself, so it can verify that no work is done. - SelfCheck, - /// This is a dry run enabled by the `--dry-run` flag. - UserSelected, -} - -#[derive(Copy, Clone, Default, PartialEq, Eq)] -pub enum DebuginfoLevel { - #[default] - None, - LineTablesOnly, - Limited, - Full, -} - -// NOTE: can't derive(Deserialize) because the intermediate trip through toml::Value only -// deserializes i64, and derive() only generates visit_u64 -impl<'de> Deserialize<'de> for DebuginfoLevel { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - use serde::de::Error; - - Ok(match Deserialize::deserialize(deserializer)? { - StringOrInt::String("none") | StringOrInt::Int(0) => DebuginfoLevel::None, - StringOrInt::String("line-tables-only") => DebuginfoLevel::LineTablesOnly, - StringOrInt::String("limited") | StringOrInt::Int(1) => DebuginfoLevel::Limited, - StringOrInt::String("full") | StringOrInt::Int(2) => DebuginfoLevel::Full, - StringOrInt::Int(n) => { - let other = serde::de::Unexpected::Signed(n); - return Err(D::Error::invalid_value(other, &"expected 0, 1, or 2")); - } - StringOrInt::String(s) => { - let other = serde::de::Unexpected::Str(s); - return Err(D::Error::invalid_value( - other, - &"expected none, line-tables-only, limited, or full", - )); - } - }) - } -} - -/// Suitable for passing to `-C debuginfo` -impl Display for DebuginfoLevel { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use DebuginfoLevel::*; - f.write_str(match self { - None => "0", - LineTablesOnly => "line-tables-only", - Limited => "1", - Full => "2", - }) - } -} - -/// Global configuration for the entire build and/or bootstrap. -/// -/// This structure is parsed from `config.toml`, and some of the fields are inferred from `git` or build-time parameters. -/// -/// Note that this structure is not decoded directly into, but rather it is -/// filled out from the decoded forms of the structs below. For documentation -/// each field, see the corresponding fields in -/// `config.example.toml`. -#[derive(Default, Clone)] -pub struct Config { - pub changelog_seen: Option, - pub ccache: Option, - /// Call Build::ninja() instead of this. - pub ninja_in_file: bool, - pub verbose: usize, - pub submodules: Option, - pub compiler_docs: bool, - pub library_docs_private_items: bool, - pub docs_minification: bool, - pub docs: bool, - pub locked_deps: bool, - pub vendor: bool, - pub target_config: HashMap, - pub full_bootstrap: bool, - pub extended: bool, - pub tools: Option>, - pub sanitizers: bool, - pub profiler: bool, - pub omit_git_hash: bool, - pub skip: Vec, - pub include_default_paths: bool, - pub rustc_error_format: Option, - pub json_output: bool, - pub test_compare_mode: bool, - pub color: Color, - pub patch_binaries_for_nix: Option, - pub stage0_metadata: Stage0Metadata, - - pub stdout_is_tty: bool, - pub stderr_is_tty: bool, - - pub on_fail: Option, - pub stage: u32, - pub keep_stage: Vec, - pub keep_stage_std: Vec, - pub src: PathBuf, - /// defaults to `config.toml` - pub config: Option, - pub jobs: Option, - pub cmd: Subcommand, - pub incremental: bool, - pub dry_run: DryRun, - /// Arguments appearing after `--` to be forwarded to tools, - /// e.g. `--fix-broken` or test arguments. - pub free_args: Vec, - - /// `None` if we shouldn't download CI compiler artifacts, or the commit to download if we should. - #[cfg(not(test))] - download_rustc_commit: Option, - #[cfg(test)] - pub download_rustc_commit: Option, - - pub deny_warnings: bool, - pub backtrace_on_ice: bool, - - // llvm codegen options - pub llvm_assertions: bool, - pub llvm_tests: bool, - pub llvm_plugins: bool, - pub llvm_optimize: bool, - pub llvm_thin_lto: bool, - pub llvm_release_debuginfo: bool, - pub llvm_static_stdcpp: bool, - /// `None` if `llvm_from_ci` is true and we haven't yet downloaded llvm. - #[cfg(not(test))] - llvm_link_shared: Cell>, - #[cfg(test)] - pub llvm_link_shared: Cell>, - pub llvm_clang_cl: Option, - pub llvm_targets: Option, - pub llvm_experimental_targets: Option, - pub llvm_link_jobs: Option, - pub llvm_version_suffix: Option, - pub llvm_use_linker: Option, - pub llvm_allow_old_toolchain: bool, - pub llvm_polly: bool, - pub llvm_clang: bool, - pub llvm_enable_warnings: bool, - pub llvm_from_ci: bool, - pub llvm_build_config: HashMap, - - pub use_lld: bool, - pub lld_enabled: bool, - pub llvm_tools_enabled: bool, - - pub llvm_cflags: Option, - pub llvm_cxxflags: Option, - pub llvm_ldflags: Option, - pub llvm_use_libcxx: bool, - - // rust codegen options - pub rust_optimize: RustOptimize, - pub rust_codegen_units: Option, - pub rust_codegen_units_std: Option, - pub rust_debug_assertions: bool, - pub rust_debug_assertions_std: bool, - pub rust_overflow_checks: bool, - pub rust_overflow_checks_std: bool, - pub rust_debug_logging: bool, - pub rust_debuginfo_level_rustc: DebuginfoLevel, - pub rust_debuginfo_level_std: DebuginfoLevel, - pub rust_debuginfo_level_tools: DebuginfoLevel, - pub rust_debuginfo_level_tests: DebuginfoLevel, - pub rust_split_debuginfo: SplitDebuginfo, - pub rust_rpath: bool, - pub rustc_parallel: bool, - pub rustc_default_linker: Option, - pub rust_optimize_tests: bool, - pub rust_dist_src: bool, - pub rust_codegen_backends: Vec>, - pub rust_verify_llvm_ir: bool, - pub rust_thin_lto_import_instr_limit: Option, - pub rust_remap_debuginfo: bool, - pub rust_new_symbol_mangling: Option, - pub rust_profile_use: Option, - pub rust_profile_generate: Option, - pub rust_lto: RustcLto, - pub rust_validate_mir_opts: Option, - pub llvm_profile_use: Option, - pub llvm_profile_generate: bool, - pub llvm_libunwind_default: Option, - - pub reproducible_artifacts: Vec, - - pub build: TargetSelection, - pub hosts: Vec, - pub targets: Vec, - pub local_rebuild: bool, - pub jemalloc: bool, - pub control_flow_guard: bool, - - // dist misc - pub dist_sign_folder: Option, - pub dist_upload_addr: Option, - pub dist_compression_formats: Option>, - pub dist_compression_profile: String, - pub dist_include_mingw_linker: bool, - - // libstd features - pub backtrace: bool, // support for RUST_BACKTRACE - - // misc - pub low_priority: bool, - pub channel: String, - pub description: Option, - pub verbose_tests: bool, - pub save_toolstates: Option, - pub print_step_timings: bool, - pub print_step_rusage: bool, - pub missing_tools: bool, - - // Fallback musl-root for all targets - pub musl_root: Option, - pub prefix: Option, - pub sysconfdir: Option, - pub datadir: Option, - pub docdir: Option, - pub bindir: PathBuf, - pub libdir: Option, - pub mandir: Option, - pub codegen_tests: bool, - pub nodejs: Option, - pub npm: Option, - pub gdb: Option, - pub python: Option, - pub reuse: Option, - pub cargo_native_static: bool, - pub configure_args: Vec, - pub out: PathBuf, - pub rust_info: channel::GitInfo, - - // These are either the stage0 downloaded binaries or the locally installed ones. - pub initial_cargo: PathBuf, - pub initial_rustc: PathBuf, - - #[cfg(not(test))] - initial_rustfmt: RefCell, - #[cfg(test)] - pub initial_rustfmt: RefCell, - - pub paths: Vec, -} - -#[derive(Default, Deserialize, Clone)] -pub struct Stage0Metadata { - pub compiler: CompilerMetadata, - pub config: Stage0Config, - pub checksums_sha256: HashMap, - pub rustfmt: Option, -} -#[derive(Default, Deserialize, Clone)] -pub struct CompilerMetadata { - pub date: String, - pub version: String, -} - -#[derive(Default, Deserialize, Clone)] -pub struct Stage0Config { - pub dist_server: String, - pub artifacts_server: String, - pub artifacts_with_llvm_assertions_server: String, - pub git_merge_commit_email: String, - pub nightly_branch: String, -} -#[derive(Default, Deserialize, Clone)] -pub struct RustfmtMetadata { - pub date: String, - pub version: String, -} - -#[derive(Clone, Debug, Default)] -pub enum RustfmtState { - SystemToolchain(PathBuf), - Downloaded(PathBuf), - Unavailable, - #[default] - LazyEvaluated, -} - -#[derive(Debug, Default, Clone, Copy, PartialEq)] -pub enum LlvmLibunwind { - #[default] - No, - InTree, - System, -} - -impl FromStr for LlvmLibunwind { - type Err = String; - - fn from_str(value: &str) -> Result { - match value { - "no" => Ok(Self::No), - "in-tree" => Ok(Self::InTree), - "system" => Ok(Self::System), - invalid => Err(format!("Invalid value '{invalid}' for rust.llvm-libunwind config.")), - } - } -} - -#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum SplitDebuginfo { - Packed, - Unpacked, - #[default] - Off, -} - -impl std::str::FromStr for SplitDebuginfo { - type Err = (); - - fn from_str(s: &str) -> Result { - match s { - "packed" => Ok(SplitDebuginfo::Packed), - "unpacked" => Ok(SplitDebuginfo::Unpacked), - "off" => Ok(SplitDebuginfo::Off), - _ => Err(()), - } - } -} - -impl SplitDebuginfo { - /// Returns the default `-Csplit-debuginfo` value for the current target. See the comment for - /// `rust.split-debuginfo` in `config.example.toml`. - fn default_for_platform(target: &str) -> Self { - if target.contains("apple") { - SplitDebuginfo::Unpacked - } else if target.contains("windows") { - SplitDebuginfo::Packed - } else { - SplitDebuginfo::Off - } - } -} - -/// LTO mode used for compiling rustc itself. -#[derive(Default, Clone, PartialEq, Debug)] -pub enum RustcLto { - Off, - #[default] - ThinLocal, - Thin, - Fat, -} - -impl std::str::FromStr for RustcLto { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "thin-local" => Ok(RustcLto::ThinLocal), - "thin" => Ok(RustcLto::Thin), - "fat" => Ok(RustcLto::Fat), - "off" => Ok(RustcLto::Off), - _ => Err(format!("Invalid value for rustc LTO: {s}")), - } - } -} - -#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct TargetSelection { - pub triple: Interned, - file: Option>, - synthetic: bool, -} - -/// Newtype over `Vec` so we can implement custom parsing logic -#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -pub struct TargetSelectionList(Vec); - -pub fn target_selection_list(s: &str) -> Result { - Ok(TargetSelectionList( - s.split(",").filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(), - )) -} - -impl TargetSelection { - pub fn from_user(selection: &str) -> Self { - let path = Path::new(selection); - - let (triple, file) = if path.exists() { - let triple = path - .file_stem() - .expect("Target specification file has no file stem") - .to_str() - .expect("Target specification file stem is not UTF-8"); - - (triple, Some(selection)) - } else { - (selection, None) - }; - - let triple = INTERNER.intern_str(triple); - let file = file.map(|f| INTERNER.intern_str(f)); - - Self { triple, file, synthetic: false } - } - - pub fn create_synthetic(triple: &str, file: &str) -> Self { - Self { - triple: INTERNER.intern_str(triple), - file: Some(INTERNER.intern_str(file)), - synthetic: true, - } - } - - pub fn rustc_target_arg(&self) -> &str { - self.file.as_ref().unwrap_or(&self.triple) - } - - pub fn contains(&self, needle: &str) -> bool { - self.triple.contains(needle) - } - - pub fn starts_with(&self, needle: &str) -> bool { - self.triple.starts_with(needle) - } - - pub fn ends_with(&self, needle: &str) -> bool { - self.triple.ends_with(needle) - } - - // See src/bootstrap/synthetic_targets.rs - pub fn is_synthetic(&self) -> bool { - self.synthetic - } -} - -impl fmt::Display for TargetSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.triple)?; - if let Some(file) = self.file { - write!(f, "({file})")?; - } - Ok(()) - } -} - -impl fmt::Debug for TargetSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{self}") - } -} - -impl PartialEq<&str> for TargetSelection { - fn eq(&self, other: &&str) -> bool { - self.triple == *other - } -} - -/// Per-target configuration stored in the global configuration structure. -#[derive(Default, Clone)] -pub struct Target { - /// Some(path to llvm-config) if using an external LLVM. - pub llvm_config: Option, - pub llvm_has_rust_patches: Option, - /// Some(path to FileCheck) if one was specified. - pub llvm_filecheck: Option, - pub llvm_libunwind: Option, - pub cc: Option, - pub cxx: Option, - pub ar: Option, - pub ranlib: Option, - pub default_linker: Option, - pub linker: Option, - pub ndk: Option, - pub sanitizers: Option, - pub profiler: Option, - pub rpath: Option, - pub crt_static: Option, - pub musl_root: Option, - pub musl_libdir: Option, - pub wasi_root: Option, - pub qemu_rootfs: Option, - pub no_std: bool, -} - -impl Target { - pub fn from_triple(triple: &str) -> Self { - let mut target: Self = Default::default(); - if triple.contains("-none") || triple.contains("nvptx") || triple.contains("switch") { - target.no_std = true; - } - target - } -} -/// Structure of the `config.toml` file that configuration is read from. -/// -/// This structure uses `Decodable` to automatically decode a TOML configuration -/// file into this format, and then this is traversed and written into the above -/// `Config` structure. -#[derive(Deserialize, Default)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -struct TomlConfig { - changelog_seen: Option, - build: Option, - install: Option, - llvm: Option, - rust: Option, - target: Option>, - dist: Option, - profile: Option, -} - -/// Describes how to handle conflicts in merging two [`TomlConfig`] -#[derive(Copy, Clone, Debug)] -enum ReplaceOpt { - /// Silently ignore a duplicated value - IgnoreDuplicate, - /// Override the current value, even if it's `Some` - Override, - /// Exit with an error on duplicate values - ErrorOnDuplicate, -} - -trait Merge { - fn merge(&mut self, other: Self, replace: ReplaceOpt); -} - -impl Merge for TomlConfig { - fn merge( - &mut self, - TomlConfig { build, install, llvm, rust, dist, target, profile: _, changelog_seen }: Self, - replace: ReplaceOpt, - ) { - fn do_merge(x: &mut Option, y: Option, replace: ReplaceOpt) { - if let Some(new) = y { - if let Some(original) = x { - original.merge(new, replace); - } else { - *x = Some(new); - } - } - } - self.changelog_seen.merge(changelog_seen, replace); - do_merge(&mut self.build, build, replace); - do_merge(&mut self.install, install, replace); - do_merge(&mut self.llvm, llvm, replace); - do_merge(&mut self.rust, rust, replace); - do_merge(&mut self.dist, dist, replace); - assert!(target.is_none(), "merging target-specific config is not currently supported"); - } -} - -// We are using a decl macro instead of a derive proc macro here to reduce the compile time of -// rustbuild. -macro_rules! define_config { - ($(#[$attr:meta])* struct $name:ident { - $($field:ident: Option<$field_ty:ty> = $field_key:literal,)* - }) => { - $(#[$attr])* - struct $name { - $($field: Option<$field_ty>,)* - } - - impl Merge for $name { - fn merge(&mut self, other: Self, replace: ReplaceOpt) { - $( - match replace { - ReplaceOpt::IgnoreDuplicate => { - if self.$field.is_none() { - self.$field = other.$field; - } - }, - ReplaceOpt::Override => { - if other.$field.is_some() { - self.$field = other.$field; - } - } - ReplaceOpt::ErrorOnDuplicate => { - if other.$field.is_some() { - if self.$field.is_some() { - if cfg!(test) { - panic!("overriding existing option") - } else { - eprintln!("overriding existing option: `{}`", stringify!($field)); - exit!(2); - } - } else { - self.$field = other.$field; - } - } - } - } - )* - } - } - - // The following is a trimmed version of what serde_derive generates. All parts not relevant - // for toml deserialization have been removed. This reduces the binary size and improves - // compile time of rustbuild. - impl<'de> Deserialize<'de> for $name { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct Field; - impl<'de> serde::de::Visitor<'de> for Field { - type Value = $name; - fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(concat!("struct ", stringify!($name))) - } - - #[inline] - fn visit_map(self, mut map: A) -> Result - where - A: serde::de::MapAccess<'de>, - { - $(let mut $field: Option<$field_ty> = None;)* - while let Some(key) = - match serde::de::MapAccess::next_key::(&mut map) { - Ok(val) => val, - Err(err) => { - return Err(err); - } - } - { - match &*key { - $($field_key => { - if $field.is_some() { - return Err(::duplicate_field( - $field_key, - )); - } - $field = match serde::de::MapAccess::next_value::<$field_ty>( - &mut map, - ) { - Ok(val) => Some(val), - Err(err) => { - return Err(err); - } - }; - })* - key => { - return Err(serde::de::Error::unknown_field(key, FIELDS)); - } - } - } - Ok($name { $($field),* }) - } - } - const FIELDS: &'static [&'static str] = &[ - $($field_key,)* - ]; - Deserializer::deserialize_struct( - deserializer, - stringify!($name), - FIELDS, - Field, - ) - } - } - } -} - -impl Merge for Option { - fn merge(&mut self, other: Self, replace: ReplaceOpt) { - match replace { - ReplaceOpt::IgnoreDuplicate => { - if self.is_none() { - *self = other; - } - } - ReplaceOpt::Override => { - if other.is_some() { - *self = other; - } - } - ReplaceOpt::ErrorOnDuplicate => { - if other.is_some() { - if self.is_some() { - if cfg!(test) { - panic!("overriding existing option") - } else { - eprintln!("overriding existing option"); - exit!(2); - } - } else { - *self = other; - } - } - } - } - } -} - -define_config! { - /// TOML representation of various global build decisions. - #[derive(Default)] - struct Build { - build: Option = "build", - host: Option> = "host", - target: Option> = "target", - build_dir: Option = "build-dir", - cargo: Option = "cargo", - rustc: Option = "rustc", - rustfmt: Option = "rustfmt", - docs: Option = "docs", - compiler_docs: Option = "compiler-docs", - library_docs_private_items: Option = "library-docs-private-items", - docs_minification: Option = "docs-minification", - submodules: Option = "submodules", - gdb: Option = "gdb", - nodejs: Option = "nodejs", - npm: Option = "npm", - python: Option = "python", - reuse: Option = "reuse", - locked_deps: Option = "locked-deps", - vendor: Option = "vendor", - full_bootstrap: Option = "full-bootstrap", - extended: Option = "extended", - tools: Option> = "tools", - verbose: Option = "verbose", - sanitizers: Option = "sanitizers", - profiler: Option = "profiler", - cargo_native_static: Option = "cargo-native-static", - low_priority: Option = "low-priority", - configure_args: Option> = "configure-args", - local_rebuild: Option = "local-rebuild", - print_step_timings: Option = "print-step-timings", - print_step_rusage: Option = "print-step-rusage", - check_stage: Option = "check-stage", - doc_stage: Option = "doc-stage", - build_stage: Option = "build-stage", - test_stage: Option = "test-stage", - install_stage: Option = "install-stage", - dist_stage: Option = "dist-stage", - bench_stage: Option = "bench-stage", - patch_binaries_for_nix: Option = "patch-binaries-for-nix", - // NOTE: only parsed by bootstrap.py, `--feature build-metrics` enables metrics unconditionally - metrics: Option = "metrics", - } -} - -define_config! { - /// TOML representation of various global install decisions. - struct Install { - prefix: Option = "prefix", - sysconfdir: Option = "sysconfdir", - docdir: Option = "docdir", - bindir: Option = "bindir", - libdir: Option = "libdir", - mandir: Option = "mandir", - datadir: Option = "datadir", - } -} - -define_config! { - /// TOML representation of how the LLVM build is configured. - struct Llvm { - optimize: Option = "optimize", - thin_lto: Option = "thin-lto", - release_debuginfo: Option = "release-debuginfo", - assertions: Option = "assertions", - tests: Option = "tests", - plugins: Option = "plugins", - ccache: Option = "ccache", - static_libstdcpp: Option = "static-libstdcpp", - ninja: Option = "ninja", - targets: Option = "targets", - experimental_targets: Option = "experimental-targets", - link_jobs: Option = "link-jobs", - link_shared: Option = "link-shared", - version_suffix: Option = "version-suffix", - clang_cl: Option = "clang-cl", - cflags: Option = "cflags", - cxxflags: Option = "cxxflags", - ldflags: Option = "ldflags", - use_libcxx: Option = "use-libcxx", - use_linker: Option = "use-linker", - allow_old_toolchain: Option = "allow-old-toolchain", - polly: Option = "polly", - clang: Option = "clang", - enable_warnings: Option = "enable-warnings", - download_ci_llvm: Option = "download-ci-llvm", - build_config: Option> = "build-config", - } -} - -define_config! { - struct Dist { - sign_folder: Option = "sign-folder", - gpg_password_file: Option = "gpg-password-file", - upload_addr: Option = "upload-addr", - src_tarball: Option = "src-tarball", - missing_tools: Option = "missing-tools", - compression_formats: Option> = "compression-formats", - compression_profile: Option = "compression-profile", - include_mingw_linker: Option = "include-mingw-linker", - } -} - -#[derive(Clone, Debug, Deserialize)] -#[serde(untagged)] -pub enum StringOrBool { - String(String), - Bool(bool), -} - -impl Default for StringOrBool { - fn default() -> StringOrBool { - StringOrBool::Bool(false) - } -} - -impl StringOrBool { - fn is_string_or_true(&self) -> bool { - matches!(self, Self::String(_) | Self::Bool(true)) - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum RustOptimize { - String(String), - Int(u8), - Bool(bool), -} - -impl Default for RustOptimize { - fn default() -> RustOptimize { - RustOptimize::Bool(false) - } -} - -impl<'de> Deserialize<'de> for RustOptimize { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_any(OptimizeVisitor) - } -} - -struct OptimizeVisitor; - -impl<'de> serde::de::Visitor<'de> for OptimizeVisitor { - type Value = RustOptimize; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str(r#"one of: 0, 1, 2, 3, "s", "z", true, false"#) - } - - fn visit_str(self, value: &str) -> Result - where - E: serde::de::Error, - { - if ["s", "z"].iter().find(|x| **x == value).is_some() { - Ok(RustOptimize::String(value.to_string())) - } else { - Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom) - } - } - - fn visit_i64(self, value: i64) -> Result - where - E: serde::de::Error, - { - if matches!(value, 0..=3) { - Ok(RustOptimize::Int(value as u8)) - } else { - Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom) - } - } - - fn visit_bool(self, value: bool) -> Result - where - E: serde::de::Error, - { - Ok(RustOptimize::Bool(value)) - } -} - -fn format_optimize_error_msg(v: impl std::fmt::Display) -> String { - format!( - r#"unrecognized option for rust optimize: "{v}", expected one of 0, 1, 2, 3, "s", "z", true, false"# - ) -} - -impl RustOptimize { - pub(crate) fn is_release(&self) -> bool { - match &self { - RustOptimize::Bool(true) | RustOptimize::String(_) => true, - RustOptimize::Int(i) => *i > 0, - RustOptimize::Bool(false) => false, - } - } - - pub(crate) fn get_opt_level(&self) -> Option { - match &self { - RustOptimize::String(s) => Some(s.clone()), - RustOptimize::Int(i) => Some(i.to_string()), - RustOptimize::Bool(_) => None, - } - } -} - -#[derive(Deserialize)] -#[serde(untagged)] -enum StringOrInt<'a> { - String(&'a str), - Int(i64), -} -define_config! { - /// TOML representation of how the Rust build is configured. - struct Rust { - optimize: Option = "optimize", - debug: Option = "debug", - codegen_units: Option = "codegen-units", - codegen_units_std: Option = "codegen-units-std", - debug_assertions: Option = "debug-assertions", - debug_assertions_std: Option = "debug-assertions-std", - overflow_checks: Option = "overflow-checks", - overflow_checks_std: Option = "overflow-checks-std", - debug_logging: Option = "debug-logging", - debuginfo_level: Option = "debuginfo-level", - debuginfo_level_rustc: Option = "debuginfo-level-rustc", - debuginfo_level_std: Option = "debuginfo-level-std", - debuginfo_level_tools: Option = "debuginfo-level-tools", - debuginfo_level_tests: Option = "debuginfo-level-tests", - split_debuginfo: Option = "split-debuginfo", - run_dsymutil: Option = "run-dsymutil", - backtrace: Option = "backtrace", - incremental: Option = "incremental", - parallel_compiler: Option = "parallel-compiler", - default_linker: Option = "default-linker", - channel: Option = "channel", - description: Option = "description", - musl_root: Option = "musl-root", - rpath: Option = "rpath", - verbose_tests: Option = "verbose-tests", - optimize_tests: Option = "optimize-tests", - codegen_tests: Option = "codegen-tests", - omit_git_hash: Option = "omit-git-hash", - dist_src: Option = "dist-src", - save_toolstates: Option = "save-toolstates", - codegen_backends: Option> = "codegen-backends", - lld: Option = "lld", - use_lld: Option = "use-lld", - llvm_tools: Option = "llvm-tools", - deny_warnings: Option = "deny-warnings", - backtrace_on_ice: Option = "backtrace-on-ice", - verify_llvm_ir: Option = "verify-llvm-ir", - thin_lto_import_instr_limit: Option = "thin-lto-import-instr-limit", - remap_debuginfo: Option = "remap-debuginfo", - jemalloc: Option = "jemalloc", - test_compare_mode: Option = "test-compare-mode", - llvm_libunwind: Option = "llvm-libunwind", - control_flow_guard: Option = "control-flow-guard", - new_symbol_mangling: Option = "new-symbol-mangling", - profile_generate: Option = "profile-generate", - profile_use: Option = "profile-use", - // ignored; this is set from an env var set by bootstrap.py - download_rustc: Option = "download-rustc", - lto: Option = "lto", - validate_mir_opts: Option = "validate-mir-opts", - } -} - -define_config! { - /// TOML representation of how each build target is configured. - struct TomlTarget { - cc: Option = "cc", - cxx: Option = "cxx", - ar: Option = "ar", - ranlib: Option = "ranlib", - default_linker: Option = "default-linker", - linker: Option = "linker", - llvm_config: Option = "llvm-config", - llvm_has_rust_patches: Option = "llvm-has-rust-patches", - llvm_filecheck: Option = "llvm-filecheck", - llvm_libunwind: Option = "llvm-libunwind", - android_ndk: Option = "android-ndk", - sanitizers: Option = "sanitizers", - profiler: Option = "profiler", - rpath: Option = "rpath", - crt_static: Option = "crt-static", - musl_root: Option = "musl-root", - musl_libdir: Option = "musl-libdir", - wasi_root: Option = "wasi-root", - qemu_rootfs: Option = "qemu-rootfs", - no_std: Option = "no-std", - } -} - -impl Config { - pub fn default_opts() -> Config { - let mut config = Config::default(); - config.llvm_optimize = true; - config.ninja_in_file = true; - config.llvm_static_stdcpp = false; - config.backtrace = true; - config.rust_optimize = RustOptimize::Bool(true); - config.rust_optimize_tests = true; - config.submodules = None; - config.docs = true; - config.docs_minification = true; - config.rust_rpath = true; - config.channel = "dev".to_string(); - config.codegen_tests = true; - config.rust_dist_src = true; - config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")]; - config.deny_warnings = true; - config.bindir = "bin".into(); - config.dist_include_mingw_linker = true; - config.dist_compression_profile = "fast".into(); - - config.stdout_is_tty = std::io::stdout().is_terminal(); - config.stderr_is_tty = std::io::stderr().is_terminal(); - - // set by build.rs - config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE")); - - let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - // Undo `src/bootstrap` - config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned(); - config.out = PathBuf::from("build"); - - config - } - - pub fn parse(args: &[String]) -> Config { - #[cfg(test)] - fn get_toml(_: &Path) -> TomlConfig { - TomlConfig::default() - } - - #[cfg(not(test))] - fn get_toml(file: &Path) -> TomlConfig { - let contents = - t!(fs::read_to_string(file), format!("config file {} not found", file.display())); - // Deserialize to Value and then TomlConfig to prevent the Deserialize impl of - // TomlConfig and sub types to be monomorphized 5x by toml. - toml::from_str(&contents) - .and_then(|table: toml::Value| TomlConfig::deserialize(table)) - .unwrap_or_else(|err| { - eprintln!("failed to parse TOML configuration '{}': {err}", file.display()); - exit!(2); - }) - } - Self::parse_inner(args, get_toml) - } - - fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config { - let mut flags = Flags::parse(&args); - let mut config = Config::default_opts(); - - // Set flags. - config.paths = std::mem::take(&mut flags.paths); - config.skip = flags.skip.into_iter().chain(flags.exclude).collect(); - config.include_default_paths = flags.include_default_paths; - config.rustc_error_format = flags.rustc_error_format; - config.json_output = flags.json_output; - config.on_fail = flags.on_fail; - config.jobs = Some(threads_from_config(flags.jobs as u32)); - config.cmd = flags.cmd; - config.incremental = flags.incremental; - config.dry_run = if flags.dry_run { DryRun::UserSelected } else { DryRun::Disabled }; - config.keep_stage = flags.keep_stage; - config.keep_stage_std = flags.keep_stage_std; - config.color = flags.color; - config.free_args = std::mem::take(&mut flags.free_args); - config.llvm_profile_use = flags.llvm_profile_use; - config.llvm_profile_generate = flags.llvm_profile_generate; - - // Infer the rest of the configuration. - - // Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary, - // running on a completely machine from where it was compiled. - let mut cmd = Command::new("git"); - // NOTE: we cannot support running from outside the repository because the only path we have available - // is set at compile time, which can be wrong if bootstrap was downloaded from source. - // We still support running outside the repository if we find we aren't in a git directory. - cmd.arg("rev-parse").arg("--show-toplevel"); - // Discard stderr because we expect this to fail when building from a tarball. - let output = cmd - .stderr(std::process::Stdio::null()) - .output() - .ok() - .and_then(|output| if output.status.success() { Some(output) } else { None }); - if let Some(output) = output { - let git_root = String::from_utf8(output.stdout).unwrap(); - // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes. - let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap(); - let s = git_root.to_str().unwrap(); - - // Bootstrap is quite bad at handling /? in front of paths - let src = match s.strip_prefix("\\\\?\\") { - Some(p) => PathBuf::from(p), - None => PathBuf::from(git_root), - }; - // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when, - // for example, the build directory is inside of another unrelated git directory. - // In that case keep the original `CARGO_MANIFEST_DIR` handling. - // - // NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside - // the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1. - if src.join("src").join("stage0.json").exists() { - config.src = src; - } - } else { - // We're building from a tarball, not git sources. - // We don't support pre-downloaded bootstrap in this case. - } - - if cfg!(test) { - // Use the build directory of the original x.py invocation, so that we can set `initial_rustc` properly. - config.out = Path::new( - &env::var_os("CARGO_TARGET_DIR").expect("cargo test directly is not supported"), - ) - .parent() - .unwrap() - .to_path_buf(); - } - - let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json"))); - - config.stage0_metadata = t!(serde_json::from_slice::(&stage0_json)); - - // Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`, then `config.toml` in the root directory. - let toml_path = flags - .config - .clone() - .or_else(|| env::var_os("RUST_BOOTSTRAP_CONFIG").map(PathBuf::from)); - let using_default_path = toml_path.is_none(); - let mut toml_path = toml_path.unwrap_or_else(|| PathBuf::from("config.toml")); - if using_default_path && !toml_path.exists() { - toml_path = config.src.join(toml_path); - } - - // Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path, - // but not if `config.toml` hasn't been created. - let mut toml = if !using_default_path || toml_path.exists() { - config.config = Some(toml_path.clone()); - get_toml(&toml_path) - } else { - config.config = None; - TomlConfig::default() - }; - - if let Some(include) = &toml.profile { - // Allows creating alias for profile names, allowing - // profiles to be renamed while maintaining back compatibility - // Keep in sync with `profile_aliases` in bootstrap.py - let profile_aliases = HashMap::from([("user", "dist")]); - let include = match profile_aliases.get(include.as_str()) { - Some(alias) => alias, - None => include.as_str(), - }; - let mut include_path = config.src.clone(); - include_path.push("src"); - include_path.push("bootstrap"); - include_path.push("defaults"); - include_path.push(format!("config.{include}.toml")); - let included_toml = get_toml(&include_path); - toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate); - } - - let mut override_toml = TomlConfig::default(); - for option in flags.set.iter() { - fn get_table(option: &str) -> Result { - toml::from_str(&option) - .and_then(|table: toml::Value| TomlConfig::deserialize(table)) - } - - let mut err = match get_table(option) { - Ok(v) => { - override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate); - continue; - } - Err(e) => e, - }; - // We want to be able to set string values without quotes, - // like in `configure.py`. Try adding quotes around the right hand side - if let Some((key, value)) = option.split_once("=") { - if !value.contains('"') { - match get_table(&format!(r#"{key}="{value}""#)) { - Ok(v) => { - override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate); - continue; - } - Err(e) => err = e, - } - } - } - eprintln!("failed to parse override `{option}`: `{err}"); - exit!(2) - } - toml.merge(override_toml, ReplaceOpt::Override); - - config.changelog_seen = toml.changelog_seen; - - let build = toml.build.unwrap_or_default(); - if let Some(file_build) = build.build { - config.build = TargetSelection::from_user(&file_build); - }; - - set(&mut config.out, flags.build_dir.or_else(|| build.build_dir.map(PathBuf::from))); - // NOTE: Bootstrap spawns various commands with different working directories. - // To avoid writing to random places on the file system, `config.out` needs to be an absolute path. - if !config.out.is_absolute() { - // `canonicalize` requires the path to already exist. Use our vendored copy of `absolute` instead. - config.out = crate::util::absolute(&config.out); - } - - config.initial_rustc = if let Some(rustc) = build.rustc { - // FIXME(#115065): re-enable this check - // config.check_build_rustc_version(&rustc); - PathBuf::from(rustc) - } else { - config.download_beta_toolchain(); - config.out.join(config.build.triple).join("stage0/bin/rustc") - }; - - config.initial_cargo = build - .cargo - .map(|cargo| { - t!(PathBuf::from(cargo).canonicalize(), "`initial_cargo` not found on disk") - }) - .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/cargo")); - - // NOTE: it's important this comes *after* we set `initial_rustc` just above. - if config.dry_run() { - let dir = config.out.join("tmp-dry-run"); - t!(fs::create_dir_all(&dir)); - config.out = dir; - } - - config.hosts = if let Some(TargetSelectionList(arg_host)) = flags.host { - arg_host - } else if let Some(file_host) = build.host { - file_host.iter().map(|h| TargetSelection::from_user(h)).collect() - } else { - vec![config.build] - }; - config.targets = if let Some(TargetSelectionList(arg_target)) = flags.target { - arg_target - } else if let Some(file_target) = build.target { - file_target.iter().map(|h| TargetSelection::from_user(h)).collect() - } else { - // If target is *not* configured, then default to the host - // toolchains. - config.hosts.clone() - }; - - config.nodejs = build.nodejs.map(PathBuf::from); - config.npm = build.npm.map(PathBuf::from); - config.gdb = build.gdb.map(PathBuf::from); - config.python = build.python.map(PathBuf::from); - config.reuse = build.reuse.map(PathBuf::from); - config.submodules = build.submodules; - set(&mut config.low_priority, build.low_priority); - set(&mut config.compiler_docs, build.compiler_docs); - set(&mut config.library_docs_private_items, build.library_docs_private_items); - set(&mut config.docs_minification, build.docs_minification); - set(&mut config.docs, build.docs); - set(&mut config.locked_deps, build.locked_deps); - set(&mut config.vendor, build.vendor); - set(&mut config.full_bootstrap, build.full_bootstrap); - set(&mut config.extended, build.extended); - config.tools = build.tools; - set(&mut config.verbose, build.verbose); - set(&mut config.sanitizers, build.sanitizers); - set(&mut config.profiler, build.profiler); - set(&mut config.cargo_native_static, build.cargo_native_static); - set(&mut config.configure_args, build.configure_args); - set(&mut config.local_rebuild, build.local_rebuild); - set(&mut config.print_step_timings, build.print_step_timings); - set(&mut config.print_step_rusage, build.print_step_rusage); - config.patch_binaries_for_nix = build.patch_binaries_for_nix; - - config.verbose = cmp::max(config.verbose, flags.verbose as usize); - - if let Some(install) = toml.install { - config.prefix = install.prefix.map(PathBuf::from); - config.sysconfdir = install.sysconfdir.map(PathBuf::from); - config.datadir = install.datadir.map(PathBuf::from); - config.docdir = install.docdir.map(PathBuf::from); - set(&mut config.bindir, install.bindir.map(PathBuf::from)); - config.libdir = install.libdir.map(PathBuf::from); - config.mandir = install.mandir.map(PathBuf::from); - } - - // Store off these values as options because if they're not provided - // we'll infer default values for them later - let mut llvm_assertions = None; - let mut llvm_tests = None; - let mut llvm_plugins = None; - let mut debug = None; - let mut debug_assertions = None; - let mut debug_assertions_std = None; - let mut overflow_checks = None; - let mut overflow_checks_std = None; - let mut debug_logging = None; - let mut debuginfo_level = None; - let mut debuginfo_level_rustc = None; - let mut debuginfo_level_std = None; - let mut debuginfo_level_tools = None; - let mut debuginfo_level_tests = None; - let mut optimize = None; - let mut omit_git_hash = None; - - if let Some(rust) = toml.rust { - set(&mut config.channel, rust.channel); - - config.download_rustc_commit = config.download_ci_rustc_commit(rust.download_rustc); - // This list is incomplete, please help by expanding it! - if config.download_rustc_commit.is_some() { - // We need the channel used by the downloaded compiler to match the one we set for rustdoc; - // otherwise rustdoc-ui tests break. - let ci_channel = t!(fs::read_to_string(config.src.join("src/ci/channel"))); - let ci_channel = ci_channel.trim_end(); - if config.channel != ci_channel - && !(config.channel == "dev" && ci_channel == "nightly") - { - panic!( - "setting rust.channel={} is incompatible with download-rustc", - config.channel - ); - } - } - - debug = rust.debug; - debug_assertions = rust.debug_assertions; - debug_assertions_std = rust.debug_assertions_std; - overflow_checks = rust.overflow_checks; - overflow_checks_std = rust.overflow_checks_std; - debug_logging = rust.debug_logging; - debuginfo_level = rust.debuginfo_level; - debuginfo_level_rustc = rust.debuginfo_level_rustc; - debuginfo_level_std = rust.debuginfo_level_std; - debuginfo_level_tools = rust.debuginfo_level_tools; - debuginfo_level_tests = rust.debuginfo_level_tests; - - config.rust_split_debuginfo = rust - .split_debuginfo - .as_deref() - .map(SplitDebuginfo::from_str) - .map(|v| v.expect("invalid value for rust.split_debuginfo")) - .unwrap_or(SplitDebuginfo::default_for_platform(&config.build.triple)); - optimize = rust.optimize; - omit_git_hash = rust.omit_git_hash; - config.rust_new_symbol_mangling = rust.new_symbol_mangling; - set(&mut config.rust_optimize_tests, rust.optimize_tests); - set(&mut config.codegen_tests, rust.codegen_tests); - set(&mut config.rust_rpath, rust.rpath); - set(&mut config.jemalloc, rust.jemalloc); - set(&mut config.test_compare_mode, rust.test_compare_mode); - set(&mut config.backtrace, rust.backtrace); - config.description = rust.description; - set(&mut config.rust_dist_src, rust.dist_src); - set(&mut config.verbose_tests, rust.verbose_tests); - // in the case "false" is set explicitly, do not overwrite the command line args - if let Some(true) = rust.incremental { - config.incremental = true; - } - set(&mut config.use_lld, rust.use_lld); - set(&mut config.lld_enabled, rust.lld); - set(&mut config.llvm_tools_enabled, rust.llvm_tools); - config.rustc_parallel = rust.parallel_compiler.unwrap_or(false); - config.rustc_default_linker = rust.default_linker; - config.musl_root = rust.musl_root.map(PathBuf::from); - config.save_toolstates = rust.save_toolstates.map(PathBuf::from); - set( - &mut config.deny_warnings, - match flags.warnings { - Warnings::Deny => Some(true), - Warnings::Warn => Some(false), - Warnings::Default => rust.deny_warnings, - }, - ); - set(&mut config.backtrace_on_ice, rust.backtrace_on_ice); - set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir); - config.rust_thin_lto_import_instr_limit = rust.thin_lto_import_instr_limit; - set(&mut config.rust_remap_debuginfo, rust.remap_debuginfo); - set(&mut config.control_flow_guard, rust.control_flow_guard); - config.llvm_libunwind_default = rust - .llvm_libunwind - .map(|v| v.parse().expect("failed to parse rust.llvm-libunwind")); - - if let Some(ref backends) = rust.codegen_backends { - let available_backends = vec!["llvm", "cranelift", "gcc"]; - - config.rust_codegen_backends = backends.iter().map(|s| { - if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) { - if available_backends.contains(&backend) { - panic!("Invalid value '{s}' for 'rust.codegen-backends'. Instead, please use '{backend}'."); - } else { - println!("help: '{s}' for 'rust.codegen-backends' might fail. \ - Codegen backends are mostly defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \ - In this case, it would be referred to as '{backend}'."); - } - } - - INTERNER.intern_str(s) - }).collect(); - } - - config.rust_codegen_units = rust.codegen_units.map(threads_from_config); - config.rust_codegen_units_std = rust.codegen_units_std.map(threads_from_config); - config.rust_profile_use = flags.rust_profile_use.or(rust.profile_use); - config.rust_profile_generate = flags.rust_profile_generate.or(rust.profile_generate); - config.rust_lto = rust - .lto - .as_deref() - .map(|value| RustcLto::from_str(value).unwrap()) - .unwrap_or_default(); - config.rust_validate_mir_opts = rust.validate_mir_opts; - } else { - config.rust_profile_use = flags.rust_profile_use; - config.rust_profile_generate = flags.rust_profile_generate; - } - - config.reproducible_artifacts = flags.reproducible_artifact; - - // rust_info must be set before is_ci_llvm_available() is called. - let default = config.channel == "dev"; - config.omit_git_hash = omit_git_hash.unwrap_or(default); - config.rust_info = GitInfo::new(config.omit_git_hash, &config.src); - - if let Some(llvm) = toml.llvm { - match llvm.ccache { - Some(StringOrBool::String(ref s)) => config.ccache = Some(s.to_string()), - Some(StringOrBool::Bool(true)) => { - config.ccache = Some("ccache".to_string()); - } - Some(StringOrBool::Bool(false)) | None => {} - } - set(&mut config.ninja_in_file, llvm.ninja); - llvm_assertions = llvm.assertions; - llvm_tests = llvm.tests; - llvm_plugins = llvm.plugins; - set(&mut config.llvm_optimize, llvm.optimize); - set(&mut config.llvm_thin_lto, llvm.thin_lto); - set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo); - set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); - if let Some(v) = llvm.link_shared { - config.llvm_link_shared.set(Some(v)); - } - config.llvm_targets = llvm.targets.clone(); - config.llvm_experimental_targets = llvm.experimental_targets.clone(); - config.llvm_link_jobs = llvm.link_jobs; - config.llvm_version_suffix = llvm.version_suffix.clone(); - config.llvm_clang_cl = llvm.clang_cl.clone(); - - config.llvm_cflags = llvm.cflags.clone(); - config.llvm_cxxflags = llvm.cxxflags.clone(); - config.llvm_ldflags = llvm.ldflags.clone(); - set(&mut config.llvm_use_libcxx, llvm.use_libcxx); - config.llvm_use_linker = llvm.use_linker.clone(); - config.llvm_allow_old_toolchain = llvm.allow_old_toolchain.unwrap_or(false); - config.llvm_polly = llvm.polly.unwrap_or(false); - config.llvm_clang = llvm.clang.unwrap_or(false); - config.llvm_enable_warnings = llvm.enable_warnings.unwrap_or(false); - config.llvm_build_config = llvm.build_config.clone().unwrap_or(Default::default()); - - let asserts = llvm_assertions.unwrap_or(false); - config.llvm_from_ci = match llvm.download_ci_llvm { - Some(StringOrBool::String(s)) => { - assert_eq!(s, "if-available", "unknown option `{s}` for download-ci-llvm"); - crate::llvm::is_ci_llvm_available(&config, asserts) - } - Some(StringOrBool::Bool(b)) => b, - None => { - config.channel == "dev" && crate::llvm::is_ci_llvm_available(&config, asserts) - } - }; - - if config.llvm_from_ci { - // None of the LLVM options, except assertions, are supported - // when using downloaded LLVM. We could just ignore these but - // that's potentially confusing, so force them to not be - // explicitly set. The defaults and CI defaults don't - // necessarily match but forcing people to match (somewhat - // arbitrary) CI configuration locally seems bad/hard. - check_ci_llvm!(llvm.optimize); - check_ci_llvm!(llvm.thin_lto); - check_ci_llvm!(llvm.release_debuginfo); - // CI-built LLVM can be either dynamic or static. We won't know until we download it. - check_ci_llvm!(llvm.link_shared); - check_ci_llvm!(llvm.static_libstdcpp); - check_ci_llvm!(llvm.targets); - check_ci_llvm!(llvm.experimental_targets); - check_ci_llvm!(llvm.link_jobs); - check_ci_llvm!(llvm.clang_cl); - check_ci_llvm!(llvm.version_suffix); - check_ci_llvm!(llvm.cflags); - check_ci_llvm!(llvm.cxxflags); - check_ci_llvm!(llvm.ldflags); - check_ci_llvm!(llvm.use_libcxx); - check_ci_llvm!(llvm.use_linker); - check_ci_llvm!(llvm.allow_old_toolchain); - check_ci_llvm!(llvm.polly); - check_ci_llvm!(llvm.clang); - check_ci_llvm!(llvm.build_config); - check_ci_llvm!(llvm.plugins); - } - - // NOTE: can never be hit when downloading from CI, since we call `check_ci_llvm!(thin_lto)` above. - if config.llvm_thin_lto && llvm.link_shared.is_none() { - // If we're building with ThinLTO on, by default we want to link - // to LLVM shared, to avoid re-doing ThinLTO (which happens in - // the link step) with each stage. - config.llvm_link_shared.set(Some(true)); - } - } else { - config.llvm_from_ci = - config.channel == "dev" && crate::llvm::is_ci_llvm_available(&config, false); - } - - if let Some(t) = toml.target { - for (triple, cfg) in t { - let mut target = Target::from_triple(&triple); - - if let Some(ref s) = cfg.llvm_config { - if config.download_rustc_commit.is_some() && triple == &*config.build.triple { - panic!( - "setting llvm_config for the host is incompatible with download-rustc" - ); - } - target.llvm_config = Some(config.src.join(s)); - } - target.llvm_has_rust_patches = cfg.llvm_has_rust_patches; - if let Some(ref s) = cfg.llvm_filecheck { - target.llvm_filecheck = Some(config.src.join(s)); - } - target.llvm_libunwind = cfg - .llvm_libunwind - .as_ref() - .map(|v| v.parse().expect("failed to parse rust.llvm-libunwind")); - if let Some(ref s) = cfg.android_ndk { - target.ndk = Some(config.src.join(s)); - } - if let Some(s) = cfg.no_std { - target.no_std = s; - } - target.cc = cfg.cc.map(PathBuf::from).or_else(|| { - target.ndk.as_ref().map(|ndk| ndk_compiler(Language::C, &triple, ndk)) - }); - target.cxx = cfg.cxx.map(PathBuf::from).or_else(|| { - target.ndk.as_ref().map(|ndk| ndk_compiler(Language::CPlusPlus, &triple, ndk)) - }); - target.ar = cfg.ar.map(PathBuf::from); - target.ranlib = cfg.ranlib.map(PathBuf::from); - target.linker = cfg.linker.map(PathBuf::from); - target.crt_static = cfg.crt_static; - target.musl_root = cfg.musl_root.map(PathBuf::from); - target.musl_libdir = cfg.musl_libdir.map(PathBuf::from); - target.wasi_root = cfg.wasi_root.map(PathBuf::from); - target.qemu_rootfs = cfg.qemu_rootfs.map(PathBuf::from); - target.sanitizers = cfg.sanitizers; - target.profiler = cfg.profiler; - target.rpath = cfg.rpath; - - config.target_config.insert(TargetSelection::from_user(&triple), target); - } - } - - if config.llvm_from_ci { - let triple = &config.build.triple; - let ci_llvm_bin = config.ci_llvm_root().join("bin"); - let build_target = config - .target_config - .entry(config.build) - .or_insert_with(|| Target::from_triple(&triple)); - - check_ci_llvm!(build_target.llvm_config); - check_ci_llvm!(build_target.llvm_filecheck); - build_target.llvm_config = Some(ci_llvm_bin.join(exe("llvm-config", config.build))); - build_target.llvm_filecheck = Some(ci_llvm_bin.join(exe("FileCheck", config.build))); - } - - if let Some(t) = toml.dist { - config.dist_sign_folder = t.sign_folder.map(PathBuf::from); - config.dist_upload_addr = t.upload_addr; - config.dist_compression_formats = t.compression_formats; - set(&mut config.dist_compression_profile, t.compression_profile); - set(&mut config.rust_dist_src, t.src_tarball); - set(&mut config.missing_tools, t.missing_tools); - set(&mut config.dist_include_mingw_linker, t.include_mingw_linker) - } - - if let Some(r) = build.rustfmt { - *config.initial_rustfmt.borrow_mut() = if r.exists() { - RustfmtState::SystemToolchain(r) - } else { - RustfmtState::Unavailable - }; - } - - // Now that we've reached the end of our configuration, infer the - // default values for all options that we haven't otherwise stored yet. - - config.llvm_assertions = llvm_assertions.unwrap_or(false); - config.llvm_tests = llvm_tests.unwrap_or(false); - config.llvm_plugins = llvm_plugins.unwrap_or(false); - config.rust_optimize = optimize.unwrap_or(RustOptimize::Bool(true)); - - let default = debug == Some(true); - config.rust_debug_assertions = debug_assertions.unwrap_or(default); - config.rust_debug_assertions_std = - debug_assertions_std.unwrap_or(config.rust_debug_assertions); - config.rust_overflow_checks = overflow_checks.unwrap_or(default); - config.rust_overflow_checks_std = - overflow_checks_std.unwrap_or(config.rust_overflow_checks); - - config.rust_debug_logging = debug_logging.unwrap_or(config.rust_debug_assertions); - - let with_defaults = |debuginfo_level_specific: Option<_>| { - debuginfo_level_specific.or(debuginfo_level).unwrap_or(if debug == Some(true) { - DebuginfoLevel::Limited - } else { - DebuginfoLevel::None - }) - }; - config.rust_debuginfo_level_rustc = with_defaults(debuginfo_level_rustc); - config.rust_debuginfo_level_std = with_defaults(debuginfo_level_std); - config.rust_debuginfo_level_tools = with_defaults(debuginfo_level_tools); - config.rust_debuginfo_level_tests = debuginfo_level_tests.unwrap_or(DebuginfoLevel::None); - - let download_rustc = config.download_rustc_commit.is_some(); - // See https://github.com/rust-lang/compiler-team/issues/326 - config.stage = match config.cmd { - Subcommand::Check { .. } => flags.stage.or(build.check_stage).unwrap_or(0), - // `download-rustc` only has a speed-up for stage2 builds. Default to stage2 unless explicitly overridden. - Subcommand::Doc { .. } => { - flags.stage.or(build.doc_stage).unwrap_or(if download_rustc { 2 } else { 0 }) - } - Subcommand::Build { .. } => { - flags.stage.or(build.build_stage).unwrap_or(if download_rustc { 2 } else { 1 }) - } - Subcommand::Test { .. } => { - flags.stage.or(build.test_stage).unwrap_or(if download_rustc { 2 } else { 1 }) - } - Subcommand::Bench { .. } => flags.stage.or(build.bench_stage).unwrap_or(2), - Subcommand::Dist { .. } => flags.stage.or(build.dist_stage).unwrap_or(2), - Subcommand::Install { .. } => flags.stage.or(build.install_stage).unwrap_or(2), - // These are all bootstrap tools, which don't depend on the compiler. - // The stage we pass shouldn't matter, but use 0 just in case. - Subcommand::Clean { .. } - | Subcommand::Clippy { .. } - | Subcommand::Fix { .. } - | Subcommand::Run { .. } - | Subcommand::Setup { .. } - | Subcommand::Format { .. } - | Subcommand::Suggest { .. } => flags.stage.unwrap_or(0), - }; - - // CI should always run stage 2 builds, unless it specifically states otherwise - #[cfg(not(test))] - if flags.stage.is_none() && crate::CiEnv::current() != crate::CiEnv::None { - match config.cmd { - Subcommand::Test { .. } - | Subcommand::Doc { .. } - | Subcommand::Build { .. } - | Subcommand::Bench { .. } - | Subcommand::Dist { .. } - | Subcommand::Install { .. } => { - assert_eq!( - config.stage, 2, - "x.py should be run with `--stage 2` on CI, but was run with `--stage {}`", - config.stage, - ); - } - Subcommand::Clean { .. } - | Subcommand::Check { .. } - | Subcommand::Clippy { .. } - | Subcommand::Fix { .. } - | Subcommand::Run { .. } - | Subcommand::Setup { .. } - | Subcommand::Format { .. } - | Subcommand::Suggest { .. } => {} - } - } - - config - } - - pub(crate) fn dry_run(&self) -> bool { - match self.dry_run { - DryRun::Disabled => false, - DryRun::SelfCheck | DryRun::UserSelected => true, - } - } - - /// Runs a command, printing out nice contextual information if it fails. - /// Exits if the command failed to execute at all, otherwise returns its - /// `status.success()`. - #[deprecated = "use `Builder::try_run` instead where possible"] - pub(crate) fn try_run(&self, cmd: &mut Command) -> Result<(), ()> { - if self.dry_run() { - return Ok(()); - } - self.verbose(&format!("running: {cmd:?}")); - build_helper::util::try_run(cmd, self.is_verbose()) - } - - /// A git invocation which runs inside the source directory. - /// - /// Use this rather than `Command::new("git")` in order to support out-of-tree builds. - pub(crate) fn git(&self) -> Command { - let mut git = Command::new("git"); - git.current_dir(&self.src); - git - } - - pub(crate) fn test_args(&self) -> Vec<&str> { - let mut test_args = match self.cmd { - Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => { - test_args.iter().flat_map(|s| s.split_whitespace()).collect() - } - _ => vec![], - }; - test_args.extend(self.free_args.iter().map(|s| s.as_str())); - test_args - } - - pub(crate) fn args(&self) -> Vec<&str> { - let mut args = match self.cmd { - Subcommand::Run { ref args, .. } => { - args.iter().flat_map(|s| s.split_whitespace()).collect() - } - _ => vec![], - }; - args.extend(self.free_args.iter().map(|s| s.as_str())); - args - } - - /// Bootstrap embeds a version number into the name of shared libraries it uploads in CI. - /// Return the version it would have used for the given commit. - pub(crate) fn artifact_version_part(&self, commit: &str) -> String { - let (channel, version) = if self.rust_info.is_managed_git_subrepository() { - let mut channel = self.git(); - channel.arg("show").arg(format!("{commit}:src/ci/channel")); - let channel = output(&mut channel); - let mut version = self.git(); - version.arg("show").arg(format!("{commit}:src/version")); - let version = output(&mut version); - (channel.trim().to_owned(), version.trim().to_owned()) - } else { - let channel = fs::read_to_string(self.src.join("src/ci/channel")); - let version = fs::read_to_string(self.src.join("src/version")); - match (channel, version) { - (Ok(channel), Ok(version)) => { - (channel.trim().to_owned(), version.trim().to_owned()) - } - (channel, version) => { - let src = self.src.display(); - eprintln!("error: failed to determine artifact channel and/or version"); - eprintln!( - "help: consider using a git checkout or ensure these files are readable" - ); - if let Err(channel) = channel { - eprintln!("reading {src}/src/ci/channel failed: {channel:?}"); - } - if let Err(version) = version { - eprintln!("reading {src}/src/version failed: {version:?}"); - } - panic!(); - } - } - }; - - match channel.as_str() { - "stable" => version, - "beta" => channel, - "nightly" => channel, - other => unreachable!("{:?} is not recognized as a valid channel", other), - } - } - - /// Try to find the relative path of `bindir`, otherwise return it in full. - pub fn bindir_relative(&self) -> &Path { - let bindir = &self.bindir; - if bindir.is_absolute() { - // Try to make it relative to the prefix. - if let Some(prefix) = &self.prefix { - if let Ok(stripped) = bindir.strip_prefix(prefix) { - return stripped; - } - } - } - bindir - } - - /// Try to find the relative path of `libdir`. - pub fn libdir_relative(&self) -> Option<&Path> { - let libdir = self.libdir.as_ref()?; - if libdir.is_relative() { - Some(libdir) - } else { - // Try to make it relative to the prefix. - libdir.strip_prefix(self.prefix.as_ref()?).ok() - } - } - - /// The absolute path to the downloaded LLVM artifacts. - pub(crate) fn ci_llvm_root(&self) -> PathBuf { - assert!(self.llvm_from_ci); - self.out.join(&*self.build.triple).join("ci-llvm") - } - - /// Directory where the extracted `rustc-dev` component is stored. - pub(crate) fn ci_rustc_dir(&self) -> PathBuf { - assert!(self.download_rustc()); - self.out.join(self.build.triple).join("ci-rustc") - } - - /// Determine whether llvm should be linked dynamically. - /// - /// If `false`, llvm should be linked statically. - /// This is computed on demand since LLVM might have to first be downloaded from CI. - pub(crate) fn llvm_link_shared(&self) -> bool { - let mut opt = self.llvm_link_shared.get(); - if opt.is_none() && self.dry_run() { - // just assume static for now - dynamic linking isn't supported on all platforms - return false; - } - - let llvm_link_shared = *opt.get_or_insert_with(|| { - if self.llvm_from_ci { - self.maybe_download_ci_llvm(); - let ci_llvm = self.ci_llvm_root(); - let link_type = t!( - std::fs::read_to_string(ci_llvm.join("link-type.txt")), - format!("CI llvm missing: {}", ci_llvm.display()) - ); - link_type == "dynamic" - } else { - // unclear how thought-through this default is, but it maintains compatibility with - // previous behavior - false - } - }); - self.llvm_link_shared.set(opt); - llvm_link_shared - } - - /// Return whether we will use a downloaded, pre-compiled version of rustc, or just build from source. - pub(crate) fn download_rustc(&self) -> bool { - self.download_rustc_commit().is_some() - } - - pub(crate) fn download_rustc_commit(&self) -> Option<&str> { - static DOWNLOAD_RUSTC: OnceCell> = OnceCell::new(); - if self.dry_run() && DOWNLOAD_RUSTC.get().is_none() { - // avoid trying to actually download the commit - return self.download_rustc_commit.as_deref(); - } - - DOWNLOAD_RUSTC - .get_or_init(|| match &self.download_rustc_commit { - None => None, - Some(commit) => { - self.download_ci_rustc(commit); - Some(commit.clone()) - } - }) - .as_deref() - } - - pub(crate) fn initial_rustfmt(&self) -> Option { - match &mut *self.initial_rustfmt.borrow_mut() { - RustfmtState::SystemToolchain(p) | RustfmtState::Downloaded(p) => Some(p.clone()), - RustfmtState::Unavailable => None, - r @ RustfmtState::LazyEvaluated => { - if self.dry_run() { - return Some(PathBuf::new()); - } - let path = self.maybe_download_rustfmt(); - *r = if let Some(p) = &path { - RustfmtState::Downloaded(p.clone()) - } else { - RustfmtState::Unavailable - }; - path - } - } - } - - pub fn verbose(&self, msg: &str) { - if self.verbose > 0 { - println!("{msg}"); - } - } - - pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool { - self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers) - } - - pub fn any_sanitizers_enabled(&self) -> bool { - self.target_config.values().any(|t| t.sanitizers == Some(true)) || self.sanitizers - } - - pub fn profiler_path(&self, target: TargetSelection) -> Option<&str> { - match self.target_config.get(&target)?.profiler.as_ref()? { - StringOrBool::String(s) => Some(s), - StringOrBool::Bool(_) => None, - } - } - - pub fn profiler_enabled(&self, target: TargetSelection) -> bool { - self.target_config - .get(&target) - .and_then(|t| t.profiler.as_ref()) - .map(StringOrBool::is_string_or_true) - .unwrap_or(self.profiler) - } - - pub fn any_profiler_enabled(&self) -> bool { - self.target_config.values().any(|t| matches!(&t.profiler, Some(p) if p.is_string_or_true())) - || self.profiler - } - - pub fn rpath_enabled(&self, target: TargetSelection) -> bool { - self.target_config.get(&target).map(|t| t.rpath).flatten().unwrap_or(self.rust_rpath) - } - - pub fn llvm_enabled(&self) -> bool { - self.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) - } - - pub fn llvm_libunwind(&self, target: TargetSelection) -> LlvmLibunwind { - self.target_config - .get(&target) - .and_then(|t| t.llvm_libunwind) - .or(self.llvm_libunwind_default) - .unwrap_or(if target.contains("fuchsia") { - LlvmLibunwind::InTree - } else { - LlvmLibunwind::No - }) - } - - pub fn submodules(&self, rust_info: &GitInfo) -> bool { - self.submodules.unwrap_or(rust_info.is_managed_git_subrepository()) - } - - pub fn default_codegen_backend(&self) -> Option> { - self.rust_codegen_backends.get(0).cloned() - } - - pub fn check_build_rustc_version(&self, rustc_path: &str) { - if self.dry_run() { - return; - } - - // check rustc version is same or lower with 1 apart from the building one - let mut cmd = Command::new(rustc_path); - cmd.arg("--version"); - let rustc_output = output(&mut cmd) - .lines() - .next() - .unwrap() - .split(' ') - .nth(1) - .unwrap() - .split('-') - .next() - .unwrap() - .to_owned(); - let rustc_version = Version::parse(&rustc_output.trim()).unwrap(); - let source_version = - Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim()) - .unwrap(); - if !(source_version == rustc_version - || (source_version.major == rustc_version.major - && (source_version.minor == rustc_version.minor - || source_version.minor == rustc_version.minor + 1))) - { - let prev_version = format!("{}.{}.x", source_version.major, source_version.minor - 1); - eprintln!( - "Unexpected rustc version: {rustc_version}, we should use {prev_version}/{source_version} to build source with {source_version}" - ); - exit!(1); - } - } - - /// Returns the commit to download, or `None` if we shouldn't download CI artifacts. - fn download_ci_rustc_commit(&self, download_rustc: Option) -> Option { - // If `download-rustc` is not set, default to rebuilding. - let if_unchanged = match download_rustc { - None | Some(StringOrBool::Bool(false)) => return None, - Some(StringOrBool::Bool(true)) => false, - Some(StringOrBool::String(s)) if s == "if-unchanged" => true, - Some(StringOrBool::String(other)) => { - panic!("unrecognized option for download-rustc: {other}") - } - }; - - // Handle running from a directory other than the top level - let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"])); - let top_level = top_level.trim_end(); - let compiler = format!("{top_level}/compiler/"); - let library = format!("{top_level}/library/"); - - // Look for a version to compare to based on the current commit. - // Only commits merged by bors will have CI artifacts. - let merge_base = output( - self.git() - .arg("rev-list") - .arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email)) - .args(&["-n1", "--first-parent", "HEAD"]), - ); - let commit = merge_base.trim_end(); - if commit.is_empty() { - println!("error: could not find commit hash for downloading rustc"); - println!("help: maybe your repository history is too shallow?"); - println!("help: consider disabling `download-rustc`"); - println!("help: or fetch enough history to include one upstream commit"); - crate::exit!(1); - } - - // Warn if there were changes to the compiler or standard library since the ancestor commit. - let has_changes = !t!(self - .git() - .args(&["diff-index", "--quiet", &commit, "--", &compiler, &library]) - .status()) - .success(); - if has_changes { - if if_unchanged { - if self.verbose > 0 { - println!( - "warning: saw changes to compiler/ or library/ since {commit}; \ - ignoring `download-rustc`" - ); - } - return None; - } - println!( - "warning: `download-rustc` is enabled, but there are changes to \ - compiler/ or library/" - ); - } - - Some(commit.to_string()) - } -} - -fn set(field: &mut T, val: Option) { - if let Some(v) = val { - *field = v; - } -} - -fn threads_from_config(v: u32) -> u32 { - match v { - 0 => std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32, - n => n, - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/configure.py rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/configure.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/configure.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/configure.py 2023-12-21 16:55:28.000000000 +0000 @@ -59,6 +59,7 @@ o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++") o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard") o("patch-binaries-for-nix", "build.patch-binaries-for-nix", "whether patch binaries for usage with Nix toolchains") +o("new-symbol-mangling", "rust.new-symbol-mangling", "use symbol-mangling-version v0") v("llvm-cflags", "llvm.cflags", "build LLVM with these extra compiler flags") v("llvm-cxxflags", "llvm.cxxflags", "build LLVM with these extra compiler flags") @@ -97,20 +98,7 @@ v("llvm-config", None, "set path to llvm-config") v("llvm-filecheck", None, "set path to LLVM's FileCheck utility") v("python", "build.python", "set path to python") -v("android-cross-path", "target.arm-linux-androideabi.android-ndk", - "Android NDK standalone path (deprecated)") -v("i686-linux-android-ndk", "target.i686-linux-android.android-ndk", - "i686-linux-android NDK standalone path") -v("arm-linux-androideabi-ndk", "target.arm-linux-androideabi.android-ndk", - "arm-linux-androideabi NDK standalone path") -v("armv7-linux-androideabi-ndk", "target.armv7-linux-androideabi.android-ndk", - "armv7-linux-androideabi NDK standalone path") -v("thumbv7neon-linux-androideabi-ndk", "target.thumbv7neon-linux-androideabi.android-ndk", - "thumbv7neon-linux-androideabi NDK standalone path") -v("aarch64-linux-android-ndk", "target.aarch64-linux-android.android-ndk", - "aarch64-linux-android NDK standalone path") -v("x86_64-linux-android-ndk", "target.x86_64-linux-android.android-ndk", - "x86_64-linux-android NDK standalone path") +v("android-ndk", "build.android-ndk", "set path to Android NDK") v("musl-root", "target.x86_64-unknown-linux-musl.musl-root", "MUSL root installation directory (deprecated)") v("musl-root-x86_64", "target.x86_64-unknown-linux-musl.musl-root", @@ -265,7 +253,7 @@ if not found: unknown_args.append(arg) - # Note: here and a few other places, we use [-1] to apply the *last* value + # NOTE: here and a few other places, we use [-1] to apply the *last* value # passed. But if option-checking is enabled, then the known_args loop will # also assert that options are only passed once. option_checking = ('option-checking' not in known_args @@ -489,7 +477,7 @@ # These are used by rpm, but aren't accepted by x.py. # Give a warning that they're ignored, but not a hard error. if key in ["infodir", "localstatedir"]: - print("warning: {} will be ignored".format(key)) + print("WARNING: {} will be ignored".format(key)) else: raise RuntimeError("failed to find config line for {}".format(key)) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/defaults/config.codegen.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/defaults/config.codegen.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/defaults/config.codegen.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/defaults/config.codegen.toml 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,7 @@ # enable warnings during the llvm compilation enable-warnings = true # build llvm from source -download-ci-llvm = false +download-ci-llvm = "if-unchanged" [rust] # This enables `RUSTC_LOG=debug`, avoiding confusing situations diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/dist.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/dist.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/dist.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/dist.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2274 +0,0 @@ -//! Implementation of the various distribution aspects of the compiler. -//! -//! This module is responsible for creating tarballs of the standard library, -//! compiler, and documentation. This ends up being what we distribute to -//! everyone as well. -//! -//! No tarball is actually created literally in this file, but rather we shell -//! out to `rust-installer` still. This may one day be replaced with bits and -//! pieces of `rustup.rs`! - -use std::collections::HashSet; -use std::env; -use std::ffi::OsStr; -use std::fs; -use std::io::Write; -use std::path::{Path, PathBuf}; -use std::process::Command; - -use object::read::archive::ArchiveFile; -use object::BinaryFormat; - -use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step}; -use crate::cache::{Interned, INTERNER}; -use crate::channel; -use crate::compile; -use crate::config::TargetSelection; -use crate::doc::DocumentationFormat; -use crate::llvm; -use crate::tarball::{GeneratedTarball, OverlayKind, Tarball}; -use crate::tool::{self, Tool}; -use crate::util::{exe, is_dylib, output, t, timeit}; -use crate::{Compiler, DependencyType, Mode, LLVM_TOOLS}; - -pub fn pkgname(builder: &Builder<'_>, component: &str) -> String { - format!("{}-{}", component, builder.rust_package_vers()) -} - -pub(crate) fn distdir(builder: &Builder<'_>) -> PathBuf { - builder.out.join("dist") -} - -pub fn tmpdir(builder: &Builder<'_>) -> PathBuf { - builder.out.join("tmp/dist") -} - -fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool { - if !builder.config.extended { - return false; - } - builder.config.tools.as_ref().map_or(true, |tools| tools.contains(tool)) -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Docs { - pub host: TargetSelection, -} - -impl Step for Docs { - type Output = Option; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = run.builder.config.docs; - run.alias("rust-docs").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Docs { host: run.target }); - } - - /// Builds the `rust-docs` installer component. - fn run(self, builder: &Builder<'_>) -> Option { - let host = self.host; - builder.default_doc(&[]); - - let dest = "share/doc/rust/html"; - - let mut tarball = Tarball::new(builder, "rust-docs", &host.triple); - tarball.set_product_name("Rust Documentation"); - tarball.add_bulk_dir(&builder.doc_out(host), dest); - tarball.add_file(&builder.src.join("src/doc/robots.txt"), dest, 0o644); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct JsonDocs { - pub host: TargetSelection, -} - -impl Step for JsonDocs { - type Output = Option; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = run.builder.config.docs; - run.alias("rust-docs-json").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(JsonDocs { host: run.target }); - } - - /// Builds the `rust-docs-json` installer component. - fn run(self, builder: &Builder<'_>) -> Option { - let host = self.host; - builder.ensure(crate::doc::Std::new( - builder.top_stage, - host, - builder, - DocumentationFormat::JSON, - )); - - let dest = "share/doc/rust/json"; - - let mut tarball = Tarball::new(builder, "rust-docs-json", &host.triple); - tarball.set_product_name("Rust Documentation In JSON Format"); - tarball.is_preview(true); - tarball.add_bulk_dir(&builder.json_doc_out(host), dest); - Some(tarball.generate()) - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustcDocs { - pub host: TargetSelection, -} - -impl Step for RustcDocs { - type Output = Option; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.alias("rustc-docs").default_condition(builder.config.compiler_docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustcDocs { host: run.target }); - } - - /// Builds the `rustc-docs` installer component. - fn run(self, builder: &Builder<'_>) -> Option { - let host = self.host; - builder.default_doc(&[]); - - let mut tarball = Tarball::new(builder, "rustc-docs", &host.triple); - tarball.set_product_name("Rustc Documentation"); - tarball.add_bulk_dir(&builder.compiler_doc_out(host), "share/doc/rust/html/rustc"); - Some(tarball.generate()) - } -} - -fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { - let mut found = Vec::with_capacity(files.len()); - - for file in files { - let file_path = path.iter().map(|dir| dir.join(file)).find(|p| p.exists()); - - if let Some(file_path) = file_path { - found.push(file_path); - } else { - panic!("Could not find '{file}' in {path:?}"); - } - } - - found -} - -fn make_win_dist( - rust_root: &Path, - plat_root: &Path, - target: TargetSelection, - builder: &Builder<'_>, -) { - if builder.config.dry_run() { - return; - } - - //Ask gcc where it keeps its stuff - let mut cmd = Command::new(builder.cc(target)); - cmd.arg("-print-search-dirs"); - let gcc_out = output(&mut cmd); - - let mut bin_path: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); - let mut lib_path = Vec::new(); - - for line in gcc_out.lines() { - let idx = line.find(':').unwrap(); - let key = &line[..idx]; - let trim_chars: &[_] = &[' ', '=']; - let value = env::split_paths(line[(idx + 1)..].trim_start_matches(trim_chars)); - - if key == "programs" { - bin_path.extend(value); - } else if key == "libraries" { - lib_path.extend(value); - } - } - - let compiler = if target == "i686-pc-windows-gnu" { - "i686-w64-mingw32-gcc.exe" - } else if target == "x86_64-pc-windows-gnu" { - "x86_64-w64-mingw32-gcc.exe" - } else { - "gcc.exe" - }; - let target_tools = [compiler, "ld.exe", "dlltool.exe", "libwinpthread-1.dll"]; - let mut rustc_dlls = vec!["libwinpthread-1.dll"]; - if target.starts_with("i686-") { - rustc_dlls.push("libgcc_s_dw2-1.dll"); - } else { - rustc_dlls.push("libgcc_s_seh-1.dll"); - } - - // Libraries necessary to link the windows-gnu toolchains. - // System libraries will be preferred if they are available (see #67429). - let target_libs = [ - //MinGW libs - "libgcc.a", - "libgcc_eh.a", - "libgcc_s.a", - "libm.a", - "libmingw32.a", - "libmingwex.a", - "libstdc++.a", - "libiconv.a", - "libmoldname.a", - "libpthread.a", - //Windows import libs - //This should contain only the set of libraries necessary to link the standard library. - "libadvapi32.a", - "libbcrypt.a", - "libcomctl32.a", - "libcomdlg32.a", - "libcredui.a", - "libcrypt32.a", - "libdbghelp.a", - "libgdi32.a", - "libimagehlp.a", - "libiphlpapi.a", - "libkernel32.a", - "libmsimg32.a", - "libmsvcrt.a", - "libntdll.a", - "libodbc32.a", - "libole32.a", - "liboleaut32.a", - "libopengl32.a", - "libpsapi.a", - "librpcrt4.a", - "libsecur32.a", - "libsetupapi.a", - "libshell32.a", - "libsynchronization.a", - "libuser32.a", - "libuserenv.a", - "libuuid.a", - "libwinhttp.a", - "libwinmm.a", - "libwinspool.a", - "libws2_32.a", - "libwsock32.a", - ]; - - //Find mingw artifacts we want to bundle - let target_tools = find_files(&target_tools, &bin_path); - let rustc_dlls = find_files(&rustc_dlls, &bin_path); - let target_libs = find_files(&target_libs, &lib_path); - - // Copy runtime dlls next to rustc.exe - let dist_bin_dir = rust_root.join("bin/"); - fs::create_dir_all(&dist_bin_dir).expect("creating dist_bin_dir failed"); - for src in rustc_dlls { - builder.copy_to_folder(&src, &dist_bin_dir); - } - - //Copy platform tools to platform-specific bin directory - let target_bin_dir = plat_root - .join("lib") - .join("rustlib") - .join(target.triple) - .join("bin") - .join("self-contained"); - fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed"); - for src in target_tools { - builder.copy_to_folder(&src, &target_bin_dir); - } - - // Warn windows-gnu users that the bundled GCC cannot compile C files - builder.create( - &target_bin_dir.join("GCC-WARNING.txt"), - "gcc.exe contained in this folder cannot be used for compiling C files - it is only \ - used as a linker. In order to be able to compile projects containing C code use \ - the GCC provided by MinGW or Cygwin.", - ); - - //Copy platform libs to platform-specific lib directory - let target_lib_dir = plat_root - .join("lib") - .join("rustlib") - .join(target.triple) - .join("lib") - .join("self-contained"); - fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed"); - for src in target_libs { - builder.copy_to_folder(&src, &target_lib_dir); - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Mingw { - pub host: TargetSelection, -} - -impl Step for Mingw { - type Output = Option; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("rust-mingw") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Mingw { host: run.target }); - } - - /// Builds the `rust-mingw` installer component. - /// - /// This contains all the bits and pieces to run the MinGW Windows targets - /// without any extra installed software (e.g., we bundle gcc, libraries, etc). - fn run(self, builder: &Builder<'_>) -> Option { - let host = self.host; - if !host.ends_with("pc-windows-gnu") || !builder.config.dist_include_mingw_linker { - return None; - } - - let mut tarball = Tarball::new(builder, "rust-mingw", &host.triple); - tarball.set_product_name("Rust MinGW"); - - // The first argument is a "temporary directory" which is just - // thrown away (this contains the runtime DLLs included in the rustc package - // above) and the second argument is where to place all the MinGW components - // (which is what we want). - make_win_dist(&tmpdir(builder), tarball.image_dir(), host, &builder); - - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Rustc { - pub compiler: Compiler, -} - -impl Step for Rustc { - type Output = GeneratedTarball; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("rustc") - } - - fn make_run(run: RunConfig<'_>) { - run.builder - .ensure(Rustc { compiler: run.builder.compiler(run.builder.top_stage, run.target) }); - } - - /// Creates the `rustc` installer component. - fn run(self, builder: &Builder<'_>) -> GeneratedTarball { - let compiler = self.compiler; - let host = self.compiler.host; - - let tarball = Tarball::new(builder, "rustc", &host.triple); - - // Prepare the rustc "image", what will actually end up getting installed - prepare_image(builder, compiler, tarball.image_dir()); - - // On MinGW we've got a few runtime DLL dependencies that we need to - // include. The first argument to this script is where to put these DLLs - // (the image we're creating), and the second argument is a junk directory - // to ignore all other MinGW stuff the script creates. - // - // On 32-bit MinGW we're always including a DLL which needs some extra - // licenses to distribute. On 64-bit MinGW we don't actually distribute - // anything requiring us to distribute a license, but it's likely the - // install will *also* include the rust-mingw package, which also needs - // licenses, so to be safe we just include it here in all MinGW packages. - if host.ends_with("pc-windows-gnu") && builder.config.dist_include_mingw_linker { - make_win_dist(tarball.image_dir(), &tmpdir(builder), host, builder); - tarball.add_dir(builder.src.join("src/etc/third-party"), "share/doc"); - } - - return tarball.generate(); - - fn prepare_image(builder: &Builder<'_>, compiler: Compiler, image: &Path) { - let host = compiler.host; - let src = builder.sysroot(compiler); - - // Copy rustc/rustdoc binaries - t!(fs::create_dir_all(image.join("bin"))); - builder.cp_r(&src.join("bin"), &image.join("bin")); - - if builder - .config - .tools - .as_ref() - .map_or(true, |tools| tools.iter().any(|tool| tool == "rustdoc")) - { - let rustdoc = builder.rustdoc(compiler); - builder.install(&rustdoc, &image.join("bin"), 0o755); - } - - if let Some(ra_proc_macro_srv) = builder.ensure_if_default( - tool::RustAnalyzerProcMacroSrv { - compiler: builder.compiler_for( - compiler.stage, - builder.config.build, - compiler.host, - ), - target: compiler.host, - }, - builder.kind, - ) { - builder.install(&ra_proc_macro_srv, &image.join("libexec"), 0o755); - } - - let libdir_relative = builder.libdir_relative(compiler); - - // Copy runtime DLLs needed by the compiler - if libdir_relative.to_str() != Some("bin") { - let libdir = builder.rustc_libdir(compiler); - for entry in builder.read_dir(&libdir) { - let name = entry.file_name(); - if let Some(s) = name.to_str() { - if is_dylib(s) { - // Don't use custom libdir here because ^lib/ will be resolved again - // with installer - builder.install(&entry.path(), &image.join("lib"), 0o644); - } - } - } - } - - // Copy over the codegen backends - let backends_src = builder.sysroot_codegen_backends(compiler); - let backends_rel = backends_src - .strip_prefix(&src) - .unwrap() - .strip_prefix(builder.sysroot_libdir_relative(compiler)) - .unwrap(); - // Don't use custom libdir here because ^lib/ will be resolved again with installer - let backends_dst = image.join("lib").join(&backends_rel); - - t!(fs::create_dir_all(&backends_dst)); - builder.cp_r(&backends_src, &backends_dst); - - // Copy libLLVM.so to the lib dir as well, if needed. While not - // technically needed by rustc itself it's needed by lots of other - // components like the llvm tools and LLD. LLD is included below and - // tools/LLDB come later, so let's just throw it in the rustc - // component for now. - maybe_install_llvm_runtime(builder, host, image); - - let dst_dir = image.join("lib/rustlib").join(&*host.triple).join("bin"); - t!(fs::create_dir_all(&dst_dir)); - - // Copy over lld if it's there - if builder.config.lld_enabled { - let src_dir = builder.sysroot_libdir(compiler, host).parent().unwrap().join("bin"); - let rust_lld = exe("rust-lld", compiler.host); - builder.copy(&src_dir.join(&rust_lld), &dst_dir.join(&rust_lld)); - // for `-Z gcc-ld=lld` - let gcc_lld_src_dir = src_dir.join("gcc-ld"); - let gcc_lld_dst_dir = dst_dir.join("gcc-ld"); - t!(fs::create_dir(&gcc_lld_dst_dir)); - for name in crate::LLD_FILE_NAMES { - let exe_name = exe(name, compiler.host); - builder - .copy(&gcc_lld_src_dir.join(&exe_name), &gcc_lld_dst_dir.join(&exe_name)); - } - } - - // Man pages - t!(fs::create_dir_all(image.join("share/man/man1"))); - let man_src = builder.src.join("src/doc/man"); - let man_dst = image.join("share/man/man1"); - - // don't use our `bootstrap::util::{copy, cp_r}`, because those try - // to hardlink, and we don't want to edit the source templates - for file_entry in builder.read_dir(&man_src) { - let page_src = file_entry.path(); - let page_dst = man_dst.join(file_entry.file_name()); - let src_text = t!(std::fs::read_to_string(&page_src)); - let new_text = src_text.replace("", &builder.version); - t!(std::fs::write(&page_dst, &new_text)); - t!(fs::copy(&page_src, &page_dst)); - } - - // Debugger scripts - builder - .ensure(DebuggerScripts { sysroot: INTERNER.intern_path(image.to_owned()), host }); - - // Misc license info - let cp = |file: &str| { - builder.install(&builder.src.join(file), &image.join("share/doc/rust"), 0o644); - }; - cp("COPYRIGHT"); - cp("LICENSE-APACHE"); - cp("LICENSE-MIT"); - cp("README.md"); - } - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct DebuggerScripts { - pub sysroot: Interned, - pub host: TargetSelection, -} - -impl Step for DebuggerScripts { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Copies debugger scripts for `target` into the `sysroot` specified. - fn run(self, builder: &Builder<'_>) { - let host = self.host; - let sysroot = self.sysroot; - let dst = sysroot.join("lib/rustlib/etc"); - t!(fs::create_dir_all(&dst)); - let cp_debugger_script = |file: &str| { - builder.install(&builder.src.join("src/etc/").join(file), &dst, 0o644); - }; - if host.contains("windows-msvc") { - // windbg debugger scripts - builder.install( - &builder.src.join("src/etc/rust-windbg.cmd"), - &sysroot.join("bin"), - 0o755, - ); - - cp_debugger_script("natvis/intrinsic.natvis"); - cp_debugger_script("natvis/liballoc.natvis"); - cp_debugger_script("natvis/libcore.natvis"); - cp_debugger_script("natvis/libstd.natvis"); - } else { - cp_debugger_script("rust_types.py"); - - // gdb debugger scripts - builder.install(&builder.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), 0o755); - builder.install(&builder.src.join("src/etc/rust-gdbgui"), &sysroot.join("bin"), 0o755); - - cp_debugger_script("gdb_load_rust_pretty_printers.py"); - cp_debugger_script("gdb_lookup.py"); - cp_debugger_script("gdb_providers.py"); - - // lldb debugger scripts - builder.install(&builder.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), 0o755); - - cp_debugger_script("lldb_lookup.py"); - cp_debugger_script("lldb_providers.py"); - cp_debugger_script("lldb_commands") - } - } -} - -fn skip_host_target_lib(builder: &Builder<'_>, compiler: Compiler) -> bool { - // The only true set of target libraries came from the build triple, so - // let's reduce redundant work by only producing archives from that host. - if compiler.host != builder.config.build { - builder.info("\tskipping, not a build host"); - true - } else { - false - } -} - -/// Check that all objects in rlibs for UEFI targets are COFF. This -/// ensures that the C compiler isn't producing ELF objects, which would -/// not link correctly with the COFF objects. -fn verify_uefi_rlib_format(builder: &Builder<'_>, target: TargetSelection, stamp: &Path) { - if !target.ends_with("-uefi") { - return; - } - - for (path, _) in builder.read_stamp_file(stamp) { - if path.extension() != Some(OsStr::new("rlib")) { - continue; - } - - let data = t!(fs::read(&path)); - let data = data.as_slice(); - let archive = t!(ArchiveFile::parse(data)); - for member in archive.members() { - let member = t!(member); - let member_data = t!(member.data(data)); - - let is_coff = match object::File::parse(member_data) { - Ok(member_file) => member_file.format() == BinaryFormat::Coff, - Err(_) => false, - }; - - if !is_coff { - let member_name = String::from_utf8_lossy(member.name()); - panic!("member {} in {} is not COFF", member_name, path.display()); - } - } - } -} - -/// Copy stamped files into an image's `target/lib` directory. -fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) { - let dst = image.join("lib/rustlib").join(target.triple).join("lib"); - let self_contained_dst = dst.join("self-contained"); - t!(fs::create_dir_all(&dst)); - t!(fs::create_dir_all(&self_contained_dst)); - for (path, dependency_type) in builder.read_stamp_file(stamp) { - if dependency_type == DependencyType::TargetSelfContained { - builder.copy(&path, &self_contained_dst.join(path.file_name().unwrap())); - } else if dependency_type == DependencyType::Target || builder.config.build == target { - builder.copy(&path, &dst.join(path.file_name().unwrap())); - } - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Std { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Std { - type Output = Option; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("rust-std") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Std { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - if skip_host_target_lib(builder, compiler) { - return None; - } - - builder.ensure(compile::Std::new(compiler, target)); - - let mut tarball = Tarball::new(builder, "rust-std", &target.triple); - tarball.include_target_in_component_name(true); - - let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); - let stamp = compile::libstd_stamp(builder, compiler_to_use, target); - verify_uefi_rlib_format(builder, target, &stamp); - copy_target_libs(builder, target, &tarball.image_dir(), &stamp); - - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustcDev { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for RustcDev { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("rustc-dev") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustcDev { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - if skip_host_target_lib(builder, compiler) { - return None; - } - - builder.ensure(compile::Rustc::new(compiler, target)); - - let tarball = Tarball::new(builder, "rustc-dev", &target.triple); - - let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); - let stamp = compile::librustc_stamp(builder, compiler_to_use, target); - copy_target_libs(builder, target, tarball.image_dir(), &stamp); - - let src_files = &["Cargo.lock"]; - // This is the reduced set of paths which will become the rustc-dev component - // (essentially the compiler crates and all of their path dependencies). - copy_src_dirs( - builder, - &builder.src, - &["compiler"], - &[], - &tarball.image_dir().join("lib/rustlib/rustc-src/rust"), - ); - for file in src_files { - tarball.add_file(builder.src.join(file), "lib/rustlib/rustc-src/rust", 0o644); - } - - Some(tarball.generate()) - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Analysis { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Analysis { - type Output = Option; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "analysis"); - run.alias("rust-analysis").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Analysis { - // Find the actual compiler (handling the full bootstrap option) which - // produced the save-analysis data because that data isn't copied - // through the sysroot uplifting. - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - /// Creates a tarball of (degenerate) save-analysis metadata, if available. - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - if compiler.host != builder.config.build { - return None; - } - - let src = builder - .stage_out(compiler, Mode::Std) - .join(target.triple) - .join(builder.cargo_dir()) - .join("deps") - .join("save-analysis"); - - // Write a file indicating that this component has been removed. - t!(std::fs::create_dir_all(&src)); - let mut removed = src.clone(); - removed.push("removed.json"); - let mut f = t!(std::fs::File::create(removed)); - t!(write!(f, r#"{{ "warning": "The `rust-analysis` component has been removed." }}"#)); - - let mut tarball = Tarball::new(builder, "rust-analysis", &target.triple); - tarball.include_target_in_component_name(true); - tarball.add_dir(src, format!("lib/rustlib/{}/analysis", target.triple)); - Some(tarball.generate()) - } -} - -/// Use the `builder` to make a filtered copy of `base`/X for X in (`src_dirs` - `exclude_dirs`) to -/// `dst_dir`. -fn copy_src_dirs( - builder: &Builder<'_>, - base: &Path, - src_dirs: &[&str], - exclude_dirs: &[&str], - dst_dir: &Path, -) { - fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool { - let spath = match path.to_str() { - Some(path) => path, - None => return false, - }; - if spath.ends_with('~') || spath.ends_with(".pyc") { - return false; - } - - const LLVM_PROJECTS: &[&str] = &[ - "llvm-project/clang", - "llvm-project\\clang", - "llvm-project/libunwind", - "llvm-project\\libunwind", - "llvm-project/lld", - "llvm-project\\lld", - "llvm-project/lldb", - "llvm-project\\lldb", - "llvm-project/llvm", - "llvm-project\\llvm", - "llvm-project/compiler-rt", - "llvm-project\\compiler-rt", - "llvm-project/cmake", - "llvm-project\\cmake", - "llvm-project/runtimes", - "llvm-project\\runtimes", - ]; - if spath.contains("llvm-project") - && !spath.ends_with("llvm-project") - && !LLVM_PROJECTS.iter().any(|path| spath.contains(path)) - { - return false; - } - - const LLVM_TEST: &[&str] = &["llvm-project/llvm/test", "llvm-project\\llvm\\test"]; - if LLVM_TEST.iter().any(|path| spath.contains(path)) - && (spath.ends_with(".ll") || spath.ends_with(".td") || spath.ends_with(".s")) - { - return false; - } - - let full_path = Path::new(dir).join(path); - if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) { - return false; - } - - let excludes = [ - "CVS", - "RCS", - "SCCS", - ".git", - ".gitignore", - ".gitmodules", - ".gitattributes", - ".cvsignore", - ".svn", - ".arch-ids", - "{arch}", - "=RELEASE-ID", - "=meta-update", - "=update", - ".bzr", - ".bzrignore", - ".bzrtags", - ".hg", - ".hgignore", - ".hgrags", - "_darcs", - ]; - !path.iter().map(|s| s.to_str().unwrap()).any(|s| excludes.contains(&s)) - } - - // Copy the directories using our filter - for item in src_dirs { - let dst = &dst_dir.join(item); - t!(fs::create_dir_all(dst)); - builder.cp_filtered(&base.join(item), dst, &|path| filter_fn(exclude_dirs, item, path)); - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Src; - -impl Step for Src { - /// The output path of the src installer tarball - type Output = GeneratedTarball; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("rust-src") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Src); - } - - /// Creates the `rust-src` installer component - fn run(self, builder: &Builder<'_>) -> GeneratedTarball { - if !builder.config.dry_run() { - builder.update_submodule(&Path::new("src/llvm-project")); - } - - let tarball = Tarball::new_targetless(builder, "rust-src"); - - // A lot of tools expect the rust-src component to be entirely in this directory, so if you - // change that (e.g. by adding another directory `lib/rustlib/src/foo` or - // `lib/rustlib/src/rust/foo`), you will need to go around hunting for implicit assumptions - // and fix them... - // - // NOTE: if you update the paths here, you also should update the "virtual" path - // translation code in `imported_source_files` in `src/librustc_metadata/rmeta/decoder.rs` - let dst_src = tarball.image_dir().join("lib/rustlib/src/rust"); - - let src_files = ["Cargo.lock"]; - // This is the reduced set of paths which will become the rust-src component - // (essentially libstd and all of its path dependencies). - copy_src_dirs( - builder, - &builder.src, - &["library", "src/llvm-project/libunwind"], - &[ - // not needed and contains symlinks which rustup currently - // chokes on when unpacking. - "library/backtrace/crates", - // these are 30MB combined and aren't necessary for building - // the standard library. - "library/stdarch/Cargo.toml", - "library/stdarch/crates/stdarch-verify", - "library/stdarch/crates/intrinsic-test", - ], - &dst_src, - ); - for file in src_files.iter() { - builder.copy(&builder.src.join(file), &dst_src.join(file)); - } - - tarball.generate() - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct PlainSourceTarball; - -impl Step for PlainSourceTarball { - /// Produces the location of the tarball generated - type Output = GeneratedTarball; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.alias("rustc-src").default_condition(builder.config.rust_dist_src) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(PlainSourceTarball); - } - - /// Creates the plain source tarball - fn run(self, builder: &Builder<'_>) -> GeneratedTarball { - // NOTE: This is a strange component in a lot of ways. It uses `src` as the target, which - // means neither rustup nor rustup-toolchain-install-master know how to download it. - // It also contains symbolic links, unlike other any other dist tarball. - // It's used for distros building rustc from source in a pre-vendored environment. - let mut tarball = Tarball::new(builder, "rustc", "src"); - tarball.permit_symlinks(true); - let plain_dst_src = tarball.image_dir(); - - // This is the set of root paths which will become part of the source package - let src_files = [ - "COPYRIGHT", - "LICENSE-APACHE", - "LICENSE-MIT", - "CONTRIBUTING.md", - "README.md", - "RELEASES.md", - "configure", - "x.py", - "config.example.toml", - "Cargo.toml", - "Cargo.lock", - ".gitmodules", - ]; - let src_dirs = ["src", "compiler", "library", "tests"]; - - copy_src_dirs(builder, &builder.src, &src_dirs, &[], &plain_dst_src); - - // Copy the files normally - for item in &src_files { - builder.copy(&builder.src.join(item), &plain_dst_src.join(item)); - } - - // Create the version file - builder.create(&plain_dst_src.join("version"), &builder.rust_version()); - if let Some(info) = builder.rust_info().info() { - channel::write_commit_hash_file(&plain_dst_src, &info.sha); - channel::write_commit_info_file(&plain_dst_src, info); - } - - // If we're building from git sources, we need to vendor a complete distribution. - if builder.rust_info().is_managed_git_subrepository() { - // Ensure we have the submodules checked out. - builder.update_submodule(Path::new("src/tools/cargo")); - builder.update_submodule(Path::new("src/tools/rust-analyzer")); - - // Vendor all Cargo dependencies - let mut cmd = Command::new(&builder.initial_cargo); - cmd.arg("vendor") - .arg("--sync") - .arg(builder.src.join("./src/tools/cargo/Cargo.toml")) - .arg("--sync") - .arg(builder.src.join("./src/tools/rust-analyzer/Cargo.toml")) - .arg("--sync") - .arg(builder.src.join("./compiler/rustc_codegen_cranelift/Cargo.toml")) - .arg("--sync") - .arg(builder.src.join("./src/bootstrap/Cargo.toml")) - // Will read the libstd Cargo.toml - // which uses the unstable `public-dependency` feature. - .env("RUSTC_BOOTSTRAP", "1") - .current_dir(&plain_dst_src); - - let config = if !builder.config.dry_run() { - t!(String::from_utf8(t!(cmd.output()).stdout)) - } else { - String::new() - }; - - let cargo_config_dir = plain_dst_src.join(".cargo"); - builder.create_dir(&cargo_config_dir); - builder.create(&cargo_config_dir.join("config.toml"), &config); - } - - tarball.bare() - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Cargo { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Cargo { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "cargo"); - run.alias("cargo").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Cargo { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - let cargo = builder.ensure(tool::Cargo { compiler, target }); - let src = builder.src.join("src/tools/cargo"); - let etc = src.join("src/etc"); - - // Prepare the image directory - let mut tarball = Tarball::new(builder, "cargo", &target.triple); - tarball.set_overlay(OverlayKind::Cargo); - - tarball.add_file(&cargo, "bin", 0o755); - tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644); - tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo"); - tarball.add_dir(etc.join("man"), "share/man/man1"); - tarball.add_legal_and_readme_to("share/doc/cargo"); - - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Rls { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Rls { - type Output = Option; - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "rls"); - run.alias("rls").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Rls { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - let rls = builder - .ensure(tool::Rls { compiler, target, extra_features: Vec::new() }) - .expect("rls expected to build"); - - let mut tarball = Tarball::new(builder, "rls", &target.triple); - tarball.set_overlay(OverlayKind::RLS); - tarball.is_preview(true); - tarball.add_file(rls, "bin", 0o755); - tarball.add_legal_and_readme_to("share/doc/rls"); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustAnalyzer { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for RustAnalyzer { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "rust-analyzer"); - run.alias("rust-analyzer").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustAnalyzer { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - let rust_analyzer = builder - .ensure(tool::RustAnalyzer { compiler, target }) - .expect("rust-analyzer always builds"); - - let mut tarball = Tarball::new(builder, "rust-analyzer", &target.triple); - tarball.set_overlay(OverlayKind::RustAnalyzer); - tarball.is_preview(true); - tarball.add_file(rust_analyzer, "bin", 0o755); - tarball.add_legal_and_readme_to("share/doc/rust-analyzer"); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Clippy { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Clippy { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "clippy"); - run.alias("clippy").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Clippy { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - // Prepare the image directory - // We expect clippy to build, because we've exited this step above if tool - // state for clippy isn't testing. - let clippy = builder - .ensure(tool::Clippy { compiler, target, extra_features: Vec::new() }) - .expect("clippy expected to build - essential tool"); - let cargoclippy = builder - .ensure(tool::CargoClippy { compiler, target, extra_features: Vec::new() }) - .expect("clippy expected to build - essential tool"); - - let mut tarball = Tarball::new(builder, "clippy", &target.triple); - tarball.set_overlay(OverlayKind::Clippy); - tarball.is_preview(true); - tarball.add_file(clippy, "bin", 0o755); - tarball.add_file(cargoclippy, "bin", 0o755); - tarball.add_legal_and_readme_to("share/doc/clippy"); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Miri { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Miri { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "miri"); - run.alias("miri").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Miri { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - // This prevents miri from being built for "dist" or "install" - // on the stable/beta channels. It is a nightly-only tool and should - // not be included. - if !builder.build.unstable_features() { - return None; - } - let compiler = self.compiler; - let target = self.target; - - let miri = builder.ensure(tool::Miri { compiler, target, extra_features: Vec::new() })?; - let cargomiri = - builder.ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })?; - - let mut tarball = Tarball::new(builder, "miri", &target.triple); - tarball.set_overlay(OverlayKind::Miri); - tarball.is_preview(true); - tarball.add_file(miri, "bin", 0o755); - tarball.add_file(cargomiri, "bin", 0o755); - tarball.add_legal_and_readme_to("share/doc/miri"); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Rustfmt { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Rustfmt { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "rustfmt"); - run.alias("rustfmt").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Rustfmt { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - let rustfmt = builder - .ensure(tool::Rustfmt { compiler, target, extra_features: Vec::new() }) - .expect("rustfmt expected to build - essential tool"); - let cargofmt = builder - .ensure(tool::Cargofmt { compiler, target, extra_features: Vec::new() }) - .expect("cargo fmt expected to build - essential tool"); - let mut tarball = Tarball::new(builder, "rustfmt", &target.triple); - tarball.set_overlay(OverlayKind::Rustfmt); - tarball.is_preview(true); - tarball.add_file(rustfmt, "bin", 0o755); - tarball.add_file(cargofmt, "bin", 0o755); - tarball.add_legal_and_readme_to("share/doc/rustfmt"); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustDemangler { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for RustDemangler { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - // While other tools use `should_build_extended_tool` to decide whether to be run by - // default or not, `rust-demangler` must be build when *either* it's enabled as a tool like - // the other ones or if `profiler = true`. Because we don't know the target at this stage - // we run the step by default when only `extended = true`, and decide whether to actually - // run it or not later. - let default = run.builder.config.extended; - run.alias("rust-demangler").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustDemangler { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.build, - run.target, - ), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - - // Only build this extended tool if explicitly included in `tools`, or if `profiler = true` - let condition = should_build_extended_tool(builder, "rust-demangler") - || builder.config.profiler_enabled(target); - if builder.config.extended && !condition { - return None; - } - - let rust_demangler = builder - .ensure(tool::RustDemangler { compiler, target, extra_features: Vec::new() }) - .expect("rust-demangler expected to build - in-tree tool"); - - // Prepare the image directory - let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple); - tarball.set_overlay(OverlayKind::RustDemangler); - tarball.is_preview(true); - tarball.add_file(&rust_demangler, "bin", 0o755); - tarball.add_legal_and_readme_to("share/doc/rust-demangler"); - Some(tarball.generate()) - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Extended { - stage: u32, - host: TargetSelection, - target: TargetSelection, -} - -impl Step for Extended { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.alias("extended").default_condition(builder.config.extended) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Extended { - stage: run.builder.top_stage, - host: run.builder.config.build, - target: run.target, - }); - } - - /// Creates a combined installer for the specified target in the provided stage. - fn run(self, builder: &Builder<'_>) { - let target = self.target; - let stage = self.stage; - let compiler = builder.compiler_for(self.stage, self.host, self.target); - - builder.info(&format!("Dist extended stage{} ({})", compiler.stage, target)); - - let mut tarballs = Vec::new(); - let mut built_tools = HashSet::new(); - macro_rules! add_component { - ($name:expr => $step:expr) => { - if let Some(tarball) = builder.ensure_if_default($step, Kind::Dist) { - tarballs.push(tarball); - built_tools.insert($name); - } - }; - } - - // When rust-std package split from rustc, we needed to ensure that during - // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering - // the std files during uninstall. To do this ensure that rustc comes - // before rust-std in the list below. - tarballs.push(builder.ensure(Rustc { compiler: builder.compiler(stage, target) })); - tarballs.push(builder.ensure(Std { compiler, target }).expect("missing std")); - - if target.ends_with("windows-gnu") { - tarballs.push(builder.ensure(Mingw { host: target }).expect("missing mingw")); - } - - add_component!("rust-docs" => Docs { host: target }); - add_component!("rust-json-docs" => JsonDocs { host: target }); - add_component!("rust-demangler"=> RustDemangler { compiler, target }); - add_component!("cargo" => Cargo { compiler, target }); - add_component!("rustfmt" => Rustfmt { compiler, target }); - add_component!("rls" => Rls { compiler, target }); - add_component!("rust-analyzer" => RustAnalyzer { compiler, target }); - add_component!("llvm-components" => LlvmTools { target }); - add_component!("clippy" => Clippy { compiler, target }); - add_component!("miri" => Miri { compiler, target }); - add_component!("analysis" => Analysis { compiler, target }); - - let etc = builder.src.join("src/etc/installer"); - - // Avoid producing tarballs during a dry run. - if builder.config.dry_run() { - return; - } - - let tarball = Tarball::new(builder, "rust", &target.triple); - let generated = tarball.combine(&tarballs); - - let tmp = tmpdir(builder).join("combined-tarball"); - let work = generated.work_dir(); - - let mut license = String::new(); - license += &builder.read(&builder.src.join("COPYRIGHT")); - license += &builder.read(&builder.src.join("LICENSE-APACHE")); - license += &builder.read(&builder.src.join("LICENSE-MIT")); - license.push('\n'); - license.push('\n'); - - let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18"; - let mut rtf = rtf.to_string(); - rtf.push('\n'); - for line in license.lines() { - rtf.push_str(line); - rtf.push_str("\\line "); - } - rtf.push('}'); - - fn filter(contents: &str, marker: &str) -> String { - let start = format!("tool-{marker}-start"); - let end = format!("tool-{marker}-end"); - let mut lines = Vec::new(); - let mut omitted = false; - for line in contents.lines() { - if line.contains(&start) { - omitted = true; - } else if line.contains(&end) { - omitted = false; - } else if !omitted { - lines.push(line); - } - } - - lines.join("\n") - } - - let xform = |p: &Path| { - let mut contents = t!(fs::read_to_string(p)); - for tool in &["rust-demangler", "miri", "rust-docs"] { - if !built_tools.contains(tool) { - contents = filter(&contents, tool); - } - } - let ret = tmp.join(p.file_name().unwrap()); - t!(fs::write(&ret, &contents)); - ret - }; - - if target.contains("apple-darwin") { - builder.info("building pkg installer"); - let pkg = tmp.join("pkg"); - let _ = fs::remove_dir_all(&pkg); - - let pkgbuild = |component: &str| { - let mut cmd = Command::new("pkgbuild"); - cmd.arg("--identifier") - .arg(format!("org.rust-lang.{}", component)) - .arg("--scripts") - .arg(pkg.join(component)) - .arg("--nopayload") - .arg(pkg.join(component).with_extension("pkg")); - builder.run(&mut cmd); - }; - - let prepare = |name: &str| { - builder.create_dir(&pkg.join(name)); - builder.cp_r( - &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)), - &pkg.join(name), - ); - builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755); - pkgbuild(name); - }; - prepare("rustc"); - prepare("cargo"); - prepare("rust-std"); - prepare("rust-analysis"); - prepare("clippy"); - prepare("rust-analyzer"); - for tool in &["rust-docs", "rust-demangler", "miri"] { - if built_tools.contains(tool) { - prepare(tool); - } - } - // create an 'uninstall' package - builder.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755); - pkgbuild("uninstall"); - - builder.create_dir(&pkg.join("res")); - builder.create(&pkg.join("res/LICENSE.txt"), &license); - builder.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644); - let mut cmd = Command::new("productbuild"); - cmd.arg("--distribution") - .arg(xform(&etc.join("pkg/Distribution.xml"))) - .arg("--resources") - .arg(pkg.join("res")) - .arg(distdir(builder).join(format!( - "{}-{}.pkg", - pkgname(builder, "rust"), - target.triple - ))) - .arg("--package-path") - .arg(&pkg); - let _time = timeit(builder); - builder.run(&mut cmd); - } - - if target.contains("windows") { - let exe = tmp.join("exe"); - let _ = fs::remove_dir_all(&exe); - - let prepare = |name: &str| { - builder.create_dir(&exe.join(name)); - let dir = if name == "rust-std" || name == "rust-analysis" { - format!("{}-{}", name, target.triple) - } else if name == "rust-analyzer" { - "rust-analyzer-preview".to_string() - } else if name == "clippy" { - "clippy-preview".to_string() - } else if name == "rust-demangler" { - "rust-demangler-preview".to_string() - } else if name == "miri" { - "miri-preview".to_string() - } else { - name.to_string() - }; - builder.cp_r( - &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir), - &exe.join(name), - ); - builder.remove(&exe.join(name).join("manifest.in")); - }; - prepare("rustc"); - prepare("cargo"); - prepare("rust-analysis"); - prepare("rust-std"); - for tool in &["clippy", "rust-analyzer", "rust-docs", "rust-demangler", "miri"] { - if built_tools.contains(tool) { - prepare(tool); - } - } - if target.ends_with("windows-gnu") { - prepare("rust-mingw"); - } - - builder.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644); - - // Generate msi installer - let wix_path = env::var_os("WIX") - .expect("`WIX` environment variable must be set for generating MSI installer(s)."); - let wix = PathBuf::from(wix_path); - let heat = wix.join("bin/heat.exe"); - let candle = wix.join("bin/candle.exe"); - let light = wix.join("bin/light.exe"); - - let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"]; - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rustc") - .args(&heat_flags) - .arg("-cg") - .arg("RustcGroup") - .arg("-dr") - .arg("Rustc") - .arg("-var") - .arg("var.RustcDir") - .arg("-out") - .arg(exe.join("RustcGroup.wxs")), - ); - if built_tools.contains("rust-docs") { - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-docs") - .args(&heat_flags) - .arg("-cg") - .arg("DocsGroup") - .arg("-dr") - .arg("Docs") - .arg("-var") - .arg("var.DocsDir") - .arg("-out") - .arg(exe.join("DocsGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/squash-components.xsl")), - ); - } - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("cargo") - .args(&heat_flags) - .arg("-cg") - .arg("CargoGroup") - .arg("-dr") - .arg("Cargo") - .arg("-var") - .arg("var.CargoDir") - .arg("-out") - .arg(exe.join("CargoGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/remove-duplicates.xsl")), - ); - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-std") - .args(&heat_flags) - .arg("-cg") - .arg("StdGroup") - .arg("-dr") - .arg("Std") - .arg("-var") - .arg("var.StdDir") - .arg("-out") - .arg(exe.join("StdGroup.wxs")), - ); - if built_tools.contains("rust-analyzer") { - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-analyzer") - .args(&heat_flags) - .arg("-cg") - .arg("RustAnalyzerGroup") - .arg("-dr") - .arg("RustAnalyzer") - .arg("-var") - .arg("var.RustAnalyzerDir") - .arg("-out") - .arg(exe.join("RustAnalyzerGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/remove-duplicates.xsl")), - ); - } - if built_tools.contains("clippy") { - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("clippy") - .args(&heat_flags) - .arg("-cg") - .arg("ClippyGroup") - .arg("-dr") - .arg("Clippy") - .arg("-var") - .arg("var.ClippyDir") - .arg("-out") - .arg(exe.join("ClippyGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/remove-duplicates.xsl")), - ); - } - if built_tools.contains("rust-demangler") { - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-demangler") - .args(&heat_flags) - .arg("-cg") - .arg("RustDemanglerGroup") - .arg("-dr") - .arg("RustDemangler") - .arg("-var") - .arg("var.RustDemanglerDir") - .arg("-out") - .arg(exe.join("RustDemanglerGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/remove-duplicates.xsl")), - ); - } - if built_tools.contains("miri") { - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("miri") - .args(&heat_flags) - .arg("-cg") - .arg("MiriGroup") - .arg("-dr") - .arg("Miri") - .arg("-var") - .arg("var.MiriDir") - .arg("-out") - .arg(exe.join("MiriGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/remove-duplicates.xsl")), - ); - } - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-analysis") - .args(&heat_flags) - .arg("-cg") - .arg("AnalysisGroup") - .arg("-dr") - .arg("Analysis") - .arg("-var") - .arg("var.AnalysisDir") - .arg("-out") - .arg(exe.join("AnalysisGroup.wxs")) - .arg("-t") - .arg(etc.join("msi/remove-duplicates.xsl")), - ); - if target.ends_with("windows-gnu") { - builder.run( - Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-mingw") - .args(&heat_flags) - .arg("-cg") - .arg("GccGroup") - .arg("-dr") - .arg("Gcc") - .arg("-var") - .arg("var.GccDir") - .arg("-out") - .arg(exe.join("GccGroup.wxs")), - ); - } - - let candle = |input: &Path| { - let output = exe.join(input.file_stem().unwrap()).with_extension("wixobj"); - let arch = if target.contains("x86_64") { "x64" } else { "x86" }; - let mut cmd = Command::new(&candle); - cmd.current_dir(&exe) - .arg("-nologo") - .arg("-dRustcDir=rustc") - .arg("-dCargoDir=cargo") - .arg("-dStdDir=rust-std") - .arg("-dAnalysisDir=rust-analysis") - .arg("-arch") - .arg(&arch) - .arg("-out") - .arg(&output) - .arg(&input); - add_env(builder, &mut cmd, target); - - if built_tools.contains("clippy") { - cmd.arg("-dClippyDir=clippy"); - } - if built_tools.contains("rust-docs") { - cmd.arg("-dDocsDir=rust-docs"); - } - if built_tools.contains("rust-demangler") { - cmd.arg("-dRustDemanglerDir=rust-demangler"); - } - if built_tools.contains("rust-analyzer") { - cmd.arg("-dRustAnalyzerDir=rust-analyzer"); - } - if built_tools.contains("miri") { - cmd.arg("-dMiriDir=miri"); - } - if target.ends_with("windows-gnu") { - cmd.arg("-dGccDir=rust-mingw"); - } - builder.run(&mut cmd); - }; - candle(&xform(&etc.join("msi/rust.wxs"))); - candle(&etc.join("msi/ui.wxs")); - candle(&etc.join("msi/rustwelcomedlg.wxs")); - candle("RustcGroup.wxs".as_ref()); - if built_tools.contains("rust-docs") { - candle("DocsGroup.wxs".as_ref()); - } - candle("CargoGroup.wxs".as_ref()); - candle("StdGroup.wxs".as_ref()); - if built_tools.contains("clippy") { - candle("ClippyGroup.wxs".as_ref()); - } - if built_tools.contains("miri") { - candle("MiriGroup.wxs".as_ref()); - } - if built_tools.contains("rust-demangler") { - candle("RustDemanglerGroup.wxs".as_ref()); - } - if built_tools.contains("rust-analyzer") { - candle("RustAnalyzerGroup.wxs".as_ref()); - } - candle("AnalysisGroup.wxs".as_ref()); - - if target.ends_with("windows-gnu") { - candle("GccGroup.wxs".as_ref()); - } - - builder.create(&exe.join("LICENSE.rtf"), &rtf); - builder.install(&etc.join("gfx/banner.bmp"), &exe, 0o644); - builder.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644); - - builder.info(&format!("building `msi` installer with {light:?}")); - let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target.triple); - let mut cmd = Command::new(&light); - cmd.arg("-nologo") - .arg("-ext") - .arg("WixUIExtension") - .arg("-ext") - .arg("WixUtilExtension") - .arg("-out") - .arg(exe.join(&filename)) - .arg("rust.wixobj") - .arg("ui.wixobj") - .arg("rustwelcomedlg.wixobj") - .arg("RustcGroup.wixobj") - .arg("CargoGroup.wixobj") - .arg("StdGroup.wixobj") - .arg("AnalysisGroup.wixobj") - .current_dir(&exe); - - if built_tools.contains("clippy") { - cmd.arg("ClippyGroup.wixobj"); - } - if built_tools.contains("miri") { - cmd.arg("MiriGroup.wixobj"); - } - if built_tools.contains("rust-analyzer") { - cmd.arg("RustAnalyzerGroup.wixobj"); - } - if built_tools.contains("rust-demangler") { - cmd.arg("RustDemanglerGroup.wixobj"); - } - if built_tools.contains("rust-docs") { - cmd.arg("DocsGroup.wixobj"); - } - - if target.ends_with("windows-gnu") { - cmd.arg("GccGroup.wixobj"); - } - // ICE57 wrongly complains about the shortcuts - cmd.arg("-sice:ICE57"); - - let _time = timeit(builder); - builder.run(&mut cmd); - - if !builder.config.dry_run() { - t!(fs::rename(exe.join(&filename), distdir(builder).join(&filename))); - } - } - } -} - -fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) { - let mut parts = builder.version.split('.'); - cmd.env("CFG_RELEASE_INFO", builder.rust_version()) - .env("CFG_RELEASE_NUM", &builder.version) - .env("CFG_RELEASE", builder.rust_release()) - .env("CFG_VER_MAJOR", parts.next().unwrap()) - .env("CFG_VER_MINOR", parts.next().unwrap()) - .env("CFG_VER_PATCH", parts.next().unwrap()) - .env("CFG_VER_BUILD", "0") // just needed to build - .env("CFG_PACKAGE_VERS", builder.rust_package_vers()) - .env("CFG_PACKAGE_NAME", pkgname(builder, "rust")) - .env("CFG_BUILD", target.triple) - .env("CFG_CHANNEL", &builder.config.channel); - - if target.contains("windows-gnullvm") { - cmd.env("CFG_MINGW", "1").env("CFG_ABI", "LLVM"); - } else if target.contains("windows-gnu") { - cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU"); - } else { - cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC"); - } -} - -fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) { - if builder.config.dry_run() { - return; - } - - builder.install(&source, destination, 0o644); -} - -/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking. -/// -/// Returns whether the files were actually copied. -fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir: &Path) -> bool { - if let Some(config) = builder.config.target_config.get(&target) { - if config.llvm_config.is_some() && !builder.config.llvm_from_ci { - // If the LLVM was externally provided, then we don't currently copy - // artifacts into the sysroot. This is not necessarily the right - // choice (in particular, it will require the LLVM dylib to be in - // the linker's load path at runtime), but the common use case for - // external LLVMs is distribution provided LLVMs, and in that case - // they're usually in the standard search path (e.g., /usr/lib) and - // copying them here is going to cause problems as we may end up - // with the wrong files and isn't what distributions want. - // - // This behavior may be revisited in the future though. - // - // If the LLVM is coming from ourselves (just from CI) though, we - // still want to install it, as it otherwise won't be available. - return false; - } - } - - // On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib - // instead of libLLVM-11-rust-....dylib, as on linux. It's not entirely - // clear why this is the case, though. llvm-config will emit the versioned - // paths and we don't want those in the sysroot (as we're expecting - // unversioned paths). - if target.contains("apple-darwin") && builder.llvm_link_shared() { - let src_libdir = builder.llvm_out(target).join("lib"); - let llvm_dylib_path = src_libdir.join("libLLVM.dylib"); - if llvm_dylib_path.exists() { - builder.install(&llvm_dylib_path, dst_libdir, 0o644); - } - !builder.config.dry_run() - } else if let Ok(llvm::LlvmResult { llvm_config, .. }) = - llvm::prebuilt_llvm_config(builder, target) - { - let mut cmd = Command::new(llvm_config); - cmd.arg("--libfiles"); - builder.verbose(&format!("running {cmd:?}")); - let files = if builder.config.dry_run() { "".into() } else { output(&mut cmd) }; - let build_llvm_out = &builder.llvm_out(builder.config.build); - let target_llvm_out = &builder.llvm_out(target); - for file in files.trim_end().split(' ') { - // If we're not using a custom LLVM, make sure we package for the target. - let file = if let Ok(relative_path) = Path::new(file).strip_prefix(build_llvm_out) { - target_llvm_out.join(relative_path) - } else { - PathBuf::from(file) - }; - install_llvm_file(builder, &file, dst_libdir); - } - !builder.config.dry_run() - } else { - false - } -} - -/// Maybe add libLLVM.so to the target lib-dir for linking. -pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { - let dst_libdir = sysroot.join("lib/rustlib").join(&*target.triple).join("lib"); - // We do not need to copy LLVM files into the sysroot if it is not - // dynamically linked; it is already included into librustc_llvm - // statically. - if builder.llvm_link_shared() { - maybe_install_llvm(builder, target, &dst_libdir); - } -} - -/// Maybe add libLLVM.so to the runtime lib-dir for rustc itself. -pub fn maybe_install_llvm_runtime(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { - let dst_libdir = - sysroot.join(builder.sysroot_libdir_relative(Compiler { stage: 1, host: target })); - // We do not need to copy LLVM files into the sysroot if it is not - // dynamically linked; it is already included into librustc_llvm - // statically. - if builder.llvm_link_shared() { - maybe_install_llvm(builder, target, &dst_libdir); - } -} - -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct LlvmTools { - pub target: TargetSelection, -} - -impl Step for LlvmTools { - type Output = Option; - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = should_build_extended_tool(&run.builder, "llvm-tools"); - // FIXME: allow using the names of the tools themselves? - run.alias("llvm-tools").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(LlvmTools { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let target = self.target; - - /* run only if llvm-config isn't used */ - if let Some(config) = builder.config.target_config.get(&target) { - if let Some(ref _s) = config.llvm_config { - builder.info(&format!("Skipping LlvmTools ({target}): external LLVM")); - return None; - } - } - - builder.ensure(crate::llvm::Llvm { target }); - - let mut tarball = Tarball::new(builder, "llvm-tools", &target.triple); - tarball.set_overlay(OverlayKind::LLVM); - tarball.is_preview(true); - - // Prepare the image directory - let src_bindir = builder.llvm_out(target).join("bin"); - let dst_bindir = format!("lib/rustlib/{}/bin", target.triple); - for tool in LLVM_TOOLS { - let exe = src_bindir.join(exe(tool, target)); - tarball.add_file(&exe, &dst_bindir, 0o755); - } - - // Copy libLLVM.so to the target lib dir as well, so the RPATH like - // `$ORIGIN/../lib` can find it. It may also be used as a dependency - // of `rustc-dev` to support the inherited `-lLLVM` when using the - // compiler libraries. - maybe_install_llvm_target(builder, target, tarball.image_dir()); - - Some(tarball.generate()) - } -} - -// Tarball intended for internal consumption to ease rustc/std development. -// -// Should not be considered stable by end users. -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct RustDev { - pub target: TargetSelection, -} - -impl Step for RustDev { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("rust-dev") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustDev { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let target = self.target; - - /* run only if llvm-config isn't used */ - if let Some(config) = builder.config.target_config.get(&target) { - if let Some(ref _s) = config.llvm_config { - builder.info(&format!("Skipping RustDev ({target}): external LLVM")); - return None; - } - } - - let mut tarball = Tarball::new(builder, "rust-dev", &target.triple); - tarball.set_overlay(OverlayKind::LLVM); - - builder.ensure(crate::llvm::Llvm { target }); - - // We want to package `lld` to use it with `download-ci-llvm`. - builder.ensure(crate::llvm::Lld { target }); - - let src_bindir = builder.llvm_out(target).join("bin"); - // If updating this list, you likely want to change - // src/bootstrap/download-ci-llvm-stamp as well, otherwise local users - // will not pick up the extra file until LLVM gets bumped. - for bin in &[ - "llvm-config", - "llvm-ar", - "llvm-objdump", - "llvm-profdata", - "llvm-bcanalyzer", - "llvm-cov", - "llvm-dwp", - "llvm-nm", - "llvm-dwarfdump", - "llvm-dis", - "llvm-tblgen", - ] { - tarball.add_file(src_bindir.join(exe(bin, target)), "bin", 0o755); - } - - // We don't build LLD on some platforms, so only add it if it exists - let lld_path = builder.lld_out(target).join("bin").join(exe("lld", target)); - if lld_path.exists() { - tarball.add_file(lld_path, "bin", 0o755); - } - - tarball.add_file(&builder.llvm_filecheck(target), "bin", 0o755); - - // Copy the include directory as well; needed mostly to build - // librustc_llvm properly (e.g., llvm-config.h is in here). But also - // just broadly useful to be able to link against the bundled LLVM. - tarball.add_dir(&builder.llvm_out(target).join("include"), "include"); - - // Copy libLLVM.so to the target lib dir as well, so the RPATH like - // `$ORIGIN/../lib` can find it. It may also be used as a dependency - // of `rustc-dev` to support the inherited `-lLLVM` when using the - // compiler libraries. - let dst_libdir = tarball.image_dir().join("lib"); - maybe_install_llvm(builder, target, &dst_libdir); - let link_type = if builder.llvm_link_shared() { "dynamic" } else { "static" }; - t!(std::fs::write(tarball.image_dir().join("link-type.txt"), link_type), dst_libdir); - - Some(tarball.generate()) - } -} - -// Tarball intended for internal consumption to ease rustc/std development. -// -// Should not be considered stable by end users. -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct Bootstrap { - pub target: TargetSelection, -} - -impl Step for Bootstrap { - type Output = Option; - const DEFAULT: bool = false; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("bootstrap") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Bootstrap { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let target = self.target; - - let tarball = Tarball::new(builder, "bootstrap", &target.triple); - - let bootstrap_outdir = &builder.bootstrap_out; - for file in &["bootstrap", "rustc", "rustdoc", "sccache-plus-cl"] { - tarball.add_file(bootstrap_outdir.join(exe(file, target)), "bootstrap/bin", 0o755); - } - - Some(tarball.generate()) - } -} - -/// Tarball containing a prebuilt version of the build-manifest tool, intended to be used by the -/// release process to avoid cloning the monorepo and building stuff. -/// -/// Should not be considered stable by end users. -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct BuildManifest { - pub target: TargetSelection, -} - -impl Step for BuildManifest { - type Output = GeneratedTarball; - const DEFAULT: bool = false; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("build-manifest") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(BuildManifest { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) -> GeneratedTarball { - let build_manifest = builder.tool_exe(Tool::BuildManifest); - - let tarball = Tarball::new(builder, "build-manifest", &self.target.triple); - tarball.add_file(&build_manifest, "bin", 0o755); - tarball.generate() - } -} - -/// Tarball containing artifacts necessary to reproduce the build of rustc. -/// -/// Currently this is the PGO profile data. -/// -/// Should not be considered stable by end users. -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct ReproducibleArtifacts { - pub target: TargetSelection, -} - -impl Step for ReproducibleArtifacts { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("reproducible-artifacts") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ReproducibleArtifacts { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let mut added_anything = false; - let tarball = Tarball::new(builder, "reproducible-artifacts", &self.target.triple); - if let Some(path) = builder.config.rust_profile_use.as_ref() { - tarball.add_file(path, ".", 0o644); - added_anything = true; - } - if let Some(path) = builder.config.llvm_profile_use.as_ref() { - tarball.add_file(path, ".", 0o644); - added_anything = true; - } - for profile in &builder.config.reproducible_artifacts { - tarball.add_file(profile, ".", 0o644); - added_anything = true; - } - if added_anything { Some(tarball.generate()) } else { None } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/doc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/doc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/doc.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/doc.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1085 +0,0 @@ -//! Documentation generation for rustbuilder. -//! -//! This module implements generation for all bits and pieces of documentation -//! for the Rust project. This notably includes suites like the rust book, the -//! nomicon, rust by example, standalone documentation, etc. -//! -//! Everything here is basically just a shim around calling either `rustbook` or -//! `rustdoc`. - -use std::fs; -use std::path::{Path, PathBuf}; - -use crate::builder::crate_description; -use crate::builder::{Alias, Builder, Compiler, Kind, RunConfig, ShouldRun, Step}; -use crate::cache::{Interned, INTERNER}; -use crate::compile; -use crate::config::{Config, TargetSelection}; -use crate::tool::{self, prepare_tool_cargo, SourceType, Tool}; -use crate::util::{dir_is_empty, symlink_dir, t, up_to_date}; -use crate::Mode; - -macro_rules! submodule_helper { - ($path:expr, submodule) => { - $path - }; - ($path:expr, submodule = $submodule:literal) => { - $submodule - }; -} - -macro_rules! book { - ($($name:ident, $path:expr, $book_name:expr $(, submodule $(= $submodule:literal)? )? ;)+) => { - $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] - pub struct $name { - target: TargetSelection, - } - - impl Step for $name { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path($path).default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($name { - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) { - $( - let path = Path::new(submodule_helper!( $path, submodule $( = $submodule )? )); - builder.update_submodule(&path); - )? - builder.ensure(RustbookSrc { - target: self.target, - name: INTERNER.intern_str($book_name), - src: INTERNER.intern_path(builder.src.join($path)), - parent: Some(self), - }) - } - } - )+ - } -} - -// NOTE: When adding a book here, make sure to ALSO build the book by -// adding a build step in `src/bootstrap/builder.rs`! -// NOTE: Make sure to add the corresponding submodule when adding a new book. -// FIXME: Make checking for a submodule automatic somehow (maybe by having a list of all submodules -// and checking against it?). -book!( - CargoBook, "src/tools/cargo/src/doc", "cargo", submodule = "src/tools/cargo"; - ClippyBook, "src/tools/clippy/book", "clippy"; - EditionGuide, "src/doc/edition-guide", "edition-guide", submodule; - EmbeddedBook, "src/doc/embedded-book", "embedded-book", submodule; - Nomicon, "src/doc/nomicon", "nomicon", submodule; - Reference, "src/doc/reference", "reference", submodule; - RustByExample, "src/doc/rust-by-example", "rust-by-example", submodule; - RustdocBook, "src/doc/rustdoc", "rustdoc"; - StyleGuide, "src/doc/style-guide", "style-guide"; -); - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct UnstableBook { - target: TargetSelection, -} - -impl Step for UnstableBook { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/doc/unstable-book").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(UnstableBook { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - builder.ensure(UnstableBookGen { target: self.target }); - builder.ensure(RustbookSrc { - target: self.target, - name: INTERNER.intern_str("unstable-book"), - src: INTERNER.intern_path(builder.md_doc_out(self.target).join("unstable-book")), - parent: Some(self), - }) - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -struct RustbookSrc { - target: TargetSelection, - name: Interned, - src: Interned, - parent: Option

, -} - -impl Step for RustbookSrc

{ - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Invoke `rustbook` for `target` for the doc book `name` from the `src` path. - /// - /// This will not actually generate any documentation if the documentation has - /// already been generated. - fn run(self, builder: &Builder<'_>) { - let target = self.target; - let name = self.name; - let src = self.src; - let out = builder.doc_out(target); - t!(fs::create_dir_all(&out)); - - let out = out.join(name); - let index = out.join("index.html"); - let rustbook = builder.tool_exe(Tool::Rustbook); - let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); - - if !builder.config.dry_run() && !(up_to_date(&src, &index) || up_to_date(&rustbook, &index)) - { - builder.info(&format!("Rustbook ({target}) - {name}")); - let _ = fs::remove_dir_all(&out); - - builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out)); - } - - if self.parent.is_some() { - builder.maybe_open_in_browser::

(index) - } - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct TheBook { - compiler: Compiler, - target: TargetSelection, -} - -impl Step for TheBook { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/doc/book").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(TheBook { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - /// Builds the book and associated stuff. - /// - /// We need to build: - /// - /// * Book - /// * Older edition redirects - /// * Version info and CSS - /// * Index page - /// * Redirect pages - fn run(self, builder: &Builder<'_>) { - let relative_path = Path::new("src").join("doc").join("book"); - builder.update_submodule(&relative_path); - - let compiler = self.compiler; - let target = self.target; - - let absolute_path = builder.src.join(&relative_path); - let redirect_path = absolute_path.join("redirects"); - if !absolute_path.exists() - || !redirect_path.exists() - || dir_is_empty(&absolute_path) - || dir_is_empty(&redirect_path) - { - eprintln!("Please checkout submodule: {}", relative_path.display()); - crate::exit!(1); - } - // build book - builder.ensure(RustbookSrc { - target, - name: INTERNER.intern_str("book"), - src: INTERNER.intern_path(absolute_path.clone()), - parent: Some(self), - }); - - // building older edition redirects - for edition in &["first-edition", "second-edition", "2018-edition"] { - builder.ensure(RustbookSrc { - target, - name: INTERNER.intern_string(format!("book/{edition}")), - src: INTERNER.intern_path(absolute_path.join(edition)), - // There should only be one book that is marked as the parent for each target, so - // treat the other editions as not having a parent. - parent: Option::::None, - }); - } - - // build the version info page and CSS - let shared_assets = builder.ensure(SharedAssets { target }); - - // build the command first so we don't nest GHA groups - builder.rustdoc_cmd(compiler); - - // build the redirect pages - let _guard = builder.msg_doc(compiler, "book redirect pages", target); - for file in t!(fs::read_dir(redirect_path)) { - let file = t!(file); - let path = file.path(); - let path = path.to_str().unwrap(); - - invoke_rustdoc(builder, compiler, &shared_assets, target, path); - } - } -} - -fn invoke_rustdoc( - builder: &Builder<'_>, - compiler: Compiler, - shared_assets: &SharedAssetsPaths, - target: TargetSelection, - markdown: &str, -) { - let out = builder.doc_out(target); - - let path = builder.src.join("src/doc").join(markdown); - - let header = builder.src.join("src/doc/redirect.inc"); - let footer = builder.src.join("src/doc/footer.inc"); - - let mut cmd = builder.rustdoc_cmd(compiler); - - let out = out.join("book"); - - cmd.arg("--html-after-content") - .arg(&footer) - .arg("--html-before-content") - .arg(&shared_assets.version_info) - .arg("--html-in-header") - .arg(&header) - .arg("--markdown-no-toc") - .arg("--markdown-playground-url") - .arg("https://play.rust-lang.org/") - .arg("-o") - .arg(&out) - .arg(&path) - .arg("--markdown-css") - .arg("../rust.css"); - - if !builder.config.docs_minification { - cmd.arg("-Z").arg("unstable-options").arg("--disable-minification"); - } - - builder.run(&mut cmd); -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Standalone { - compiler: Compiler, - target: TargetSelection, -} - -impl Step for Standalone { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/doc").alias("standalone").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Standalone { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - /// Generates all standalone documentation as compiled by the rustdoc in `stage` - /// for the `target` into `out`. - /// - /// This will list all of `src/doc` looking for markdown files and appropriately - /// perform transformations like substituting `VERSION`, `SHORT_HASH`, and - /// `STAMP` along with providing the various header/footer HTML we've customized. - /// - /// In the end, this is just a glorified wrapper around rustdoc! - fn run(self, builder: &Builder<'_>) { - let target = self.target; - let compiler = self.compiler; - let _guard = builder.msg_doc(compiler, "standalone", target); - let out = builder.doc_out(target); - t!(fs::create_dir_all(&out)); - - let version_info = builder.ensure(SharedAssets { target: self.target }).version_info; - - let favicon = builder.src.join("src/doc/favicon.inc"); - let footer = builder.src.join("src/doc/footer.inc"); - let full_toc = builder.src.join("src/doc/full-toc.inc"); - - for file in t!(fs::read_dir(builder.src.join("src/doc"))) { - let file = t!(file); - let path = file.path(); - let filename = path.file_name().unwrap().to_str().unwrap(); - if !filename.ends_with(".md") || filename == "README.md" { - continue; - } - - let html = out.join(filename).with_extension("html"); - let rustdoc = builder.rustdoc(compiler); - if up_to_date(&path, &html) - && up_to_date(&footer, &html) - && up_to_date(&favicon, &html) - && up_to_date(&full_toc, &html) - && (builder.config.dry_run() || up_to_date(&version_info, &html)) - && (builder.config.dry_run() || up_to_date(&rustdoc, &html)) - { - continue; - } - - let mut cmd = builder.rustdoc_cmd(compiler); - // Needed for --index-page flag - cmd.arg("-Z").arg("unstable-options"); - - cmd.arg("--html-after-content") - .arg(&footer) - .arg("--html-before-content") - .arg(&version_info) - .arg("--html-in-header") - .arg(&favicon) - .arg("--markdown-no-toc") - .arg("--index-page") - .arg(&builder.src.join("src/doc/index.md")) - .arg("--markdown-playground-url") - .arg("https://play.rust-lang.org/") - .arg("-o") - .arg(&out) - .arg(&path); - - if !builder.config.docs_minification { - cmd.arg("--disable-minification"); - } - - if filename == "not_found.md" { - cmd.arg("--markdown-css").arg("https://doc.rust-lang.org/rust.css"); - } else { - cmd.arg("--markdown-css").arg("rust.css"); - } - builder.run(&mut cmd); - } - - // We open doc/index.html as the default if invoked as `x.py doc --open` - // with no particular explicit doc requested (e.g. library/core). - if builder.paths.is_empty() || builder.was_invoked_explicitly::(Kind::Doc) { - let index = out.join("index.html"); - builder.open_in_browser(&index); - } - } -} - -#[derive(Debug, Clone)] -pub struct SharedAssetsPaths { - pub version_info: PathBuf, -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct SharedAssets { - target: TargetSelection, -} - -impl Step for SharedAssets { - type Output = SharedAssetsPaths; - const DEFAULT: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - // Other tasks depend on this, no need to execute it on its own - run.never() - } - - // Generate shared resources used by other pieces of documentation. - fn run(self, builder: &Builder<'_>) -> Self::Output { - let out = builder.doc_out(self.target); - - let version_input = builder.src.join("src").join("doc").join("version_info.html.template"); - let version_info = out.join("version_info.html"); - if !builder.config.dry_run() && !up_to_date(&version_input, &version_info) { - let info = t!(fs::read_to_string(&version_input)) - .replace("VERSION", &builder.rust_release()) - .replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or("")) - .replace("STAMP", builder.rust_info().sha().unwrap_or("")); - t!(fs::write(&version_info, &info)); - } - - builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css")); - - SharedAssetsPaths { version_info } - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Std { - pub stage: u32, - pub target: TargetSelection, - pub format: DocumentationFormat, - crates: Interned>, -} - -impl Std { - pub(crate) fn new( - stage: u32, - target: TargetSelection, - builder: &Builder<'_>, - format: DocumentationFormat, - ) -> Self { - let crates = builder - .in_tree_crates("sysroot", Some(target)) - .into_iter() - .map(|krate| krate.name.to_string()) - .collect(); - Std { stage, target, format, crates: INTERNER.intern_list(crates) } - } -} - -impl Step for Std { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.crate_or_deps("sysroot").path("library").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Std { - stage: run.builder.top_stage, - target: run.target, - format: if run.builder.config.cmd.json() { - DocumentationFormat::JSON - } else { - DocumentationFormat::HTML - }, - crates: run.make_run_crates(Alias::Library), - }); - } - - /// Compile all standard library documentation. - /// - /// This will generate all documentation for the standard library and its - /// dependencies. This is largely just a wrapper around `cargo doc`. - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let target = self.target; - let out = match self.format { - DocumentationFormat::HTML => builder.doc_out(target), - DocumentationFormat::JSON => builder.json_doc_out(target), - }; - - t!(fs::create_dir_all(&out)); - - if self.format == DocumentationFormat::HTML { - builder.ensure(SharedAssets { target: self.target }); - } - - let index_page = builder - .src - .join("src/doc/index.md") - .into_os_string() - .into_string() - .expect("non-utf8 paths are unsupported"); - let mut extra_args = match self.format { - DocumentationFormat::HTML => { - vec!["--markdown-css", "rust.css", "--markdown-no-toc", "--index-page", &index_page] - } - DocumentationFormat::JSON => vec!["--output-format", "json"], - }; - - if !builder.config.docs_minification { - extra_args.push("--disable-minification"); - } - - doc_std(builder, self.format, stage, target, &out, &extra_args, &self.crates); - - // Don't open if the format is json - if let DocumentationFormat::JSON = self.format { - return; - } - - if builder.paths.iter().any(|path| path.ends_with("library")) { - // For `x.py doc library --open`, open `std` by default. - let index = out.join("std").join("index.html"); - builder.open_in_browser(index); - } else { - for requested_crate in &*self.crates { - if STD_PUBLIC_CRATES.iter().any(|&k| k == requested_crate) { - let index = out.join(requested_crate).join("index.html"); - builder.open_in_browser(index); - break; - } - } - } - } -} - -/// Name of the crates that are visible to consumers of the standard library. -/// Documentation for internal crates is handled by the rustc step, so internal crates will show -/// up there. -/// -/// Order here is important! -/// Crates need to be processed starting from the leaves, otherwise rustdoc will not -/// create correct links between crates because rustdoc depends on the -/// existence of the output directories to know if it should be a local -/// or remote link. -const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"]; - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum DocumentationFormat { - HTML, - JSON, -} - -impl DocumentationFormat { - fn as_str(&self) -> &str { - match self { - DocumentationFormat::HTML => "HTML", - DocumentationFormat::JSON => "JSON", - } - } -} - -/// Build the documentation for public standard library crates. -fn doc_std( - builder: &Builder<'_>, - format: DocumentationFormat, - stage: u32, - target: TargetSelection, - out: &Path, - extra_args: &[&str], - requested_crates: &[String], -) { - if builder.no_std(target) == Some(true) { - panic!( - "building std documentation for no_std target {target} is not supported\n\ - Set `docs = false` in the config to disable documentation, or pass `--skip library`." - ); - } - - let compiler = builder.compiler(stage, builder.config.build); - - let target_doc_dir_name = if format == DocumentationFormat::JSON { "json-doc" } else { "doc" }; - let target_dir = - builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name); - - // This is directory where the compiler will place the output of the command. - // We will then copy the files from this directory into the final `out` directory, the specified - // as a function parameter. - let out_dir = target_dir.join(target.triple).join("doc"); - - let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "doc"); - compile::std_cargo(builder, target, compiler.stage, &mut cargo); - cargo - .arg("--no-deps") - .arg("--target-dir") - .arg(&*target_dir.to_string_lossy()) - .arg("-Zskip-rustdoc-fingerprint") - .rustdocflag("-Z") - .rustdocflag("unstable-options") - .rustdocflag("--resource-suffix") - .rustdocflag(&builder.version); - for arg in extra_args { - cargo.rustdocflag(arg); - } - - if builder.config.library_docs_private_items { - cargo.rustdocflag("--document-private-items").rustdocflag("--document-hidden-items"); - } - - for krate in requested_crates { - if krate == "sysroot" { - // The sysroot crate is an implementation detail, don't include it in public docs. - continue; - } - cargo.arg("-p").arg(krate); - } - - let description = - format!("library{} in {} format", crate_description(&requested_crates), format.as_str()); - let _guard = builder.msg_doc(compiler, &description, target); - - builder.run(&mut cargo.into()); - builder.cp_r(&out_dir, &out); -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Rustc { - pub stage: u32, - pub target: TargetSelection, - crates: Interned>, -} - -impl Rustc { - pub(crate) fn new(stage: u32, target: TargetSelection, builder: &Builder<'_>) -> Self { - let crates = builder - .in_tree_crates("rustc-main", Some(target)) - .into_iter() - .map(|krate| krate.name.to_string()) - .collect(); - Self { stage, target, crates: INTERNER.intern_list(crates) } - } -} - -impl Step for Rustc { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.crate_or_deps("rustc-main") - .path("compiler") - .default_condition(builder.config.compiler_docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Rustc { - stage: run.builder.top_stage, - target: run.target, - crates: run.make_run_crates(Alias::Compiler), - }); - } - - /// Generates compiler documentation. - /// - /// This will generate all documentation for compiler and dependencies. - /// Compiler documentation is distributed separately, so we make sure - /// we do not merge it with the other documentation from std, test and - /// proc_macros. This is largely just a wrapper around `cargo doc`. - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let target = self.target; - - // This is the intended out directory for compiler documentation. - let out = builder.compiler_doc_out(target); - t!(fs::create_dir_all(&out)); - - // Build the standard library, so that proc-macros can use it. - // (Normally, only the metadata would be necessary, but proc-macros are special since they run at compile-time.) - let compiler = builder.compiler(stage, builder.config.build); - builder.ensure(compile::Std::new(compiler, builder.config.build)); - - let _guard = builder.msg_sysroot_tool( - Kind::Doc, - stage, - &format!("compiler{}", crate_description(&self.crates)), - compiler.host, - target, - ); - - // This uses a shared directory so that librustdoc documentation gets - // correctly built and merged with the rustc documentation. This is - // needed because rustdoc is built in a different directory from - // rustc. rustdoc needs to be able to see everything, for example when - // merging the search index, or generating local (relative) links. - let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc"); - t!(fs::create_dir_all(out_dir.parent().unwrap())); - symlink_dir_force(&builder.config, &out, &out_dir); - // Cargo puts proc macros in `target/doc` even if you pass `--target` - // explicitly (https://github.com/rust-lang/cargo/issues/7677). - let proc_macro_out_dir = builder.stage_out(compiler, Mode::Rustc).join("doc"); - symlink_dir_force(&builder.config, &out, &proc_macro_out_dir); - - // Build cargo command. - let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "doc"); - cargo.rustdocflag("--document-private-items"); - // Since we always pass --document-private-items, there's no need to warn about linking to private items. - cargo.rustdocflag("-Arustdoc::private-intra-doc-links"); - cargo.rustdocflag("--enable-index-page"); - cargo.rustdocflag("-Zunstable-options"); - cargo.rustdocflag("-Znormalize-docs"); - cargo.rustdocflag("--show-type-layout"); - cargo.rustdocflag("--generate-link-to-definition"); - compile::rustc_cargo(builder, &mut cargo, target, compiler.stage); - cargo.arg("-Zunstable-options"); - cargo.arg("-Zskip-rustdoc-fingerprint"); - - // Only include compiler crates, no dependencies of those, such as `libc`. - // Do link to dependencies on `docs.rs` however using `rustdoc-map`. - cargo.arg("--no-deps"); - cargo.arg("-Zrustdoc-map"); - - // FIXME: `-Zrustdoc-map` does not yet correctly work for transitive dependencies, - // once this is no longer an issue the special case for `ena` can be removed. - cargo.rustdocflag("--extern-html-root-url"); - cargo.rustdocflag("ena=https://docs.rs/ena/latest/"); - - let mut to_open = None; - - for krate in &*self.crates { - // Create all crate output directories first to make sure rustdoc uses - // relative links. - // FIXME: Cargo should probably do this itself. - let dir_name = krate.replace("-", "_"); - t!(fs::create_dir_all(out_dir.join(&*dir_name))); - cargo.arg("-p").arg(krate); - if to_open.is_none() { - to_open = Some(dir_name); - } - } - - builder.run(&mut cargo.into()); - - if builder.paths.iter().any(|path| path.ends_with("compiler")) { - // For `x.py doc compiler --open`, open `rustc_middle` by default. - let index = out.join("rustc_middle").join("index.html"); - builder.open_in_browser(index); - } else if let Some(krate) = to_open { - // Let's open the first crate documentation page: - let index = out.join(krate).join("index.html"); - builder.open_in_browser(index); - } - } -} - -macro_rules! tool_doc { - ( - $tool: ident, - $should_run: literal, - $path: literal, - $(rustc_tool = $rustc_tool:literal, )? - $(in_tree = $in_tree:literal, )? - [$($extra_arg: literal),+ $(,)?] - $(,)? - ) => { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] - pub struct $tool { - target: TargetSelection, - } - - impl Step for $tool { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.crate_or_deps($should_run).default_condition(builder.config.compiler_docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($tool { target: run.target }); - } - - /// Generates compiler documentation. - /// - /// This will generate all documentation for compiler and dependencies. - /// Compiler documentation is distributed separately, so we make sure - /// we do not merge it with the other documentation from std, test and - /// proc_macros. This is largely just a wrapper around `cargo doc`. - fn run(self, builder: &Builder<'_>) { - let stage = builder.top_stage; - let target = self.target; - - // This is the intended out directory for compiler documentation. - let out = builder.compiler_doc_out(target); - t!(fs::create_dir_all(&out)); - - let compiler = builder.compiler(stage, builder.config.build); - builder.ensure(compile::Std::new(compiler, target)); - - if true $(&& $rustc_tool)? { - // Build rustc docs so that we generate relative links. - builder.ensure(Rustc::new(stage, target, builder)); - - // Rustdoc needs the rustc sysroot available to build. - // FIXME: is there a way to only ensure `check::Rustc` here? Last time I tried it failed - // with strange errors, but only on a full bors test ... - builder.ensure(compile::Rustc::new(compiler, target)); - } - - let source_type = if true $(&& $in_tree)? { - SourceType::InTree - } else { - SourceType::Submodule - }; - - // Symlink compiler docs to the output directory of rustdoc documentation. - let out_dirs = [ - builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc"), - // Cargo uses a different directory for proc macros. - builder.stage_out(compiler, Mode::ToolRustc).join("doc"), - ]; - for out_dir in out_dirs { - t!(fs::create_dir_all(&out_dir)); - symlink_dir_force(&builder.config, &out, &out_dir); - } - - // Build cargo command. - let mut cargo = prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - target, - "doc", - $path, - source_type, - &[], - ); - - cargo.arg("-Zskip-rustdoc-fingerprint"); - // Only include compiler crates, no dependencies of those, such as `libc`. - cargo.arg("--no-deps"); - - $( - cargo.arg($extra_arg); - )+ - - cargo.rustdocflag("--document-private-items"); - // Since we always pass --document-private-items, there's no need to warn about linking to private items. - cargo.rustdocflag("-Arustdoc::private-intra-doc-links"); - cargo.rustdocflag("--enable-index-page"); - cargo.rustdocflag("--show-type-layout"); - cargo.rustdocflag("--generate-link-to-definition"); - cargo.rustdocflag("-Zunstable-options"); - - let _guard = builder.msg_doc(compiler, stringify!($tool).to_lowercase(), target); - builder.run(&mut cargo.into()); - } - } - } -} - -tool_doc!( - Rustdoc, - "rustdoc-tool", - "src/tools/rustdoc", - ["-p", "rustdoc", "-p", "rustdoc-json-types"] -); -tool_doc!( - Rustfmt, - "rustfmt-nightly", - "src/tools/rustfmt", - ["-p", "rustfmt-nightly", "-p", "rustfmt-config_proc_macro"], -); -tool_doc!(Clippy, "clippy", "src/tools/clippy", ["-p", "clippy_utils"]); -tool_doc!(Miri, "miri", "src/tools/miri", ["-p", "miri"]); -tool_doc!( - Cargo, - "cargo", - "src/tools/cargo", - rustc_tool = false, - in_tree = false, - [ - "-p", - "cargo", - "-p", - "cargo-platform", - "-p", - "cargo-util", - "-p", - "crates-io", - "-p", - "cargo-test-macro", - "-p", - "cargo-test-support", - "-p", - "cargo-credential", - "-p", - "mdman", - // FIXME: this trips a license check in tidy. - // "-p", - // "resolver-tests", - ] -); -tool_doc!(Tidy, "tidy", "src/tools/tidy", rustc_tool = false, ["-p", "tidy"]); -tool_doc!( - Bootstrap, - "bootstrap", - "src/bootstrap", - rustc_tool = false, - ["--lib", "-p", "bootstrap"] -); - -#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct ErrorIndex { - pub target: TargetSelection, -} - -impl Step for ErrorIndex { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/tools/error_index_generator").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - let target = run.target; - run.builder.ensure(ErrorIndex { target }); - } - - /// Generates the HTML rendered error-index by running the - /// `error_index_generator` tool. - fn run(self, builder: &Builder<'_>) { - builder.info(&format!("Documenting error index ({})", self.target)); - let out = builder.doc_out(self.target); - t!(fs::create_dir_all(&out)); - let mut index = tool::ErrorIndex::command(builder); - index.arg("html"); - index.arg(out); - index.arg(&builder.version); - - builder.run(&mut index); - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct UnstableBookGen { - target: TargetSelection, -} - -impl Step for UnstableBookGen { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/tools/unstable-book-gen").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(UnstableBookGen { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let target = self.target; - - builder.info(&format!("Generating unstable book md files ({target})")); - let out = builder.md_doc_out(target).join("unstable-book"); - builder.create_dir(&out); - builder.remove_dir(&out); - let mut cmd = builder.tool_cmd(Tool::UnstableBookGen); - cmd.arg(builder.src.join("library")); - cmd.arg(builder.src.join("compiler")); - cmd.arg(builder.src.join("src")); - cmd.arg(out); - - builder.run(&mut cmd); - } -} - -fn symlink_dir_force(config: &Config, original: &Path, link: &Path) { - if config.dry_run() { - return; - } - if let Ok(m) = fs::symlink_metadata(link) { - if m.file_type().is_dir() { - t!(fs::remove_dir_all(link)); - } else { - // handle directory junctions on windows by falling back to - // `remove_dir`. - t!(fs::remove_file(link).or_else(|_| fs::remove_dir(link))); - } - } - - t!( - symlink_dir(config, original, link), - format!("failed to create link from {} -> {}", link.display(), original.display()) - ); -} - -#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustcBook { - pub compiler: Compiler, - pub target: TargetSelection, - pub validate: bool, -} - -impl Step for RustcBook { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/doc/rustc").default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustcBook { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - validate: false, - }); - } - - /// Builds the rustc book. - /// - /// The lints are auto-generated by a tool, and then merged into the book - /// in the "md-doc" directory in the build output directory. Then - /// "rustbook" is used to convert it to HTML. - fn run(self, builder: &Builder<'_>) { - let out_base = builder.md_doc_out(self.target).join("rustc"); - t!(fs::create_dir_all(&out_base)); - let out_listing = out_base.join("src/lints"); - builder.cp_r(&builder.src.join("src/doc/rustc"), &out_base); - builder.info(&format!("Generating lint docs ({})", self.target)); - - let rustc = builder.rustc(self.compiler); - // The tool runs `rustc` for extracting output examples, so it needs a - // functional sysroot. - builder.ensure(compile::Std::new(self.compiler, self.target)); - let mut cmd = builder.tool_cmd(Tool::LintDocs); - cmd.arg("--src"); - cmd.arg(builder.src.join("compiler")); - cmd.arg("--out"); - cmd.arg(&out_listing); - cmd.arg("--rustc"); - cmd.arg(&rustc); - cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg()); - if builder.is_verbose() { - cmd.arg("--verbose"); - } - if self.validate { - cmd.arg("--validate"); - } - // We need to validate nightly features, even on the stable channel. - // Set this unconditionally as the stage0 compiler may be being used to - // document. - cmd.env("RUSTC_BOOTSTRAP", "1"); - - // If the lib directories are in an unusual location (changed in - // config.toml), then this needs to explicitly update the dylib search - // path. - builder.add_rustc_lib_path(self.compiler, &mut cmd); - let doc_generator_guard = builder.msg( - Kind::Run, - self.compiler.stage, - "lint-docs", - self.compiler.host, - self.target, - ); - builder.run(&mut cmd); - drop(doc_generator_guard); - - // Run rustbook/mdbook to generate the HTML pages. - builder.ensure(RustbookSrc { - target: self.target, - name: INTERNER.intern_str("rustc"), - src: INTERNER.intern_path(out_base), - parent: Some(self), - }); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/download-ci-llvm-stamp rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/download-ci-llvm-stamp --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/download-ci-llvm-stamp 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/download-ci-llvm-stamp 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,4 @@ Change this file to make users of the `download-ci-llvm` configuration download a new version of LLVM from CI, even if the LLVM submodule hasn’t changed. -Last change is for: https://github.com/rust-lang/rust/pull/113996 +Last change is for: https://github.com/rust-lang/rust/pull/116881 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/download.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/download.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/download.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/download.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,686 +0,0 @@ -use std::{ - env, - ffi::{OsStr, OsString}, - fs::{self, File}, - io::{BufRead, BufReader, BufWriter, ErrorKind, Write}, - path::{Path, PathBuf}, - process::{Command, Stdio}, -}; - -use build_helper::ci::CiEnv; -use once_cell::sync::OnceCell; -use xz2::bufread::XzDecoder; - -use crate::{ - config::RustfmtMetadata, - llvm::detect_llvm_sha, - t, - util::{check_run, exe, program_out_of_date}, - Config, -}; - -static SHOULD_FIX_BINS_AND_DYLIBS: OnceCell = OnceCell::new(); - -/// `Config::try_run` wrapper for this module to avoid warnings on `try_run`, since we don't have access to a `builder` yet. -fn try_run(config: &Config, cmd: &mut Command) -> Result<(), ()> { - #[allow(deprecated)] - config.try_run(cmd) -} - -/// Generic helpers that are useful anywhere in bootstrap. -impl Config { - pub fn is_verbose(&self) -> bool { - self.verbose > 0 - } - - pub(crate) fn create(&self, path: &Path, s: &str) { - if self.dry_run() { - return; - } - t!(fs::write(path, s)); - } - - pub(crate) fn remove(&self, f: &Path) { - if self.dry_run() { - return; - } - fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {:?}", f)); - } - - /// Create a temporary directory in `out` and return its path. - /// - /// NOTE: this temporary directory is shared between all steps; - /// if you need an empty directory, create a new subdirectory inside it. - pub(crate) fn tempdir(&self) -> PathBuf { - let tmp = self.out.join("tmp"); - t!(fs::create_dir_all(&tmp)); - tmp - } - - /// Runs a command, printing out nice contextual information if it fails. - /// Returns false if do not execute at all, otherwise returns its - /// `status.success()`. - pub(crate) fn check_run(&self, cmd: &mut Command) -> bool { - if self.dry_run() { - return true; - } - self.verbose(&format!("running: {cmd:?}")); - check_run(cmd, self.is_verbose()) - } - - /// Whether or not `fix_bin_or_dylib` needs to be run; can only be true - /// on NixOS - fn should_fix_bins_and_dylibs(&self) -> bool { - let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { - match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() { - Err(_) => return false, - Ok(output) if !output.status.success() => return false, - Ok(output) => { - let mut os_name = output.stdout; - if os_name.last() == Some(&b'\n') { - os_name.pop(); - } - if os_name != b"Linux" { - return false; - } - } - } - - // If the user has asked binaries to be patched for Nix, then - // don't check for NixOS or `/lib`. - // NOTE: this intentionally comes after the Linux check: - // - patchelf only works with ELF files, so no need to run it on Mac or Windows - // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. - if let Some(explicit_value) = self.patch_binaries_for_nix { - return explicit_value; - } - - // Use `/etc/os-release` instead of `/etc/NIXOS`. - // The latter one does not exist on NixOS when using tmpfs as root. - let is_nixos = match File::open("/etc/os-release") { - Err(e) if e.kind() == ErrorKind::NotFound => false, - Err(e) => panic!("failed to access /etc/os-release: {}", e), - Ok(os_release) => BufReader::new(os_release).lines().any(|l| { - let l = l.expect("reading /etc/os-release"); - matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") - }), - }; - if !is_nixos { - let in_nix_shell = env::var("IN_NIX_SHELL"); - if let Ok(in_nix_shell) = in_nix_shell { - eprintln!( - "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ - you may need to set `patch-binaries-for-nix=true` in config.toml" - ); - } - } - is_nixos - }); - if val { - eprintln!("info: You seem to be using Nix."); - } - val - } - - /// Modifies the interpreter section of 'fname' to fix the dynamic linker, - /// or the RPATH section, to fix the dynamic library search path - /// - /// This is only required on NixOS and uses the PatchELF utility to - /// change the interpreter/RPATH of ELF executables. - /// - /// Please see for more information - fn fix_bin_or_dylib(&self, fname: &Path) { - assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); - println!("attempting to patch {}", fname.display()); - - // Only build `.nix-deps` once. - static NIX_DEPS_DIR: OnceCell = OnceCell::new(); - let mut nix_build_succeeded = true; - let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { - // Run `nix-build` to "build" each dependency (which will likely reuse - // the existing `/nix/store` copy, or at most download a pre-built copy). - // - // Importantly, we create a gc-root called `.nix-deps` in the `build/` - // directory, but still reference the actual `/nix/store` path in the rpath - // as it makes it significantly more robust against changes to the location of - // the `.nix-deps` location. - // - // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). - // zlib: Needed as a system dependency of `libLLVM-*.so`. - // patchelf: Needed for patching ELF binaries (see doc comment above). - let nix_deps_dir = self.out.join(".nix-deps"); - const NIX_EXPR: &str = " - with (import {}); - symlinkJoin { - name = \"rust-stage0-dependencies\"; - paths = [ - zlib - patchelf - stdenv.cc.bintools - ]; - } - "; - nix_build_succeeded = try_run( - self, - Command::new("nix-build").args(&[ - Path::new("-E"), - Path::new(NIX_EXPR), - Path::new("-o"), - &nix_deps_dir, - ]), - ) - .is_ok(); - nix_deps_dir - }); - if !nix_build_succeeded { - return; - } - - let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf")); - let rpath_entries = { - // ORIGIN is a relative default, all binary and dynamic libraries we ship - // appear to have this (even when `../lib` is redundant). - // NOTE: there are only two paths here, delimited by a `:` - let mut entries = OsString::from("$ORIGIN/../lib:"); - entries.push(t!(fs::canonicalize(nix_deps_dir)).join("lib")); - entries - }; - patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]); - if !fname.extension().map_or(false, |ext| ext == "so") { - // Finally, set the correct .interp for binaries - let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); - // FIXME: can we support utf8 here? `args` doesn't accept Vec, only OsString ... - let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path)))); - patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]); - } - - let _ = try_run(self, patchelf.arg(fname)); - } - - fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) { - self.verbose(&format!("download {url}")); - // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. - let tempfile = self.tempdir().join(dest_path.file_name().unwrap()); - // While bootstrap itself only supports http and https downloads, downstream forks might - // need to download components from other protocols. The match allows them adding more - // protocols without worrying about merge conflicts if we change the HTTP implementation. - match url.split_once("://").map(|(proto, _)| proto) { - Some("http") | Some("https") => { - self.download_http_with_retries(&tempfile, url, help_on_error) - } - Some(other) => panic!("unsupported protocol {other} in {url}"), - None => panic!("no protocol in {url}"), - } - t!(std::fs::rename(&tempfile, dest_path)); - } - - fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) { - println!("downloading {url}"); - // Try curl. If that fails and we are on windows, fallback to PowerShell. - let mut curl = Command::new("curl"); - curl.args(&[ - "-y", - "30", - "-Y", - "10", // timeout if speed is < 10 bytes/sec for > 30 seconds - "--connect-timeout", - "30", // timeout if cannot connect within 30 seconds - "-o", - tempfile.to_str().unwrap(), - "--retry", - "3", - "-SRf", - ]); - // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. - if CiEnv::is_ci() { - curl.arg("-s"); - } else { - curl.arg("--progress-bar"); - } - curl.arg(url); - if !self.check_run(&mut curl) { - if self.build.contains("windows-msvc") { - eprintln!("Fallback to PowerShell"); - for _ in 0..3 { - if try_run(self, Command::new("PowerShell.exe").args(&[ - "/nologo", - "-Command", - "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", - &format!( - "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", - url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), - ), - ])).is_err() { - return; - } - eprintln!("\nspurious failure, trying again"); - } - } - if !help_on_error.is_empty() { - eprintln!("{help_on_error}"); - } - crate::exit!(1); - } - } - - fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) { - eprintln!("extracting {} to {}", tarball.display(), dst.display()); - if !dst.exists() { - t!(fs::create_dir_all(dst)); - } - - // `tarball` ends with `.tar.xz`; strip that suffix - // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` - let uncompressed_filename = - Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); - let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); - - // decompress the file - let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); - let decompressor = XzDecoder::new(BufReader::new(data)); - - let mut tar = tar::Archive::new(decompressor); - - // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding - // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. - // Cache the entries when we extract it so we only have to read it once. - let mut recorded_entries = - if dst.ends_with("ci-rustc") { recorded_entries(dst, pattern) } else { None }; - - for member in t!(tar.entries()) { - let mut member = t!(member); - let original_path = t!(member.path()).into_owned(); - // skip the top-level directory - if original_path == directory_prefix { - continue; - } - let mut short_path = t!(original_path.strip_prefix(directory_prefix)); - if !short_path.starts_with(pattern) { - continue; - } - short_path = t!(short_path.strip_prefix(pattern)); - let dst_path = dst.join(short_path); - self.verbose(&format!("extracting {} to {}", original_path.display(), dst.display())); - if !t!(member.unpack_in(dst)) { - panic!("path traversal attack ??"); - } - if let Some(record) = &mut recorded_entries { - t!(writeln!(record, "{}", short_path.to_str().unwrap())); - } - let src_path = dst.join(original_path); - if src_path.is_dir() && dst_path.exists() { - continue; - } - t!(fs::rename(src_path, dst_path)); - } - let dst_dir = dst.join(directory_prefix); - if dst_dir.exists() { - t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); - } - } - - /// Returns whether the SHA256 checksum of `path` matches `expected`. - fn verify(&self, path: &Path, expected: &str) -> bool { - use sha2::Digest; - - self.verbose(&format!("verifying {}", path.display())); - let mut hasher = sha2::Sha256::new(); - // FIXME: this is ok for rustfmt (4.1 MB large at time of writing), but it seems memory-intensive for rustc and larger components. - // Consider using streaming IO instead? - let contents = if self.dry_run() { vec![] } else { t!(fs::read(path)) }; - hasher.update(&contents); - let found = hex::encode(hasher.finalize().as_slice()); - let verified = found == expected; - if !verified && !self.dry_run() { - println!( - "invalid checksum: \n\ - found: {found}\n\ - expected: {expected}", - ); - } - return verified; - } -} - -fn recorded_entries(dst: &Path, pattern: &str) -> Option> { - let name = if pattern == "rustc-dev" { - ".rustc-dev-contents" - } else if pattern.starts_with("rust-std") { - ".rust-std-contents" - } else { - return None; - }; - Some(BufWriter::new(t!(File::create(dst.join(name))))) -} - -enum DownloadSource { - CI, - Dist, -} - -/// Functions that are only ever called once, but named for clarify and to avoid thousand-line functions. -impl Config { - pub(crate) fn maybe_download_rustfmt(&self) -> Option { - let RustfmtMetadata { date, version } = self.stage0_metadata.rustfmt.as_ref()?; - let channel = format!("{version}-{date}"); - - let host = self.build; - let bin_root = self.out.join(host.triple).join("rustfmt"); - let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); - let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); - if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { - return Some(rustfmt_path); - } - - self.download_component( - DownloadSource::Dist, - format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), - "rustfmt-preview", - &date, - "rustfmt", - ); - self.download_component( - DownloadSource::Dist, - format!("rustc-{version}-{build}.tar.xz", build = host.triple), - "rustc", - &date, - "rustfmt", - ); - - if self.should_fix_bins_and_dylibs() { - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); - let lib_dir = bin_root.join("lib"); - for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { - let lib = t!(lib); - if lib.path().extension() == Some(OsStr::new("so")) { - self.fix_bin_or_dylib(&lib.path()); - } - } - } - - self.create(&rustfmt_stamp, &channel); - Some(rustfmt_path) - } - - pub(crate) fn ci_rust_std_contents(&self) -> Vec { - self.ci_component_contents(".rust-std-contents") - } - - pub(crate) fn ci_rustc_dev_contents(&self) -> Vec { - self.ci_component_contents(".rustc-dev-contents") - } - - fn ci_component_contents(&self, stamp_file: &str) -> Vec { - assert!(self.download_rustc()); - if self.dry_run() { - return vec![]; - } - - let ci_rustc_dir = self.ci_rustc_dir(); - let stamp_file = ci_rustc_dir.join(stamp_file); - let contents_file = t!(File::open(&stamp_file), stamp_file.display().to_string()); - t!(BufReader::new(contents_file).lines().collect()) - } - - pub(crate) fn download_ci_rustc(&self, commit: &str) { - self.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})")); - - let version = self.artifact_version_part(commit); - // download-rustc doesn't need its own cargo, it can just use beta's. But it does need the - // `rustc_private` crates for tools. - let extra_components = ["rustc-dev"]; - - self.download_toolchain( - &version, - "ci-rustc", - &format!("{commit}-{}", self.llvm_assertions), - &extra_components, - Self::download_ci_component, - ); - } - - pub(crate) fn download_beta_toolchain(&self) { - self.verbose("downloading stage0 beta artifacts"); - - let date = &self.stage0_metadata.compiler.date; - let version = &self.stage0_metadata.compiler.version; - let extra_components = ["cargo"]; - - let download_beta_component = |config: &Config, filename, prefix: &_, date: &_| { - config.download_component(DownloadSource::Dist, filename, prefix, date, "stage0") - }; - - self.download_toolchain( - version, - "stage0", - date, - &extra_components, - download_beta_component, - ); - } - - fn download_toolchain( - &self, - version: &str, - sysroot: &str, - stamp_key: &str, - extra_components: &[&str], - download_component: fn(&Config, String, &str, &str), - ) { - let host = self.build.triple; - let bin_root = self.out.join(host).join(sysroot); - let rustc_stamp = bin_root.join(".rustc-stamp"); - - if !bin_root.join("bin").join(exe("rustc", self.build)).exists() - || program_out_of_date(&rustc_stamp, stamp_key) - { - if bin_root.exists() { - t!(fs::remove_dir_all(&bin_root)); - } - let filename = format!("rust-std-{version}-{host}.tar.xz"); - let pattern = format!("rust-std-{host}"); - download_component(self, filename, &pattern, stamp_key); - let filename = format!("rustc-{version}-{host}.tar.xz"); - download_component(self, filename, "rustc", stamp_key); - - for component in extra_components { - let filename = format!("{component}-{version}-{host}.tar.xz"); - download_component(self, filename, component, stamp_key); - } - - if self.should_fix_bins_and_dylibs() { - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustc")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustdoc")); - self.fix_bin_or_dylib( - &bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"), - ); - let lib_dir = bin_root.join("lib"); - for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { - let lib = t!(lib); - if lib.path().extension() == Some(OsStr::new("so")) { - self.fix_bin_or_dylib(&lib.path()); - } - } - } - - t!(fs::write(rustc_stamp, stamp_key)); - } - } - - /// Download a single component of a CI-built toolchain (not necessarily a published nightly). - // NOTE: intentionally takes an owned string to avoid downloading multiple times by accident - fn download_ci_component(&self, filename: String, prefix: &str, commit_with_assertions: &str) { - Self::download_component( - self, - DownloadSource::CI, - filename, - prefix, - commit_with_assertions, - "ci-rustc", - ) - } - - fn download_component( - &self, - mode: DownloadSource, - filename: String, - prefix: &str, - key: &str, - destination: &str, - ) { - let cache_dst = self.out.join("cache"); - let cache_dir = cache_dst.join(key); - if !cache_dir.exists() { - t!(fs::create_dir_all(&cache_dir)); - } - - let bin_root = self.out.join(self.build.triple).join(destination); - let tarball = cache_dir.join(&filename); - let (base_url, url, should_verify) = match mode { - DownloadSource::CI => { - let dist_server = if self.llvm_assertions { - self.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() - } else { - self.stage0_metadata.config.artifacts_server.clone() - }; - let url = format!( - "{}/{filename}", - key.strip_suffix(&format!("-{}", self.llvm_assertions)).unwrap() - ); - (dist_server, url, false) - } - DownloadSource::Dist => { - let dist_server = env::var("RUSTUP_DIST_SERVER") - .unwrap_or(self.stage0_metadata.config.dist_server.to_string()); - // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0.json - (dist_server, format!("dist/{key}/{filename}"), true) - } - }; - - // For the beta compiler, put special effort into ensuring the checksums are valid. - // FIXME: maybe we should do this for download-rustc as well? but it would be a pain to update - // this on each and every nightly ... - let checksum = if should_verify { - let error = format!( - "src/stage0.json doesn't contain a checksum for {url}. \ - Pre-built artifacts might not be available for this \ - target at this time, see https://doc.rust-lang.org/nightly\ - /rustc/platform-support.html for more information." - ); - let sha256 = self.stage0_metadata.checksums_sha256.get(&url).expect(&error); - if tarball.exists() { - if self.verify(&tarball, sha256) { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - self.verbose(&format!( - "ignoring cached file {} due to failed verification", - tarball.display() - )); - self.remove(&tarball); - } - } - Some(sha256) - } else if tarball.exists() { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - None - }; - - let mut help_on_error = ""; - if destination == "ci-rustc" { - help_on_error = "error: failed to download pre-built rustc from CI - -note: old builds get deleted after a certain time -help: if trying to compile an old commit of rustc, disable `download-rustc` in config.toml: - -[rust] -download-rustc = false -"; - } - self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error); - if let Some(sha256) = checksum { - if !self.verify(&tarball, sha256) { - panic!("failed to verify {}", tarball.display()); - } - } - - self.unpack(&tarball, &bin_root, prefix); - } - - pub(crate) fn maybe_download_ci_llvm(&self) { - if !self.llvm_from_ci { - return; - } - let llvm_root = self.ci_llvm_root(); - let llvm_stamp = llvm_root.join(".llvm-stamp"); - let llvm_sha = detect_llvm_sha(&self, self.rust_info.is_managed_git_subrepository()); - let key = format!("{}{}", llvm_sha, self.llvm_assertions); - if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() { - self.download_ci_llvm(&llvm_sha); - if self.should_fix_bins_and_dylibs() { - for entry in t!(fs::read_dir(llvm_root.join("bin"))) { - self.fix_bin_or_dylib(&t!(entry).path()); - } - } - - // Update the timestamp of llvm-config to force rustc_llvm to be - // rebuilt. This is a hacky workaround for a deficiency in Cargo where - // the rerun-if-changed directive doesn't handle changes very well. - // https://github.com/rust-lang/cargo/issues/10791 - // Cargo only compares the timestamp of the file relative to the last - // time `rustc_llvm` build script ran. However, the timestamps of the - // files in the tarball are in the past, so it doesn't trigger a - // rebuild. - let now = filetime::FileTime::from_system_time(std::time::SystemTime::now()); - let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build)); - t!(filetime::set_file_times(&llvm_config, now, now)); - - if self.should_fix_bins_and_dylibs() { - let llvm_lib = llvm_root.join("lib"); - for entry in t!(fs::read_dir(&llvm_lib)) { - let lib = t!(entry).path(); - if lib.extension().map_or(false, |ext| ext == "so") { - self.fix_bin_or_dylib(&lib); - } - } - } - - t!(fs::write(llvm_stamp, key)); - } - } - - fn download_ci_llvm(&self, llvm_sha: &str) { - let llvm_assertions = self.llvm_assertions; - - let cache_prefix = format!("llvm-{llvm_sha}-{llvm_assertions}"); - let cache_dst = self.out.join("cache"); - let rustc_cache = cache_dst.join(cache_prefix); - if !rustc_cache.exists() { - t!(fs::create_dir_all(&rustc_cache)); - } - let base = if llvm_assertions { - &self.stage0_metadata.config.artifacts_with_llvm_assertions_server - } else { - &self.stage0_metadata.config.artifacts_server - }; - let version = self.artifact_version_part(llvm_sha); - let filename = format!("rust-dev-{}-{}.tar.xz", version, self.build.triple); - let tarball = rustc_cache.join(&filename); - if !tarball.exists() { - let help_on_error = "error: failed to download llvm from ci - - help: old builds get deleted after a certain time - help: if trying to compile an old commit of rustc, disable `download-ci-llvm` in config.toml: - - [llvm] - download-ci-llvm = false - "; - self.download_file(&format!("{base}/{llvm_sha}/{filename}"), &tarball, help_on_error); - } - let llvm_root = self.ci_llvm_root(); - self.unpack(&tarball, &llvm_root, "rust-dev"); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/dylib_util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/dylib_util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/dylib_util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/dylib_util.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,30 +0,0 @@ -// Various utilities for working with dylib paths. -// -// This file is meant to be included directly to avoid a dependency on the bootstrap library from -// the rustc and rustdoc wrappers. This improves compilation time by reducing the linking time. - -/// Returns the environment variable which the dynamic library lookup path -/// resides in for this platform. -pub fn dylib_path_var() -> &'static str { - if cfg!(target_os = "windows") { - "PATH" - } else if cfg!(target_os = "macos") { - "DYLD_LIBRARY_PATH" - } else if cfg!(target_os = "haiku") { - "LIBRARY_PATH" - } else if cfg!(target_os = "aix") { - "LIBPATH" - } else { - "LD_LIBRARY_PATH" - } -} - -/// Parses the `dylib_path_var()` environment variable, returning a list of -/// paths that are members of this lookup path. -pub fn dylib_path() -> Vec { - let var = match env::var_os(dylib_path_var()) { - Some(v) => v, - None => return vec![], - }; - env::split_paths(&var).collect() -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/flags.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/flags.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/flags.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/flags.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,565 +0,0 @@ -//! Command-line interface of the rustbuild build system. -//! -//! This module implements the command-line parsing of the build system which -//! has various flags to configure how it's run. - -use std::path::{Path, PathBuf}; - -use clap::{CommandFactory, Parser, ValueEnum}; - -use crate::builder::{Builder, Kind}; -use crate::config::{target_selection_list, Config, TargetSelectionList}; -use crate::setup::Profile; -use crate::{Build, DocTests}; - -#[derive(Copy, Clone, Default, Debug, ValueEnum)] -pub enum Color { - Always, - Never, - #[default] - Auto, -} - -/// Whether to deny warnings, emit them as warnings, or use the default behavior -#[derive(Copy, Clone, Default, Debug, ValueEnum)] -pub enum Warnings { - Deny, - Warn, - #[default] - Default, -} - -/// Deserialized version of all flags for this compile. -#[derive(Debug, Parser)] -#[clap( - override_usage = "x.py [options] [...]", - disable_help_subcommand(true), - about = "", - next_line_help(false) -)] -pub struct Flags { - #[command(subcommand)] - pub cmd: Subcommand, - - #[arg(global(true), short, long, action = clap::ArgAction::Count)] - /// use verbose output (-vv for very verbose) - pub verbose: u8, // each extra -v after the first is passed to Cargo - #[arg(global(true), short, long)] - /// use incremental compilation - pub incremental: bool, - #[arg(global(true), long, value_hint = clap::ValueHint::FilePath, value_name = "FILE")] - /// TOML configuration file for build - pub config: Option, - #[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")] - /// Build directory, overrides `build.build-dir` in `config.toml` - pub build_dir: Option, - - #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "BUILD")] - /// build target of the stage0 compiler - pub build: Option, - - #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "HOST", value_parser = target_selection_list)] - /// host targets to build - pub host: Option, - - #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "TARGET", value_parser = target_selection_list)] - /// target targets to build - pub target: Option, - - #[arg(global(true), long, value_name = "PATH")] - /// build paths to exclude - pub exclude: Vec, // keeping for client backward compatibility - #[arg(global(true), long, value_name = "PATH")] - /// build paths to skip - pub skip: Vec, - #[arg(global(true), long)] - /// include default paths in addition to the provided ones - pub include_default_paths: bool, - - #[arg(global(true), value_hint = clap::ValueHint::Other, long)] - pub rustc_error_format: Option, - - #[arg(global(true), long, value_hint = clap::ValueHint::CommandString, value_name = "CMD")] - /// command to run on failure - pub on_fail: Option, - #[arg(global(true), long)] - /// dry run; don't build anything - pub dry_run: bool, - #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] - /// stage to build (indicates compiler to use/test, e.g., stage 0 uses the - /// bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.) - pub stage: Option, - - #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] - /// stage(s) to keep without recompiling - /// (pass multiple times to keep e.g., both stages 0 and 1) - pub keep_stage: Vec, - #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] - /// stage(s) of the standard library to keep without recompiling - /// (pass multiple times to keep e.g., both stages 0 and 1) - pub keep_stage_std: Vec, - #[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")] - /// path to the root of the rust checkout - pub src: Option, - - #[arg( - global(true), - short, - long, - value_hint = clap::ValueHint::Other, - default_value_t = std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get), - value_name = "JOBS" - )] - /// number of jobs to run in parallel - pub jobs: usize, - // This overrides the deny-warnings configuration option, - // which passes -Dwarnings to the compiler invocations. - #[arg(global(true), long)] - #[clap(value_enum, default_value_t=Warnings::Default, value_name = "deny|warn")] - /// if value is deny, will deny warnings - /// if value is warn, will emit warnings - /// otherwise, use the default configured behaviour - pub warnings: Warnings, - - #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "FORMAT")] - /// rustc error format - pub error_format: Option, - #[arg(global(true), long)] - /// use message-format=json - pub json_output: bool, - - #[arg(global(true), long, value_name = "STYLE")] - #[clap(value_enum, default_value_t = Color::Auto)] - /// whether to use color in cargo and rustc output - pub color: Color, - - /// whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml - #[arg(global(true), long, value_name = "VALUE")] - pub llvm_skip_rebuild: Option, - /// generate PGO profile with rustc build - #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")] - pub rust_profile_generate: Option, - /// use PGO profile for rustc build - #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")] - pub rust_profile_use: Option, - /// use PGO profile for LLVM build - #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")] - pub llvm_profile_use: Option, - // LLVM doesn't support a custom location for generating profile - // information. - // - // llvm_out/build/profiles/ is the location this writes to. - /// generate PGO profile with llvm built for rustc - #[arg(global(true), long)] - pub llvm_profile_generate: bool, - /// Additional reproducible artifacts that should be added to the reproducible artifacts archive. - #[arg(global(true), long)] - pub reproducible_artifact: Vec, - #[arg(global(true))] - /// paths for the subcommand - pub paths: Vec, - /// override options in config.toml - #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "section.option=value")] - pub set: Vec, - /// arguments passed to subcommands - #[arg(global(true), last(true), value_name = "ARGS")] - pub free_args: Vec, -} - -impl Flags { - pub fn parse(args: &[String]) -> Self { - let first = String::from("x.py"); - let it = std::iter::once(&first).chain(args.iter()); - // We need to check for ` -h -v`, in which case we list the paths - #[derive(Parser)] - #[clap(disable_help_flag(true))] - struct HelpVerboseOnly { - #[arg(short, long)] - help: bool, - #[arg(global(true), short, long, action = clap::ArgAction::Count)] - pub verbose: u8, - #[arg(value_enum)] - cmd: Kind, - } - if let Ok(HelpVerboseOnly { help: true, verbose: 1.., cmd: subcommand }) = - HelpVerboseOnly::try_parse_from(it.clone()) - { - println!("note: updating submodules before printing available paths"); - let config = Config::parse(&[String::from("build")]); - let build = Build::new(config); - let paths = Builder::get_help(&build, subcommand); - if let Some(s) = paths { - println!("{s}"); - } else { - panic!("No paths available for subcommand `{}`", subcommand.as_str()); - } - crate::exit!(0); - } - - Flags::parse_from(it) - } -} - -#[derive(Debug, Clone, Default, clap::Subcommand)] -pub enum Subcommand { - #[clap(aliases = ["b"], long_about = "\n - Arguments: - This subcommand accepts a number of paths to directories to the crates - and/or artifacts to compile. For example, for a quick build of a usable - compiler: - ./x.py build --stage 1 library/std - This will build a compiler and standard library from the local source code. - Once this is done, build/$ARCH/stage1 contains a usable compiler. - If no arguments are passed then the default artifacts for that stage are - compiled. For example: - ./x.py build --stage 0 - ./x.py build ")] - /// Compile either the compiler or libraries - #[default] - Build, - #[clap(aliases = ["c"], long_about = "\n - Arguments: - This subcommand accepts a number of paths to directories to the crates - and/or artifacts to compile. For example: - ./x.py check library/std - If no arguments are passed then many artifacts are checked.")] - /// Compile either the compiler or libraries, using cargo check - Check { - #[arg(long)] - /// Check all targets - all_targets: bool, - }, - /// Run Clippy (uses rustup/cargo-installed clippy binary) - #[clap(long_about = "\n - Arguments: - This subcommand accepts a number of paths to directories to the crates - and/or artifacts to run clippy against. For example: - ./x.py clippy library/core - ./x.py clippy library/core library/proc_macro")] - Clippy { - #[arg(long)] - fix: bool, - /// clippy lints to allow - #[arg(global(true), short = 'A', action = clap::ArgAction::Append, value_name = "LINT")] - allow: Vec, - /// clippy lints to deny - #[arg(global(true), short = 'D', action = clap::ArgAction::Append, value_name = "LINT")] - deny: Vec, - /// clippy lints to warn on - #[arg(global(true), short = 'W', action = clap::ArgAction::Append, value_name = "LINT")] - warn: Vec, - /// clippy lints to forbid - #[arg(global(true), short = 'F', action = clap::ArgAction::Append, value_name = "LINT")] - forbid: Vec, - }, - /// Run cargo fix - #[clap(long_about = "\n - Arguments: - This subcommand accepts a number of paths to directories to the crates - and/or artifacts to run `cargo fix` against. For example: - ./x.py fix library/core - ./x.py fix library/core library/proc_macro")] - Fix, - #[clap( - name = "fmt", - long_about = "\n - Arguments: - This subcommand optionally accepts a `--check` flag which succeeds if formatting is correct and - fails if it is not. For example: - ./x.py fmt - ./x.py fmt --check" - )] - /// Run rustfmt - Format { - /// check formatting instead of applying - #[arg(long)] - check: bool, - }, - #[clap(aliases = ["d"], long_about = "\n - Arguments: - This subcommand accepts a number of paths to directories of documentation - to build. For example: - ./x.py doc src/doc/book - ./x.py doc src/doc/nomicon - ./x.py doc src/doc/book library/std - ./x.py doc library/std --json - ./x.py doc library/std --open - If no arguments are passed then everything is documented: - ./x.py doc - ./x.py doc --stage 1")] - /// Build documentation - Doc { - #[arg(long)] - /// open the docs in a browser - open: bool, - #[arg(long)] - /// render the documentation in JSON format in addition to the usual HTML format - json: bool, - }, - #[clap(aliases = ["t"], long_about = "\n - Arguments: - This subcommand accepts a number of paths to test directories that - should be compiled and run. For example: - ./x.py test tests/ui - ./x.py test library/std --test-args hash_map - ./x.py test library/std --stage 0 --no-doc - ./x.py test tests/ui --bless - ./x.py test tests/ui --compare-mode next-solver - Note that `test tests/* --stage N` does NOT depend on `build compiler/rustc --stage N`; - just like `build library/std --stage N` it tests the compiler produced by the previous - stage. - Execute tool tests with a tool name argument: - ./x.py test tidy - If no arguments are passed then the complete artifacts for that stage are - compiled and tested. - ./x.py test - ./x.py test --stage 1")] - /// Build and run some test suites - Test { - #[arg(long)] - /// run all tests regardless of failure - no_fail_fast: bool, - #[arg(long, value_name = "SUBSTRING")] - /// skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times - skip: Vec, - #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] - /// extra arguments to be passed for the test tool being used - /// (e.g. libtest, compiletest or rustdoc) - test_args: Vec, - /// extra options to pass the compiler when running tests - #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] - rustc_args: Vec, - #[arg(long)] - /// do not run doc tests - no_doc: bool, - #[arg(long)] - /// only run doc tests - doc: bool, - #[arg(long)] - /// whether to automatically update stderr/stdout files - bless: bool, - #[arg(long)] - /// comma-separated list of other files types to check (accepts py, py:lint, - /// py:fmt, shell) - extra_checks: Option, - #[arg(long)] - /// rerun tests even if the inputs are unchanged - force_rerun: bool, - #[arg(long)] - /// only run tests that result has been changed - only_modified: bool, - #[arg(long, value_name = "COMPARE MODE")] - /// mode describing what file the actual ui output will be compared to - compare_mode: Option, - #[arg(long, value_name = "check | build | run")] - /// force {check,build,run}-pass tests to this mode. - pass: Option, - #[arg(long, value_name = "auto | always | never")] - /// whether to execute run-* tests - run: Option, - #[arg(long)] - /// enable this to generate a Rustfix coverage file, which is saved in - /// `//rustfix_missing_coverage.txt` - rustfix_coverage: bool, - }, - /// Build and run some benchmarks - Bench { - #[arg(long, allow_hyphen_values(true))] - test_args: Vec, - }, - /// Clean out build directories - Clean { - #[arg(long)] - /// Clean the entire build directory (not used by default) - all: bool, - #[arg(long, value_name = "N")] - /// Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used. - stage: Option, - }, - /// Build distribution artifacts - Dist, - /// Install distribution artifacts - Install, - #[clap(aliases = ["r"], long_about = "\n - Arguments: - This subcommand accepts a number of paths to tools to build and run. For - example: - ./x.py run src/tools/expand-yaml-anchors - At least a tool needs to be called.")] - /// Run tools contained in this repository - Run { - /// arguments for the tool - #[arg(long, allow_hyphen_values(true))] - args: Vec, - }, - /// Set up the environment for development - #[clap(long_about = format!( - "\n -x.py setup creates a `config.toml` which changes the defaults for x.py itself, -as well as setting up a git pre-push hook, VS Code config and toolchain link. -Arguments: - This subcommand accepts a 'profile' to use for builds. For example: - ./x.py setup library - The profile is optional and you will be prompted interactively if it is not given. - The following profiles are available: -{} - To only set up the git hook, VS Code config or toolchain link, you may use - ./x.py setup hook - ./x.py setup vscode - ./x.py setup link", Profile::all_for_help(" ").trim_end()))] - Setup { - /// Either the profile for `config.toml` or another setup action. - /// May be omitted to set up interactively - #[arg(value_name = "|hook|vscode|link")] - profile: Option, - }, - /// Suggest a subset of tests to run, based on modified files - #[clap(long_about = "\n")] - Suggest { - /// run suggested tests - #[arg(long)] - run: bool, - }, -} - -impl Subcommand { - pub fn kind(&self) -> Kind { - match self { - Subcommand::Bench { .. } => Kind::Bench, - Subcommand::Build { .. } => Kind::Build, - Subcommand::Check { .. } => Kind::Check, - Subcommand::Clippy { .. } => Kind::Clippy, - Subcommand::Doc { .. } => Kind::Doc, - Subcommand::Fix { .. } => Kind::Fix, - Subcommand::Format { .. } => Kind::Format, - Subcommand::Test { .. } => Kind::Test, - Subcommand::Clean { .. } => Kind::Clean, - Subcommand::Dist { .. } => Kind::Dist, - Subcommand::Install { .. } => Kind::Install, - Subcommand::Run { .. } => Kind::Run, - Subcommand::Setup { .. } => Kind::Setup, - Subcommand::Suggest { .. } => Kind::Suggest, - } - } - - pub fn rustc_args(&self) -> Vec<&str> { - match *self { - Subcommand::Test { ref rustc_args, .. } => { - rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() - } - _ => vec![], - } - } - - pub fn fail_fast(&self) -> bool { - match *self { - Subcommand::Test { no_fail_fast, .. } => !no_fail_fast, - _ => false, - } - } - - pub fn doc_tests(&self) -> DocTests { - match *self { - Subcommand::Test { doc, no_doc, .. } => { - if doc { - DocTests::Only - } else if no_doc { - DocTests::No - } else { - DocTests::Yes - } - } - _ => DocTests::Yes, - } - } - - pub fn bless(&self) -> bool { - match *self { - Subcommand::Test { bless, .. } => bless, - _ => false, - } - } - - pub fn extra_checks(&self) -> Option<&str> { - match *self { - Subcommand::Test { ref extra_checks, .. } => extra_checks.as_ref().map(String::as_str), - _ => None, - } - } - - pub fn only_modified(&self) -> bool { - match *self { - Subcommand::Test { only_modified, .. } => only_modified, - _ => false, - } - } - - pub fn force_rerun(&self) -> bool { - match *self { - Subcommand::Test { force_rerun, .. } => force_rerun, - _ => false, - } - } - - pub fn rustfix_coverage(&self) -> bool { - match *self { - Subcommand::Test { rustfix_coverage, .. } => rustfix_coverage, - _ => false, - } - } - - pub fn compare_mode(&self) -> Option<&str> { - match *self { - Subcommand::Test { ref compare_mode, .. } => compare_mode.as_ref().map(|s| &s[..]), - _ => None, - } - } - - pub fn pass(&self) -> Option<&str> { - match *self { - Subcommand::Test { ref pass, .. } => pass.as_ref().map(|s| &s[..]), - _ => None, - } - } - - pub fn run(&self) -> Option<&str> { - match *self { - Subcommand::Test { ref run, .. } => run.as_ref().map(|s| &s[..]), - _ => None, - } - } - - pub fn open(&self) -> bool { - match *self { - Subcommand::Doc { open, .. } => open, - _ => false, - } - } - - pub fn json(&self) -> bool { - match *self { - Subcommand::Doc { json, .. } => json, - _ => false, - } - } -} - -/// Returns the shell completion for a given shell, if the result differs from the current -/// content of `path`. If `path` does not exist, always returns `Some`. -pub fn get_completion(shell: G, path: &Path) -> Option { - let mut cmd = Flags::command(); - let current = if !path.exists() { - String::new() - } else { - std::fs::read_to_string(path).unwrap_or_else(|_| { - eprintln!("couldn't read {}", path.display()); - crate::exit!(1) - }) - }; - let mut buf = Vec::new(); - clap_complete::generate(shell, &mut cmd, "x.py", &mut buf); - if buf == current.as_bytes() { - return None; - } - Some(String::from_utf8(buf).expect("completion script should be UTF-8")) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/format.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/format.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/format.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/format.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,322 +0,0 @@ -//! Runs rustfmt on the repository. - -use crate::builder::Builder; -use crate::util::{output, program_out_of_date, t}; -use build_helper::ci::CiEnv; -use build_helper::git::get_git_modified_files; -use ignore::WalkBuilder; -use std::collections::VecDeque; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; -use std::sync::mpsc::SyncSender; - -fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool { - let mut cmd = Command::new(&rustfmt); - // avoid the submodule config paths from coming into play, - // we only allow a single global config for the workspace for now - cmd.arg("--config-path").arg(&src.canonicalize().unwrap()); - cmd.arg("--edition").arg("2021"); - cmd.arg("--unstable-features"); - cmd.arg("--skip-children"); - if check { - cmd.arg("--check"); - } - cmd.args(paths); - let cmd_debug = format!("{cmd:?}"); - let mut cmd = cmd.spawn().expect("running rustfmt"); - // poor man's async: return a closure that'll wait for rustfmt's completion - move |block: bool| -> bool { - if !block { - match cmd.try_wait() { - Ok(Some(_)) => {} - _ => return false, - } - } - let status = cmd.wait().unwrap(); - if !status.success() { - eprintln!( - "Running `{}` failed.\nIf you're running `tidy`, \ - try again with `--bless`. Or, if you just want to format \ - code, run `./x.py fmt` instead.", - cmd_debug, - ); - crate::exit!(1); - } - true - } -} - -fn get_rustfmt_version(build: &Builder<'_>) -> Option<(String, PathBuf)> { - let stamp_file = build.out.join("rustfmt.stamp"); - - let mut cmd = Command::new(match build.initial_rustfmt() { - Some(p) => p, - None => return None, - }); - cmd.arg("--version"); - let output = match cmd.output() { - Ok(status) => status, - Err(_) => return None, - }; - if !output.status.success() { - return None; - } - Some((String::from_utf8(output.stdout).unwrap(), stamp_file)) -} - -/// Return whether the format cache can be reused. -fn verify_rustfmt_version(build: &Builder<'_>) -> bool { - let Some((version, stamp_file)) = get_rustfmt_version(build) else { - return false; - }; - !program_out_of_date(&stamp_file, &version) -} - -/// Updates the last rustfmt version used -fn update_rustfmt_version(build: &Builder<'_>) { - let Some((version, stamp_file)) = get_rustfmt_version(build) else { - return; - }; - t!(std::fs::write(stamp_file, version)) -} - -/// Returns the Rust files modified between the `merge-base` of HEAD and -/// rust-lang/master and what is now on the disk. -/// -/// Returns `None` if all files should be formatted. -fn get_modified_rs_files(build: &Builder<'_>) -> Result>, String> { - if !verify_rustfmt_version(build) { - return Ok(None); - } - - get_git_modified_files(Some(&build.config.src), &vec!["rs"]) -} - -#[derive(serde_derive::Deserialize)] -struct RustfmtConfig { - ignore: Vec, -} - -pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { - if build.config.dry_run() { - return; - } - let mut builder = ignore::types::TypesBuilder::new(); - builder.add_defaults(); - builder.select("rust"); - let matcher = builder.build().unwrap(); - let rustfmt_config = build.src.join("rustfmt.toml"); - if !rustfmt_config.exists() { - eprintln!("Not running formatting checks; rustfmt.toml does not exist."); - eprintln!("This may happen in distributed tarballs."); - return; - } - let rustfmt_config = t!(std::fs::read_to_string(&rustfmt_config)); - let rustfmt_config: RustfmtConfig = t!(toml::from_str(&rustfmt_config)); - let mut fmt_override = ignore::overrides::OverrideBuilder::new(&build.src); - for ignore in rustfmt_config.ignore { - fmt_override.add(&format!("!{ignore}")).expect(&ignore); - } - let git_available = match Command::new("git") - .arg("--version") - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - { - Ok(status) => status.success(), - Err(_) => false, - }; - - if git_available { - let in_working_tree = match build - .config - .git() - .arg("rev-parse") - .arg("--is-inside-work-tree") - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - { - Ok(status) => status.success(), - Err(_) => false, - }; - if in_working_tree { - let untracked_paths_output = output( - build.config.git().arg("status").arg("--porcelain").arg("--untracked-files=normal"), - ); - let untracked_paths = untracked_paths_output - .lines() - .filter(|entry| entry.starts_with("??")) - .map(|entry| { - entry.split(' ').nth(1).expect("every git status entry should list a path") - }); - let mut untracked_count = 0; - for untracked_path in untracked_paths { - println!("skip untracked path {untracked_path} during rustfmt invocations"); - // The leading `/` makes it an exact match against the - // repository root, rather than a glob. Without that, if you - // have `foo.rs` in the repository root it will also match - // against anything like `compiler/rustc_foo/src/foo.rs`, - // preventing the latter from being formatted. - untracked_count += 1; - fmt_override.add(&format!("!/{untracked_path}")).expect(&untracked_path); - } - // Only check modified files locally to speed up runtime. - // We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through; - // we also care about CI time less since this is still very fast compared to building the compiler. - if !CiEnv::is_ci() && paths.is_empty() { - match get_modified_rs_files(build) { - Ok(Some(files)) => { - if files.len() <= 10 { - for file in &files { - println!("formatting modified file {file}"); - } - } else { - let pluralized = |count| if count > 1 { "files" } else { "file" }; - let untracked_msg = if untracked_count == 0 { - "".to_string() - } else { - format!( - ", skipped {} untracked {}", - untracked_count, - pluralized(untracked_count), - ) - }; - println!( - "formatting {} modified {}{}", - files.len(), - pluralized(files.len()), - untracked_msg - ); - } - for file in files { - fmt_override.add(&format!("/{file}")).expect(&file); - } - } - Ok(None) => {} - Err(err) => { - println!( - "WARN: Something went wrong when running git commands:\n{err}\n\ - Falling back to formatting all files." - ); - } - } - } - } else { - println!("Not in git tree. Skipping git-aware format checks"); - } - } else { - println!("Could not find usable git. Skipping git-aware format checks"); - } - - let fmt_override = fmt_override.build().unwrap(); - - let rustfmt_path = build.initial_rustfmt().unwrap_or_else(|| { - eprintln!("./x.py fmt is not supported on this channel"); - crate::exit!(1); - }); - assert!(rustfmt_path.exists(), "{}", rustfmt_path.display()); - let src = build.src.clone(); - let (tx, rx): (SyncSender, _) = std::sync::mpsc::sync_channel(128); - let walker = match paths.get(0) { - Some(first) => { - let find_shortcut_candidates = |p: &PathBuf| { - let mut candidates = Vec::new(); - for candidate in WalkBuilder::new(src.clone()).max_depth(Some(3)).build() { - if let Ok(entry) = candidate { - if let Some(dir_name) = p.file_name() { - if entry.path().is_dir() && entry.file_name() == dir_name { - candidates.push(entry.into_path()); - } - } - } - } - candidates - }; - - // Only try to look for shortcut candidates for single component paths like - // `std` and not for e.g. relative paths like `../library/std`. - let should_look_for_shortcut_dir = |p: &PathBuf| p.components().count() == 1; - - let mut walker = if should_look_for_shortcut_dir(first) { - if let [single_candidate] = &find_shortcut_candidates(first)[..] { - WalkBuilder::new(single_candidate) - } else { - WalkBuilder::new(first) - } - } else { - WalkBuilder::new(src.join(first)) - }; - - for path in &paths[1..] { - if should_look_for_shortcut_dir(path) { - if let [single_candidate] = &find_shortcut_candidates(path)[..] { - walker.add(single_candidate); - } else { - walker.add(path); - } - } else { - walker.add(src.join(path)); - } - } - - walker - } - None => WalkBuilder::new(src.clone()), - } - .types(matcher) - .overrides(fmt_override) - .build_parallel(); - - // there is a lot of blocking involved in spawning a child process and reading files to format. - // spawn more processes than available concurrency to keep the CPU busy - let max_processes = build.jobs() as usize * 2; - - // spawn child processes on a separate thread so we can batch entries we have received from ignore - let thread = std::thread::spawn(move || { - let mut children = VecDeque::new(); - while let Ok(path) = rx.recv() { - // try getting a few more paths from the channel to amortize the overhead of spawning processes - let paths: Vec<_> = rx.try_iter().take(7).chain(std::iter::once(path)).collect(); - - let child = rustfmt(&src, &rustfmt_path, paths.as_slice(), check); - children.push_back(child); - - // poll completion before waiting - for i in (0..children.len()).rev() { - if children[i](false) { - children.swap_remove_back(i); - break; - } - } - - if children.len() >= max_processes { - // await oldest child - children.pop_front().unwrap()(true); - } - } - - // await remaining children - for mut child in children { - child(true); - } - }); - - walker.run(|| { - let tx = tx.clone(); - Box::new(move |entry| { - let entry = t!(entry); - if entry.file_type().map_or(false, |t| t.is_file()) { - t!(tx.send(entry.into_path())); - } - ignore::WalkState::Continue - }) - }); - - drop(tx); - - thread.join().unwrap(); - if !check { - update_rustfmt_version(build); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/install.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/install.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/install.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/install.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,320 +0,0 @@ -//! Implementation of the install aspects of the compiler. -//! -//! This module is responsible for installing the standard library, -//! compiler, and documentation. - -use std::env; -use std::fs; -use std::path::{Component, Path, PathBuf}; -use std::process::Command; - -use crate::util::t; - -use crate::dist; -use crate::tarball::GeneratedTarball; -use crate::{Compiler, Kind}; - -use crate::builder::{Builder, RunConfig, ShouldRun, Step}; -use crate::config::{Config, TargetSelection}; - -#[cfg(target_os = "illumos")] -const SHELL: &str = "bash"; -#[cfg(not(target_os = "illumos"))] -const SHELL: &str = "sh"; - -// We have to run a few shell scripts, which choke quite a bit on both `\` -// characters and on `C:\` paths, so normalize both of them away. -fn sanitize_sh(path: &Path) -> String { - let path = path.to_str().unwrap().replace("\\", "/"); - return change_drive(unc_to_lfs(&path)).unwrap_or(path); - - fn unc_to_lfs(s: &str) -> &str { - s.strip_prefix("//?/").unwrap_or(s) - } - - fn change_drive(s: &str) -> Option { - let mut ch = s.chars(); - let drive = ch.next().unwrap_or('C'); - if ch.next() != Some(':') { - return None; - } - if ch.next() != Some('/') { - return None; - } - Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..])) - } -} - -fn is_dir_writable_for_user(dir: &PathBuf) -> bool { - let tmp = dir.join(".tmp"); - match fs::create_dir_all(&tmp) { - Ok(_) => { - fs::remove_dir_all(tmp).unwrap(); - true - } - Err(e) => { - if e.kind() == std::io::ErrorKind::PermissionDenied { - false - } else { - panic!("Failed the write access check for the current user. {}", e); - } - } - } -} - -fn install_sh( - builder: &Builder<'_>, - package: &str, - stage: u32, - host: Option, - tarball: &GeneratedTarball, -) { - let _guard = builder.msg(Kind::Install, stage, package, host, host); - - let prefix = default_path(&builder.config.prefix, "/usr/local"); - let sysconfdir = prefix.join(default_path(&builder.config.sysconfdir, "/etc")); - let destdir_env = env::var_os("DESTDIR").map(PathBuf::from); - - // Sanity checks on the write access of user. - // - // When the `DESTDIR` environment variable is present, there is no point to - // check write access for `prefix` and `sysconfdir` individually, as they - // are combined with the path from the `DESTDIR` environment variable. In - // this case, we only need to check the `DESTDIR` path, disregarding the - // `prefix` and `sysconfdir` paths. - if let Some(destdir) = &destdir_env { - assert!(is_dir_writable_for_user(destdir), "User doesn't have write access on DESTDIR."); - } else { - assert!( - is_dir_writable_for_user(&prefix), - "User doesn't have write access on `install.prefix` path in the `config.toml`.", - ); - assert!( - is_dir_writable_for_user(&sysconfdir), - "User doesn't have write access on `install.sysconfdir` path in `config.toml`." - ); - } - - let datadir = prefix.join(default_path(&builder.config.datadir, "share")); - let docdir = prefix.join(default_path(&builder.config.docdir, "share/doc/rust")); - let mandir = prefix.join(default_path(&builder.config.mandir, "share/man")); - let libdir = prefix.join(default_path(&builder.config.libdir, "lib")); - let bindir = prefix.join(&builder.config.bindir); // Default in config.rs - - let empty_dir = builder.out.join("tmp/empty_dir"); - t!(fs::create_dir_all(&empty_dir)); - - let mut cmd = Command::new(SHELL); - cmd.current_dir(&empty_dir) - .arg(sanitize_sh(&tarball.decompressed_output().join("install.sh"))) - .arg(format!("--prefix={}", prepare_dir(&destdir_env, prefix))) - .arg(format!("--sysconfdir={}", prepare_dir(&destdir_env, sysconfdir))) - .arg(format!("--datadir={}", prepare_dir(&destdir_env, datadir))) - .arg(format!("--docdir={}", prepare_dir(&destdir_env, docdir))) - .arg(format!("--bindir={}", prepare_dir(&destdir_env, bindir))) - .arg(format!("--libdir={}", prepare_dir(&destdir_env, libdir))) - .arg(format!("--mandir={}", prepare_dir(&destdir_env, mandir))) - .arg("--disable-ldconfig"); - builder.run(&mut cmd); - t!(fs::remove_dir_all(&empty_dir)); -} - -fn default_path(config: &Option, default: &str) -> PathBuf { - config.as_ref().cloned().unwrap_or_else(|| PathBuf::from(default)) -} - -fn prepare_dir(destdir_env: &Option, mut path: PathBuf) -> String { - // The DESTDIR environment variable is a standard way to install software in a subdirectory - // while keeping the original directory structure, even if the prefix or other directories - // contain absolute paths. - // - // More information on the environment variable is available here: - // https://www.gnu.org/prep/standards/html_node/DESTDIR.html - if let Some(destdir) = destdir_env { - let without_destdir = path.clone(); - path = destdir.clone(); - // Custom .join() which ignores disk roots. - for part in without_destdir.components() { - if let Component::Normal(s) = part { - path.push(s) - } - } - } - - // The installation command is not executed from the current directory, but from a temporary - // directory. To prevent relative paths from breaking this converts relative paths to absolute - // paths. std::fs::canonicalize is not used as that requires the path to actually be present. - if path.is_relative() { - path = std::env::current_dir().expect("failed to get the current directory").join(path); - assert!(path.is_absolute(), "could not make the path relative"); - } - - sanitize_sh(&path) -} - -macro_rules! install { - (($sel:ident, $builder:ident, $_config:ident), - $($name:ident, - $condition_name: ident = $path_or_alias: literal, - $default_cond:expr, - only_hosts: $only_hosts:expr, - $run_item:block $(, $c:ident)*;)+) => { - $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] - pub struct $name { - pub compiler: Compiler, - pub target: TargetSelection, - } - - impl $name { - #[allow(dead_code)] - fn should_build(config: &Config) -> bool { - config.extended && config.tools.as_ref() - .map_or(true, |t| t.contains($path_or_alias)) - } - } - - impl Step for $name { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = $only_hosts; - $(const $c: bool = true;)* - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let $_config = &run.builder.config; - run.$condition_name($path_or_alias).default_condition($default_cond) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($name { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - fn run($sel, $builder: &Builder<'_>) { - $run_item - } - })+ - } -} - -install!((self, builder, _config), - Docs, path = "src/doc", _config.docs, only_hosts: false, { - let tarball = builder.ensure(dist::Docs { host: self.target }).expect("missing docs"); - install_sh(builder, "docs", self.compiler.stage, Some(self.target), &tarball); - }; - Std, path = "library/std", true, only_hosts: false, { - for target in &builder.targets { - // `expect` should be safe, only None when host != build, but this - // only runs when host == build - let tarball = builder.ensure(dist::Std { - compiler: self.compiler, - target: *target - }).expect("missing std"); - install_sh(builder, "std", self.compiler.stage, Some(*target), &tarball); - } - }; - Cargo, alias = "cargo", Self::should_build(_config), only_hosts: true, { - let tarball = builder - .ensure(dist::Cargo { compiler: self.compiler, target: self.target }) - .expect("missing cargo"); - install_sh(builder, "cargo", self.compiler.stage, Some(self.target), &tarball); - }; - RustAnalyzer, alias = "rust-analyzer", Self::should_build(_config), only_hosts: true, { - if let Some(tarball) = - builder.ensure(dist::RustAnalyzer { compiler: self.compiler, target: self.target }) - { - install_sh(builder, "rust-analyzer", self.compiler.stage, Some(self.target), &tarball); - } else { - builder.info( - &format!("skipping Install rust-analyzer stage{} ({})", self.compiler.stage, self.target), - ); - } - }; - Clippy, alias = "clippy", Self::should_build(_config), only_hosts: true, { - let tarball = builder - .ensure(dist::Clippy { compiler: self.compiler, target: self.target }) - .expect("missing clippy"); - install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball); - }; - Miri, alias = "miri", Self::should_build(_config), only_hosts: true, { - if let Some(tarball) = builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }) { - install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball); - } else { - // Miri is only available on nightly - builder.info( - &format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target), - ); - } - }; - LlvmTools, alias = "llvm-tools", Self::should_build(_config), only_hosts: true, { - if let Some(tarball) = builder.ensure(dist::LlvmTools { target: self.target }) { - install_sh(builder, "llvm-tools", self.compiler.stage, Some(self.target), &tarball); - } else { - builder.info( - &format!("skipping llvm-tools stage{} ({}): external LLVM", self.compiler.stage, self.target), - ); - } - }; - Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, { - if let Some(tarball) = builder.ensure(dist::Rustfmt { - compiler: self.compiler, - target: self.target - }) { - install_sh(builder, "rustfmt", self.compiler.stage, Some(self.target), &tarball); - } else { - builder.info( - &format!("skipping Install Rustfmt stage{} ({})", self.compiler.stage, self.target), - ); - } - }; - RustDemangler, alias = "rust-demangler", Self::should_build(_config), only_hosts: true, { - // Note: Even though `should_build` may return true for `extended` default tools, - // dist::RustDemangler may still return None, unless the target-dependent `profiler` config - // is also true, or the `tools` array explicitly includes "rust-demangler". - if let Some(tarball) = builder.ensure(dist::RustDemangler { - compiler: self.compiler, - target: self.target - }) { - install_sh(builder, "rust-demangler", self.compiler.stage, Some(self.target), &tarball); - } else { - builder.info( - &format!("skipping Install RustDemangler stage{} ({})", - self.compiler.stage, self.target), - ); - } - }; - Rustc, path = "compiler/rustc", true, only_hosts: true, { - let tarball = builder.ensure(dist::Rustc { - compiler: builder.compiler(builder.top_stage, self.target), - }); - install_sh(builder, "rustc", self.compiler.stage, Some(self.target), &tarball); - }; -); - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Src { - pub stage: u32, -} - -impl Step for Src { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let config = &run.builder.config; - let cond = config.extended && config.tools.as_ref().map_or(true, |t| t.contains("src")); - run.path("src").default_condition(cond) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Src { stage: run.builder.top_stage }); - } - - fn run(self, builder: &Builder<'_>) { - let tarball = builder.ensure(dist::Src); - install_sh(builder, "src", self.stage, None, &tarball); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/job.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/job.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/job.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/job.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,143 +0,0 @@ -//! Job management on Windows for bootstrapping -//! -//! Most of the time when you're running a build system (e.g., make) you expect -//! Ctrl-C or abnormal termination to actually terminate the entire tree of -//! process in play, not just the one at the top. This currently works "by -//! default" on Unix platforms because Ctrl-C actually sends a signal to the -//! *process group* rather than the parent process, so everything will get torn -//! down. On Windows, however, this does not happen and Ctrl-C just kills the -//! parent process. -//! -//! To achieve the same semantics on Windows we use Job Objects to ensure that -//! all processes die at the same time. Job objects have a mode of operation -//! where when all handles to the object are closed it causes all child -//! processes associated with the object to be terminated immediately. -//! Conveniently whenever a process in the job object spawns a new process the -//! child will be associated with the job object as well. This means if we add -//! ourselves to the job object we create then everything will get torn down! -//! -//! Unfortunately most of the time the build system is actually called from a -//! python wrapper (which manages things like building the build system) so this -//! all doesn't quite cut it so far. To go the last mile we duplicate the job -//! object handle into our parent process (a python process probably) and then -//! close our own handle. This means that the only handle to the job object -//! resides in the parent python process, so when python dies the whole build -//! system dies (as one would probably expect!). -//! -//! Note that this module has a #[cfg(windows)] above it as none of this logic -//! is required on Unix. - -use crate::Build; -use std::env; -use std::ffi::c_void; -use std::io; -use std::mem; - -use windows::{ - core::PCWSTR, - Win32::Foundation::{CloseHandle, DuplicateHandle, DUPLICATE_SAME_ACCESS, HANDLE}, - Win32::System::Diagnostics::Debug::{SetErrorMode, SEM_NOGPFAULTERRORBOX, THREAD_ERROR_MODE}, - Win32::System::JobObjects::{ - AssignProcessToJobObject, CreateJobObjectW, JobObjectExtendedLimitInformation, - SetInformationJobObject, JOBOBJECT_EXTENDED_LIMIT_INFORMATION, - JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE, JOB_OBJECT_LIMIT_PRIORITY_CLASS, - }, - Win32::System::Threading::{ - GetCurrentProcess, OpenProcess, BELOW_NORMAL_PRIORITY_CLASS, PROCESS_DUP_HANDLE, - }, -}; - -pub unsafe fn setup(build: &mut Build) { - // Enable the Windows Error Reporting dialog which msys disables, - // so we can JIT debug rustc - let mode = SetErrorMode(THREAD_ERROR_MODE::default()); - let mode = THREAD_ERROR_MODE(mode); - SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX); - - // Create a new job object for us to use - let job = CreateJobObjectW(None, PCWSTR::null()).unwrap(); - - // Indicate that when all handles to the job object are gone that all - // process in the object should be killed. Note that this includes our - // entire process tree by default because we've added ourselves and our - // children will reside in the job by default. - let mut info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION::default(); - info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; - if build.config.low_priority { - info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS; - info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS.0; - } - let r = SetInformationJobObject( - job, - JobObjectExtendedLimitInformation, - &info as *const _ as *const c_void, - mem::size_of_val(&info) as u32, - ) - .ok(); - assert!(r.is_ok(), "{}", io::Error::last_os_error()); - - // Assign our process to this job object. Note that if this fails, one very - // likely reason is that we are ourselves already in a job object! This can - // happen on the build bots that we've got for Windows, or if just anyone - // else is instrumenting the build. In this case we just bail out - // immediately and assume that they take care of it. - // - // Also note that nested jobs (why this might fail) are supported in recent - // versions of Windows, but the version of Windows that our bots are running - // at least don't support nested job objects. - let r = AssignProcessToJobObject(job, GetCurrentProcess()).ok(); - if r.is_err() { - CloseHandle(job); - return; - } - - // If we've got a parent process (e.g., the python script that called us) - // then move ownership of this job object up to them. That way if the python - // script is killed (e.g., via ctrl-c) then we'll all be torn down. - // - // If we don't have a parent (e.g., this was run directly) then we - // intentionally leak the job object handle. When our process exits - // (normally or abnormally) it will close the handle implicitly, causing all - // processes in the job to be cleaned up. - let pid = match env::var("BOOTSTRAP_PARENT_ID") { - Ok(s) => s, - Err(..) => return, - }; - - let parent = match OpenProcess(PROCESS_DUP_HANDLE, false, pid.parse().unwrap()).ok() { - Some(parent) => parent, - _ => { - // If we get a null parent pointer here, it is possible that either - // we have an invalid pid or the parent process has been closed. - // Since the first case rarely happens - // (only when wrongly setting the environmental variable), - // it might be better to improve the experience of the second case - // when users have interrupted the parent process and we haven't finish - // duplicating the handle yet. We just need close the job object if that occurs. - CloseHandle(job); - return; - } - }; - - let mut parent_handle = HANDLE::default(); - let r = DuplicateHandle( - GetCurrentProcess(), - job, - parent, - &mut parent_handle, - 0, - false, - DUPLICATE_SAME_ACCESS, - ) - .ok(); - - // If this failed, well at least we tried! An example of DuplicateHandle - // failing in the past has been when the wrong python2 package spawned this - // build system (e.g., the `python2` package in MSYS instead of - // `mingw-w64-x86_64-python2`). Not sure why it failed, but the "failure - // mode" here is that we only clean everything up when the build system - // dies, not when the python parent does, so not too bad. - if r.is_err() { - CloseHandle(job); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/lib.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/lib.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1846 +0,0 @@ -//! Implementation of rustbuild, the Rust build system. -//! -//! This module, and its descendants, are the implementation of the Rust build -//! system. Most of this build system is backed by Cargo but the outer layer -//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo -//! builds, building artifacts like LLVM, etc. The goals of rustbuild are: -//! -//! * To be an easily understandable, easily extensible, and maintainable build -//! system. -//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka -//! crates.io and Cargo. -//! * A standard interface to build across all platforms, including MSVC -//! -//! ## Further information -//! -//! More documentation can be found in each respective module below, and you can -//! also check out the `src/bootstrap/README.md` file for more information. - -use std::cell::{Cell, RefCell}; -use std::collections::{HashMap, HashSet}; -use std::env; -use std::fmt::Display; -use std::fs::{self, File}; -use std::io; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; -use std::str; - -use build_helper::ci::{gha, CiEnv}; -use build_helper::exit; -use channel::GitInfo; -use config::{DryRun, Target}; -use filetime::FileTime; -use once_cell::sync::OnceCell; - -use crate::builder::Kind; -use crate::config::{LlvmLibunwind, TargetSelection}; -use crate::util::{ - dir_is_empty, exe, libdir, mtime, output, run, run_suppressed, symlink_dir, try_run_suppressed, -}; - -mod builder; -mod cache; -mod cc_detect; -mod channel; -mod check; -mod clean; -mod compile; -mod config; -mod dist; -mod doc; -mod download; -mod flags; -mod format; -mod install; -mod llvm; -mod metadata; -mod render_tests; -mod run; -mod sanity; -mod setup; -mod suggest; -mod synthetic_targets; -mod tarball; -mod test; -mod tool; -mod toolstate; -pub mod util; - -#[cfg(feature = "build-metrics")] -mod metrics; - -#[cfg(windows)] -mod job; - -#[cfg(all(unix, not(target_os = "haiku")))] -mod job { - pub unsafe fn setup(build: &mut crate::Build) { - if build.config.low_priority { - libc::setpriority(libc::PRIO_PGRP as _, 0, 10); - } - } -} - -#[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))] -mod job { - pub unsafe fn setup(_build: &mut crate::Build) {} -} - -pub use crate::builder::PathSet; -use crate::cache::{Interned, INTERNER}; -pub use crate::config::Config; -pub use crate::flags::Subcommand; -use termcolor::{ColorChoice, StandardStream, WriteColor}; - -const LLVM_TOOLS: &[&str] = &[ - "llvm-cov", // used to generate coverage report - "llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility - "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume - "llvm-objdump", // used to disassemble programs - "llvm-profdata", // used to inspect and merge files generated by profiles - "llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide - "llvm-size", // used to prints the size of the linker sections of a program - "llvm-strip", // used to discard symbols from binary files to reduce their size - "llvm-ar", // used for creating and modifying archive files - "llvm-as", // used to convert LLVM assembly to LLVM bitcode - "llvm-dis", // used to disassemble LLVM bitcode - "llc", // used to compile LLVM bytecode - "opt", // used to optimize LLVM bytecode -]; - -/// LLD file names for all flavors. -const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"]; - -pub const VERSION: usize = 2; - -/// Extra --check-cfg to add when building -/// (Mode restriction, config name, config values (if any)) -const EXTRA_CHECK_CFGS: &[(Option, &str, Option<&[&'static str]>)] = &[ - (None, "bootstrap", None), - (Some(Mode::Rustc), "parallel_compiler", None), - (Some(Mode::ToolRustc), "parallel_compiler", None), - (Some(Mode::Codegen), "parallel_compiler", None), - (Some(Mode::Std), "stdarch_intel_sde", None), - (Some(Mode::Std), "no_fp_fmt_parse", None), - (Some(Mode::Std), "no_global_oom_handling", None), - (Some(Mode::Std), "no_rc", None), - (Some(Mode::Std), "no_sync", None), - (Some(Mode::Std), "freebsd12", None), - (Some(Mode::Std), "freebsd13", None), - (Some(Mode::Std), "backtrace_in_libstd", None), - /* Extra values not defined in the built-in targets yet, but used in std */ - // #[cfg(bootstrap)] - (Some(Mode::Std), "target_vendor", Some(&["unikraft"])), - (Some(Mode::Std), "target_env", Some(&["libnx"])), - // #[cfg(bootstrap)] hurd - (Some(Mode::Std), "target_os", Some(&["teeos", "hurd"])), - (Some(Mode::Rustc), "target_os", Some(&["hurd"])), - // #[cfg(bootstrap)] mips32r6, mips64r6 - ( - Some(Mode::Std), - "target_arch", - Some(&["asmjs", "spirv", "nvptx", "xtensa", "mips32r6", "mips64r6", "csky"]), - ), - /* Extra names used by dependencies */ - // FIXME: Used by serde_json, but we should not be triggering on external dependencies. - (Some(Mode::Rustc), "no_btreemap_remove_entry", None), - (Some(Mode::ToolRustc), "no_btreemap_remove_entry", None), - // FIXME: Used by crossbeam-utils, but we should not be triggering on external dependencies. - (Some(Mode::Rustc), "crossbeam_loom", None), - (Some(Mode::ToolRustc), "crossbeam_loom", None), - // FIXME: Used by proc-macro2, but we should not be triggering on external dependencies. - (Some(Mode::Rustc), "span_locations", None), - (Some(Mode::ToolRustc), "span_locations", None), - // FIXME: Used by rustix, but we should not be triggering on external dependencies. - (Some(Mode::Rustc), "rustix_use_libc", None), - (Some(Mode::ToolRustc), "rustix_use_libc", None), - // FIXME: Used by filetime, but we should not be triggering on external dependencies. - (Some(Mode::Rustc), "emulate_second_only_system", None), - (Some(Mode::ToolRustc), "emulate_second_only_system", None), - // Needed to avoid the need to copy windows.lib into the sysroot. - (Some(Mode::Rustc), "windows_raw_dylib", None), - (Some(Mode::ToolRustc), "windows_raw_dylib", None), -]; - -/// A structure representing a Rust compiler. -/// -/// Each compiler has a `stage` that it is associated with and a `host` that -/// corresponds to the platform the compiler runs on. This structure is used as -/// a parameter to many methods below. -#[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)] -pub struct Compiler { - stage: u32, - host: TargetSelection, -} - -#[derive(PartialEq, Eq, Copy, Clone, Debug)] -pub enum DocTests { - /// Run normal tests and doc tests (default). - Yes, - /// Do not run any doc tests. - No, - /// Only run doc tests. - Only, -} - -pub enum GitRepo { - Rustc, - Llvm, -} - -/// Global configuration for the build system. -/// -/// This structure transitively contains all configuration for the build system. -/// All filesystem-encoded configuration is in `config`, all flags are in -/// `flags`, and then parsed or probed information is listed in the keys below. -/// -/// This structure is a parameter of almost all methods in the build system, -/// although most functions are implemented as free functions rather than -/// methods specifically on this structure itself (to make it easier to -/// organize). -#[derive(Clone)] -pub struct Build { - /// User-specified configuration from `config.toml`. - config: Config, - - // Version information - version: String, - - // Properties derived from the above configuration - src: PathBuf, - out: PathBuf, - bootstrap_out: PathBuf, - cargo_info: channel::GitInfo, - rust_analyzer_info: channel::GitInfo, - clippy_info: channel::GitInfo, - miri_info: channel::GitInfo, - rustfmt_info: channel::GitInfo, - in_tree_llvm_info: channel::GitInfo, - local_rebuild: bool, - fail_fast: bool, - doc_tests: DocTests, - verbosity: usize, - - // Targets for which to build - build: TargetSelection, - hosts: Vec, - targets: Vec, - - initial_rustc: PathBuf, - initial_cargo: PathBuf, - initial_lld: PathBuf, - initial_libdir: PathBuf, - initial_sysroot: PathBuf, - - // Runtime state filled in later on - // C/C++ compilers and archiver for all targets - cc: RefCell>, - cxx: RefCell>, - ar: RefCell>, - ranlib: RefCell>, - // Miscellaneous - // allow bidirectional lookups: both name -> path and path -> name - crates: HashMap, Crate>, - crate_paths: HashMap>, - is_sudo: bool, - ci_env: CiEnv, - delayed_failures: RefCell>, - prerelease_version: Cell>, - - #[cfg(feature = "build-metrics")] - metrics: metrics::BuildMetrics, -} - -#[derive(Debug, Clone)] -struct Crate { - name: Interned, - deps: HashSet>, - path: PathBuf, - has_lib: bool, -} - -impl Crate { - fn local_path(&self, build: &Build) -> PathBuf { - self.path.strip_prefix(&build.config.src).unwrap().into() - } -} - -/// When building Rust various objects are handled differently. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -pub enum DependencyType { - /// Libraries originating from proc-macros. - Host, - /// Typical Rust libraries. - Target, - /// Non Rust libraries and objects shipped to ease usage of certain targets. - TargetSelfContained, -} - -/// The various "modes" of invoking Cargo. -/// -/// These entries currently correspond to the various output directories of the -/// build system, with each mod generating output in a different directory. -#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -pub enum Mode { - /// Build the standard library, placing output in the "stageN-std" directory. - Std, - - /// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory. - Rustc, - - /// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory. - Codegen, - - /// Build a tool, placing output in the "stage0-bootstrap-tools" - /// directory. This is for miscellaneous sets of tools that are built - /// using the bootstrap stage0 compiler in its entirety (target libraries - /// and all). Typically these tools compile with stable Rust. - ToolBootstrap, - - /// Build a tool which uses the locally built std, placing output in the - /// "stageN-tools" directory. Its usage is quite rare, mainly used by - /// compiletest which needs libtest. - ToolStd, - - /// Build a tool which uses the locally built rustc and the target std, - /// placing the output in the "stageN-tools" directory. This is used for - /// anything that needs a fully functional rustc, such as rustdoc, clippy, - /// cargo, rls, rustfmt, miri, etc. - ToolRustc, -} - -impl Mode { - pub fn is_tool(&self) -> bool { - matches!(self, Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd) - } - - pub fn must_support_dlopen(&self) -> bool { - matches!(self, Mode::Std | Mode::Codegen) - } -} - -pub enum CLang { - C, - Cxx, -} - -macro_rules! forward { - ( $( $fn:ident( $($param:ident: $ty:ty),* ) $( -> $ret:ty)? ),+ $(,)? ) => { - impl Build { - $( fn $fn(&self, $($param: $ty),* ) $( -> $ret)? { - self.config.$fn( $($param),* ) - } )+ - } - } -} - -forward! { - verbose(msg: &str), - is_verbose() -> bool, - create(path: &Path, s: &str), - remove(f: &Path), - tempdir() -> PathBuf, - llvm_link_shared() -> bool, - download_rustc() -> bool, - initial_rustfmt() -> Option, -} - -impl Build { - /// Creates a new set of build configuration from the `flags` on the command - /// line and the filesystem `config`. - /// - /// By default all build output will be placed in the current directory. - pub fn new(mut config: Config) -> Build { - let src = config.src.clone(); - let out = config.out.clone(); - - #[cfg(unix)] - // keep this consistent with the equivalent check in x.py: - // https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/bootstrap.py#L796-L797 - let is_sudo = match env::var_os("SUDO_USER") { - Some(_sudo_user) => { - let uid = unsafe { libc::getuid() }; - uid == 0 - } - None => false, - }; - #[cfg(not(unix))] - let is_sudo = false; - - let omit_git_hash = config.omit_git_hash; - let rust_info = channel::GitInfo::new(omit_git_hash, &src); - let cargo_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/cargo")); - let rust_analyzer_info = - channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rust-analyzer")); - let clippy_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/clippy")); - let miri_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/miri")); - let rustfmt_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rustfmt")); - - // we always try to use git for LLVM builds - let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project")); - - let initial_target_libdir_str = if config.dry_run() { - "/dummy/lib/path/to/lib/".to_string() - } else { - output( - Command::new(&config.initial_rustc) - .arg("--target") - .arg(config.build.rustc_target_arg()) - .arg("--print") - .arg("target-libdir"), - ) - }; - let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap(); - let initial_lld = initial_target_dir.join("bin").join("rust-lld"); - - let initial_sysroot = if config.dry_run() { - "/dummy".to_string() - } else { - output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot")) - } - .trim() - .to_string(); - - let initial_libdir = initial_target_dir - .parent() - .unwrap() - .parent() - .unwrap() - .strip_prefix(&initial_sysroot) - .unwrap() - .to_path_buf(); - - let version = std::fs::read_to_string(src.join("src").join("version")) - .expect("failed to read src/version"); - let version = version.trim(); - - let bootstrap_out = std::env::current_exe() - .expect("could not determine path to running process") - .parent() - .unwrap() - .to_path_buf(); - if !bootstrap_out.join(exe("rustc", config.build)).exists() && !cfg!(test) { - // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented - panic!( - "`rustc` not found in {}, run `cargo build --bins` before `cargo run`", - bootstrap_out.display() - ) - } - - if rust_info.is_from_tarball() && config.description.is_none() { - config.description = Some("built from a source tarball".to_owned()); - } - - let mut build = Build { - initial_rustc: config.initial_rustc.clone(), - initial_cargo: config.initial_cargo.clone(), - initial_lld, - initial_libdir, - initial_sysroot: initial_sysroot.into(), - local_rebuild: config.local_rebuild, - fail_fast: config.cmd.fail_fast(), - doc_tests: config.cmd.doc_tests(), - verbosity: config.verbose, - - build: config.build, - hosts: config.hosts.clone(), - targets: config.targets.clone(), - - config, - version: version.to_string(), - src, - out, - bootstrap_out, - - cargo_info, - rust_analyzer_info, - clippy_info, - miri_info, - rustfmt_info, - in_tree_llvm_info, - cc: RefCell::new(HashMap::new()), - cxx: RefCell::new(HashMap::new()), - ar: RefCell::new(HashMap::new()), - ranlib: RefCell::new(HashMap::new()), - crates: HashMap::new(), - crate_paths: HashMap::new(), - is_sudo, - ci_env: CiEnv::current(), - delayed_failures: RefCell::new(Vec::new()), - prerelease_version: Cell::new(None), - - #[cfg(feature = "build-metrics")] - metrics: metrics::BuildMetrics::init(), - }; - - // If local-rust is the same major.minor as the current version, then force a - // local-rebuild - let local_version_verbose = - output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose")); - let local_release = local_version_verbose - .lines() - .filter_map(|x| x.strip_prefix("release:")) - .next() - .unwrap() - .trim(); - if local_release.split('.').take(2).eq(version.split('.').take(2)) { - build.verbose(&format!("auto-detected local-rebuild {local_release}")); - build.local_rebuild = true; - } - - build.verbose("finding compilers"); - cc_detect::find(&build); - // When running `setup`, the profile is about to change, so any requirements we have now may - // be different on the next invocation. Don't check for them until the next time x.py is - // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing. - // - // Similarly, for `setup` we don't actually need submodules or cargo metadata. - if !matches!(build.config.cmd, Subcommand::Setup { .. }) { - build.verbose("running sanity check"); - sanity::check(&mut build); - - // Make sure we update these before gathering metadata so we don't get an error about missing - // Cargo.toml files. - let rust_submodules = ["src/tools/cargo", "library/backtrace", "library/stdarch"]; - for s in rust_submodules { - build.update_submodule(Path::new(s)); - } - // Now, update all existing submodules. - build.update_existing_submodules(); - - build.verbose("learning about cargo"); - metadata::build(&mut build); - } - - // Make a symbolic link so we can use a consistent directory in the documentation. - let build_triple = build.out.join(&build.build.triple); - t!(fs::create_dir_all(&build_triple)); - let host = build.out.join("host"); - if host.is_symlink() { - // Left over from a previous build; overwrite it. - // This matters if `build.build` has changed between invocations. - #[cfg(windows)] - t!(fs::remove_dir(&host)); - #[cfg(not(windows))] - t!(fs::remove_file(&host)); - } - t!( - symlink_dir(&build.config, &build_triple, &host), - format!("symlink_dir({} => {}) failed", host.display(), build_triple.display()) - ); - - build - } - - // modified from `check_submodule` and `update_submodule` in bootstrap.py - /// Given a path to the directory of a submodule, update it. - /// - /// `relative_path` should be relative to the root of the git repository, not an absolute path. - pub(crate) fn update_submodule(&self, relative_path: &Path) { - if !self.config.submodules(&self.rust_info()) { - return; - } - - let absolute_path = self.config.src.join(relative_path); - - // NOTE: The check for the empty directory is here because when running x.py the first time, - // the submodule won't be checked out. Check it out now so we can build it. - if !channel::GitInfo::new(false, &absolute_path).is_managed_git_subrepository() - && !dir_is_empty(&absolute_path) - { - return; - } - - // check_submodule - let checked_out_hash = - output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path)); - // update_submodules - let recorded = output( - Command::new("git") - .args(&["ls-tree", "HEAD"]) - .arg(relative_path) - .current_dir(&self.config.src), - ); - let actual_hash = recorded - .split_whitespace() - .nth(2) - .unwrap_or_else(|| panic!("unexpected output `{}`", recorded)); - - // update_submodule - if actual_hash == checked_out_hash.trim_end() { - // already checked out - return; - } - - println!("Updating submodule {}", relative_path.display()); - self.run( - Command::new("git") - .args(&["submodule", "-q", "sync"]) - .arg(relative_path) - .current_dir(&self.config.src), - ); - - // Try passing `--progress` to start, then run git again without if that fails. - let update = |progress: bool| { - // Git is buggy and will try to fetch submodules from the tracking branch for *this* repository, - // even though that has no relation to the upstream for the submodule. - let current_branch = { - let output = self - .config - .git() - .args(["symbolic-ref", "--short", "HEAD"]) - .stderr(Stdio::inherit()) - .output(); - let output = t!(output); - if output.status.success() { - Some(String::from_utf8(output.stdout).unwrap().trim().to_owned()) - } else { - None - } - }; - - let mut git = self.config.git(); - if let Some(branch) = current_branch { - // If there is a tag named after the current branch, git will try to disambiguate by prepending `heads/` to the branch name. - // This syntax isn't accepted by `branch.{branch}`. Strip it. - let branch = branch.strip_prefix("heads/").unwrap_or(&branch); - git.arg("-c").arg(format!("branch.{branch}.remote=origin")); - } - git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]); - if progress { - git.arg("--progress"); - } - git.arg(relative_path); - git - }; - // NOTE: doesn't use `try_run` because this shouldn't print an error if it fails. - if !update(true).status().map_or(false, |status| status.success()) { - self.run(&mut update(false)); - } - - // Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error). - #[allow(deprecated)] // diff-index reports the modifications through the exit status - let has_local_modifications = self - .config - .try_run( - Command::new("git") - .args(&["diff-index", "--quiet", "HEAD"]) - .current_dir(&absolute_path), - ) - .is_err(); - if has_local_modifications { - self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path)); - } - - self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path)); - self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path)); - - if has_local_modifications { - self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path)); - } - } - - /// If any submodule has been initialized already, sync it unconditionally. - /// This avoids contributors checking in a submodule change by accident. - pub fn update_existing_submodules(&self) { - // Avoid running git when there isn't a git checkout. - if !self.config.submodules(&self.rust_info()) { - return; - } - let output = output( - self.config - .git() - .args(&["config", "--file"]) - .arg(&self.config.src.join(".gitmodules")) - .args(&["--get-regexp", "path"]), - ); - for line in output.lines() { - // Look for `submodule.$name.path = $path` - // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer` - let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap()); - // Don't update the submodule unless it's already been cloned. - if channel::GitInfo::new(false, submodule).is_managed_git_subrepository() { - self.update_submodule(submodule); - } - } - } - - /// Executes the entire build, as configured by the flags and configuration. - pub fn build(&mut self) { - unsafe { - job::setup(self); - } - - // Download rustfmt early so that it can be used in rust-analyzer configs. - let _ = &builder::Builder::new(&self).initial_rustfmt(); - - // hardcoded subcommands - match &self.config.cmd { - Subcommand::Format { check } => { - return format::format(&builder::Builder::new(&self), *check, &self.config.paths); - } - Subcommand::Suggest { run } => { - return suggest::suggest(&builder::Builder::new(&self), *run); - } - _ => (), - } - - { - let builder = builder::Builder::new(&self); - if let Some(path) = builder.paths.get(0) { - if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") { - return; - } - } - } - - if !self.config.dry_run() { - { - self.config.dry_run = DryRun::SelfCheck; - let builder = builder::Builder::new(&self); - builder.execute_cli(); - } - self.config.dry_run = DryRun::Disabled; - let builder = builder::Builder::new(&self); - builder.execute_cli(); - } else { - let builder = builder::Builder::new(&self); - builder.execute_cli(); - } - - // Check for postponed failures from `test --no-fail-fast`. - let failures = self.delayed_failures.borrow(); - if failures.len() > 0 { - eprintln!("\n{} command(s) did not execute successfully:\n", failures.len()); - for failure in failures.iter() { - eprintln!(" - {failure}\n"); - } - exit!(1); - } - - #[cfg(feature = "build-metrics")] - self.metrics.persist(self); - } - - /// Clear out `dir` if `input` is newer. - /// - /// After this executes, it will also ensure that `dir` exists. - fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool { - let stamp = dir.join(".stamp"); - let mut cleared = false; - if mtime(&stamp) < mtime(input) { - self.verbose(&format!("Dirty - {}", dir.display())); - let _ = fs::remove_dir_all(dir); - cleared = true; - } else if stamp.exists() { - return cleared; - } - t!(fs::create_dir_all(dir)); - t!(File::create(stamp)); - cleared - } - - fn rust_info(&self) -> &GitInfo { - &self.config.rust_info - } - - /// Gets the space-separated set of activated features for the standard - /// library. - fn std_features(&self, target: TargetSelection) -> String { - let mut features = " panic-unwind".to_string(); - - match self.config.llvm_libunwind(target) { - LlvmLibunwind::InTree => features.push_str(" llvm-libunwind"), - LlvmLibunwind::System => features.push_str(" system-llvm-libunwind"), - LlvmLibunwind::No => {} - } - if self.config.backtrace { - features.push_str(" backtrace"); - } - if self.config.profiler_enabled(target) { - features.push_str(" profiler"); - } - features - } - - /// Gets the space-separated set of activated features for the compiler. - fn rustc_features(&self, kind: Kind) -> String { - let mut features = vec![]; - if self.config.jemalloc { - features.push("jemalloc"); - } - if self.config.llvm_enabled() || kind == Kind::Check { - features.push("llvm"); - } - // keep in sync with `bootstrap/compile.rs:rustc_cargo_env` - if self.config.rustc_parallel { - features.push("rustc_use_parallel_compiler"); - } - - // If debug logging is on, then we want the default for tracing: - // https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26 - // which is everything (including debug/trace/etc.) - // if its unset, if debug_assertions is on, then debug_logging will also be on - // as well as tracing *ignoring* this feature when debug_assertions is on - if !self.config.rust_debug_logging { - features.push("max_level_info"); - } - - features.join(" ") - } - - /// Component directory that Cargo will produce output into (e.g. - /// release/debug) - fn cargo_dir(&self) -> &'static str { - if self.config.rust_optimize.is_release() { "release" } else { "debug" } - } - - fn tools_dir(&self, compiler: Compiler) -> PathBuf { - let out = self - .out - .join(&*compiler.host.triple) - .join(format!("stage{}-tools-bin", compiler.stage)); - t!(fs::create_dir_all(&out)); - out - } - - /// Returns the root directory for all output generated in a particular - /// stage when running with a particular host compiler. - /// - /// The mode indicates what the root directory is for. - fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf { - let suffix = match mode { - Mode::Std => "-std", - Mode::Rustc => "-rustc", - Mode::Codegen => "-codegen", - Mode::ToolBootstrap => "-bootstrap-tools", - Mode::ToolStd | Mode::ToolRustc => "-tools", - }; - self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix)) - } - - /// Returns the root output directory for all Cargo output in a given stage, - /// running a particular compiler, whether or not we're building the - /// standard library, and targeting the specified architecture. - fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf { - self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir()) - } - - /// Root output directory for LLVM compiled for `target` - /// - /// Note that if LLVM is configured externally then the directory returned - /// will likely be empty. - fn llvm_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("llvm") - } - - fn lld_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("lld") - } - - /// Output directory for all documentation for a target - fn doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("doc") - } - - /// Output directory for all JSON-formatted documentation for a target - fn json_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("json-doc") - } - - fn test_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("test") - } - - /// Output directory for all documentation for a target - fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("compiler-doc") - } - - /// Output directory for some generated md crate documentation for a target (temporary) - fn md_doc_out(&self, target: TargetSelection) -> Interned { - INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc")) - } - - /// Returns `true` if no custom `llvm-config` is set for the specified target. - /// - /// If no custom `llvm-config` was specified then Rust's llvm will be used. - fn is_rust_llvm(&self, target: TargetSelection) -> bool { - match self.config.target_config.get(&target) { - Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched, - Some(Target { llvm_config, .. }) => { - // If the user set llvm-config we assume Rust is not patched, - // but first check to see if it was configured by llvm-from-ci. - (self.config.llvm_from_ci && target == self.config.build) || llvm_config.is_none() - } - None => true, - } - } - - /// Returns the path to `FileCheck` binary for the specified target - fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf { - let target_config = self.config.target_config.get(&target); - if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) { - s.to_path_buf() - } else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { - let llvm_bindir = output(Command::new(s).arg("--bindir")); - let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target)); - if filecheck.exists() { - filecheck - } else { - // On Fedora the system LLVM installs FileCheck in the - // llvm subdirectory of the libdir. - let llvm_libdir = output(Command::new(s).arg("--libdir")); - let lib_filecheck = - Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target)); - if lib_filecheck.exists() { - lib_filecheck - } else { - // Return the most normal file name, even though - // it doesn't exist, so that any error message - // refers to that. - filecheck - } - } - } else { - let base = self.llvm_out(target).join("build"); - let base = if !self.ninja() && target.contains("msvc") { - if self.config.llvm_optimize { - if self.config.llvm_release_debuginfo { - base.join("RelWithDebInfo") - } else { - base.join("Release") - } - } else { - base.join("Debug") - } - } else { - base - }; - base.join("bin").join(exe("FileCheck", target)) - } - } - - /// Directory for libraries built from C/C++ code and shared between stages. - fn native_dir(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("native") - } - - /// Root output directory for rust_test_helpers library compiled for - /// `target` - fn test_helpers_out(&self, target: TargetSelection) -> PathBuf { - self.native_dir(target).join("rust-test-helpers") - } - - /// Adds the `RUST_TEST_THREADS` env var if necessary - fn add_rust_test_threads(&self, cmd: &mut Command) { - if env::var_os("RUST_TEST_THREADS").is_none() { - cmd.env("RUST_TEST_THREADS", self.jobs().to_string()); - } - } - - /// Returns the libdir of the snapshot compiler. - fn rustc_snapshot_libdir(&self) -> PathBuf { - self.rustc_snapshot_sysroot().join(libdir(self.config.build)) - } - - /// Returns the sysroot of the snapshot compiler. - fn rustc_snapshot_sysroot(&self) -> &Path { - static SYSROOT_CACHE: OnceCell = once_cell::sync::OnceCell::new(); - SYSROOT_CACHE.get_or_init(|| { - let mut rustc = Command::new(&self.initial_rustc); - rustc.args(&["--print", "sysroot"]); - output(&mut rustc).trim().into() - }) - } - - /// Runs a command, printing out nice contextual information if it fails. - fn run(&self, cmd: &mut Command) { - if self.config.dry_run() { - return; - } - self.verbose(&format!("running: {cmd:?}")); - run(cmd, self.is_verbose()) - } - - /// Runs a command, printing out nice contextual information if it fails. - fn run_quiet(&self, cmd: &mut Command) { - if self.config.dry_run() { - return; - } - self.verbose(&format!("running: {cmd:?}")); - run_suppressed(cmd) - } - - /// Runs a command, printing out nice contextual information if it fails. - /// Exits if the command failed to execute at all, otherwise returns its - /// `status.success()`. - fn run_quiet_delaying_failure(&self, cmd: &mut Command) -> bool { - if self.config.dry_run() { - return true; - } - if !self.fail_fast { - self.verbose(&format!("running: {cmd:?}")); - if !try_run_suppressed(cmd) { - let mut failures = self.delayed_failures.borrow_mut(); - failures.push(format!("{cmd:?}")); - return false; - } - } else { - self.run_quiet(cmd); - } - true - } - - /// Runs a command, printing out contextual info if it fails, and delaying errors until the build finishes. - pub(crate) fn run_delaying_failure(&self, cmd: &mut Command) -> bool { - if !self.fail_fast { - #[allow(deprecated)] // can't use Build::try_run, that's us - if self.config.try_run(cmd).is_err() { - let mut failures = self.delayed_failures.borrow_mut(); - failures.push(format!("{cmd:?}")); - return false; - } - } else { - self.run(cmd); - } - true - } - - pub fn is_verbose_than(&self, level: usize) -> bool { - self.verbosity > level - } - - /// Prints a message if this build is configured in more verbose mode than `level`. - fn verbose_than(&self, level: usize, msg: &str) { - if self.is_verbose_than(level) { - println!("{msg}"); - } - } - - fn info(&self, msg: &str) { - match self.config.dry_run { - DryRun::SelfCheck => (), - DryRun::Disabled | DryRun::UserSelected => { - println!("{msg}"); - } - } - } - - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg_check( - &self, - what: impl Display, - target: impl Into>, - ) -> Option { - self.msg(Kind::Check, self.config.stage, what, self.config.build, target) - } - - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg_doc( - &self, - compiler: Compiler, - what: impl Display, - target: impl Into> + Copy, - ) -> Option { - self.msg(Kind::Doc, compiler.stage, what, compiler.host, target.into()) - } - - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg_build( - &self, - compiler: Compiler, - what: impl Display, - target: impl Into>, - ) -> Option { - self.msg(Kind::Build, compiler.stage, what, compiler.host, target) - } - - /// Return a `Group` guard for a [`Step`] that is built for each `--stage`. - /// - /// [`Step`]: crate::builder::Step - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg( - &self, - action: impl Into, - stage: u32, - what: impl Display, - host: impl Into>, - target: impl Into>, - ) -> Option { - let action = action.into().description(); - let msg = |fmt| format!("{action} stage{stage} {what}{fmt}"); - let msg = if let Some(target) = target.into() { - let host = host.into().unwrap(); - if host == target { - msg(format_args!(" ({target})")) - } else { - msg(format_args!(" ({host} -> {target})")) - } - } else { - msg(format_args!("")) - }; - self.group(&msg) - } - - /// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`. - /// - /// [`Step`]: crate::builder::Step - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg_unstaged( - &self, - action: impl Into, - what: impl Display, - target: TargetSelection, - ) -> Option { - let action = action.into().description(); - let msg = format!("{action} {what} for {target}"); - self.group(&msg) - } - - #[must_use = "Groups should not be dropped until the Step finishes running"] - #[track_caller] - fn msg_sysroot_tool( - &self, - action: impl Into, - stage: u32, - what: impl Display, - host: TargetSelection, - target: TargetSelection, - ) -> Option { - let action = action.into().description(); - let msg = |fmt| format!("{action} {what} {fmt}"); - let msg = if host == target { - msg(format_args!("(stage{stage} -> stage{}, {target})", stage + 1)) - } else { - msg(format_args!("(stage{stage}:{host} -> stage{}:{target})", stage + 1)) - }; - self.group(&msg) - } - - #[track_caller] - fn group(&self, msg: &str) -> Option { - match self.config.dry_run { - DryRun::SelfCheck => None, - DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)), - } - } - - /// Returns the number of parallel jobs that have been configured for this - /// build. - fn jobs(&self) -> u32 { - self.config.jobs.unwrap_or_else(|| { - std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 - }) - } - - fn debuginfo_map_to(&self, which: GitRepo) -> Option { - if !self.config.rust_remap_debuginfo { - return None; - } - - match which { - GitRepo::Rustc => { - let sha = self.rust_sha().unwrap_or(&self.version); - Some(format!("/rustc/{sha}")) - } - GitRepo::Llvm => Some(String::from("/rustc/llvm")), - } - } - - /// Returns the path to the C compiler for the target specified. - fn cc(&self, target: TargetSelection) -> PathBuf { - if self.config.dry_run() { - return PathBuf::new(); - } - self.cc.borrow()[&target].path().into() - } - - /// Returns a list of flags to pass to the C compiler for the target - /// specified. - fn cflags(&self, target: TargetSelection, which: GitRepo, c: CLang) -> Vec { - if self.config.dry_run() { - return Vec::new(); - } - let base = match c { - CLang::C => self.cc.borrow()[&target].clone(), - CLang::Cxx => self.cxx.borrow()[&target].clone(), - }; - - // Filter out -O and /O (the optimization flags) that we picked up from - // cc-rs because the build scripts will determine that for themselves. - let mut base = base - .args() - .iter() - .map(|s| s.to_string_lossy().into_owned()) - .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) - .collect::>(); - - // If we're compiling on macOS then we add a few unconditional flags - // indicating that we want libc++ (more filled out than libstdc++) and - // we want to compile for 10.7. This way we can ensure that - // LLVM/etc are all properly compiled. - if target.contains("apple-darwin") { - base.push("-stdlib=libc++".into()); - } - - // Work around an apparently bad MinGW / GCC optimization, - // See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html - // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936 - if &*target.triple == "i686-pc-windows-gnu" { - base.push("-fno-omit-frame-pointer".into()); - } - - if let Some(map_to) = self.debuginfo_map_to(which) { - let map = format!("{}={}", self.src.display(), map_to); - let cc = self.cc(target); - if cc.ends_with("clang") || cc.ends_with("gcc") { - base.push(format!("-fdebug-prefix-map={map}")); - } else if cc.ends_with("clang-cl.exe") { - base.push("-Xclang".into()); - base.push(format!("-fdebug-prefix-map={map}")); - } - } - base - } - - /// Returns the path to the `ar` archive utility for the target specified. - fn ar(&self, target: TargetSelection) -> Option { - if self.config.dry_run() { - return None; - } - self.ar.borrow().get(&target).cloned() - } - - /// Returns the path to the `ranlib` utility for the target specified. - fn ranlib(&self, target: TargetSelection) -> Option { - if self.config.dry_run() { - return None; - } - self.ranlib.borrow().get(&target).cloned() - } - - /// Returns the path to the C++ compiler for the target specified. - fn cxx(&self, target: TargetSelection) -> Result { - if self.config.dry_run() { - return Ok(PathBuf::new()); - } - match self.cxx.borrow().get(&target) { - Some(p) => Ok(p.path().into()), - None => Err(format!("target `{target}` is not configured as a host, only as a target")), - } - } - - /// Returns the path to the linker for the given target if it needs to be overridden. - fn linker(&self, target: TargetSelection) -> Option { - if self.config.dry_run() { - return Some(PathBuf::new()); - } - if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.clone()) - { - Some(linker) - } else if target.contains("vxworks") { - // need to use CXX compiler as linker to resolve the exception functions - // that are only existed in CXX libraries - Some(self.cxx.borrow()[&target].path().into()) - } else if target != self.config.build - && util::use_host_linker(target) - && !target.contains("msvc") - { - Some(self.cc(target)) - } else if self.config.use_lld && !self.is_fuse_ld_lld(target) && self.build == target { - Some(self.initial_lld.clone()) - } else { - None - } - } - - // LLD is used through `-fuse-ld=lld` rather than directly. - // Only MSVC targets use LLD directly at the moment. - fn is_fuse_ld_lld(&self, target: TargetSelection) -> bool { - self.config.use_lld && !target.contains("msvc") - } - - fn lld_flags(&self, target: TargetSelection) -> impl Iterator { - let mut options = [None, None]; - - if self.config.use_lld { - if self.is_fuse_ld_lld(target) { - options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string()); - } - - let no_threads = util::lld_flag_no_threads(target.contains("windows")); - options[1] = Some(format!("-Clink-arg=-Wl,{no_threads}")); - } - - IntoIterator::into_iter(options).flatten() - } - - /// Returns if this target should statically link the C runtime, if specified - fn crt_static(&self, target: TargetSelection) -> Option { - if target.contains("pc-windows-msvc") { - Some(true) - } else { - self.config.target_config.get(&target).and_then(|t| t.crt_static) - } - } - - /// Returns the "musl root" for this `target`, if defined - fn musl_root(&self, target: TargetSelection) -> Option<&Path> { - self.config - .target_config - .get(&target) - .and_then(|t| t.musl_root.as_ref()) - .or_else(|| self.config.musl_root.as_ref()) - .map(|p| &**p) - } - - /// Returns the "musl libdir" for this `target`. - fn musl_libdir(&self, target: TargetSelection) -> Option { - let t = self.config.target_config.get(&target)?; - if let libdir @ Some(_) = &t.musl_libdir { - return libdir.clone(); - } - self.musl_root(target).map(|root| root.join("lib")) - } - - /// Returns the sysroot for the wasi target, if defined - fn wasi_root(&self, target: TargetSelection) -> Option<&Path> { - self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p) - } - - /// Returns `true` if this is a no-std `target`, if defined - fn no_std(&self, target: TargetSelection) -> Option { - self.config.target_config.get(&target).map(|t| t.no_std) - } - - /// Returns `true` if the target will be tested using the `remote-test-client` - /// and `remote-test-server` binaries. - fn remote_tested(&self, target: TargetSelection) -> bool { - self.qemu_rootfs(target).is_some() - || target.contains("android") - || env::var_os("TEST_DEVICE_ADDR").is_some() - } - - /// Returns the root of the "rootfs" image that this target will be using, - /// if one was configured. - /// - /// If `Some` is returned then that means that tests for this target are - /// emulated with QEMU and binaries will need to be shipped to the emulator. - fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> { - self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p) - } - - /// Path to the python interpreter to use - fn python(&self) -> &Path { - if self.config.build.ends_with("apple-darwin") { - // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the - // LLDB plugin's compiled module which only works with the system python - // (namely not Homebrew-installed python) - Path::new("/usr/bin/python3") - } else { - self.config - .python - .as_ref() - .expect("python is required for running LLDB or rustdoc tests") - } - } - - /// Temporary directory that extended error information is emitted to. - fn extended_error_dir(&self) -> PathBuf { - self.out.join("tmp/extended-error-metadata") - } - - /// Tests whether the `compiler` compiling for `target` should be forced to - /// use a stage1 compiler instead. - /// - /// Currently, by default, the build system does not perform a "full - /// bootstrap" by default where we compile the compiler three times. - /// Instead, we compile the compiler two times. The final stage (stage2) - /// just copies the libraries from the previous stage, which is what this - /// method detects. - /// - /// Here we return `true` if: - /// - /// * The build isn't performing a full bootstrap - /// * The `compiler` is in the final stage, 2 - /// * We're not cross-compiling, so the artifacts are already available in - /// stage1 - /// - /// When all of these conditions are met the build will lift artifacts from - /// the previous stage forward. - fn force_use_stage1(&self, stage: u32, target: TargetSelection) -> bool { - !self.config.full_bootstrap - && !self.config.download_rustc() - && stage >= 2 - && (self.hosts.iter().any(|h| *h == target) || target == self.build) - } - - /// Checks whether the `compiler` compiling for `target` should be forced to - /// use a stage2 compiler instead. - /// - /// When we download the pre-compiled version of rustc and compiler stage is >= 2, - /// it should be forced to use a stage2 compiler. - fn force_use_stage2(&self, stage: u32) -> bool { - self.config.download_rustc() && stage >= 2 - } - - /// Given `num` in the form "a.b.c" return a "release string" which - /// describes the release version number. - /// - /// For example on nightly this returns "a.b.c-nightly", on beta it returns - /// "a.b.c-beta.1" and on stable it just returns "a.b.c". - fn release(&self, num: &str) -> String { - match &self.config.channel[..] { - "stable" => num.to_string(), - "beta" => { - if !self.config.omit_git_hash { - format!("{}-beta.{}", num, self.beta_prerelease_version()) - } else { - format!("{num}-beta") - } - } - "nightly" => format!("{num}-nightly"), - _ => format!("{num}-dev"), - } - } - - fn beta_prerelease_version(&self) -> u32 { - fn extract_beta_rev_from_file>(version_file: P) -> Option { - let version = fs::read_to_string(version_file).ok()?; - - extract_beta_rev(&version) - } - - if let Some(s) = self.prerelease_version.get() { - return s; - } - - // First check if there is a version file available. - // If available, we read the beta revision from that file. - // This only happens when building from a source tarball when Git should not be used. - let count = extract_beta_rev_from_file(self.src.join("version")).unwrap_or_else(|| { - // Figure out how many merge commits happened since we branched off master. - // That's our beta number! - // (Note that we use a `..` range, not the `...` symmetric difference.) - output(self.config.git().arg("rev-list").arg("--count").arg("--merges").arg(format!( - "refs/remotes/origin/{}..HEAD", - self.config.stage0_metadata.config.nightly_branch - ))) - }); - let n = count.trim().parse().unwrap(); - self.prerelease_version.set(Some(n)); - n - } - - /// Returns the value of `release` above for Rust itself. - fn rust_release(&self) -> String { - self.release(&self.version) - } - - /// Returns the "package version" for a component given the `num` release - /// number. - /// - /// The package version is typically what shows up in the names of tarballs. - /// For channels like beta/nightly it's just the channel name, otherwise - /// it's the `num` provided. - fn package_vers(&self, num: &str) -> String { - match &self.config.channel[..] { - "stable" => num.to_string(), - "beta" => "beta".to_string(), - "nightly" => "nightly".to_string(), - _ => format!("{num}-dev"), - } - } - - /// Returns the value of `package_vers` above for Rust itself. - fn rust_package_vers(&self) -> String { - self.package_vers(&self.version) - } - - /// Returns the `version` string associated with this compiler for Rust - /// itself. - /// - /// Note that this is a descriptive string which includes the commit date, - /// sha, version, etc. - fn rust_version(&self) -> String { - let mut version = self.rust_info().version(self, &self.version); - if let Some(ref s) = self.config.description { - version.push_str(" ("); - version.push_str(s); - version.push(')'); - } - version - } - - /// Returns the full commit hash. - fn rust_sha(&self) -> Option<&str> { - self.rust_info().sha() - } - - /// Returns the `a.b.c` version that the given package is at. - fn release_num(&self, package: &str) -> String { - let toml_file_name = self.src.join(&format!("src/tools/{package}/Cargo.toml")); - let toml = t!(fs::read_to_string(&toml_file_name)); - for line in toml.lines() { - if let Some(stripped) = - line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\"")) - { - return stripped.to_owned(); - } - } - - panic!("failed to find version in {package}'s Cargo.toml") - } - - /// Returns `true` if unstable features should be enabled for the compiler - /// we're building. - fn unstable_features(&self) -> bool { - match &self.config.channel[..] { - "stable" | "beta" => false, - "nightly" | _ => true, - } - } - - /// Returns a Vec of all the dependencies of the given root crate, - /// including transitive dependencies and the root itself. Only includes - /// "local" crates (those in the local source tree, not from a registry). - fn in_tree_crates(&self, root: &str, target: Option) -> Vec<&Crate> { - let mut ret = Vec::new(); - let mut list = vec![INTERNER.intern_str(root)]; - let mut visited = HashSet::new(); - while let Some(krate) = list.pop() { - let krate = self - .crates - .get(&krate) - .unwrap_or_else(|| panic!("metadata missing for {krate}: {:?}", self.crates)); - ret.push(krate); - for dep in &krate.deps { - if !self.crates.contains_key(dep) { - // Ignore non-workspace members. - continue; - } - // Don't include optional deps if their features are not - // enabled. Ideally this would be computed from `cargo - // metadata --features …`, but that is somewhat slow. In - // the future, we may want to consider just filtering all - // build and dev dependencies in metadata::build. - if visited.insert(dep) - && (dep != "profiler_builtins" - || target - .map(|t| self.config.profiler_enabled(t)) - .unwrap_or_else(|| self.config.any_profiler_enabled())) - && (dep != "rustc_codegen_llvm" || self.config.llvm_enabled()) - { - list.push(*dep); - } - } - } - ret.sort_unstable_by_key(|krate| krate.name); // reproducible order needed for tests - ret - } - - fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, DependencyType)> { - if self.config.dry_run() { - return Vec::new(); - } - - if !stamp.exists() { - eprintln!( - "Error: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?", - stamp.display() - ); - crate::exit!(1); - } - - let mut paths = Vec::new(); - let contents = t!(fs::read(stamp), &stamp); - // This is the method we use for extracting paths from the stamp file passed to us. See - // run_cargo for more information (in compile.rs). - for part in contents.split(|b| *b == 0) { - if part.is_empty() { - continue; - } - let dependency_type = match part[0] as char { - 'h' => DependencyType::Host, - 's' => DependencyType::TargetSelfContained, - 't' => DependencyType::Target, - _ => unreachable!(), - }; - let path = PathBuf::from(t!(str::from_utf8(&part[1..]))); - paths.push((path, dependency_type)); - } - paths - } - - /// Copies a file from `src` to `dst` - pub fn copy(&self, src: &Path, dst: &Path) { - self.copy_internal(src, dst, false); - } - - fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) { - if self.config.dry_run() { - return; - } - self.verbose_than(1, &format!("Copy {src:?} to {dst:?}")); - if src == dst { - return; - } - let _ = fs::remove_file(&dst); - let metadata = t!(src.symlink_metadata()); - let mut src = src.to_path_buf(); - if metadata.file_type().is_symlink() { - if dereference_symlinks { - src = t!(fs::canonicalize(src)); - } else { - let link = t!(fs::read_link(src)); - t!(self.symlink_file(link, dst)); - return; - } - } - if let Ok(()) = fs::hard_link(&src, dst) { - // Attempt to "easy copy" by creating a hard link - // (symlinks don't work on windows), but if that fails - // just fall back to a slow `copy` operation. - } else { - if let Err(e) = fs::copy(&src, dst) { - panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e) - } - t!(fs::set_permissions(dst, metadata.permissions())); - let atime = FileTime::from_last_access_time(&metadata); - let mtime = FileTime::from_last_modification_time(&metadata); - t!(filetime::set_file_times(dst, atime, mtime)); - } - } - - /// Copies the `src` directory recursively to `dst`. Both are assumed to exist - /// when this function is called. - pub fn cp_r(&self, src: &Path, dst: &Path) { - if self.config.dry_run() { - return; - } - for f in self.read_dir(src) { - let path = f.path(); - let name = path.file_name().unwrap(); - let dst = dst.join(name); - if t!(f.file_type()).is_dir() { - t!(fs::create_dir_all(&dst)); - self.cp_r(&path, &dst); - } else { - let _ = fs::remove_file(&dst); - self.copy(&path, &dst); - } - } - } - - /// Copies the `src` directory recursively to `dst`. Both are assumed to exist - /// when this function is called. Unwanted files or directories can be skipped - /// by returning `false` from the filter function. - pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) { - // Immediately recurse with an empty relative path - self.recurse_(src, dst, Path::new(""), filter) - } - - // Inner function does the actual work - fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) { - for f in self.read_dir(src) { - let path = f.path(); - let name = path.file_name().unwrap(); - let dst = dst.join(name); - let relative = relative.join(name); - // Only copy file or directory if the filter function returns true - if filter(&relative) { - if t!(f.file_type()).is_dir() { - let _ = fs::remove_dir_all(&dst); - self.create_dir(&dst); - self.recurse_(&path, &dst, &relative, filter); - } else { - let _ = fs::remove_file(&dst); - self.copy(&path, &dst); - } - } - } - } - - fn copy_to_folder(&self, src: &Path, dest_folder: &Path) { - let file_name = src.file_name().unwrap(); - let dest = dest_folder.join(file_name); - self.copy(src, &dest); - } - - fn install(&self, src: &Path, dstdir: &Path, perms: u32) { - if self.config.dry_run() { - return; - } - let dst = dstdir.join(src.file_name().unwrap()); - self.verbose_than(1, &format!("Install {src:?} to {dst:?}")); - t!(fs::create_dir_all(dstdir)); - if !src.exists() { - panic!("Error: File \"{}\" not found!", src.display()); - } - self.copy_internal(src, &dst, true); - chmod(&dst, perms); - } - - fn read(&self, path: &Path) -> String { - if self.config.dry_run() { - return String::new(); - } - t!(fs::read_to_string(path)) - } - - fn create_dir(&self, dir: &Path) { - if self.config.dry_run() { - return; - } - t!(fs::create_dir_all(dir)) - } - - fn remove_dir(&self, dir: &Path) { - if self.config.dry_run() { - return; - } - t!(fs::remove_dir_all(dir)) - } - - fn read_dir(&self, dir: &Path) -> impl Iterator { - let iter = match fs::read_dir(dir) { - Ok(v) => v, - Err(_) if self.config.dry_run() => return vec![].into_iter(), - Err(err) => panic!("could not read dir {dir:?}: {err:?}"), - }; - iter.map(|e| t!(e)).collect::>().into_iter() - } - - fn symlink_file, Q: AsRef>(&self, src: P, link: Q) -> io::Result<()> { - #[cfg(unix)] - use std::os::unix::fs::symlink as symlink_file; - #[cfg(windows)] - use std::os::windows::fs::symlink_file; - if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) } - } - - /// Returns if config.ninja is enabled, and checks for ninja existence, - /// exiting with a nicer error message if not. - fn ninja(&self) -> bool { - let mut cmd_finder = crate::sanity::Finder::new(); - - if self.config.ninja_in_file { - // Some Linux distros rename `ninja` to `ninja-build`. - // CMake can work with either binary name. - if cmd_finder.maybe_have("ninja-build").is_none() - && cmd_finder.maybe_have("ninja").is_none() - { - eprintln!( - " -Couldn't find required command: ninja (or ninja-build) - -You should install ninja as described at -, -or set `ninja = false` in the `[llvm]` section of `config.toml`. -Alternatively, set `download-ci-llvm = true` in that `[llvm]` section -to download LLVM rather than building it. -" - ); - exit!(1); - } - } - - // If ninja isn't enabled but we're building for MSVC then we try - // doubly hard to enable it. It was realized in #43767 that the msbuild - // CMake generator for MSVC doesn't respect configuration options like - // disabling LLVM assertions, which can often be quite important! - // - // In these cases we automatically enable Ninja if we find it in the - // environment. - if !self.config.ninja_in_file - && self.config.build.contains("msvc") - && cmd_finder.maybe_have("ninja").is_some() - { - return true; - } - - self.config.ninja_in_file - } - - pub fn colored_stdout R>(&self, f: F) -> R { - self.colored_stream_inner(StandardStream::stdout, self.config.stdout_is_tty, f) - } - - pub fn colored_stderr R>(&self, f: F) -> R { - self.colored_stream_inner(StandardStream::stderr, self.config.stderr_is_tty, f) - } - - fn colored_stream_inner(&self, constructor: C, is_tty: bool, f: F) -> R - where - C: Fn(ColorChoice) -> StandardStream, - F: FnOnce(&mut dyn WriteColor) -> R, - { - let choice = match self.config.color { - flags::Color::Always => ColorChoice::Always, - flags::Color::Never => ColorChoice::Never, - flags::Color::Auto if !is_tty => ColorChoice::Never, - flags::Color::Auto => ColorChoice::Auto, - }; - let mut stream = constructor(choice); - let result = f(&mut stream); - stream.reset().unwrap(); - result - } -} - -/// Extract the beta revision from the full version string. -/// -/// The full version string looks like "a.b.c-beta.y". And we need to extract -/// the "y" part from the string. -pub fn extract_beta_rev(version: &str) -> Option { - let parts = version.splitn(2, "-beta.").collect::>(); - let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string())); - - count -} - -#[cfg(unix)] -fn chmod(path: &Path, perms: u32) { - use std::os::unix::fs::*; - t!(fs::set_permissions(path, fs::Permissions::from_mode(perms))); -} -#[cfg(windows)] -fn chmod(_path: &Path, _perms: u32) {} - -impl Compiler { - pub fn with_stage(mut self, stage: u32) -> Compiler { - self.stage = stage; - self - } - - /// Returns `true` if this is a snapshot compiler for `build`'s configuration - pub fn is_snapshot(&self, build: &Build) -> bool { - self.stage == 0 && self.host == build.build - } - - /// Returns if this compiler should be treated as a final stage one in the - /// current build session. - /// This takes into account whether we're performing a full bootstrap or - /// not; don't directly compare the stage with `2`! - pub fn is_final_stage(&self, build: &Build) -> bool { - let final_stage = if build.config.full_bootstrap { 2 } else { 1 }; - self.stage >= final_stage - } -} - -fn envify(s: &str) -> String { - s.chars() - .map(|c| match c { - '-' => '_', - c => c, - }) - .flat_map(|c| c.to_uppercase()) - .collect() -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/llvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/llvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/llvm.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/llvm.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1357 +0,0 @@ -//! Compilation of native dependencies like LLVM. -//! -//! Native projects like LLVM unfortunately aren't suited just yet for -//! compilation in build scripts that Cargo has. This is because the -//! compilation takes a *very* long time but also because we don't want to -//! compile LLVM 3 times as part of a normal bootstrap (we want it cached). -//! -//! LLVM and compiler-rt are essentially just wired up to everything else to -//! ensure that they're always in place if needed. - -use std::env; -use std::env::consts::EXE_EXTENSION; -use std::ffi::{OsStr, OsString}; -use std::fs::{self, File}; -use std::io; -use std::path::{Path, PathBuf}; -use std::process::Command; - -use crate::builder::{Builder, RunConfig, ShouldRun, Step}; -use crate::channel; -use crate::config::{Config, TargetSelection}; -use crate::util::get_clang_cl_resource_dir; -use crate::util::{self, exe, output, t, up_to_date}; -use crate::{CLang, GitRepo, Kind}; - -use build_helper::ci::CiEnv; -use build_helper::git::get_git_merge_base; - -#[derive(Clone)] -pub struct LlvmResult { - /// Path to llvm-config binary. - /// NB: This is always the host llvm-config! - pub llvm_config: PathBuf, - /// Path to LLVM cmake directory for the target. - pub llvm_cmake_dir: PathBuf, -} - -pub struct Meta { - stamp: HashStamp, - res: LlvmResult, - out_dir: PathBuf, - root: String, -} - -// Linker flags to pass to LLVM's CMake invocation. -#[derive(Debug, Clone, Default)] -struct LdFlags { - // CMAKE_EXE_LINKER_FLAGS - exe: OsString, - // CMAKE_SHARED_LINKER_FLAGS - shared: OsString, - // CMAKE_MODULE_LINKER_FLAGS - module: OsString, -} - -impl LdFlags { - fn push_all(&mut self, s: impl AsRef) { - let s = s.as_ref(); - self.exe.push(" "); - self.exe.push(s); - self.shared.push(" "); - self.shared.push(s); - self.module.push(" "); - self.module.push(s); - } -} - -/// This returns whether we've already previously built LLVM. -/// -/// It's used to avoid busting caches during x.py check -- if we've already built -/// LLVM, it's fine for us to not try to avoid doing so. -/// -/// This will return the llvm-config if it can get it (but it will not build it -/// if not). -pub fn prebuilt_llvm_config( - builder: &Builder<'_>, - target: TargetSelection, -) -> Result { - builder.config.maybe_download_ci_llvm(); - - // If we're using a custom LLVM bail out here, but we can only use a - // custom LLVM for the build triple. - if let Some(config) = builder.config.target_config.get(&target) { - if let Some(ref s) = config.llvm_config { - check_llvm_version(builder, s); - let llvm_config = s.to_path_buf(); - let mut llvm_cmake_dir = llvm_config.clone(); - llvm_cmake_dir.pop(); - llvm_cmake_dir.pop(); - llvm_cmake_dir.push("lib"); - llvm_cmake_dir.push("cmake"); - llvm_cmake_dir.push("llvm"); - return Ok(LlvmResult { llvm_config, llvm_cmake_dir }); - } - } - - let root = "src/llvm-project/llvm"; - let out_dir = builder.llvm_out(target); - - let mut llvm_config_ret_dir = builder.llvm_out(builder.config.build); - if !builder.config.build.contains("msvc") || builder.ninja() { - llvm_config_ret_dir.push("build"); - } - llvm_config_ret_dir.push("bin"); - let build_llvm_config = llvm_config_ret_dir.join(exe("llvm-config", builder.config.build)); - let llvm_cmake_dir = out_dir.join("lib/cmake/llvm"); - let res = LlvmResult { llvm_config: build_llvm_config, llvm_cmake_dir }; - - let stamp = out_dir.join("llvm-finished-building"); - let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha()); - - if stamp.is_done() { - if stamp.hash.is_none() { - builder.info( - "Could not determine the LLVM submodule commit hash. \ - Assuming that an LLVM rebuild is not necessary.", - ); - builder.info(&format!( - "To force LLVM to rebuild, remove the file `{}`", - stamp.path.display() - )); - } - return Ok(res); - } - - Err(Meta { stamp, res, out_dir, root: root.into() }) -} - -/// This retrieves the LLVM sha we *want* to use, according to git history. -pub(crate) fn detect_llvm_sha(config: &Config, is_git: bool) -> String { - let llvm_sha = if is_git { - // We proceed in 2 steps. First we get the closest commit that is actually upstream. Then we - // walk back further to the last bors merge commit that actually changed LLVM. The first - // step will fail on CI because only the `auto` branch exists; we just fall back to `HEAD` - // in that case. - let closest_upstream = - get_git_merge_base(Some(&config.src)).unwrap_or_else(|_| "HEAD".into()); - let mut rev_list = config.git(); - rev_list.args(&[ - PathBuf::from("rev-list"), - format!("--author={}", config.stage0_metadata.config.git_merge_commit_email).into(), - "-n1".into(), - "--first-parent".into(), - closest_upstream.into(), - "--".into(), - config.src.join("src/llvm-project"), - config.src.join("src/bootstrap/download-ci-llvm-stamp"), - // the LLVM shared object file is named `LLVM-12-rust-{version}-nightly` - config.src.join("src/version"), - ]); - output(&mut rev_list).trim().to_owned() - } else if let Some(info) = channel::read_commit_info_file(&config.src) { - info.sha.trim().to_owned() - } else { - "".to_owned() - }; - - if llvm_sha.is_empty() { - eprintln!("error: could not find commit hash for downloading LLVM"); - eprintln!("help: maybe your repository history is too shallow?"); - eprintln!("help: consider disabling `download-ci-llvm`"); - eprintln!("help: or fetch enough history to include one upstream commit"); - panic!(); - } - - llvm_sha -} - -/// Returns whether the CI-found LLVM is currently usable. -/// -/// This checks both the build triple platform to confirm we're usable at all, -/// and then verifies if the current HEAD matches the detected LLVM SHA head, -/// in which case LLVM is indicated as not available. -pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool { - // This is currently all tier 1 targets and tier 2 targets with host tools - // (since others may not have CI artifacts) - // https://doc.rust-lang.org/rustc/platform-support.html#tier-1 - let supported_platforms = [ - // tier 1 - ("aarch64-unknown-linux-gnu", false), - ("i686-pc-windows-gnu", false), - ("i686-pc-windows-msvc", false), - ("i686-unknown-linux-gnu", false), - ("x86_64-unknown-linux-gnu", true), - ("x86_64-apple-darwin", true), - ("x86_64-pc-windows-gnu", true), - ("x86_64-pc-windows-msvc", true), - // tier 2 with host tools - ("aarch64-apple-darwin", false), - ("aarch64-pc-windows-msvc", false), - ("aarch64-unknown-linux-musl", false), - ("arm-unknown-linux-gnueabi", false), - ("arm-unknown-linux-gnueabihf", false), - ("armv7-unknown-linux-gnueabihf", false), - ("loongarch64-unknown-linux-gnu", false), - ("mips-unknown-linux-gnu", false), - ("mips64-unknown-linux-gnuabi64", false), - ("mips64el-unknown-linux-gnuabi64", false), - ("mipsel-unknown-linux-gnu", false), - ("powerpc-unknown-linux-gnu", false), - ("powerpc64-unknown-linux-gnu", false), - ("powerpc64le-unknown-linux-gnu", false), - ("riscv64gc-unknown-linux-gnu", false), - ("s390x-unknown-linux-gnu", false), - ("x86_64-unknown-freebsd", false), - ("x86_64-unknown-illumos", false), - ("x86_64-unknown-linux-musl", false), - ("x86_64-unknown-netbsd", false), - ]; - - if !supported_platforms.contains(&(&*config.build.triple, asserts)) - && (asserts || !supported_platforms.contains(&(&*config.build.triple, true))) - { - return false; - } - - if is_ci_llvm_modified(config) { - eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); - return false; - } - - true -} - -/// Returns true if we're running in CI with modified LLVM (and thus can't download it) -pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool { - CiEnv::is_ci() && config.rust_info.is_managed_git_subrepository() && { - // We assume we have access to git, so it's okay to unconditionally pass - // `true` here. - let llvm_sha = detect_llvm_sha(config, true); - let head_sha = output(config.git().arg("rev-parse").arg("HEAD")); - let head_sha = head_sha.trim(); - llvm_sha == head_sha - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Llvm { - pub target: TargetSelection, -} - -impl Step for Llvm { - type Output = LlvmResult; - - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/llvm-project").path("src/llvm-project/llvm") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Llvm { target: run.target }); - } - - /// Compile LLVM for `target`. - fn run(self, builder: &Builder<'_>) -> LlvmResult { - let target = self.target; - let target_native = if self.target.starts_with("riscv") { - // RISC-V target triples in Rust is not named the same as C compiler target triples. - // This converts Rust RISC-V target triples to C compiler triples. - let idx = target.triple.find('-').unwrap(); - - format!("riscv{}{}", &target.triple[5..7], &target.triple[idx..]) - } else if self.target.starts_with("powerpc") && self.target.ends_with("freebsd") { - // FreeBSD 13 had incompatible ABI changes on all PowerPC platforms. - // Set the version suffix to 13.0 so the correct target details are used. - format!("{}{}", self.target, "13.0") - } else { - target.to_string() - }; - - let Meta { stamp, res, out_dir, root } = match prebuilt_llvm_config(builder, target) { - Ok(p) => return p, - Err(m) => m, - }; - - builder.update_submodule(&Path::new("src").join("llvm-project")); - if builder.llvm_link_shared() && target.contains("windows") { - panic!("shared linking to LLVM is not currently supported on {}", target.triple); - } - - let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target); - t!(stamp.remove()); - let _time = util::timeit(&builder); - t!(fs::create_dir_all(&out_dir)); - - // https://llvm.org/docs/CMake.html - let mut cfg = cmake::Config::new(builder.src.join(root)); - let mut ldflags = LdFlags::default(); - - let profile = match (builder.config.llvm_optimize, builder.config.llvm_release_debuginfo) { - (false, _) => "Debug", - (true, false) => "Release", - (true, true) => "RelWithDebInfo", - }; - - // NOTE: remember to also update `config.example.toml` when changing the - // defaults! - let llvm_targets = match &builder.config.llvm_targets { - Some(s) => s, - None => { - "AArch64;ARM;BPF;Hexagon;LoongArch;MSP430;Mips;NVPTX;PowerPC;RISCV;\ - Sparc;SystemZ;WebAssembly;X86" - } - }; - - let llvm_exp_targets = match builder.config.llvm_experimental_targets { - Some(ref s) => s, - None => "AVR;M68k;CSKY", - }; - - let assertions = if builder.config.llvm_assertions { "ON" } else { "OFF" }; - let plugins = if builder.config.llvm_plugins { "ON" } else { "OFF" }; - let enable_tests = if builder.config.llvm_tests { "ON" } else { "OFF" }; - let enable_warnings = if builder.config.llvm_enable_warnings { "ON" } else { "OFF" }; - - cfg.out_dir(&out_dir) - .profile(profile) - .define("LLVM_ENABLE_ASSERTIONS", assertions) - .define("LLVM_UNREACHABLE_OPTIMIZE", "OFF") - .define("LLVM_ENABLE_PLUGINS", plugins) - .define("LLVM_TARGETS_TO_BUILD", llvm_targets) - .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) - .define("LLVM_INCLUDE_EXAMPLES", "OFF") - .define("LLVM_INCLUDE_DOCS", "OFF") - .define("LLVM_INCLUDE_BENCHMARKS", "OFF") - .define("LLVM_INCLUDE_TESTS", enable_tests) - .define("LLVM_ENABLE_TERMINFO", "OFF") - .define("LLVM_ENABLE_LIBEDIT", "OFF") - .define("LLVM_ENABLE_BINDINGS", "OFF") - .define("LLVM_ENABLE_Z3_SOLVER", "OFF") - .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string()) - .define("LLVM_TARGET_ARCH", target_native.split('-').next().unwrap()) - .define("LLVM_DEFAULT_TARGET_TRIPLE", target_native) - .define("LLVM_ENABLE_WARNINGS", enable_warnings); - - // Parts of our test suite rely on the `FileCheck` tool, which is built by default in - // `build/$TARGET/llvm/build/bin` is but *not* then installed to `build/$TARGET/llvm/bin`. - // This flag makes sure `FileCheck` is copied in the final binaries directory. - cfg.define("LLVM_INSTALL_UTILS", "ON"); - - if builder.config.llvm_profile_generate { - cfg.define("LLVM_BUILD_INSTRUMENTED", "IR"); - if let Ok(llvm_profile_dir) = std::env::var("LLVM_PROFILE_DIR") { - cfg.define("LLVM_PROFILE_DATA_DIR", llvm_profile_dir); - } - cfg.define("LLVM_BUILD_RUNTIME", "No"); - } - if let Some(path) = builder.config.llvm_profile_use.as_ref() { - cfg.define("LLVM_PROFDATA_FILE", &path); - } - - // Disable zstd to avoid a dependency on libzstd.so. - cfg.define("LLVM_ENABLE_ZSTD", "OFF"); - - if !target.contains("windows") { - cfg.define("LLVM_ENABLE_ZLIB", "ON"); - } else { - cfg.define("LLVM_ENABLE_ZLIB", "OFF"); - } - - // Are we compiling for iOS/tvOS/watchOS? - if target.contains("apple-ios") - || target.contains("apple-tvos") - || target.contains("apple-watchos") - { - // These two defines prevent CMake from automatically trying to add a MacOSX sysroot, which leads to a compiler error. - cfg.define("CMAKE_OSX_SYSROOT", "/"); - cfg.define("CMAKE_OSX_DEPLOYMENT_TARGET", ""); - // Prevent cmake from adding -bundle to CFLAGS automatically, which leads to a compiler error because "-bitcode_bundle" also gets added. - cfg.define("LLVM_ENABLE_PLUGINS", "OFF"); - // Zlib fails to link properly, leading to a compiler error. - cfg.define("LLVM_ENABLE_ZLIB", "OFF"); - } - - // This setting makes the LLVM tools link to the dynamic LLVM library, - // which saves both memory during parallel links and overall disk space - // for the tools. We don't do this on every platform as it doesn't work - // equally well everywhere. - if builder.llvm_link_shared() { - cfg.define("LLVM_LINK_LLVM_DYLIB", "ON"); - } - - if (target.starts_with("riscv") || target.starts_with("csky")) - && !target.contains("freebsd") - && !target.contains("openbsd") - && !target.contains("netbsd") - { - // RISC-V and CSKY GCC erroneously requires linking against - // `libatomic` when using 1-byte and 2-byte C++ - // atomics but the LLVM build system check cannot - // detect this. Therefore it is set manually here. - // Some BSD uses Clang as its system compiler and - // provides no libatomic in its base system so does - // not want this. - ldflags.exe.push(" -latomic"); - ldflags.shared.push(" -latomic"); - } - - if target.contains("msvc") { - cfg.define("LLVM_USE_CRT_DEBUG", "MT"); - cfg.define("LLVM_USE_CRT_RELEASE", "MT"); - cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT"); - cfg.static_crt(true); - } - - if target.starts_with("i686") { - cfg.define("LLVM_BUILD_32_BITS", "ON"); - } - - let mut enabled_llvm_projects = Vec::new(); - - if util::forcing_clang_based_tests() { - enabled_llvm_projects.push("clang"); - enabled_llvm_projects.push("compiler-rt"); - } - - if builder.config.llvm_polly { - enabled_llvm_projects.push("polly"); - } - - if builder.config.llvm_clang { - enabled_llvm_projects.push("clang"); - } - - // We want libxml to be disabled. - // See https://github.com/rust-lang/rust/pull/50104 - cfg.define("LLVM_ENABLE_LIBXML2", "OFF"); - - if !enabled_llvm_projects.is_empty() { - enabled_llvm_projects.sort(); - enabled_llvm_projects.dedup(); - cfg.define("LLVM_ENABLE_PROJECTS", enabled_llvm_projects.join(";")); - } - - if let Some(num_linkers) = builder.config.llvm_link_jobs { - if num_linkers > 0 { - cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string()); - } - } - - // https://llvm.org/docs/HowToCrossCompileLLVM.html - if target != builder.config.build { - let LlvmResult { llvm_config, .. } = - builder.ensure(Llvm { target: builder.config.build }); - if !builder.config.dry_run() { - let llvm_bindir = output(Command::new(&llvm_config).arg("--bindir")); - let host_bin = Path::new(llvm_bindir.trim()); - cfg.define( - "LLVM_TABLEGEN", - host_bin.join("llvm-tblgen").with_extension(EXE_EXTENSION), - ); - // LLVM_NM is required for cross compiling using MSVC - cfg.define("LLVM_NM", host_bin.join("llvm-nm").with_extension(EXE_EXTENSION)); - } - cfg.define("LLVM_CONFIG_PATH", llvm_config); - if builder.config.llvm_clang { - let build_bin = builder.llvm_out(builder.config.build).join("build").join("bin"); - let clang_tblgen = build_bin.join("clang-tblgen").with_extension(EXE_EXTENSION); - if !builder.config.dry_run() && !clang_tblgen.exists() { - panic!("unable to find {}", clang_tblgen.display()); - } - cfg.define("CLANG_TABLEGEN", clang_tblgen); - } - } - - let llvm_version_suffix = if let Some(ref suffix) = builder.config.llvm_version_suffix { - // Allow version-suffix="" to not define a version suffix at all. - if !suffix.is_empty() { Some(suffix.to_string()) } else { None } - } else if builder.config.channel == "dev" { - // Changes to a version suffix require a complete rebuild of the LLVM. - // To avoid rebuilds during a time of version bump, don't include rustc - // release number on the dev channel. - Some("-rust-dev".to_string()) - } else { - Some(format!("-rust-{}-{}", builder.version, builder.config.channel)) - }; - if let Some(ref suffix) = llvm_version_suffix { - cfg.define("LLVM_VERSION_SUFFIX", suffix); - } - - configure_cmake(builder, target, &mut cfg, true, ldflags, &[]); - configure_llvm(builder, target, &mut cfg); - - for (key, val) in &builder.config.llvm_build_config { - cfg.define(key, val); - } - - if builder.config.dry_run() { - return res; - } - - cfg.build(); - - // Helper to find the name of LLVM's shared library on darwin and linux. - let find_llvm_lib_name = |extension| { - let mut cmd = Command::new(&res.llvm_config); - let version = output(cmd.arg("--version")); - let major = version.split('.').next().unwrap(); - - match &llvm_version_suffix { - Some(version_suffix) => format!("libLLVM-{major}{version_suffix}.{extension}"), - None => format!("libLLVM-{major}.{extension}"), - } - }; - - // When building LLVM with LLVM_LINK_LLVM_DYLIB for macOS, an unversioned - // libLLVM.dylib will be built. However, llvm-config will still look - // for a versioned path like libLLVM-14.dylib. Manually create a symbolic - // link to make llvm-config happy. - if builder.llvm_link_shared() && target.contains("apple-darwin") { - let lib_name = find_llvm_lib_name("dylib"); - let lib_llvm = out_dir.join("build").join("lib").join(lib_name); - if !lib_llvm.exists() { - t!(builder.symlink_file("libLLVM.dylib", &lib_llvm)); - } - } - - // When building LLVM as a shared library on linux, it can contain unexpected debuginfo: - // some can come from the C++ standard library. Unless we're explicitly requesting LLVM to - // be built with debuginfo, strip it away after the fact, to make dist artifacts smaller. - if builder.llvm_link_shared() - && builder.config.llvm_optimize - && !builder.config.llvm_release_debuginfo - { - // Find the name of the LLVM shared library that we just built. - let lib_name = find_llvm_lib_name("so"); - - // If the shared library exists in LLVM's `/build/lib/` or `/lib/` folders, strip its - // debuginfo. - crate::compile::strip_debug(builder, target, &out_dir.join("lib").join(&lib_name)); - crate::compile::strip_debug( - builder, - target, - &out_dir.join("build").join("lib").join(&lib_name), - ); - } - - t!(stamp.write()); - - res - } -} - -fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) { - if builder.config.dry_run() { - return; - } - - let mut cmd = Command::new(llvm_config); - let version = output(cmd.arg("--version")); - let mut parts = version.split('.').take(2).filter_map(|s| s.parse::().ok()); - if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) { - if major >= 15 { - return; - } - } - panic!("\n\nbad LLVM version: {version}, need >=15.0\n\n") -} - -fn configure_cmake( - builder: &Builder<'_>, - target: TargetSelection, - cfg: &mut cmake::Config, - use_compiler_launcher: bool, - mut ldflags: LdFlags, - extra_compiler_flags: &[&str], -) { - // Do not print installation messages for up-to-date files. - // LLVM and LLD builds can produce a lot of those and hit CI limits on log size. - cfg.define("CMAKE_INSTALL_MESSAGE", "LAZY"); - - // Do not allow the user's value of DESTDIR to influence where - // LLVM will install itself. LLVM must always be installed in our - // own build directories. - cfg.env("DESTDIR", ""); - - if builder.ninja() { - cfg.generator("Ninja"); - } - cfg.target(&target.triple).host(&builder.config.build.triple); - - if target != builder.config.build { - cfg.define("CMAKE_CROSSCOMPILING", "True"); - - if target.contains("netbsd") { - cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); - } else if target.contains("dragonfly") { - cfg.define("CMAKE_SYSTEM_NAME", "DragonFly"); - } else if target.contains("freebsd") { - cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); - } else if target.contains("windows") { - cfg.define("CMAKE_SYSTEM_NAME", "Windows"); - } else if target.contains("haiku") { - cfg.define("CMAKE_SYSTEM_NAME", "Haiku"); - } else if target.contains("solaris") || target.contains("illumos") { - cfg.define("CMAKE_SYSTEM_NAME", "SunOS"); - } else if target.contains("linux") { - cfg.define("CMAKE_SYSTEM_NAME", "Linux"); - } else { - builder.info(&format!( - "could not determine CMAKE_SYSTEM_NAME from the target `{target}`, build may fail", - )); - } - - // When cross-compiling we should also set CMAKE_SYSTEM_VERSION, but in - // that case like CMake we cannot easily determine system version either. - // - // Since, the LLVM itself makes rather limited use of version checks in - // CMakeFiles (and then only in tests), and so far no issues have been - // reported, the system version is currently left unset. - - if target.contains("darwin") { - // Make sure that CMake does not build universal binaries on macOS. - // Explicitly specify the one single target architecture. - if target.starts_with("aarch64") { - // macOS uses a different name for building arm64 - cfg.define("CMAKE_OSX_ARCHITECTURES", "arm64"); - } else if target.starts_with("i686") { - // macOS uses a different name for building i386 - cfg.define("CMAKE_OSX_ARCHITECTURES", "i386"); - } else { - cfg.define("CMAKE_OSX_ARCHITECTURES", target.triple.split('-').next().unwrap()); - } - } - } - - let sanitize_cc = |cc: &Path| { - if target.contains("msvc") { - OsString::from(cc.to_str().unwrap().replace("\\", "/")) - } else { - cc.as_os_str().to_owned() - } - }; - - // MSVC with CMake uses msbuild by default which doesn't respect these - // vars that we'd otherwise configure. In that case we just skip this - // entirely. - if target.contains("msvc") && !builder.ninja() { - return; - } - - let (cc, cxx) = match builder.config.llvm_clang_cl { - Some(ref cl) => (cl.into(), cl.into()), - None => (builder.cc(target), builder.cxx(target).unwrap()), - }; - - // Handle msvc + ninja + ccache specially (this is what the bots use) - if target.contains("msvc") && builder.ninja() && builder.config.ccache.is_some() { - let mut wrap_cc = env::current_exe().expect("failed to get cwd"); - wrap_cc.set_file_name("sccache-plus-cl.exe"); - - cfg.define("CMAKE_C_COMPILER", sanitize_cc(&wrap_cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(&wrap_cc)); - cfg.env("SCCACHE_PATH", builder.config.ccache.as_ref().unwrap()) - .env("SCCACHE_TARGET", target.triple) - .env("SCCACHE_CC", &cc) - .env("SCCACHE_CXX", &cxx); - - // Building LLVM on MSVC can be a little ludicrous at times. We're so far - // off the beaten path here that I'm not really sure this is even half - // supported any more. Here we're trying to: - // - // * Build LLVM on MSVC - // * Build LLVM with `clang-cl` instead of `cl.exe` - // * Build a project with `sccache` - // * Build for 32-bit as well - // * Build with Ninja - // - // For `cl.exe` there are different binaries to compile 32/64 bit which - // we use but for `clang-cl` there's only one which internally - // multiplexes via flags. As a result it appears that CMake's detection - // of a compiler's architecture and such on MSVC **doesn't** pass any - // custom flags we pass in CMAKE_CXX_FLAGS below. This means that if we - // use `clang-cl.exe` it's always diagnosed as a 64-bit compiler which - // definitely causes problems since all the env vars are pointing to - // 32-bit libraries. - // - // To hack around this... again... we pass an argument that's - // unconditionally passed in the sccache shim. This'll get CMake to - // correctly diagnose it's doing a 32-bit compilation and LLVM will - // internally configure itself appropriately. - if builder.config.llvm_clang_cl.is_some() && target.contains("i686") { - cfg.env("SCCACHE_EXTRA_ARGS", "-m32"); - } - } else { - // If ccache is configured we inform the build a little differently how - // to invoke ccache while also invoking our compilers. - if use_compiler_launcher { - if let Some(ref ccache) = builder.config.ccache { - cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache) - .define("CMAKE_CXX_COMPILER_LAUNCHER", ccache); - } - } - cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(&cxx)) - .define("CMAKE_ASM_COMPILER", sanitize_cc(&cc)); - } - - cfg.build_arg("-j").build_arg(builder.jobs().to_string()); - let mut cflags: OsString = builder.cflags(target, GitRepo::Llvm, CLang::C).join(" ").into(); - if let Some(ref s) = builder.config.llvm_cflags { - cflags.push(" "); - cflags.push(s); - } - - if builder.config.llvm_clang_cl.is_some() { - cflags.push(&format!(" --target={target}")); - } - for flag in extra_compiler_flags { - cflags.push(&format!(" {flag}")); - } - cfg.define("CMAKE_C_FLAGS", cflags); - let mut cxxflags: OsString = builder.cflags(target, GitRepo::Llvm, CLang::Cxx).join(" ").into(); - if let Some(ref s) = builder.config.llvm_cxxflags { - cxxflags.push(" "); - cxxflags.push(s); - } - if builder.config.llvm_clang_cl.is_some() { - cxxflags.push(&format!(" --target={target}")); - } - for flag in extra_compiler_flags { - cxxflags.push(&format!(" {flag}")); - } - cfg.define("CMAKE_CXX_FLAGS", cxxflags); - if let Some(ar) = builder.ar(target) { - if ar.is_absolute() { - // LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it - // tries to resolve this path in the LLVM build directory. - cfg.define("CMAKE_AR", sanitize_cc(&ar)); - } - } - - if let Some(ranlib) = builder.ranlib(target) { - if ranlib.is_absolute() { - // LLVM build breaks if `CMAKE_RANLIB` is a relative path, for some reason it - // tries to resolve this path in the LLVM build directory. - cfg.define("CMAKE_RANLIB", sanitize_cc(&ranlib)); - } - } - - if let Some(ref flags) = builder.config.llvm_ldflags { - ldflags.push_all(flags); - } - - if let Some(flags) = get_var("LDFLAGS", &builder.config.build.triple, &target.triple) { - ldflags.push_all(&flags); - } - - // For distribution we want the LLVM tools to be *statically* linked to libstdc++. - // We also do this if the user explicitly requested static libstdc++. - if builder.config.llvm_static_stdcpp - && !target.contains("msvc") - && !target.contains("netbsd") - && !target.contains("solaris") - { - if target.contains("apple") || target.contains("windows") { - ldflags.push_all("-static-libstdc++"); - } else { - ldflags.push_all("-Wl,-Bsymbolic -static-libstdc++"); - } - } - - cfg.define("CMAKE_SHARED_LINKER_FLAGS", &ldflags.shared); - cfg.define("CMAKE_MODULE_LINKER_FLAGS", &ldflags.module); - cfg.define("CMAKE_EXE_LINKER_FLAGS", &ldflags.exe); - - if env::var_os("SCCACHE_ERROR_LOG").is_some() { - cfg.env("RUSTC_LOG", "sccache=warn"); - } -} - -fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmake::Config) { - // ThinLTO is only available when building with LLVM, enabling LLD is required. - // Apple's linker ld64 supports ThinLTO out of the box though, so don't use LLD on Darwin. - if builder.config.llvm_thin_lto { - cfg.define("LLVM_ENABLE_LTO", "Thin"); - if !target.contains("apple") { - cfg.define("LLVM_ENABLE_LLD", "ON"); - } - } - - if let Some(ref linker) = builder.config.llvm_use_linker { - cfg.define("LLVM_USE_LINKER", linker); - } - - if builder.config.llvm_allow_old_toolchain { - cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES"); - } -} - -// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365 -fn get_var(var_base: &str, host: &str, target: &str) -> Option { - let kind = if host == target { "HOST" } else { "TARGET" }; - let target_u = target.replace("-", "_"); - env::var_os(&format!("{var_base}_{target}")) - .or_else(|| env::var_os(&format!("{}_{}", var_base, target_u))) - .or_else(|| env::var_os(&format!("{}_{}", kind, var_base))) - .or_else(|| env::var_os(var_base)) -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Lld { - pub target: TargetSelection, -} - -impl Step for Lld { - type Output = PathBuf; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/llvm-project/lld") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Lld { target: run.target }); - } - - /// Compile LLD for `target`. - fn run(self, builder: &Builder<'_>) -> PathBuf { - if builder.config.dry_run() { - return PathBuf::from("lld-out-dir-test-gen"); - } - let target = self.target; - - let LlvmResult { llvm_config, llvm_cmake_dir } = builder.ensure(Llvm { target }); - - // The `dist` step packages LLD next to LLVM's binaries for download-ci-llvm. The root path - // we usually expect here is `./build/$triple/ci-llvm/`, with the binaries in its `bin` - // subfolder. We check if that's the case, and if LLD's binary already exists there next to - // `llvm-config`: if so, we can use it instead of building LLVM/LLD from source. - let ci_llvm_bin = llvm_config.parent().unwrap(); - if ci_llvm_bin.is_dir() && ci_llvm_bin.file_name().unwrap() == "bin" { - let lld_path = ci_llvm_bin.join(exe("lld", target)); - if lld_path.exists() { - // The following steps copying `lld` as `rust-lld` to the sysroot, expect it in the - // `bin` subfolder of this step's out dir. - return ci_llvm_bin.parent().unwrap().to_path_buf(); - } - } - - let out_dir = builder.lld_out(target); - let done_stamp = out_dir.join("lld-finished-building"); - if done_stamp.exists() { - return out_dir; - } - - let _guard = builder.msg_unstaged(Kind::Build, "LLD", target); - let _time = util::timeit(&builder); - t!(fs::create_dir_all(&out_dir)); - - let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld")); - let mut ldflags = LdFlags::default(); - - // When building LLD as part of a build with instrumentation on windows, for example - // when doing PGO on CI, cmake or clang-cl don't automatically link clang's - // profiler runtime in. In that case, we need to manually ask cmake to do it, to avoid - // linking errors, much like LLVM's cmake setup does in that situation. - if builder.config.llvm_profile_generate && target.contains("msvc") { - if let Some(clang_cl_path) = builder.config.llvm_clang_cl.as_ref() { - // Find clang's runtime library directory and push that as a search path to the - // cmake linker flags. - let clang_rt_dir = get_clang_cl_resource_dir(clang_cl_path); - ldflags.push_all(&format!("/libpath:{}", clang_rt_dir.display())); - } - } - - // LLD is built as an LLVM tool, but is distributed outside of the `llvm-tools` component, - // which impacts where it expects to find LLVM's shared library. This causes #80703. - // - // LLD is distributed at "$root/lib/rustlib/$host/bin/rust-lld", but the `libLLVM-*.so` it - // needs is distributed at "$root/lib". The default rpath of "$ORIGIN/../lib" points at the - // lib path for LLVM tools, not the one for rust binaries. - // - // (The `llvm-tools` component copies the .so there for the other tools, and with that - // component installed, one can successfully invoke `rust-lld` directly without rustup's - // `LD_LIBRARY_PATH` overrides) - // - if builder.config.rpath_enabled(target) - && util::use_host_linker(target) - && builder.config.llvm_link_shared() - && target.contains("linux") - { - // So we inform LLD where it can find LLVM's libraries by adding an rpath entry to the - // expected parent `lib` directory. - // - // Be careful when changing this path, we need to ensure it's quoted or escaped: - // `$ORIGIN` would otherwise be expanded when the `LdFlags` are passed verbatim to - // cmake. - ldflags.push_all("-Wl,-rpath,'$ORIGIN/../../../'"); - } - - configure_cmake(builder, target, &mut cfg, true, ldflags, &[]); - configure_llvm(builder, target, &mut cfg); - - // Re-use the same flags as llvm to control the level of debug information - // generated for lld. - let profile = match (builder.config.llvm_optimize, builder.config.llvm_release_debuginfo) { - (false, _) => "Debug", - (true, false) => "Release", - (true, true) => "RelWithDebInfo", - }; - - cfg.out_dir(&out_dir) - .profile(profile) - .define("LLVM_CMAKE_DIR", llvm_cmake_dir) - .define("LLVM_INCLUDE_TESTS", "OFF"); - - if target != builder.config.build { - // Use the host llvm-tblgen binary. - cfg.define( - "LLVM_TABLEGEN_EXE", - llvm_config.with_file_name("llvm-tblgen").with_extension(EXE_EXTENSION), - ); - } - - cfg.build(); - - t!(File::create(&done_stamp)); - out_dir - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Sanitizers { - pub target: TargetSelection, -} - -impl Step for Sanitizers { - type Output = Vec; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("sanitizers") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Sanitizers { target: run.target }); - } - - /// Builds sanitizer runtime libraries. - fn run(self, builder: &Builder<'_>) -> Self::Output { - let compiler_rt_dir = builder.src.join("src/llvm-project/compiler-rt"); - if !compiler_rt_dir.exists() { - return Vec::new(); - } - - let out_dir = builder.native_dir(self.target).join("sanitizers"); - let runtimes = supported_sanitizers(&out_dir, self.target, &builder.config.channel); - if runtimes.is_empty() { - return runtimes; - } - - let LlvmResult { llvm_config, .. } = builder.ensure(Llvm { target: builder.config.build }); - if builder.config.dry_run() { - return runtimes; - } - - let stamp = out_dir.join("sanitizers-finished-building"); - let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha()); - - if stamp.is_done() { - if stamp.hash.is_none() { - builder.info(&format!( - "Rebuild sanitizers by removing the file `{}`", - stamp.path.display() - )); - } - return runtimes; - } - - let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target); - t!(stamp.remove()); - let _time = util::timeit(&builder); - - let mut cfg = cmake::Config::new(&compiler_rt_dir); - cfg.profile("Release"); - cfg.define("CMAKE_C_COMPILER_TARGET", self.target.triple); - cfg.define("COMPILER_RT_BUILD_BUILTINS", "OFF"); - cfg.define("COMPILER_RT_BUILD_CRT", "OFF"); - cfg.define("COMPILER_RT_BUILD_LIBFUZZER", "OFF"); - cfg.define("COMPILER_RT_BUILD_PROFILE", "OFF"); - cfg.define("COMPILER_RT_BUILD_SANITIZERS", "ON"); - cfg.define("COMPILER_RT_BUILD_XRAY", "OFF"); - cfg.define("COMPILER_RT_DEFAULT_TARGET_ONLY", "ON"); - cfg.define("COMPILER_RT_USE_LIBCXX", "OFF"); - cfg.define("LLVM_CONFIG_PATH", &llvm_config); - - // On Darwin targets the sanitizer runtimes are build as universal binaries. - // Unfortunately sccache currently lacks support to build them successfully. - // Disable compiler launcher on Darwin targets to avoid potential issues. - let use_compiler_launcher = !self.target.contains("apple-darwin"); - let extra_compiler_flags: &[&str] = - if self.target.contains("apple") { &["-fembed-bitcode=off"] } else { &[] }; - configure_cmake( - builder, - self.target, - &mut cfg, - use_compiler_launcher, - LdFlags::default(), - extra_compiler_flags, - ); - - t!(fs::create_dir_all(&out_dir)); - cfg.out_dir(out_dir); - - for runtime in &runtimes { - cfg.build_target(&runtime.cmake_target); - cfg.build(); - } - t!(stamp.write()); - - runtimes - } -} - -#[derive(Clone, Debug)] -pub struct SanitizerRuntime { - /// CMake target used to build the runtime. - pub cmake_target: String, - /// Path to the built runtime library. - pub path: PathBuf, - /// Library filename that will be used rustc. - pub name: String, -} - -/// Returns sanitizers available on a given target. -fn supported_sanitizers( - out_dir: &Path, - target: TargetSelection, - channel: &str, -) -> Vec { - let darwin_libs = |os: &str, components: &[&str]| -> Vec { - components - .iter() - .map(move |c| SanitizerRuntime { - cmake_target: format!("clang_rt.{}_{}_dynamic", c, os), - path: out_dir - .join(&format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)), - name: format!("librustc-{}_rt.{}.dylib", channel, c), - }) - .collect() - }; - - let common_libs = |os: &str, arch: &str, components: &[&str]| -> Vec { - components - .iter() - .map(move |c| SanitizerRuntime { - cmake_target: format!("clang_rt.{}-{}", c, arch), - path: out_dir.join(&format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)), - name: format!("librustc-{}_rt.{}.a", channel, c), - }) - .collect() - }; - - match &*target.triple { - "aarch64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]), - "aarch64-apple-ios" => darwin_libs("ios", &["asan", "tsan"]), - "aarch64-apple-ios-sim" => darwin_libs("iossim", &["asan", "tsan"]), - "aarch64-apple-ios-macabi" => darwin_libs("osx", &["asan", "lsan", "tsan"]), - "aarch64-unknown-fuchsia" => common_libs("fuchsia", "aarch64", &["asan"]), - "aarch64-unknown-linux-gnu" => { - common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"]) - } - "aarch64-unknown-linux-ohos" => { - common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"]) - } - "x86_64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]), - "x86_64-unknown-fuchsia" => common_libs("fuchsia", "x86_64", &["asan"]), - "x86_64-apple-ios" => darwin_libs("iossim", &["asan", "tsan"]), - "x86_64-apple-ios-macabi" => darwin_libs("osx", &["asan", "lsan", "tsan"]), - "x86_64-unknown-freebsd" => common_libs("freebsd", "x86_64", &["asan", "msan", "tsan"]), - "x86_64-unknown-netbsd" => { - common_libs("netbsd", "x86_64", &["asan", "lsan", "msan", "tsan"]) - } - "x86_64-unknown-illumos" => common_libs("illumos", "x86_64", &["asan"]), - "x86_64-pc-solaris" => common_libs("solaris", "x86_64", &["asan"]), - "x86_64-unknown-linux-gnu" => { - common_libs("linux", "x86_64", &["asan", "lsan", "msan", "safestack", "tsan"]) - } - "x86_64-unknown-linux-musl" => { - common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"]) - } - "s390x-unknown-linux-gnu" => { - common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) - } - "s390x-unknown-linux-musl" => { - common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) - } - "x86_64-unknown-linux-ohos" => { - common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"]) - } - _ => Vec::new(), - } -} - -struct HashStamp { - path: PathBuf, - hash: Option>, -} - -impl HashStamp { - fn new(path: PathBuf, hash: Option<&str>) -> Self { - HashStamp { path, hash: hash.map(|s| s.as_bytes().to_owned()) } - } - - fn is_done(&self) -> bool { - match fs::read(&self.path) { - Ok(h) => self.hash.as_deref().unwrap_or(b"") == h.as_slice(), - Err(e) if e.kind() == io::ErrorKind::NotFound => false, - Err(e) => { - panic!("failed to read stamp file `{}`: {}", self.path.display(), e); - } - } - } - - fn remove(&self) -> io::Result<()> { - match fs::remove_file(&self.path) { - Ok(()) => Ok(()), - Err(e) => { - if e.kind() == io::ErrorKind::NotFound { - Ok(()) - } else { - Err(e) - } - } - } - } - - fn write(&self) -> io::Result<()> { - fs::write(&self.path, self.hash.as_deref().unwrap_or(b"")) - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CrtBeginEnd { - pub target: TargetSelection, -} - -impl Step for CrtBeginEnd { - type Output = PathBuf; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/llvm-project/compiler-rt/lib/crt") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(CrtBeginEnd { target: run.target }); - } - - /// Build crtbegin.o/crtend.o for musl target. - fn run(self, builder: &Builder<'_>) -> Self::Output { - builder.update_submodule(&Path::new("src/llvm-project")); - - let out_dir = builder.native_dir(self.target).join("crt"); - - if builder.config.dry_run() { - return out_dir; - } - - let crtbegin_src = builder.src.join("src/llvm-project/compiler-rt/lib/builtins/crtbegin.c"); - let crtend_src = builder.src.join("src/llvm-project/compiler-rt/lib/builtins/crtend.c"); - if up_to_date(&crtbegin_src, &out_dir.join("crtbegin.o")) - && up_to_date(&crtend_src, &out_dir.join("crtendS.o")) - { - return out_dir; - } - - let _guard = builder.msg_unstaged(Kind::Build, "crtbegin.o and crtend.o", self.target); - t!(fs::create_dir_all(&out_dir)); - - let mut cfg = cc::Build::new(); - - if let Some(ar) = builder.ar(self.target) { - cfg.archiver(ar); - } - cfg.compiler(builder.cc(self.target)); - cfg.cargo_metadata(false) - .out_dir(&out_dir) - .target(&self.target.triple) - .host(&builder.config.build.triple) - .warnings(false) - .debug(false) - .opt_level(3) - .file(crtbegin_src) - .file(crtend_src); - - // Those flags are defined in src/llvm-project/compiler-rt/lib/crt/CMakeLists.txt - // Currently only consumer of those objects is musl, which use .init_array/.fini_array - // instead of .ctors/.dtors - cfg.flag("-std=c11") - .define("CRT_HAS_INITFINI_ARRAY", None) - .define("EH_USE_FRAME_REGISTRY", None); - - cfg.compile("crt"); - - t!(fs::copy(out_dir.join("crtbegin.o"), out_dir.join("crtbeginS.o"))); - t!(fs::copy(out_dir.join("crtend.o"), out_dir.join("crtendS.o"))); - out_dir - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Libunwind { - pub target: TargetSelection, -} - -impl Step for Libunwind { - type Output = PathBuf; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/llvm-project/libunwind") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Libunwind { target: run.target }); - } - - /// Build libunwind.a - fn run(self, builder: &Builder<'_>) -> Self::Output { - builder.update_submodule(&Path::new("src/llvm-project")); - - if builder.config.dry_run() { - return PathBuf::new(); - } - - let out_dir = builder.native_dir(self.target).join("libunwind"); - let root = builder.src.join("src/llvm-project/libunwind"); - - if up_to_date(&root, &out_dir.join("libunwind.a")) { - return out_dir; - } - - let _guard = builder.msg_unstaged(Kind::Build, "libunwind.a", self.target); - t!(fs::create_dir_all(&out_dir)); - - let mut cc_cfg = cc::Build::new(); - let mut cpp_cfg = cc::Build::new(); - - cpp_cfg.cpp(true); - cpp_cfg.cpp_set_stdlib(None); - cpp_cfg.flag("-nostdinc++"); - cpp_cfg.flag("-fno-exceptions"); - cpp_cfg.flag("-fno-rtti"); - cpp_cfg.flag_if_supported("-fvisibility-global-new-delete-hidden"); - - for cfg in [&mut cc_cfg, &mut cpp_cfg].iter_mut() { - if let Some(ar) = builder.ar(self.target) { - cfg.archiver(ar); - } - cfg.target(&self.target.triple); - cfg.host(&builder.config.build.triple); - cfg.warnings(false); - cfg.debug(false); - // get_compiler() need set opt_level first. - cfg.opt_level(3); - cfg.flag("-fstrict-aliasing"); - cfg.flag("-funwind-tables"); - cfg.flag("-fvisibility=hidden"); - cfg.define("_LIBUNWIND_DISABLE_VISIBILITY_ANNOTATIONS", None); - cfg.include(root.join("include")); - cfg.cargo_metadata(false); - cfg.out_dir(&out_dir); - - if self.target.contains("x86_64-fortanix-unknown-sgx") { - cfg.static_flag(true); - cfg.flag("-fno-stack-protector"); - cfg.flag("-ffreestanding"); - cfg.flag("-fexceptions"); - - // easiest way to undefine since no API available in cc::Build to undefine - cfg.flag("-U_FORTIFY_SOURCE"); - cfg.define("_FORTIFY_SOURCE", "0"); - cfg.define("RUST_SGX", "1"); - cfg.define("__NO_STRING_INLINES", None); - cfg.define("__NO_MATH_INLINES", None); - cfg.define("_LIBUNWIND_IS_BAREMETAL", None); - cfg.define("__LIBUNWIND_IS_NATIVE_ONLY", None); - cfg.define("NDEBUG", None); - } - if self.target.contains("windows") { - cfg.define("_LIBUNWIND_HIDE_SYMBOLS", "1"); - cfg.define("_LIBUNWIND_IS_NATIVE_ONLY", "1"); - } - } - - cc_cfg.compiler(builder.cc(self.target)); - if let Ok(cxx) = builder.cxx(self.target) { - cpp_cfg.compiler(cxx); - } else { - cc_cfg.compiler(builder.cc(self.target)); - } - - // Don't set this for clang - // By default, Clang builds C code in GNU C17 mode. - // By default, Clang builds C++ code according to the C++98 standard, - // with many C++11 features accepted as extensions. - if cc_cfg.get_compiler().is_like_gnu() { - cc_cfg.flag("-std=c99"); - } - if cpp_cfg.get_compiler().is_like_gnu() { - cpp_cfg.flag("-std=c++11"); - } - - if self.target.contains("x86_64-fortanix-unknown-sgx") || self.target.contains("musl") { - // use the same GCC C compiler command to compile C++ code so we do not need to setup the - // C++ compiler env variables on the builders. - // Don't set this for clang++, as clang++ is able to compile this without libc++. - if cpp_cfg.get_compiler().is_like_gnu() { - cpp_cfg.cpp(false); - cpp_cfg.compiler(builder.cc(self.target)); - } - } - - let mut c_sources = vec![ - "Unwind-sjlj.c", - "UnwindLevel1-gcc-ext.c", - "UnwindLevel1.c", - "UnwindRegistersRestore.S", - "UnwindRegistersSave.S", - ]; - - let cpp_sources = vec!["Unwind-EHABI.cpp", "Unwind-seh.cpp", "libunwind.cpp"]; - let cpp_len = cpp_sources.len(); - - if self.target.contains("x86_64-fortanix-unknown-sgx") { - c_sources.push("UnwindRustSgx.c"); - } - - for src in c_sources { - cc_cfg.file(root.join("src").join(src).canonicalize().unwrap()); - } - - for src in &cpp_sources { - cpp_cfg.file(root.join("src").join(src).canonicalize().unwrap()); - } - - cpp_cfg.compile("unwind-cpp"); - - // FIXME: https://github.com/alexcrichton/cc-rs/issues/545#issuecomment-679242845 - let mut count = 0; - for entry in fs::read_dir(&out_dir).unwrap() { - let file = entry.unwrap().path().canonicalize().unwrap(); - if file.is_file() && file.extension() == Some(OsStr::new("o")) { - // file name starts with "Unwind-EHABI", "Unwind-seh" or "libunwind" - let file_name = file.file_name().unwrap().to_str().expect("UTF-8 file name"); - if cpp_sources.iter().any(|f| file_name.starts_with(&f[..f.len() - 4])) { - cc_cfg.object(&file); - count += 1; - } - } - } - assert_eq!(cpp_len, count, "Can't get object files from {:?}", &out_dir); - - cc_cfg.compile("unwind"); - out_dir - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/metadata.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/metadata.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/metadata.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/metadata.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,101 +0,0 @@ -use std::path::PathBuf; -use std::process::Command; - -use serde_derive::Deserialize; - -use crate::cache::INTERNER; -use crate::util::output; -use crate::{t, Build, Crate}; - -/// For more information, see the output of -/// -#[derive(Debug, Deserialize)] -struct Output { - packages: Vec, -} - -/// For more information, see the output of -/// -#[derive(Debug, Deserialize)] -struct Package { - name: String, - source: Option, - manifest_path: String, - dependencies: Vec, - targets: Vec, -} - -/// For more information, see the output of -/// -#[derive(Debug, Deserialize)] -struct Dependency { - name: String, - source: Option, -} - -#[derive(Debug, Deserialize)] -struct Target { - kind: Vec, -} - -/// Collects and stores package metadata of each workspace members into `build`, -/// by executing `cargo metadata` commands. -pub fn build(build: &mut Build) { - for package in workspace_members(build) { - if package.source.is_none() { - let name = INTERNER.intern_string(package.name); - let mut path = PathBuf::from(package.manifest_path); - path.pop(); - let deps = package - .dependencies - .into_iter() - .filter(|dep| dep.source.is_none()) - .map(|dep| INTERNER.intern_string(dep.name)) - .collect(); - let has_lib = package.targets.iter().any(|t| t.kind.iter().any(|k| k == "lib")); - let krate = Crate { name, deps, path, has_lib }; - let relative_path = krate.local_path(build); - build.crates.insert(name, krate); - let existing_path = build.crate_paths.insert(relative_path, name); - assert!( - existing_path.is_none(), - "multiple crates with the same path: {}", - existing_path.unwrap() - ); - } - } -} - -/// Invokes `cargo metadata` to get package metadata of each workspace member. -/// -/// Note that `src/tools/cargo` is no longer a workspace member but we still -/// treat it as one here, by invoking an additional `cargo metadata` command. -fn workspace_members(build: &Build) -> impl Iterator { - let collect_metadata = |manifest_path| { - let mut cargo = Command::new(&build.initial_cargo); - cargo - // Will read the libstd Cargo.toml - // which uses the unstable `public-dependency` feature. - .env("RUSTC_BOOTSTRAP", "1") - .arg("metadata") - .arg("--format-version") - .arg("1") - .arg("--no-deps") - .arg("--manifest-path") - .arg(build.src.join(manifest_path)); - let metadata_output = output(&mut cargo); - let Output { packages, .. } = t!(serde_json::from_str(&metadata_output)); - packages - }; - - // Collects `metadata.packages` from all workspaces. - let packages = collect_metadata("Cargo.toml"); - let cargo_packages = collect_metadata("src/tools/cargo/Cargo.toml"); - let ra_packages = collect_metadata("src/tools/rust-analyzer/Cargo.toml"); - let bootstrap_packages = collect_metadata("src/bootstrap/Cargo.toml"); - - // We only care about the root package from `src/tool/cargo` workspace. - let cargo_package = cargo_packages.into_iter().find(|pkg| pkg.name == "cargo").into_iter(); - - packages.into_iter().chain(cargo_package).chain(ra_packages).chain(bootstrap_packages) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/metrics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/metrics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/metrics.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/metrics.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,258 +0,0 @@ -//! This module is responsible for collecting metrics profiling information for the current build -//! and dumping it to disk as JSON, to aid investigations on build and CI performance. -//! -//! As this module requires additional dependencies not present during local builds, it's cfg'd -//! away whenever the `build.metrics` config option is not set to `true`. - -use crate::builder::{Builder, Step}; -use crate::util::t; -use crate::Build; -use build_helper::metrics::{ - JsonInvocation, JsonInvocationSystemStats, JsonNode, JsonRoot, JsonStepSystemStats, Test, - TestOutcome, TestSuite, TestSuiteMetadata, -}; -use std::cell::RefCell; -use std::fs::File; -use std::io::BufWriter; -use std::time::{Duration, Instant, SystemTime}; -use sysinfo::{CpuExt, System, SystemExt}; - -// Update this number whenever a breaking change is made to the build metrics. -// -// The output format is versioned for two reasons: -// -// - The metadata is intended to be consumed by external tooling, and exposing a format version -// helps the tools determine whether they're compatible with a metrics file. -// -// - If a developer enables build metrics in their local checkout, making a breaking change to the -// metrics format would result in a hard-to-diagnose error message when an existing metrics file -// is not compatible with the new changes. With a format version number, bootstrap can discard -// incompatible metrics files instead of appending metrics to them. -// -// Version changelog: -// -// - v0: initial version -// - v1: replaced JsonNode::Test with JsonNode::TestSuite -// -const CURRENT_FORMAT_VERSION: usize = 1; - -pub(crate) struct BuildMetrics { - state: RefCell, -} - -/// NOTE: this isn't really cloning anything, but `x suggest` doesn't need metrics so this is probably ok. -impl Clone for BuildMetrics { - fn clone(&self) -> Self { - Self::init() - } -} - -impl BuildMetrics { - pub(crate) fn init() -> Self { - let state = RefCell::new(MetricsState { - finished_steps: Vec::new(), - running_steps: Vec::new(), - - system_info: System::new(), - timer_start: None, - invocation_timer_start: Instant::now(), - invocation_start: SystemTime::now(), - }); - - BuildMetrics { state } - } - - pub(crate) fn enter_step(&self, step: &S, builder: &Builder<'_>) { - // Do not record dry runs, as they'd be duplicates of the actual steps. - if builder.config.dry_run() { - return; - } - - let mut state = self.state.borrow_mut(); - - // Consider all the stats gathered so far as the parent's. - if !state.running_steps.is_empty() { - self.collect_stats(&mut *state); - } - - state.system_info.refresh_cpu(); - state.timer_start = Some(Instant::now()); - - state.running_steps.push(StepMetrics { - type_: std::any::type_name::().into(), - debug_repr: format!("{step:?}"), - - cpu_usage_time_sec: 0.0, - duration_excluding_children_sec: Duration::ZERO, - - children: Vec::new(), - test_suites: Vec::new(), - }); - } - - pub(crate) fn exit_step(&self, builder: &Builder<'_>) { - // Do not record dry runs, as they'd be duplicates of the actual steps. - if builder.config.dry_run() { - return; - } - - let mut state = self.state.borrow_mut(); - - self.collect_stats(&mut *state); - - let step = state.running_steps.pop().unwrap(); - if state.running_steps.is_empty() { - state.finished_steps.push(step); - state.timer_start = None; - } else { - state.running_steps.last_mut().unwrap().children.push(step); - - // Start collecting again for the parent step. - state.system_info.refresh_cpu(); - state.timer_start = Some(Instant::now()); - } - } - - pub(crate) fn begin_test_suite(&self, metadata: TestSuiteMetadata, builder: &Builder<'_>) { - // Do not record dry runs, as they'd be duplicates of the actual steps. - if builder.config.dry_run() { - return; - } - - let mut state = self.state.borrow_mut(); - let step = state.running_steps.last_mut().unwrap(); - step.test_suites.push(TestSuite { metadata, tests: Vec::new() }); - } - - pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome, builder: &Builder<'_>) { - // Do not record dry runs, as they'd be duplicates of the actual steps. - if builder.config.dry_run() { - return; - } - - let mut state = self.state.borrow_mut(); - let step = state.running_steps.last_mut().unwrap(); - - if let Some(test_suite) = step.test_suites.last_mut() { - test_suite.tests.push(Test { name: name.to_string(), outcome }); - } else { - panic!("metrics.record_test() called without calling metrics.begin_test_suite() first"); - } - } - - fn collect_stats(&self, state: &mut MetricsState) { - let step = state.running_steps.last_mut().unwrap(); - - let elapsed = state.timer_start.unwrap().elapsed(); - step.duration_excluding_children_sec += elapsed; - - state.system_info.refresh_cpu(); - let cpu = state.system_info.cpus().iter().map(|p| p.cpu_usage()).sum::(); - step.cpu_usage_time_sec += cpu as f64 / 100.0 * elapsed.as_secs_f64(); - } - - pub(crate) fn persist(&self, build: &Build) { - let mut state = self.state.borrow_mut(); - assert!(state.running_steps.is_empty(), "steps are still executing"); - - let dest = build.out.join("metrics.json"); - - let mut system = System::new(); - system.refresh_cpu(); - system.refresh_memory(); - - let system_stats = JsonInvocationSystemStats { - cpu_threads_count: system.cpus().len(), - cpu_model: system.cpus()[0].brand().into(), - - memory_total_bytes: system.total_memory(), - }; - let steps = std::mem::take(&mut state.finished_steps); - - // Some of our CI builds consist of multiple independent CI invocations. Ensure all the - // previous invocations are still present in the resulting file. - let mut invocations = match std::fs::read(&dest) { - Ok(contents) => { - // We first parse just the format_version field to have the check succeed even if - // the rest of the contents are not valid anymore. - let version: OnlyFormatVersion = t!(serde_json::from_slice(&contents)); - if version.format_version == CURRENT_FORMAT_VERSION { - t!(serde_json::from_slice::(&contents)).invocations - } else { - println!( - "warning: overriding existing build/metrics.json, as it's not \ - compatible with build metrics format version {CURRENT_FORMAT_VERSION}." - ); - Vec::new() - } - } - Err(err) => { - if err.kind() != std::io::ErrorKind::NotFound { - panic!("failed to open existing metrics file at {}: {err}", dest.display()); - } - Vec::new() - } - }; - invocations.push(JsonInvocation { - start_time: state - .invocation_start - .duration_since(SystemTime::UNIX_EPOCH) - .unwrap() - .as_secs(), - duration_including_children_sec: state.invocation_timer_start.elapsed().as_secs_f64(), - children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(), - }); - - let json = JsonRoot { format_version: CURRENT_FORMAT_VERSION, system_stats, invocations }; - - t!(std::fs::create_dir_all(dest.parent().unwrap())); - let mut file = BufWriter::new(t!(File::create(&dest))); - t!(serde_json::to_writer(&mut file, &json)); - } - - fn prepare_json_step(&self, step: StepMetrics) -> JsonNode { - let mut children = Vec::new(); - children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child))); - children.extend(step.test_suites.into_iter().map(JsonNode::TestSuite)); - - JsonNode::RustbuildStep { - type_: step.type_, - debug_repr: step.debug_repr, - - duration_excluding_children_sec: step.duration_excluding_children_sec.as_secs_f64(), - system_stats: JsonStepSystemStats { - cpu_utilization_percent: step.cpu_usage_time_sec * 100.0 - / step.duration_excluding_children_sec.as_secs_f64(), - }, - - children, - } - } -} - -struct MetricsState { - finished_steps: Vec, - running_steps: Vec, - - system_info: System, - timer_start: Option, - invocation_timer_start: Instant, - invocation_start: SystemTime, -} - -struct StepMetrics { - type_: String, - debug_repr: String, - - cpu_usage_time_sec: f64, - duration_excluding_children_sec: Duration, - - children: Vec, - test_suites: Vec, -} - -#[derive(serde_derive::Deserialize)] -struct OnlyFormatVersion { - #[serde(default)] // For version 0 the field was not present. - format_version: usize, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/render_tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/render_tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/render_tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/render_tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,400 +0,0 @@ -//! This module renders the JSON output of libtest into a human-readable form, trying to be as -//! similar to libtest's native output as possible. -//! -//! This is needed because we need to use libtest in JSON mode to extract granular information -//! about the executed tests. Doing so suppresses the human-readable output, and (compared to Cargo -//! and rustc) libtest doesn't include the rendered human-readable output as a JSON field. We had -//! to reimplement all the rendering logic in this module because of that. - -use crate::builder::Builder; -use std::io::{BufRead, BufReader, Read, Write}; -use std::process::{ChildStdout, Command, Stdio}; -use std::time::Duration; -use termcolor::{Color, ColorSpec, WriteColor}; - -const TERSE_TESTS_PER_LINE: usize = 88; - -pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool { - if cmd.get_args().position(|arg| arg == "--").is_none() { - cmd.arg("--"); - } - cmd.args(&["-Z", "unstable-options", "--format", "json"]); - - try_run_tests(builder, cmd, false) -} - -pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool { - if builder.config.dry_run() { - return true; - } - - if !run_tests(builder, cmd, stream) { - if builder.fail_fast { - crate::exit!(1); - } else { - let mut failures = builder.delayed_failures.borrow_mut(); - failures.push(format!("{cmd:?}")); - false - } - } else { - true - } -} - -fn run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool { - cmd.stdout(Stdio::piped()); - - builder.verbose(&format!("running: {cmd:?}")); - - let mut process = cmd.spawn().unwrap(); - - // This runs until the stdout of the child is closed, which means the child exited. We don't - // run this on another thread since the builder is not Sync. - let renderer = Renderer::new(process.stdout.take().unwrap(), builder); - if stream { - renderer.stream_all(); - } else { - renderer.render_all(); - } - - let result = process.wait_with_output().unwrap(); - if !result.status.success() && builder.is_verbose() { - println!( - "\n\ncommand did not execute successfully: {cmd:?}\n\ - expected success, got: {}", - result.status - ); - } - - result.status.success() -} - -struct Renderer<'a> { - stdout: BufReader, - failures: Vec, - benches: Vec, - builder: &'a Builder<'a>, - tests_count: Option, - executed_tests: usize, - terse_tests_in_line: usize, -} - -impl<'a> Renderer<'a> { - fn new(stdout: ChildStdout, builder: &'a Builder<'a>) -> Self { - Self { - stdout: BufReader::new(stdout), - benches: Vec::new(), - failures: Vec::new(), - builder, - tests_count: None, - executed_tests: 0, - terse_tests_in_line: 0, - } - } - - fn render_all(mut self) { - let mut line = Vec::new(); - loop { - line.clear(); - match self.stdout.read_until(b'\n', &mut line) { - Ok(_) => {} - Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break, - Err(err) => panic!("failed to read output of test runner: {err}"), - } - if line.is_empty() { - break; - } - - match serde_json::from_slice(&line) { - Ok(parsed) => self.render_message(parsed), - Err(_err) => { - // Handle non-JSON output, for example when --nocapture is passed. - let mut stdout = std::io::stdout(); - stdout.write_all(&line).unwrap(); - let _ = stdout.flush(); - } - } - } - } - - /// Renders the stdout characters one by one - fn stream_all(mut self) { - let mut buffer = [0; 1]; - loop { - match self.stdout.read(&mut buffer) { - Ok(0) => break, - Ok(_) => { - let mut stdout = std::io::stdout(); - stdout.write_all(&buffer).unwrap(); - let _ = stdout.flush(); - } - Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break, - Err(err) => panic!("failed to read output of test runner: {err}"), - } - } - } - - fn render_test_outcome(&mut self, outcome: Outcome<'_>, test: &TestOutcome) { - self.executed_tests += 1; - - #[cfg(feature = "build-metrics")] - self.builder.metrics.record_test( - &test.name, - match outcome { - Outcome::Ok | Outcome::BenchOk => build_helper::metrics::TestOutcome::Passed, - Outcome::Failed => build_helper::metrics::TestOutcome::Failed, - Outcome::Ignored { reason } => build_helper::metrics::TestOutcome::Ignored { - ignore_reason: reason.map(|s| s.to_string()), - }, - }, - self.builder, - ); - - if self.builder.config.verbose_tests { - self.render_test_outcome_verbose(outcome, test); - } else { - self.render_test_outcome_terse(outcome, test); - } - } - - fn render_test_outcome_verbose(&self, outcome: Outcome<'_>, test: &TestOutcome) { - print!("test {} ... ", test.name); - self.builder.colored_stdout(|stdout| outcome.write_long(stdout)).unwrap(); - if let Some(exec_time) = test.exec_time { - print!(" ({exec_time:.2?})"); - } - println!(); - } - - fn render_test_outcome_terse(&mut self, outcome: Outcome<'_>, _: &TestOutcome) { - if self.terse_tests_in_line != 0 && self.terse_tests_in_line % TERSE_TESTS_PER_LINE == 0 { - if let Some(total) = self.tests_count { - let total = total.to_string(); - let executed = format!("{:>width$}", self.executed_tests - 1, width = total.len()); - print!(" {executed}/{total}"); - } - println!(); - self.terse_tests_in_line = 0; - } - - self.terse_tests_in_line += 1; - self.builder.colored_stdout(|stdout| outcome.write_short(stdout)).unwrap(); - let _ = std::io::stdout().flush(); - } - - fn render_suite_outcome(&self, outcome: Outcome<'_>, suite: &SuiteOutcome) { - // The terse output doesn't end with a newline, so we need to add it ourselves. - if !self.builder.config.verbose_tests { - println!(); - } - - if !self.failures.is_empty() { - println!("\nfailures:\n"); - for failure in &self.failures { - if failure.stdout.is_some() || failure.message.is_some() { - println!("---- {} stdout ----", failure.name); - if let Some(stdout) = &failure.stdout { - println!("{stdout}"); - } - if let Some(message) = &failure.message { - println!("note: {message}"); - } - } - } - - println!("\nfailures:"); - for failure in &self.failures { - println!(" {}", failure.name); - } - } - - if !self.benches.is_empty() { - println!("\nbenchmarks:"); - - let mut rows = Vec::new(); - for bench in &self.benches { - rows.push(( - &bench.name, - format!("{:.2?}/iter", Duration::from_nanos(bench.median)), - format!("+/- {:.2?}", Duration::from_nanos(bench.deviation)), - )); - } - - let max_0 = rows.iter().map(|r| r.0.len()).max().unwrap_or(0); - let max_1 = rows.iter().map(|r| r.1.len()).max().unwrap_or(0); - let max_2 = rows.iter().map(|r| r.2.len()).max().unwrap_or(0); - for row in &rows { - println!(" {:max_1$} {:>max_2$}", row.0, row.1, row.2); - } - } - - print!("\ntest result: "); - self.builder.colored_stdout(|stdout| outcome.write_long(stdout)).unwrap(); - println!( - ". {} passed; {} failed; {} ignored; {} measured; {} filtered out; \ - finished in {:.2?}\n", - suite.passed, - suite.failed, - suite.ignored, - suite.measured, - suite.filtered_out, - Duration::from_secs_f64(suite.exec_time) - ); - } - - fn render_message(&mut self, message: Message) { - match message { - Message::Suite(SuiteMessage::Started { test_count }) => { - println!("\nrunning {test_count} tests"); - self.executed_tests = 0; - self.terse_tests_in_line = 0; - self.tests_count = Some(test_count); - } - Message::Suite(SuiteMessage::Ok(outcome)) => { - self.render_suite_outcome(Outcome::Ok, &outcome); - } - Message::Suite(SuiteMessage::Failed(outcome)) => { - self.render_suite_outcome(Outcome::Failed, &outcome); - } - Message::Bench(outcome) => { - // The formatting for benchmarks doesn't replicate 1:1 the formatting libtest - // outputs, mostly because libtest's formatting is broken in terse mode, which is - // the default used by our monorepo. We use a different formatting instead: - // successful benchmarks are just showed as "benchmarked"/"b", and the details are - // outputted at the bottom like failures. - let fake_test_outcome = TestOutcome { - name: outcome.name.clone(), - exec_time: None, - stdout: None, - message: None, - }; - self.render_test_outcome(Outcome::BenchOk, &fake_test_outcome); - self.benches.push(outcome); - } - Message::Test(TestMessage::Ok(outcome)) => { - self.render_test_outcome(Outcome::Ok, &outcome); - } - Message::Test(TestMessage::Ignored(outcome)) => { - self.render_test_outcome( - Outcome::Ignored { reason: outcome.message.as_deref() }, - &outcome, - ); - } - Message::Test(TestMessage::Failed(outcome)) => { - self.render_test_outcome(Outcome::Failed, &outcome); - self.failures.push(outcome); - } - Message::Test(TestMessage::Timeout { name }) => { - println!("test {name} has been running for a long time"); - } - Message::Test(TestMessage::Started) => {} // Not useful - } - } -} - -enum Outcome<'a> { - Ok, - BenchOk, - Failed, - Ignored { reason: Option<&'a str> }, -} - -impl Outcome<'_> { - fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> { - match self { - Outcome::Ok => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?; - write!(writer, ".")?; - } - Outcome::BenchOk => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?; - write!(writer, "b")?; - } - Outcome::Failed => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?; - write!(writer, "F")?; - } - Outcome::Ignored { .. } => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?; - write!(writer, "i")?; - } - } - writer.reset() - } - - fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> { - match self { - Outcome::Ok => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?; - write!(writer, "ok")?; - } - Outcome::BenchOk => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?; - write!(writer, "benchmarked")?; - } - Outcome::Failed => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?; - write!(writer, "FAILED")?; - } - Outcome::Ignored { reason } => { - writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?; - write!(writer, "ignored")?; - if let Some(reason) = reason { - write!(writer, ", {reason}")?; - } - } - } - writer.reset() - } -} - -#[derive(serde_derive::Deserialize)] -#[serde(tag = "type", rename_all = "snake_case")] -enum Message { - Suite(SuiteMessage), - Test(TestMessage), - Bench(BenchOutcome), -} - -#[derive(serde_derive::Deserialize)] -#[serde(tag = "event", rename_all = "snake_case")] -enum SuiteMessage { - Ok(SuiteOutcome), - Failed(SuiteOutcome), - Started { test_count: usize }, -} - -#[derive(serde_derive::Deserialize)] -struct SuiteOutcome { - passed: usize, - failed: usize, - ignored: usize, - measured: usize, - filtered_out: usize, - exec_time: f64, -} - -#[derive(serde_derive::Deserialize)] -#[serde(tag = "event", rename_all = "snake_case")] -enum TestMessage { - Ok(TestOutcome), - Failed(TestOutcome), - Ignored(TestOutcome), - Timeout { name: String }, - Started, -} - -#[derive(serde_derive::Deserialize)] -struct BenchOutcome { - name: String, - median: u64, - deviation: u64, -} - -#[derive(serde_derive::Deserialize)] -struct TestOutcome { - name: String, - exec_time: Option, - stdout: Option, - message: Option, -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/run.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/run.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/run.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/run.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,297 +0,0 @@ -use std::path::PathBuf; -use std::process::Command; - -use clap_complete::shells; - -use crate::builder::{Builder, RunConfig, ShouldRun, Step}; -use crate::config::TargetSelection; -use crate::dist::distdir; -use crate::flags::get_completion; -use crate::test; -use crate::tool::{self, SourceType, Tool}; -use crate::util::output; -use crate::Mode; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ExpandYamlAnchors; - -impl Step for ExpandYamlAnchors { - type Output = (); - - /// Runs the `expand-yaml_anchors` tool. - /// - /// This tool in `src/tools` reads the CI configuration files written in YAML and expands the - /// anchors in them, since GitHub Actions doesn't support them. - fn run(self, builder: &Builder<'_>) { - builder.info("Expanding YAML anchors in the GitHub Actions configuration"); - builder.run_delaying_failure( - &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src), - ); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/expand-yaml-anchors") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ExpandYamlAnchors); - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct BuildManifest; - -impl Step for BuildManifest { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/build-manifest") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(BuildManifest); - } - - fn run(self, builder: &Builder<'_>) { - // This gets called by `promote-release` - // (https://github.com/rust-lang/promote-release). - let mut cmd = builder.tool_cmd(Tool::BuildManifest); - let sign = builder.config.dist_sign_folder.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n") - }); - let addr = builder.config.dist_upload_addr.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n") - }); - - let today = output(Command::new("date").arg("+%Y-%m-%d")); - - cmd.arg(sign); - cmd.arg(distdir(builder)); - cmd.arg(today.trim()); - cmd.arg(addr); - cmd.arg(&builder.config.channel); - - builder.create_dir(&distdir(builder)); - builder.run(&mut cmd); - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct BumpStage0; - -impl Step for BumpStage0 { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/bump-stage0") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(BumpStage0); - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let mut cmd = builder.tool_cmd(Tool::BumpStage0); - cmd.args(builder.config.args()); - builder.run(&mut cmd); - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct ReplaceVersionPlaceholder; - -impl Step for ReplaceVersionPlaceholder { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/replace-version-placeholder") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ReplaceVersionPlaceholder); - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let mut cmd = builder.tool_cmd(Tool::ReplaceVersionPlaceholder); - cmd.arg(&builder.src); - builder.run(&mut cmd); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Miri { - stage: u32, - host: TargetSelection, - target: TargetSelection, -} - -impl Step for Miri { - type Output = (); - const ONLY_HOSTS: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/miri") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Miri { - stage: run.builder.top_stage, - host: run.build_triple(), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let target = self.target; - let compiler = builder.compiler(stage, host); - - let miri = builder - .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - let miri_sysroot = test::Miri::build_miri_sysroot(builder, compiler, &miri, target); - - // # Run miri. - // Running it via `cargo run` as that figures out the right dylib path. - // add_rustc_lib_path does not add the path that contains librustc_driver-<...>.so. - let mut miri = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - host, - "run", - "src/tools/miri", - SourceType::InTree, - &[], - ); - miri.add_rustc_lib_path(builder, compiler); - // Forward arguments. - miri.arg("--").arg("--target").arg(target.rustc_target_arg()); - miri.args(builder.config.args()); - - // miri tests need to know about the stage sysroot - miri.env("MIRI_SYSROOT", &miri_sysroot); - - let mut miri = Command::from(miri); - builder.run(&mut miri); - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct CollectLicenseMetadata; - -impl Step for CollectLicenseMetadata { - type Output = PathBuf; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/collect-license-metadata") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(CollectLicenseMetadata); - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let Some(reuse) = &builder.config.reuse else { - panic!("REUSE is required to collect the license metadata"); - }; - - // Temporary location, it will be moved to src/etc once it's accurate. - let dest = builder.out.join("license-metadata.json"); - - let mut cmd = builder.tool_cmd(Tool::CollectLicenseMetadata); - cmd.env("REUSE_EXE", reuse); - cmd.env("DEST", &dest); - builder.run(&mut cmd); - - dest - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct GenerateCopyright; - -impl Step for GenerateCopyright { - type Output = PathBuf; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/generate-copyright") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(GenerateCopyright); - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - let license_metadata = builder.ensure(CollectLicenseMetadata); - - // Temporary location, it will be moved to the proper one once it's accurate. - let dest = builder.out.join("COPYRIGHT.md"); - - let mut cmd = builder.tool_cmd(Tool::GenerateCopyright); - cmd.env("LICENSE_METADATA", &license_metadata); - cmd.env("DEST", &dest); - builder.run(&mut cmd); - - dest - } -} - -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] -pub struct GenerateWindowsSys; - -impl Step for GenerateWindowsSys { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/generate-windows-sys") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(GenerateWindowsSys); - } - - fn run(self, builder: &Builder<'_>) { - let mut cmd = builder.tool_cmd(Tool::GenerateWindowsSys); - cmd.arg(&builder.src); - builder.run(&mut cmd); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct GenerateCompletions; - -impl Step for GenerateCompletions { - type Output = (); - - /// Uses `clap_complete` to generate shell completions. - fn run(self, builder: &Builder<'_>) { - // FIXME(clubby789): enable zsh when clap#4898 is fixed - let [bash, fish, powershell] = ["x.py.sh", "x.py.fish", "x.py.ps1"] - .map(|filename| builder.src.join("src/etc/completions").join(filename)); - if let Some(comp) = get_completion(shells::Bash, &bash) { - std::fs::write(&bash, comp).expect("writing bash completion"); - } - if let Some(comp) = get_completion(shells::Fish, &fish) { - std::fs::write(&fish, comp).expect("writing fish completion"); - } - if let Some(comp) = get_completion(shells::PowerShell, &powershell) { - std::fs::write(&powershell, comp).expect("writing powershell completion"); - } - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("generate-completions") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(GenerateCompletions); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/sanity.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/sanity.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/sanity.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/sanity.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,267 +0,0 @@ -//! Sanity checking performed by rustbuild before actually executing anything. -//! -//! This module contains the implementation of ensuring that the build -//! environment looks reasonable before progressing. This will verify that -//! various programs like git and python exist, along with ensuring that all C -//! compilers for cross-compiling are found. -//! -//! In theory if we get past this phase it's a bug if a build fails, but in -//! practice that's likely not true! - -use std::collections::HashMap; -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fs; -use std::path::PathBuf; -use std::process::Command; - -use crate::cache::INTERNER; -use crate::config::Target; -use crate::util::output; -use crate::Build; - -pub struct Finder { - cache: HashMap>, - path: OsString, -} - -impl Finder { - pub fn new() -> Self { - Self { cache: HashMap::new(), path: env::var_os("PATH").unwrap_or_default() } - } - - pub fn maybe_have>(&mut self, cmd: S) -> Option { - let cmd: OsString = cmd.into(); - let path = &self.path; - self.cache - .entry(cmd.clone()) - .or_insert_with(|| { - for path in env::split_paths(path) { - let target = path.join(&cmd); - let mut cmd_exe = cmd.clone(); - cmd_exe.push(".exe"); - - if target.is_file() // some/path/git - || path.join(&cmd_exe).exists() // some/path/git.exe - || target.join(&cmd_exe).exists() - // some/path/git/git.exe - { - return Some(target); - } - } - None - }) - .clone() - } - - pub fn must_have>(&mut self, cmd: S) -> PathBuf { - self.maybe_have(&cmd).unwrap_or_else(|| { - panic!("\n\ncouldn't find required command: {:?}\n\n", cmd.as_ref()); - }) - } -} - -pub fn check(build: &mut Build) { - let skip_target_sanity = - env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some_and(|s| s == "1" || s == "true"); - - let path = env::var_os("PATH").unwrap_or_default(); - // On Windows, quotes are invalid characters for filename paths, and if - // one is present as part of the PATH then that can lead to the system - // being unable to identify the files properly. See - // https://github.com/rust-lang/rust/issues/34959 for more details. - if cfg!(windows) && path.to_string_lossy().contains('\"') { - panic!("PATH contains invalid character '\"'"); - } - - let mut cmd_finder = Finder::new(); - // If we've got a git directory we're gonna need git to update - // submodules and learn about various other aspects. - if build.rust_info().is_managed_git_subrepository() { - cmd_finder.must_have("git"); - } - - // We need cmake, but only if we're actually building LLVM or sanitizers. - let building_llvm = build.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) - && build - .hosts - .iter() - .map(|host| { - build - .config - .target_config - .get(host) - .map(|config| config.llvm_config.is_none()) - .unwrap_or(true) - }) - .any(|build_llvm_ourselves| build_llvm_ourselves); - - let need_cmake = building_llvm || build.config.any_sanitizers_enabled(); - if need_cmake && cmd_finder.maybe_have("cmake").is_none() { - eprintln!( - " -Couldn't find required command: cmake - -You should install cmake, or set `download-ci-llvm = true` in the -`[llvm]` section of `config.toml` to download LLVM rather -than building it. -" - ); - crate::exit!(1); - } - - build.config.python = build - .config - .python - .take() - .map(|p| cmd_finder.must_have(p)) - .or_else(|| env::var_os("BOOTSTRAP_PYTHON").map(PathBuf::from)) // set by bootstrap.py - .or_else(|| cmd_finder.maybe_have("python")) - .or_else(|| cmd_finder.maybe_have("python3")) - .or_else(|| cmd_finder.maybe_have("python2")); - - build.config.nodejs = build - .config - .nodejs - .take() - .map(|p| cmd_finder.must_have(p)) - .or_else(|| cmd_finder.maybe_have("node")) - .or_else(|| cmd_finder.maybe_have("nodejs")); - - build.config.npm = build - .config - .npm - .take() - .map(|p| cmd_finder.must_have(p)) - .or_else(|| cmd_finder.maybe_have("npm")); - - build.config.gdb = build - .config - .gdb - .take() - .map(|p| cmd_finder.must_have(p)) - .or_else(|| cmd_finder.maybe_have("gdb")); - - build.config.reuse = build - .config - .reuse - .take() - .map(|p| cmd_finder.must_have(p)) - .or_else(|| cmd_finder.maybe_have("reuse")); - - // We're gonna build some custom C code here and there, host triples - // also build some C++ shims for LLVM so we need a C++ compiler. - for target in &build.targets { - // On emscripten we don't actually need the C compiler to just - // build the target artifacts, only for testing. For the sake - // of easier bot configuration, just skip detection. - if target.contains("emscripten") { - continue; - } - - // We don't use a C compiler on wasm32 - if target.contains("wasm32") { - continue; - } - - // Some environments don't want or need these tools, such as when testing Miri. - // FIXME: it would be better to refactor this code to split necessary setup from pure sanity - // checks, and have a regular flag for skipping the latter. Also see - // . - if skip_target_sanity { - continue; - } - - if !build.config.dry_run() { - cmd_finder.must_have(build.cc(*target)); - if let Some(ar) = build.ar(*target) { - cmd_finder.must_have(ar); - } - } - } - - for host in &build.hosts { - if !build.config.dry_run() { - cmd_finder.must_have(build.cxx(*host).unwrap()); - } - } - - if build.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) { - // Externally configured LLVM requires FileCheck to exist - let filecheck = build.llvm_filecheck(build.build); - if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests { - panic!("FileCheck executable {filecheck:?} does not exist"); - } - } - - for target in &build.targets { - build - .config - .target_config - .entry(*target) - .or_insert_with(|| Target::from_triple(&target.triple)); - - if (target.contains("-none-") || target.contains("nvptx")) - && build.no_std(*target) == Some(false) - { - panic!("All the *-none-* and nvptx* targets are no-std targets") - } - - // Some environments don't want or need these tools, such as when testing Miri. - // FIXME: it would be better to refactor this code to split necessary setup from pure sanity - // checks, and have a regular flag for skipping the latter. Also see - // . - if skip_target_sanity { - continue; - } - - // Make sure musl-root is valid. - if target.contains("musl") && !target.contains("unikraft") { - // If this is a native target (host is also musl) and no musl-root is given, - // fall back to the system toolchain in /usr before giving up - if build.musl_root(*target).is_none() && build.config.build == *target { - let target = build.config.target_config.entry(*target).or_default(); - target.musl_root = Some("/usr".into()); - } - match build.musl_libdir(*target) { - Some(libdir) => { - if fs::metadata(libdir.join("libc.a")).is_err() { - panic!("couldn't find libc.a in musl libdir: {}", libdir.display()); - } - } - None => panic!( - "when targeting MUSL either the rust.musl-root \ - option or the target.$TARGET.musl-root option must \ - be specified in config.toml" - ), - } - } - - if need_cmake && target.contains("msvc") { - // There are three builds of cmake on windows: MSVC, MinGW, and - // Cygwin. The Cygwin build does not have generators for Visual - // Studio, so detect that here and error. - let out = output(Command::new("cmake").arg("--help")); - if !out.contains("Visual Studio") { - panic!( - " -cmake does not support Visual Studio generators. - -This is likely due to it being an msys/cygwin build of cmake, -rather than the required windows version, built using MinGW -or Visual Studio. - -If you are building under msys2 try installing the mingw-w64-x86_64-cmake -package instead of cmake: - -$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake -" - ); - } - } - } - - if let Some(ref s) = build.config.ccache { - cmd_finder.must_have(s); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/setup/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/setup/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/setup/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/setup/tests.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -use super::{RUST_ANALYZER_SETTINGS, SETTINGS_HASHES}; -use sha2::Digest; - -#[test] -fn check_matching_settings_hash() { - let mut hasher = sha2::Sha256::new(); - hasher.update(&RUST_ANALYZER_SETTINGS); - let hash = hex::encode(hasher.finalize().as_slice()); - assert_eq!( - &hash, - SETTINGS_HASHES.last().unwrap(), - "Update `SETTINGS_HASHES` with the new hash of `src/etc/rust_analyzer_settings.json`" - ); -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/setup.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/setup.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/setup.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/setup.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,599 +0,0 @@ -use crate::builder::{Builder, RunConfig, ShouldRun, Step}; -use crate::Config; -use crate::{t, VERSION}; -use sha2::Digest; -use std::env::consts::EXE_SUFFIX; -use std::fmt::Write as _; -use std::fs::File; -use std::io::Write; -use std::path::{Path, PathBuf, MAIN_SEPARATOR}; -use std::process::Command; -use std::str::FromStr; -use std::{fmt, fs, io}; - -#[cfg(test)] -mod tests; - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub enum Profile { - Compiler, - Codegen, - Library, - Tools, - Dist, - None, -} - -/// A list of historical hashes of `src/etc/rust_analyzer_settings.json`. -/// New entries should be appended whenever this is updated so we can detect -/// outdated vs. user-modified settings files. -static SETTINGS_HASHES: &[&str] = &[ - "ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8", - "56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922", - "af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0", - "3468fea433c25fff60be6b71e8a215a732a7b1268b6a83bf10d024344e140541", - "47d227f424bf889b0d899b9cc992d5695e1b78c406e183cd78eafefbe5488923", - "b526bd58d0262dd4dda2bff5bc5515b705fb668a46235ace3e057f807963a11a", -]; -static RUST_ANALYZER_SETTINGS: &str = include_str!("../etc/rust_analyzer_settings.json"); - -impl Profile { - fn include_path(&self, src_path: &Path) -> PathBuf { - PathBuf::from(format!("{}/src/bootstrap/defaults/config.{}.toml", src_path.display(), self)) - } - - pub fn all() -> impl Iterator { - use Profile::*; - // N.B. these are ordered by how they are displayed, not alphabetically - [Library, Compiler, Codegen, Tools, Dist, None].iter().copied() - } - - pub fn purpose(&self) -> String { - use Profile::*; - match self { - Library => "Contribute to the standard library", - Compiler => "Contribute to the compiler itself", - Codegen => "Contribute to the compiler, and also modify LLVM or codegen", - Tools => "Contribute to tools which depend on the compiler, but do not modify it directly (e.g. rustdoc, clippy, miri)", - Dist => "Install Rust from source", - None => "Do not modify `config.toml`" - } - .to_string() - } - - pub fn all_for_help(indent: &str) -> String { - let mut out = String::new(); - for choice in Profile::all() { - writeln!(&mut out, "{}{}: {}", indent, choice, choice.purpose()).unwrap(); - } - out - } - - pub fn as_str(&self) -> &'static str { - match self { - Profile::Compiler => "compiler", - Profile::Codegen => "codegen", - Profile::Library => "library", - Profile::Tools => "tools", - Profile::Dist => "dist", - Profile::None => "none", - } - } -} - -impl FromStr for Profile { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "lib" | "library" => Ok(Profile::Library), - "compiler" => Ok(Profile::Compiler), - "llvm" | "codegen" => Ok(Profile::Codegen), - "maintainer" | "dist" | "user" => Ok(Profile::Dist), - "tools" | "tool" | "rustdoc" | "clippy" | "miri" | "rustfmt" | "rls" => { - Ok(Profile::Tools) - } - "none" => Ok(Profile::None), - _ => Err(format!("unknown profile: '{s}'")), - } - } -} - -impl fmt::Display for Profile { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -impl Step for Profile { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(mut run: ShouldRun<'_>) -> ShouldRun<'_> { - for choice in Profile::all() { - run = run.alias(choice.as_str()); - } - run - } - - fn make_run(run: RunConfig<'_>) { - if run.builder.config.dry_run() { - return; - } - - // for Profile, `run.paths` will have 1 and only 1 element - // this is because we only accept at most 1 path from user input. - // If user calls `x.py setup` without arguments, the interactive TUI - // will guide user to provide one. - let profile = if run.paths.len() > 1 { - // HACK: `builder` runs this step with all paths if no path was passed. - t!(interactive_path()) - } else { - run.paths - .first() - .unwrap() - .assert_single_path() - .path - .as_path() - .as_os_str() - .to_str() - .unwrap() - .parse() - .unwrap() - }; - - run.builder.ensure(profile); - } - - fn run(self, builder: &Builder<'_>) { - setup(&builder.build.config, self) - } -} - -pub fn setup(config: &Config, profile: Profile) { - let suggestions: &[&str] = match profile { - Profile::Codegen | Profile::Compiler | Profile::None => &["check", "build", "test"], - Profile::Tools => &[ - "check", - "build", - "test tests/rustdoc*", - "test src/tools/clippy", - "test src/tools/miri", - "test src/tools/rustfmt", - ], - Profile::Library => &["check", "build", "test library/std", "doc"], - Profile::Dist => &["dist", "build"], - }; - - println!(); - - println!("To get started, try one of the following commands:"); - for cmd in suggestions { - println!("- `x.py {cmd}`"); - } - - if profile != Profile::Dist { - println!( - "For more suggestions, see https://rustc-dev-guide.rust-lang.org/building/suggested.html" - ); - } - - if profile == Profile::Tools { - eprintln!(); - eprintln!( - "note: the `tools` profile sets up the `stage2` toolchain (use \ - `rustup toolchain link 'name' host/build/stage2` to use rustc)" - ) - } - - let path = &config.config.clone().unwrap_or(PathBuf::from("config.toml")); - setup_config_toml(path, profile, config); -} - -fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { - if profile == Profile::None { - return; - } - if path.exists() { - eprintln!(); - eprintln!( - "error: you asked `x.py` to setup a new config file, but one already exists at `{}`", - path.display() - ); - eprintln!("help: try adding `profile = \"{}\"` at the top of {}", profile, path.display()); - eprintln!( - "note: this will use the configuration in {}", - profile.include_path(&config.src).display() - ); - crate::exit!(1); - } - - let settings = format!( - "# Includes one of the default files in src/bootstrap/defaults\n\ - profile = \"{profile}\"\n\ - changelog-seen = {VERSION}\n" - ); - - t!(fs::write(path, settings)); - - let include_path = profile.include_path(&config.src); - println!("`x.py` will now use the configuration at {}", include_path.display()); -} - -/// Creates a toolchain link for stage1 using `rustup` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Link; -impl Step for Link { - type Output = (); - const DEFAULT: bool = true; - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("link") - } - fn make_run(run: RunConfig<'_>) { - if run.builder.config.dry_run() { - return; - } - if let [cmd] = &run.paths[..] { - if cmd.assert_single_path().path.as_path().as_os_str() == "link" { - run.builder.ensure(Link); - } - } - } - fn run(self, builder: &Builder<'_>) -> Self::Output { - let config = &builder.config; - if config.dry_run() { - return; - } - let stage_path = - ["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string()); - - if !rustup_installed() { - eprintln!("`rustup` is not installed; cannot link `stage1` toolchain"); - } else if stage_dir_exists(&stage_path[..]) && !config.dry_run() { - attempt_toolchain_link(&stage_path[..]); - } - } -} - -fn rustup_installed() -> bool { - Command::new("rustup") - .arg("--version") - .stdout(std::process::Stdio::null()) - .output() - .map_or(false, |output| output.status.success()) -} - -fn stage_dir_exists(stage_path: &str) -> bool { - match fs::create_dir(&stage_path) { - Ok(_) => true, - Err(_) => Path::new(&stage_path).exists(), - } -} - -fn attempt_toolchain_link(stage_path: &str) { - if toolchain_is_linked() { - return; - } - - if !ensure_stage1_toolchain_placeholder_exists(stage_path) { - eprintln!( - "Failed to create a template for stage 1 toolchain or confirm that it already exists" - ); - return; - } - - if try_link_toolchain(&stage_path) { - println!( - "Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain" - ); - } else { - eprintln!("`rustup` failed to link stage 1 build to `stage1` toolchain"); - eprintln!( - "To manually link stage 1 build to `stage1` toolchain, run:\n - `rustup toolchain link stage1 {}`", - &stage_path - ); - } -} - -fn toolchain_is_linked() -> bool { - match Command::new("rustup") - .args(&["toolchain", "list"]) - .stdout(std::process::Stdio::piped()) - .output() - { - Ok(toolchain_list) => { - if !String::from_utf8_lossy(&toolchain_list.stdout).contains("stage1") { - return false; - } - // The toolchain has already been linked. - println!( - "`stage1` toolchain already linked; not attempting to link `stage1` toolchain" - ); - } - Err(_) => { - // In this case, we don't know if the `stage1` toolchain has been linked; - // but `rustup` failed, so let's not go any further. - println!( - "`rustup` failed to list current toolchains; not attempting to link `stage1` toolchain" - ); - } - } - true -} - -fn try_link_toolchain(stage_path: &str) -> bool { - Command::new("rustup") - .stdout(std::process::Stdio::null()) - .args(&["toolchain", "link", "stage1", &stage_path]) - .output() - .map_or(false, |output| output.status.success()) -} - -fn ensure_stage1_toolchain_placeholder_exists(stage_path: &str) -> bool { - let pathbuf = PathBuf::from(stage_path); - - if fs::create_dir_all(pathbuf.join("lib")).is_err() { - return false; - }; - - let pathbuf = pathbuf.join("bin"); - if fs::create_dir_all(&pathbuf).is_err() { - return false; - }; - - let pathbuf = pathbuf.join(format!("rustc{EXE_SUFFIX}")); - - if pathbuf.exists() { - return true; - } - - // Take care not to overwrite the file - let result = File::options().append(true).create(true).open(&pathbuf); - if result.is_err() { - return false; - } - - return true; -} - -// Used to get the path for `Subcommand::Setup` -pub fn interactive_path() -> io::Result { - fn abbrev_all() -> impl Iterator { - ('a'..) - .zip(1..) - .map(|(letter, number)| (letter.to_string(), number.to_string())) - .zip(Profile::all()) - } - - fn parse_with_abbrev(input: &str) -> Result { - let input = input.trim().to_lowercase(); - for ((letter, number), profile) in abbrev_all() { - if input == letter || input == number { - return Ok(profile); - } - } - input.parse() - } - - println!("Welcome to the Rust project! What do you want to do with x.py?"); - for ((letter, _), profile) in abbrev_all() { - println!("{}) {}: {}", letter, profile, profile.purpose()); - } - let template = loop { - print!( - "Please choose one ({}): ", - abbrev_all().map(|((l, _), _)| l).collect::>().join("/") - ); - io::stdout().flush()?; - let mut input = String::new(); - io::stdin().read_line(&mut input)?; - if input.is_empty() { - eprintln!("EOF on stdin, when expecting answer to question. Giving up."); - crate::exit!(1); - } - break match parse_with_abbrev(&input) { - Ok(profile) => profile, - Err(err) => { - eprintln!("error: {err}"); - eprintln!("note: press Ctrl+C to exit"); - continue; - } - }; - }; - Ok(template) -} - -#[derive(PartialEq)] -enum PromptResult { - Yes, // y/Y/yes - No, // n/N/no - Print, // p/P/print -} - -/// Prompt a user for a answer, looping until they enter an accepted input or nothing -fn prompt_user(prompt: &str) -> io::Result> { - let mut input = String::new(); - loop { - print!("{prompt} "); - io::stdout().flush()?; - input.clear(); - io::stdin().read_line(&mut input)?; - match input.trim().to_lowercase().as_str() { - "y" | "yes" => return Ok(Some(PromptResult::Yes)), - "n" | "no" => return Ok(Some(PromptResult::No)), - "p" | "print" => return Ok(Some(PromptResult::Print)), - "" => return Ok(None), - _ => { - eprintln!("error: unrecognized option '{}'", input.trim()); - eprintln!("note: press Ctrl+C to exit"); - } - }; - } -} - -/// Installs `src/etc/pre-push.sh` as a Git hook -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Hook; - -impl Step for Hook { - type Output = (); - const DEFAULT: bool = true; - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("hook") - } - fn make_run(run: RunConfig<'_>) { - if run.builder.config.dry_run() { - return; - } - if let [cmd] = &run.paths[..] { - if cmd.assert_single_path().path.as_path().as_os_str() == "hook" { - run.builder.ensure(Hook); - } - } - } - fn run(self, builder: &Builder<'_>) -> Self::Output { - let config = &builder.config; - if config.dry_run() { - return; - } - t!(install_git_hook_maybe(&config)); - } -} - -// install a git hook to automatically run tidy, if they want -fn install_git_hook_maybe(config: &Config) -> io::Result<()> { - let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| { - assert!(output.status.success(), "failed to run `git`"); - PathBuf::from(t!(String::from_utf8(output.stdout)).trim()) - })); - let dst = git.join("hooks").join("pre-push"); - if dst.exists() { - // The git hook has already been set up, or the user already has a custom hook. - return Ok(()); - } - - println!( - "\nRust's CI will automatically fail if it doesn't pass `tidy`, the internal tool for ensuring code quality. -If you'd like, x.py can install a git hook for you that will automatically run `test tidy` before -pushing your code to ensure your code is up to par. If you decide later that this behavior is -undesirable, simply delete the `pre-push` file from .git/hooks." - ); - - if prompt_user("Would you like to install the git hook?: [y/N]")? != Some(PromptResult::Yes) { - println!("Ok, skipping installation!"); - return Ok(()); - } - let src = config.src.join("src").join("etc").join("pre-push.sh"); - match fs::hard_link(src, &dst) { - Err(e) => { - eprintln!( - "error: could not create hook {}: do you already have the git hook installed?\n{}", - dst.display(), - e - ); - return Err(e); - } - Ok(_) => println!("Linked `src/etc/pre-push.sh` to `.git/hooks/pre-push`"), - }; - Ok(()) -} - -/// Sets up or displays `src/etc/rust_analyzer_settings.json` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct Vscode; - -impl Step for Vscode { - type Output = (); - const DEFAULT: bool = true; - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("vscode") - } - fn make_run(run: RunConfig<'_>) { - if run.builder.config.dry_run() { - return; - } - if let [cmd] = &run.paths[..] { - if cmd.assert_single_path().path.as_path().as_os_str() == "vscode" { - run.builder.ensure(Vscode); - } - } - } - fn run(self, builder: &Builder<'_>) -> Self::Output { - let config = &builder.config; - if config.dry_run() { - return; - } - t!(create_vscode_settings_maybe(&config)); - } -} - -/// Create a `.vscode/settings.json` file for rustc development, or just print it -fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> { - let (current_hash, historical_hashes) = SETTINGS_HASHES.split_last().unwrap(); - let vscode_settings = config.src.join(".vscode").join("settings.json"); - // If None, no settings.json exists - // If Some(true), is a previous version of settings.json - // If Some(false), is not a previous version (i.e. user modified) - // If it's up to date we can just skip this - let mut mismatched_settings = None; - if let Ok(current) = fs::read_to_string(&vscode_settings) { - let mut hasher = sha2::Sha256::new(); - hasher.update(¤t); - let hash = hex::encode(hasher.finalize().as_slice()); - if hash == *current_hash { - return Ok(()); - } else if historical_hashes.contains(&hash.as_str()) { - mismatched_settings = Some(true); - } else { - mismatched_settings = Some(false); - } - } - println!( - "\nx.py can automatically install the recommended `.vscode/settings.json` file for rustc development" - ); - match mismatched_settings { - Some(true) => eprintln!( - "warning: existing `.vscode/settings.json` is out of date, x.py will update it" - ), - Some(false) => eprintln!( - "warning: existing `.vscode/settings.json` has been modified by user, x.py will back it up and replace it" - ), - _ => (), - } - let should_create = match prompt_user( - "Would you like to create/update `settings.json`, or only print suggested settings?: [y/p/N]", - )? { - Some(PromptResult::Yes) => true, - Some(PromptResult::Print) => false, - _ => { - println!("Ok, skipping settings!"); - return Ok(()); - } - }; - if should_create { - let path = config.src.join(".vscode"); - if !path.exists() { - fs::create_dir(&path)?; - } - let verb = match mismatched_settings { - // exists but outdated, we can replace this - Some(true) => "Updated", - // exists but user modified, back it up - Some(false) => { - // exists and is not current version or outdated, so back it up - let mut backup = vscode_settings.clone(); - backup.set_extension("json.bak"); - eprintln!("warning: copying `settings.json` to `settings.json.bak`"); - fs::copy(&vscode_settings, &backup)?; - "Updated" - } - _ => "Created", - }; - fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?; - println!("{verb} `.vscode/settings.json`"); - } else { - println!("\n{RUST_ANALYZER_SETTINGS}"); - } - Ok(()) -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/main.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/main.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/main.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/main.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,143 @@ +//! rustbuild, the Rust build system +//! +//! This is the entry point for the build system used to compile the `rustc` +//! compiler. Lots of documentation can be found in the `README.md` file in the +//! parent directory, and otherwise documentation can be found throughout the `build` +//! directory in each respective module. + +#[cfg(all(any(unix, windows), not(target_os = "solaris")))] +use std::io::Write; +#[cfg(all(any(unix, windows), not(target_os = "solaris")))] +use std::process; +use std::{env, fs}; + +#[cfg(all(any(unix, windows), not(target_os = "solaris")))] +use bootstrap::t; +use bootstrap::{find_recent_config_change_ids, Build, Config, Subcommand, CONFIG_CHANGE_HISTORY}; + +fn main() { + let args = env::args().skip(1).collect::>(); + let config = Config::parse(&args); + + #[cfg(all(any(unix, windows), not(target_os = "solaris")))] + let mut build_lock; + #[cfg(all(any(unix, windows), not(target_os = "solaris")))] + let _build_lock_guard; + #[cfg(all(any(unix, windows), not(target_os = "solaris")))] + // Display PID of process holding the lock + // PID will be stored in a lock file + { + let path = config.out.join("lock"); + let pid = match fs::read_to_string(&path) { + Ok(contents) => contents, + Err(_) => String::new(), + }; + + build_lock = + fd_lock::RwLock::new(t!(fs::OpenOptions::new().write(true).create(true).open(&path))); + _build_lock_guard = match build_lock.try_write() { + Ok(mut lock) => { + t!(lock.write(&process::id().to_string().as_ref())); + lock + } + err => { + drop(err); + println!("WARNING: build directory locked by process {pid}, waiting for lock"); + let mut lock = t!(build_lock.write()); + t!(lock.write(&process::id().to_string().as_ref())); + lock + } + }; + } + + #[cfg(any(not(any(unix, windows)), target_os = "solaris"))] + println!("WARNING: file locking not supported for target, not locking build directory"); + + // check_version warnings are not printed during setup + let changelog_suggestion = + if matches!(config.cmd, Subcommand::Setup { .. }) { None } else { check_version(&config) }; + + // NOTE: Since `./configure` generates a `config.toml`, distro maintainers will see the + // changelog warning, not the `x.py setup` message. + let suggest_setup = config.config.is_none() && !matches!(config.cmd, Subcommand::Setup { .. }); + if suggest_setup { + println!("WARNING: you have not made a `config.toml`"); + println!( + "HELP: consider running `./x.py setup` or copying `config.example.toml` by running \ + `cp config.example.toml config.toml`" + ); + } else if let Some(suggestion) = &changelog_suggestion { + println!("{suggestion}"); + } + + let pre_commit = config.src.join(".git").join("hooks").join("pre-commit"); + Build::new(config).build(); + + if suggest_setup { + println!("WARNING: you have not made a `config.toml`"); + println!( + "HELP: consider running `./x.py setup` or copying `config.example.toml` by running \ + `cp config.example.toml config.toml`" + ); + } else if let Some(suggestion) = &changelog_suggestion { + println!("{suggestion}"); + } + + // Give a warning if the pre-commit script is in pre-commit and not pre-push. + // HACK: Since the commit script uses hard links, we can't actually tell if it was installed by x.py setup or not. + // We could see if it's identical to src/etc/pre-push.sh, but pre-push may have been modified in the meantime. + // Instead, look for this comment, which is almost certainly not in any custom hook. + if fs::read_to_string(pre_commit).map_or(false, |contents| { + contents.contains("https://github.com/rust-lang/rust/issues/77620#issuecomment-705144570") + }) { + println!( + "WARNING: You have the pre-push script installed to .git/hooks/pre-commit. \ + Consider moving it to .git/hooks/pre-push instead, which runs less often." + ); + } + + if suggest_setup || changelog_suggestion.is_some() { + println!("NOTE: this message was printed twice to make it more likely to be seen"); + } +} + +fn check_version(config: &Config) -> Option { + let mut msg = String::new(); + + if config.changelog_seen.is_some() { + msg.push_str("WARNING: The use of `changelog-seen` is deprecated. Please refer to `change-id` option in `config.example.toml` instead.\n"); + } + + let latest_config_id = CONFIG_CHANGE_HISTORY.last().unwrap(); + if let Some(id) = config.change_id { + if &id == latest_config_id { + return None; + } + + let change_links: Vec = find_recent_config_change_ids(id) + .iter() + .map(|id| format!("https://github.com/rust-lang/rust/pull/{id}")) + .collect(); + if !change_links.is_empty() { + msg.push_str("WARNING: there have been changes to x.py since you last updated.\n"); + msg.push_str("To see more detail about these changes, visit the following PRs:\n"); + + for link in change_links { + msg.push_str(&format!(" - {link}\n")); + } + + msg.push_str("WARNING: there have been changes to x.py since you last updated.\n"); + + msg.push_str("NOTE: to silence this warning, "); + msg.push_str(&format!( + "update `config.toml` to use `change-id = {latest_config_id}` instead" + )); + } + } else { + msg.push_str("WARNING: The `change-id` is missing in the `config.toml`. This means that you will not be able to track the major changes made to the bootstrap configurations.\n"); + msg.push_str("NOTE: to silence this warning, "); + msg.push_str(&format!("add `change-id = {latest_config_id}` at the top of `config.toml`")); + }; + + Some(msg) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,406 @@ +//! Shim which is passed to Cargo as "rustc" when running the bootstrap. +//! +//! This shim will take care of some various tasks that our build process +//! requires that Cargo can't quite do through normal configuration: +//! +//! 1. When compiling build scripts and build dependencies, we need a guaranteed +//! full standard library available. The only compiler which actually has +//! this is the snapshot, so we detect this situation and always compile with +//! the snapshot compiler. +//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling +//! (and this slightly differs based on a whether we're using a snapshot or +//! not), so we do that all here. +//! +//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of +//! switching compilers for the bootstrap and for build scripts will probably +//! never get replaced. + +use std::env; +use std::path::PathBuf; +use std::process::{Child, Command}; +use std::time::Instant; + +use dylib_util::{dylib_path, dylib_path_var}; + +#[path = "../utils/bin_helpers.rs"] +mod bin_helpers; + +#[path = "../utils/dylib.rs"] +mod dylib_util; + +fn main() { + let args = env::args_os().skip(1).collect::>(); + let arg = |name| args.windows(2).find(|args| args[0] == name).and_then(|args| args[1].to_str()); + + // We don't use the stage in this shim, but let's parse it to make sure that we're invoked + // by bootstrap, or that we provide a helpful error message if not. + bin_helpers::parse_rustc_stage(); + let verbose = bin_helpers::parse_rustc_verbose(); + + // Detect whether or not we're a build script depending on whether --target + // is passed (a bit janky...) + let target = arg("--target"); + let version = args.iter().find(|w| &**w == "-vV"); + + // Use a different compiler for build scripts, since there may not yet be a + // libstd for the real compiler to use. However, if Cargo is attempting to + // determine the version of the compiler, the real compiler needs to be + // used. Currently, these two states are differentiated based on whether + // --target and -vV is/isn't passed. + let (rustc, libdir) = if target.is_none() && version.is_none() { + ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR") + } else { + ("RUSTC_REAL", "RUSTC_LIBDIR") + }; + + let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set"); + let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new); + + let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc)); + let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir)); + let mut dylib_path = dylib_path(); + dylib_path.insert(0, PathBuf::from(&libdir)); + + let mut cmd = Command::new(rustc); + cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + + // Get the name of the crate we're compiling, if any. + let crate_name = arg("--crate-name"); + + if let Some(crate_name) = crate_name { + if let Some(target) = env::var_os("RUSTC_TIME") { + if target == "all" + || target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name) + { + cmd.arg("-Ztime-passes"); + } + } + } + + // Print backtrace in case of ICE + if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() { + cmd.env("RUST_BACKTRACE", "1"); + } + + if let Ok(lint_flags) = env::var("RUSTC_LINT_FLAGS") { + cmd.args(lint_flags.split_whitespace()); + } + + if target.is_some() { + // The stage0 compiler has a special sysroot distinct from what we + // actually downloaded, so we just always pass the `--sysroot` option, + // unless one is already set. + if !args.iter().any(|arg| arg == "--sysroot") { + cmd.arg("--sysroot").arg(&sysroot); + } + + // If we're compiling specifically the `panic_abort` crate then we pass + // the `-C panic=abort` option. Note that we do not do this for any + // other crate intentionally as this is the only crate for now that we + // ship with panic=abort. + // + // This... is a bit of a hack how we detect this. Ideally this + // information should be encoded in the crate I guess? Would likely + // require an RFC amendment to RFC 1513, however. + if crate_name == Some("panic_abort") { + cmd.arg("-C").arg("panic=abort"); + } + + // `-Ztls-model=initial-exec` must not be applied to proc-macros, see + // issue https://github.com/rust-lang/rust/issues/100530 + if env::var("RUSTC_TLS_MODEL_INITIAL_EXEC").is_ok() + && arg("--crate-type") != Some("proc-macro") + && !matches!(crate_name, Some("proc_macro2" | "quote" | "syn" | "synstructure")) + { + cmd.arg("-Ztls-model=initial-exec"); + } + } else { + // Find any host flags that were passed by bootstrap. + // The flags are stored in a RUSTC_HOST_FLAGS variable, separated by spaces. + if let Ok(flags) = std::env::var("RUSTC_HOST_FLAGS") { + for flag in flags.split(' ') { + cmd.arg(flag); + } + } + } + + if let Ok(map) = env::var("RUSTC_DEBUGINFO_MAP") { + cmd.arg("--remap-path-prefix").arg(&map); + } + // The remap flags for Cargo registry sources need to be passed after the remapping for the + // Rust source code directory, to handle cases when $CARGO_HOME is inside the source directory. + if let Ok(maps) = env::var("RUSTC_CARGO_REGISTRY_SRC_TO_REMAP") { + for map in maps.split('\t') { + cmd.arg("--remap-path-prefix").arg(map); + } + } + + // Force all crates compiled by this compiler to (a) be unstable and (b) + // allow the `rustc_private` feature to link to other unstable crates + // also in the sysroot. We also do this for host crates, since those + // may be proc macros, in which case we might ship them. + if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() { + cmd.arg("-Z").arg("force-unstable-if-unmarked"); + } + + // allow-features is handled from within this rustc wrapper because of + // issues with build scripts. Some packages use build scripts to + // dynamically detect if certain nightly features are available. + // There are different ways this causes problems: + // + // * rustix runs `rustc` on a small test program to see if the feature is + // available (and sets a `cfg` if it is). It does not honor + // CARGO_ENCODED_RUSTFLAGS. + // * proc-macro2 detects if `rustc -vV` says "nighty" or "dev" and enables + // nightly features. It will scan CARGO_ENCODED_RUSTFLAGS for + // -Zallow-features. Unfortunately CARGO_ENCODED_RUSTFLAGS is not set + // for build-dependencies when --target is used. + // + // The issues above means we can't just use RUSTFLAGS, and we can't use + // `cargo -Zallow-features=…`. Passing it through here ensures that it + // always gets set. Unfortunately that also means we need to enable more + // features than we really want (like those for proc-macro2), but there + // isn't much of a way around it. + // + // I think it is unfortunate that build scripts are doing this at all, + // since changes to nightly features can cause crates to break even if the + // user didn't want or care about the use of the nightly features. I think + // nightly features should be opt-in only. Unfortunately the dynamic + // checks are now too wide spread that we just need to deal with it. + // + // If you want to try to remove this, I suggest working with the crate + // authors to remove the dynamic checking. Another option is to pursue + // https://github.com/rust-lang/cargo/issues/11244 and + // https://github.com/rust-lang/cargo/issues/4423, which will likely be + // very difficult, but could help expose -Zallow-features into build + // scripts so they could try to honor them. + if let Ok(allow_features) = env::var("RUSTC_ALLOW_FEATURES") { + cmd.arg(format!("-Zallow-features={allow_features}")); + } + + if let Ok(flags) = env::var("MAGIC_EXTRA_RUSTFLAGS") { + for flag in flags.split(' ') { + cmd.arg(flag); + } + } + + let is_test = args.iter().any(|a| a == "--test"); + if verbose > 2 { + let rust_env_vars = + env::vars().filter(|(k, _)| k.starts_with("RUST") || k.starts_with("CARGO")); + let prefix = if is_test { "[RUSTC-SHIM] rustc --test" } else { "[RUSTC-SHIM] rustc" }; + let prefix = match crate_name { + Some(crate_name) => format!("{prefix} {crate_name}"), + None => prefix.to_string(), + }; + for (i, (k, v)) in rust_env_vars.enumerate() { + eprintln!("{prefix} env[{i}]: {k:?}={v:?}"); + } + eprintln!("{} working directory: {}", prefix, env::current_dir().unwrap().display()); + eprintln!( + "{} command: {:?}={:?} {:?}", + prefix, + dylib_path_var(), + env::join_paths(&dylib_path).unwrap(), + cmd, + ); + eprintln!("{prefix} sysroot: {sysroot:?}"); + eprintln!("{prefix} libdir: {libdir:?}"); + } + + if env::var_os("RUSTC_BOLT_LINK_FLAGS").is_some() { + if let Some("rustc_driver") = crate_name { + cmd.arg("-Clink-args=-Wl,-q"); + } + } + + let start = Instant::now(); + let (child, status) = { + let errmsg = format!("\nFailed to run:\n{cmd:?}\n-------------"); + let mut child = cmd.spawn().expect(&errmsg); + let status = child.wait().expect(&errmsg); + (child, status) + }; + + if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some() + || env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some() + { + if let Some(crate_name) = crate_name { + let dur = start.elapsed(); + // If the user requested resource usage data, then + // include that in addition to the timing output. + let rusage_data = + env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child)); + eprintln!( + "[RUSTC-TIMING] {} test:{} {}.{:03}{}{}", + crate_name, + is_test, + dur.as_secs(), + dur.subsec_millis(), + if rusage_data.is_some() { " " } else { "" }, + rusage_data.unwrap_or(String::new()), + ); + } + } + + if status.success() { + std::process::exit(0); + // NOTE: everything below here is unreachable. do not put code that + // should run on success, after this block. + } + if verbose > 0 { + println!("\nDid not run successfully: {status}\n{cmd:?}\n-------------"); + } + + if let Some(mut on_fail) = on_fail { + on_fail.status().expect("Could not run the on_fail command"); + } + + // Preserve the exit code. In case of signal, exit with 0xfe since it's + // awkward to preserve this status in a cross-platform way. + match status.code() { + Some(i) => std::process::exit(i), + None => { + eprintln!("rustc exited with {status}"); + std::process::exit(0xfe); + } + } +} + +#[cfg(all(not(unix), not(windows)))] +// In the future we can add this for more platforms +fn format_rusage_data(_child: Child) -> Option { + None +} + +#[cfg(windows)] +fn format_rusage_data(child: Child) -> Option { + use std::os::windows::io::AsRawHandle; + + use windows::{ + Win32::Foundation::HANDLE, + Win32::System::ProcessStatus::{ + K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS, PROCESS_MEMORY_COUNTERS_EX, + }, + Win32::System::Threading::GetProcessTimes, + Win32::System::Time::FileTimeToSystemTime, + }; + + let handle = HANDLE(child.as_raw_handle() as isize); + + let mut user_filetime = Default::default(); + let mut user_time = Default::default(); + let mut kernel_filetime = Default::default(); + let mut kernel_time = Default::default(); + let mut memory_counters = PROCESS_MEMORY_COUNTERS::default(); + + unsafe { + GetProcessTimes( + handle, + &mut Default::default(), + &mut Default::default(), + &mut kernel_filetime, + &mut user_filetime, + ) + } + .ok()?; + unsafe { FileTimeToSystemTime(&user_filetime, &mut user_time) }.ok()?; + unsafe { FileTimeToSystemTime(&kernel_filetime, &mut kernel_time) }.ok()?; + + // Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process + // with the given handle and none of that process's children. + unsafe { + K32GetProcessMemoryInfo( + handle, + &mut memory_counters, + std::mem::size_of::() as u32, + ) + } + .ok() + .ok()?; + + // Guide on interpreting these numbers: + // https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information + let peak_working_set = memory_counters.PeakWorkingSetSize / 1024; + let peak_page_file = memory_counters.PeakPagefileUsage / 1024; + let peak_paged_pool = memory_counters.QuotaPeakPagedPoolUsage / 1024; + let peak_nonpaged_pool = memory_counters.QuotaPeakNonPagedPoolUsage / 1024; + Some(format!( + "user: {USER_SEC}.{USER_USEC:03} \ + sys: {SYS_SEC}.{SYS_USEC:03} \ + peak working set (kb): {PEAK_WORKING_SET} \ + peak page file usage (kb): {PEAK_PAGE_FILE} \ + peak paged pool usage (kb): {PEAK_PAGED_POOL} \ + peak non-paged pool usage (kb): {PEAK_NONPAGED_POOL} \ + page faults: {PAGE_FAULTS}", + USER_SEC = user_time.wSecond + (user_time.wMinute * 60), + USER_USEC = user_time.wMilliseconds, + SYS_SEC = kernel_time.wSecond + (kernel_time.wMinute * 60), + SYS_USEC = kernel_time.wMilliseconds, + PEAK_WORKING_SET = peak_working_set, + PEAK_PAGE_FILE = peak_page_file, + PEAK_PAGED_POOL = peak_paged_pool, + PEAK_NONPAGED_POOL = peak_nonpaged_pool, + PAGE_FAULTS = memory_counters.PageFaultCount, + )) +} + +#[cfg(unix)] +/// Tries to build a string with human readable data for several of the rusage +/// fields. Note that we are focusing mainly on data that we believe to be +/// supplied on Linux (the `rusage` struct has other fields in it but they are +/// currently unsupported by Linux). +fn format_rusage_data(_child: Child) -> Option { + let rusage: libc::rusage = unsafe { + let mut recv = std::mem::zeroed(); + // -1 is RUSAGE_CHILDREN, which means to get the rusage for all children + // (and grandchildren, etc) processes that have respectively terminated + // and been waited for. + let retval = libc::getrusage(-1, &mut recv); + if retval != 0 { + return None; + } + recv + }; + // Mac OS X reports the maxrss in bytes, not kb. + let divisor = if env::consts::OS == "macos" { 1024 } else { 1 }; + let maxrss = (rusage.ru_maxrss + (divisor - 1)) / divisor; + + let mut init_str = format!( + "user: {USER_SEC}.{USER_USEC:03} \ + sys: {SYS_SEC}.{SYS_USEC:03} \ + max rss (kb): {MAXRSS}", + USER_SEC = rusage.ru_utime.tv_sec, + USER_USEC = rusage.ru_utime.tv_usec, + SYS_SEC = rusage.ru_stime.tv_sec, + SYS_USEC = rusage.ru_stime.tv_usec, + MAXRSS = maxrss + ); + + // The remaining rusage stats vary in platform support. So we treat + // uniformly zero values in each category as "not worth printing", since it + // either means no events of that type occurred, or that the platform + // does not support it. + + let minflt = rusage.ru_minflt; + let majflt = rusage.ru_majflt; + if minflt != 0 || majflt != 0 { + init_str.push_str(&format!(" page reclaims: {minflt} page faults: {majflt}")); + } + + let inblock = rusage.ru_inblock; + let oublock = rusage.ru_oublock; + if inblock != 0 || oublock != 0 { + init_str.push_str(&format!(" fs block inputs: {inblock} fs block outputs: {oublock}")); + } + + let nvcsw = rusage.ru_nvcsw; + let nivcsw = rusage.ru_nivcsw; + if nvcsw != 0 || nivcsw != 0 { + init_str.push_str(&format!( + " voluntary ctxt switches: {nvcsw} involuntary ctxt switches: {nivcsw}" + )); + } + + return Some(init_str); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustdoc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustdoc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustdoc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/rustdoc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,92 @@ +//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap. +//! +//! See comments in `src/bootstrap/rustc.rs` for more information. + +use std::env; +use std::ffi::OsString; +use std::path::PathBuf; +use std::process::Command; + +use dylib_util::{dylib_path, dylib_path_var}; + +#[path = "../utils/bin_helpers.rs"] +mod bin_helpers; + +#[path = "../utils/dylib.rs"] +mod dylib_util; + +fn main() { + let args = env::args_os().skip(1).collect::>(); + + let stage = bin_helpers::parse_rustc_stage(); + let verbose = bin_helpers::parse_rustc_verbose(); + + let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set"); + let libdir = env::var_os("RUSTDOC_LIBDIR").expect("RUSTDOC_LIBDIR was not set"); + let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set"); + + // Detect whether or not we're a build script depending on whether --target + // is passed (a bit janky...) + let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str()); + + let mut dylib_path = dylib_path(); + dylib_path.insert(0, PathBuf::from(libdir.clone())); + + let mut cmd = Command::new(rustdoc); + + if target.is_some() { + // The stage0 compiler has a special sysroot distinct from what we + // actually downloaded, so we just always pass the `--sysroot` option, + // unless one is already set. + if !args.iter().any(|arg| arg == "--sysroot") { + cmd.arg("--sysroot").arg(&sysroot); + } + } + + cmd.args(&args); + cmd.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + + // Force all crates compiled by this compiler to (a) be unstable and (b) + // allow the `rustc_private` feature to link to other unstable crates + // also in the sysroot. + if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() { + cmd.arg("-Z").arg("force-unstable-if-unmarked"); + } + if let Some(linker) = env::var_os("RUSTDOC_LINKER") { + let mut arg = OsString::from("-Clinker="); + arg.push(&linker); + cmd.arg(arg); + } + if let Ok(no_threads) = env::var("RUSTDOC_LLD_NO_THREADS") { + cmd.arg("-Clink-arg=-fuse-ld=lld"); + cmd.arg(format!("-Clink-arg=-Wl,{no_threads}")); + } + // Cargo doesn't pass RUSTDOCFLAGS to proc_macros: + // https://github.com/rust-lang/cargo/issues/4423 + // Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`. + // We also declare that the flag is expected, which we need to do to not + // get warnings about it being unexpected. + if stage == "0" { + cmd.arg("--cfg=bootstrap"); + } + cmd.arg("-Zunstable-options"); + // #[cfg(bootstrap)] + cmd.arg("--check-cfg=values(bootstrap)"); + // cmd.arg("--check-cfg=cfg(bootstrap)"); + + if verbose > 1 { + eprintln!( + "rustdoc command: {:?}={:?} {:?}", + dylib_path_var(), + env::join_paths(&dylib_path).unwrap(), + cmd, + ); + eprintln!("sysroot: {sysroot:?}"); + eprintln!("libdir: {libdir:?}"); + } + + std::process::exit(match cmd.status() { + Ok(s) => s.code().unwrap_or(1), + Err(e) => panic!("\n\nfailed to run {cmd:?}: {e}\n\n"), + }) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/sccache-plus-cl.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/sccache-plus-cl.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/sccache-plus-cl.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/bin/sccache-plus-cl.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,38 @@ +use std::env; +use std::process::{self, Command}; + +fn main() { + let target = env::var("SCCACHE_TARGET").unwrap(); + // Locate the actual compiler that we're invoking + env::set_var("CC", env::var_os("SCCACHE_CC").unwrap()); + env::set_var("CXX", env::var_os("SCCACHE_CXX").unwrap()); + let mut cfg = cc::Build::new(); + cfg.cargo_metadata(false) + .out_dir("/") + .target(&target) + .host(&target) + .opt_level(0) + .warnings(false) + .debug(false); + let compiler = cfg.get_compiler(); + + // Invoke sccache with said compiler + let sccache_path = env::var_os("SCCACHE_PATH").unwrap(); + let mut cmd = Command::new(&sccache_path); + cmd.arg(compiler.path()); + for &(ref k, ref v) in compiler.env() { + cmd.env(k, v); + } + for arg in env::args().skip(1) { + cmd.arg(arg); + } + + if let Ok(s) = env::var("SCCACHE_EXTRA_ARGS") { + for s in s.split_whitespace() { + cmd.arg(s); + } + } + + let status = cmd.status().expect("failed to spawn"); + process::exit(status.code().unwrap_or(2)) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/check.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/check.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/check.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/check.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,544 @@ +//! Implementation of compiling the compiler and standard library, in "check"-based modes. + +use crate::core::build_steps::compile::{ + add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, +}; +use crate::core::build_steps::tool::{prepare_tool_cargo, SourceType}; +use crate::core::builder::{crate_description, Alias, Builder, Kind, RunConfig, ShouldRun, Step}; +use crate::core::config::TargetSelection; +use crate::utils::cache::Interned; +use crate::INTERNER; +use crate::{Compiler, Mode, Subcommand}; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Std { + pub target: TargetSelection, + /// Whether to build only a subset of crates. + /// + /// This shouldn't be used from other steps; see the comment on [`compile::Rustc`]. + /// + /// [`compile::Rustc`]: crate::core::build_steps::compile::Rustc + crates: Interned>, +} + +/// Returns args for the subcommand itself (not for cargo) +fn args(builder: &Builder<'_>) -> Vec { + fn strings<'a>(arr: &'a [&str]) -> impl Iterator + 'a { + arr.iter().copied().map(String::from) + } + + if let Subcommand::Clippy { fix, allow, deny, warn, forbid, .. } = &builder.config.cmd { + // disable the most spammy clippy lints + let ignored_lints = vec![ + "many_single_char_names", // there are a lot in stdarch + "collapsible_if", + "type_complexity", + "missing_safety_doc", // almost 3K warnings + "too_many_arguments", + "needless_lifetimes", // people want to keep the lifetimes + "wrong_self_convention", + ]; + let mut args = vec![]; + if *fix { + #[rustfmt::skip] + args.extend(strings(&[ + "--fix", "-Zunstable-options", + // FIXME: currently, `--fix` gives an error while checking tests for libtest, + // possibly because libtest is not yet built in the sysroot. + // As a workaround, avoid checking tests and benches when passed --fix. + "--lib", "--bins", "--examples", + ])); + } + args.extend(strings(&["--", "--cap-lints", "warn"])); + args.extend(ignored_lints.iter().map(|lint| format!("-Aclippy::{}", lint))); + let mut clippy_lint_levels: Vec = Vec::new(); + allow.iter().for_each(|v| clippy_lint_levels.push(format!("-A{}", v))); + deny.iter().for_each(|v| clippy_lint_levels.push(format!("-D{}", v))); + warn.iter().for_each(|v| clippy_lint_levels.push(format!("-W{}", v))); + forbid.iter().for_each(|v| clippy_lint_levels.push(format!("-F{}", v))); + args.extend(clippy_lint_levels); + args.extend(builder.config.free_args.clone()); + args + } else { + builder.config.free_args.clone() + } +} + +fn cargo_subcommand(kind: Kind) -> &'static str { + match kind { + Kind::Check => "check", + Kind::Clippy => "clippy", + Kind::Fix => "fix", + _ => unreachable!(), + } +} + +impl Std { + pub fn new(target: TargetSelection) -> Self { + Self { target, crates: INTERNER.intern_list(vec![]) } + } +} + +impl Step for Std { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.crate_or_deps("sysroot").path("library") + } + + fn make_run(run: RunConfig<'_>) { + let crates = run.make_run_crates(Alias::Library); + run.builder.ensure(Std { target: run.target, crates }); + } + + fn run(self, builder: &Builder<'_>) { + builder.update_submodule(&Path::new("library").join("stdarch")); + + let target = self.target; + let compiler = builder.compiler(builder.top_stage, builder.config.build); + + let mut cargo = builder.cargo( + compiler, + Mode::Std, + SourceType::InTree, + target, + cargo_subcommand(builder.kind), + ); + std_cargo(builder, target, compiler.stage, &mut cargo); + if matches!(builder.config.cmd, Subcommand::Fix { .. }) { + // By default, cargo tries to fix all targets. Tell it not to fix tests until we've added `test` to the sysroot. + cargo.arg("--lib"); + } + + for krate in &*self.crates { + cargo.arg("-p").arg(krate); + } + + let _guard = builder.msg_check( + format_args!("library artifacts{}", crate_description(&self.crates)), + target, + ); + run_cargo( + builder, + cargo, + args(builder), + &libstd_stamp(builder, compiler, target), + vec![], + true, + false, + ); + + // We skip populating the sysroot in non-zero stage because that'll lead + // to rlib/rmeta conflicts if std gets built during this session. + if compiler.stage == 0 { + let libdir = builder.sysroot_libdir(compiler, target); + let hostdir = builder.sysroot_libdir(compiler, compiler.host); + add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); + } + drop(_guard); + + // don't run on std twice with x.py clippy + // don't check test dependencies if we haven't built libtest + if builder.kind == Kind::Clippy || !self.crates.iter().any(|krate| krate == "test") { + return; + } + + // Then run cargo again, once we've put the rmeta files for the library + // crates into the sysroot. This is needed because e.g., core's tests + // depend on `libtest` -- Cargo presumes it will exist, but it doesn't + // since we initialize with an empty sysroot. + // + // Currently only the "libtest" tree of crates does this. + let mut cargo = builder.cargo( + compiler, + Mode::Std, + SourceType::InTree, + target, + cargo_subcommand(builder.kind), + ); + + // If we're not in stage 0, tests and examples will fail to compile + // from `core` definitions being loaded from two different `libcore` + // .rmeta and .rlib files. + if compiler.stage == 0 { + cargo.arg("--all-targets"); + } + + std_cargo(builder, target, compiler.stage, &mut cargo); + + // Explicitly pass -p for all dependencies krates -- this will force cargo + // to also check the tests/benches/examples for these crates, rather + // than just the leaf crate. + for krate in &*self.crates { + cargo.arg("-p").arg(krate); + } + + let _guard = builder.msg_check("library test/bench/example targets", target); + run_cargo( + builder, + cargo, + args(builder), + &libstd_test_stamp(builder, compiler, target), + vec![], + true, + false, + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rustc { + pub target: TargetSelection, + /// Whether to build only a subset of crates. + /// + /// This shouldn't be used from other steps; see the comment on [`compile::Rustc`]. + /// + /// [`compile::Rustc`]: crate::core::build_steps::compile::Rustc + crates: Interned>, +} + +impl Rustc { + pub fn new(target: TargetSelection, builder: &Builder<'_>) -> Self { + let crates = builder + .in_tree_crates("rustc-main", Some(target)) + .into_iter() + .map(|krate| krate.name.to_string()) + .collect(); + Self { target, crates: INTERNER.intern_list(crates) } + } +} + +impl Step for Rustc { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.crate_or_deps("rustc-main").path("compiler") + } + + fn make_run(run: RunConfig<'_>) { + let crates = run.make_run_crates(Alias::Compiler); + run.builder.ensure(Rustc { target: run.target, crates }); + } + + /// Builds the compiler. + /// + /// This will build the compiler for a particular stage of the build using + /// the `compiler` targeting the `target` architecture. The artifacts + /// created will also be linked into the sysroot directory. + fn run(self, builder: &Builder<'_>) { + let compiler = builder.compiler(builder.top_stage, builder.config.build); + let target = self.target; + + if compiler.stage != 0 { + // If we're not in stage 0, then we won't have a std from the beta + // compiler around. That means we need to make sure there's one in + // the sysroot for the compiler to find. Otherwise, we're going to + // fail when building crates that need to generate code (e.g., build + // scripts and their dependencies). + builder.ensure(crate::core::build_steps::compile::Std::new(compiler, compiler.host)); + builder.ensure(crate::core::build_steps::compile::Std::new(compiler, target)); + } else { + builder.ensure(Std::new(target)); + } + + let mut cargo = builder.cargo( + compiler, + Mode::Rustc, + SourceType::InTree, + target, + cargo_subcommand(builder.kind), + ); + rustc_cargo(builder, &mut cargo, target, compiler.stage); + + // For ./x.py clippy, don't run with --all-targets because + // linting tests and benchmarks can produce very noisy results + if builder.kind != Kind::Clippy { + cargo.arg("--all-targets"); + } + + // Explicitly pass -p for all compiler crates -- this will force cargo + // to also check the tests/benches/examples for these crates, rather + // than just the leaf crate. + for krate in &*self.crates { + cargo.arg("-p").arg(krate); + } + + let _guard = builder.msg_check( + format_args!("compiler artifacts{}", crate_description(&self.crates)), + target, + ); + run_cargo( + builder, + cargo, + args(builder), + &librustc_stamp(builder, compiler, target), + vec![], + true, + false, + ); + + let libdir = builder.sysroot_libdir(compiler, target); + let hostdir = builder.sysroot_libdir(compiler, compiler.host); + add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target)); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CodegenBackend { + pub target: TargetSelection, + pub backend: Interned, +} + +impl Step for CodegenBackend { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.paths(&["compiler/rustc_codegen_cranelift", "compiler/rustc_codegen_gcc"]) + } + + fn make_run(run: RunConfig<'_>) { + for &backend in &[INTERNER.intern_str("cranelift"), INTERNER.intern_str("gcc")] { + run.builder.ensure(CodegenBackend { target: run.target, backend }); + } + } + + fn run(self, builder: &Builder<'_>) { + // FIXME: remove once https://github.com/rust-lang/rust/issues/112393 is resolved + if builder.build.config.vendor && &self.backend == "gcc" { + println!("Skipping checking of `rustc_codegen_gcc` with vendoring enabled."); + return; + } + + let compiler = builder.compiler(builder.top_stage, builder.config.build); + let target = self.target; + let backend = self.backend; + + builder.ensure(Rustc::new(target, builder)); + + let mut cargo = builder.cargo( + compiler, + Mode::Codegen, + SourceType::InTree, + target, + cargo_subcommand(builder.kind), + ); + cargo + .arg("--manifest-path") + .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); + rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + + let _guard = builder.msg_check(&backend, target); + + run_cargo( + builder, + cargo, + args(builder), + &codegen_backend_stamp(builder, compiler, target, backend), + vec![], + true, + false, + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RustAnalyzer { + pub target: TargetSelection, +} + +impl Step for RustAnalyzer { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/tools/rust-analyzer").default_condition( + builder + .config + .tools + .as_ref() + .map_or(true, |tools| tools.iter().any(|tool| tool == "rust-analyzer")), + ) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustAnalyzer { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let compiler = builder.compiler(builder.top_stage, builder.config.build); + let target = self.target; + + builder.ensure(Std::new(target)); + + let mut cargo = prepare_tool_cargo( + builder, + compiler, + Mode::ToolStd, + target, + cargo_subcommand(builder.kind), + "src/tools/rust-analyzer", + SourceType::InTree, + &["rust-analyzer/in-rust-tree".to_owned()], + ); + + cargo.allow_features(crate::core::build_steps::tool::RustAnalyzer::ALLOW_FEATURES); + + // For ./x.py clippy, don't check those targets because + // linting tests and benchmarks can produce very noisy results + if builder.kind != Kind::Clippy { + // can't use `--all-targets` because `--examples` doesn't work well + cargo.arg("--bins"); + cargo.arg("--tests"); + cargo.arg("--benches"); + } + + let _guard = builder.msg_check("rust-analyzer artifacts", target); + run_cargo( + builder, + cargo, + args(builder), + &stamp(builder, compiler, target), + vec![], + true, + false, + ); + + /// Cargo's output path in a given stage, compiled by a particular + /// compiler for the specified target. + fn stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { + builder.cargo_out(compiler, Mode::ToolStd, target).join(".rust-analyzer-check.stamp") + } + } +} + +macro_rules! tool_check_step { + ($name:ident, $path:literal, $($alias:literal, )* $source_type:path $(, $default:literal )?) => { + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct $name { + pub target: TargetSelection, + } + + impl Step for $name { + type Output = (); + const ONLY_HOSTS: bool = true; + // don't ever check out-of-tree tools by default, they'll fail when toolstate is broken + const DEFAULT: bool = matches!($source_type, SourceType::InTree) $( && $default )?; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.paths(&[ $path, $($alias),* ]) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($name { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let compiler = builder.compiler(builder.top_stage, builder.config.build); + let target = self.target; + + builder.ensure(Rustc::new(target, builder)); + + let mut cargo = prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + target, + cargo_subcommand(builder.kind), + $path, + $source_type, + &[], + ); + + // For ./x.py clippy, don't run with --all-targets because + // linting tests and benchmarks can produce very noisy results + if builder.kind != Kind::Clippy { + cargo.arg("--all-targets"); + } + + // Enable internal lints for clippy and rustdoc + // NOTE: this doesn't enable lints for any other tools unless they explicitly add `#![warn(rustc::internal)]` + // See https://github.com/rust-lang/rust/pull/80573#issuecomment-754010776 + cargo.rustflag("-Zunstable-options"); + let _guard = builder.msg_check(&concat!(stringify!($name), " artifacts").to_lowercase(), target); + run_cargo( + builder, + cargo, + args(builder), + &stamp(builder, compiler, target), + vec![], + true, + false, + ); + + /// Cargo's output path in a given stage, compiled by a particular + /// compiler for the specified target. + fn stamp( + builder: &Builder<'_>, + compiler: Compiler, + target: TargetSelection, + ) -> PathBuf { + builder + .cargo_out(compiler, Mode::ToolRustc, target) + .join(format!(".{}-check.stamp", stringify!($name).to_lowercase())) + } + } + } + }; +} + +tool_check_step!(Rustdoc, "src/tools/rustdoc", "src/librustdoc", SourceType::InTree); +// Clippy, miri and Rustfmt are hybrids. They are external tools, but use a git subtree instead +// of a submodule. Since the SourceType only drives the deny-warnings +// behavior, treat it as in-tree so that any new warnings in clippy will be +// rejected. +tool_check_step!(Clippy, "src/tools/clippy", SourceType::InTree); +tool_check_step!(Miri, "src/tools/miri", SourceType::InTree); +tool_check_step!(CargoMiri, "src/tools/miri/cargo-miri", SourceType::InTree); +tool_check_step!(Rls, "src/tools/rls", SourceType::InTree); +tool_check_step!(Rustfmt, "src/tools/rustfmt", SourceType::InTree); +tool_check_step!(MiroptTestTools, "src/tools/miropt-test-tools", SourceType::InTree); + +tool_check_step!(Bootstrap, "src/bootstrap", SourceType::InTree, false); + +/// Cargo's output path for the standard library in a given stage, compiled +/// by a particular compiler for the specified target. +fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { + builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp") +} + +/// Cargo's output path for the standard library in a given stage, compiled +/// by a particular compiler for the specified target. +fn libstd_test_stamp( + builder: &Builder<'_>, + compiler: Compiler, + target: TargetSelection, +) -> PathBuf { + builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check-test.stamp") +} + +/// Cargo's output path for librustc in a given stage, compiled by a particular +/// compiler for the specified target. +fn librustc_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { + builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc-check.stamp") +} + +/// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular +/// compiler for the specified target and backend. +fn codegen_backend_stamp( + builder: &Builder<'_>, + compiler: Compiler, + target: TargetSelection, + backend: Interned, +) -> PathBuf { + builder + .cargo_out(compiler, Mode::Codegen, target) + .join(format!(".librustc_codegen_{backend}-check.stamp")) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/clean.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/clean.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/clean.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/clean.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,242 @@ +//! Implementation of `make clean` in rustbuild. +//! +//! Responsible for cleaning out a build directory of all old and stale +//! artifacts to prepare for a fresh build. Currently doesn't remove the +//! `build/cache` directory (download cache) or the `build/$target/llvm` +//! directory unless the `--all` flag is present. + +use std::fs; +use std::io::{self, ErrorKind}; +use std::path::Path; + +use crate::core::builder::{crate_description, Builder, RunConfig, ShouldRun, Step}; +use crate::utils::cache::Interned; +use crate::utils::helpers::t; +use crate::{Build, Compiler, Mode, Subcommand}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct CleanAll {} + +impl Step for CleanAll { + const DEFAULT: bool = true; + type Output = (); + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(CleanAll {}) + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let Subcommand::Clean { all, stage } = builder.config.cmd else { + unreachable!("wrong subcommand?") + }; + + if all && stage.is_some() { + panic!("--all and --stage can't be used at the same time for `x clean`"); + } + + clean(builder.build, all, stage) + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() // handled by DEFAULT + } +} + +macro_rules! clean_crate_tree { + ( $( $name:ident, $mode:path, $root_crate:literal);+ $(;)? ) => { $( + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct $name { + compiler: Compiler, + crates: Interned>, + } + + impl Step for $name { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let crates = run.builder.in_tree_crates($root_crate, None); + run.crates(crates) + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + let compiler = builder.compiler(builder.top_stage, run.target); + builder.ensure(Self { crates: run.cargo_crates_in_set(), compiler }); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let compiler = self.compiler; + let target = compiler.host; + let mut cargo = builder.bare_cargo(compiler, $mode, target, "clean"); + + // Since https://github.com/rust-lang/rust/pull/111076 enables + // unstable cargo feature (`public-dependency`), we need to ensure + // that unstable features are enabled before reading libstd Cargo.toml. + cargo.env("RUSTC_BOOTSTRAP", "1"); + + for krate in &*self.crates { + cargo.arg("-p"); + cargo.arg(krate); + } + + builder.info(&format!( + "Cleaning{} stage{} {} artifacts ({} -> {})", + crate_description(&self.crates), compiler.stage, stringify!($name).to_lowercase(), &compiler.host, target, + )); + + // NOTE: doesn't use `run_cargo` because we don't want to save a stamp file, + // and doesn't use `stream_cargo` to avoid passing `--message-format` which `clean` doesn't accept. + builder.run(&mut cargo); + } + } + )+ } +} + +clean_crate_tree! { + Rustc, Mode::Rustc, "rustc-main"; + Std, Mode::Std, "sysroot"; +} + +fn clean(build: &Build, all: bool, stage: Option) { + if build.config.dry_run() { + return; + } + + rm_rf("tmp".as_ref()); + + // Clean the entire build directory + if all { + rm_rf(&build.out); + return; + } + + // Clean the target stage artifacts + if let Some(stage) = stage { + clean_specific_stage(build, stage); + return; + } + + // Follow the default behaviour + clean_default(build); +} + +fn clean_specific_stage(build: &Build, stage: u32) { + for host in &build.hosts { + let entries = match build.out.join(host.triple).read_dir() { + Ok(iter) => iter, + Err(_) => continue, + }; + + for entry in entries { + let entry = t!(entry); + let stage_prefix = format!("stage{}", stage); + + // if current entry is not related with the target stage, continue + if !entry.file_name().to_str().unwrap_or("").contains(&stage_prefix) { + continue; + } + + let path = t!(entry.path().canonicalize()); + rm_rf(&path); + } + } +} + +fn clean_default(build: &Build) { + rm_rf(&build.out.join("tmp")); + rm_rf(&build.out.join("dist")); + rm_rf(&build.out.join("rustfmt.stamp")); + + for host in &build.hosts { + let entries = match build.out.join(host.triple).read_dir() { + Ok(iter) => iter, + Err(_) => continue, + }; + + for entry in entries { + let entry = t!(entry); + if entry.file_name().to_str() == Some("llvm") { + continue; + } + let path = t!(entry.path().canonicalize()); + rm_rf(&path); + } + } +} + +fn rm_rf(path: &Path) { + match path.symlink_metadata() { + Err(e) => { + if e.kind() == ErrorKind::NotFound { + return; + } + panic!("failed to get metadata for file {}: {}", path.display(), e); + } + Ok(metadata) => { + if metadata.file_type().is_file() || metadata.file_type().is_symlink() { + do_op(path, "remove file", |p| { + fs::remove_file(p).or_else(|e| { + // Work around the fact that we cannot + // delete an executable while it runs on Windows. + #[cfg(windows)] + if e.kind() == std::io::ErrorKind::PermissionDenied + && p.file_name().and_then(std::ffi::OsStr::to_str) + == Some("bootstrap.exe") + { + eprintln!("WARNING: failed to delete '{}'.", p.display()); + return Ok(()); + } + Err(e) + }) + }); + return; + } + + for file in t!(fs::read_dir(path)) { + rm_rf(&t!(file).path()); + } + do_op(path, "remove dir", |p| { + fs::remove_dir(p).or_else(|e| { + // Check for dir not empty on Windows + // FIXME: Once `ErrorKind::DirectoryNotEmpty` is stabilized, + // match on `e.kind()` instead. + #[cfg(windows)] + if e.raw_os_error() == Some(145) { + return Ok(()); + } + + Err(e) + }) + }); + } + }; +} + +fn do_op(path: &Path, desc: &str, mut f: F) +where + F: FnMut(&Path) -> io::Result<()>, +{ + match f(path) { + Ok(()) => {} + // On windows we can't remove a readonly file, and git will often clone files as readonly. + // As a result, we have some special logic to remove readonly files on windows. + // This is also the reason that we can't use things like fs::remove_dir_all(). + Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => { + let m = t!(path.symlink_metadata()); + let mut p = m.permissions(); + p.set_readonly(false); + t!(fs::set_permissions(path, p)); + f(path).unwrap_or_else(|e| { + // Delete symlinked directories on Windows + #[cfg(windows)] + if m.file_type().is_symlink() && path.is_dir() && fs::remove_dir(path).is_ok() { + return; + } + panic!("failed to {} {}: {}", desc, path.display(), e); + }); + } + Err(e) => { + panic!("failed to {} {}: {}", desc, path.display(), e); + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/compile.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/compile.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/compile.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/compile.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,2069 @@ +//! Implementation of compiling various phases of the compiler and standard +//! library. +//! +//! This module contains some of the real meat in the rustbuild build system +//! which is where Cargo is used to compile the standard library, libtest, and +//! the compiler. This module is also responsible for assembling the sysroot as it +//! goes along from the output of the previous stage. + +use std::borrow::Cow; +use std::collections::HashSet; +use std::env; +use std::ffi::OsStr; +use std::fs; +use std::io::prelude::*; +use std::io::BufReader; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::str; + +use serde_derive::Deserialize; + +use crate::core::build_steps::dist; +use crate::core::build_steps::llvm; +use crate::core::build_steps::tool::SourceType; +use crate::core::builder::crate_description; +use crate::core::builder::Cargo; +use crate::core::builder::{Builder, Kind, PathSet, RunConfig, ShouldRun, Step, TaskPath}; +use crate::core::config::{DebuginfoLevel, LlvmLibunwind, RustcLto, TargetSelection}; +use crate::utils::cache::{Interned, INTERNER}; +use crate::utils::helpers::{ + exe, get_clang_cl_resource_dir, is_debug_info, is_dylib, output, symlink_dir, t, up_to_date, +}; +use crate::LLVM_TOOLS; +use crate::{CLang, Compiler, DependencyType, GitRepo, Mode}; +use filetime::FileTime; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Std { + pub target: TargetSelection, + pub compiler: Compiler, + /// Whether to build only a subset of crates in the standard library. + /// + /// This shouldn't be used from other steps; see the comment on [`Rustc`]. + crates: Interned>, + /// When using download-rustc, we need to use a new build of `std` for running unit tests of Std itself, + /// but we need to use the downloaded copy of std for linking to rustdoc. Allow this to be overriden by `builder.ensure` from other steps. + force_recompile: bool, + extra_rust_args: &'static [&'static str], +} + +impl Std { + pub fn new(compiler: Compiler, target: TargetSelection) -> Self { + Self { + target, + compiler, + crates: Default::default(), + force_recompile: false, + extra_rust_args: &[], + } + } + + pub fn force_recompile(compiler: Compiler, target: TargetSelection) -> Self { + Self { + target, + compiler, + crates: Default::default(), + force_recompile: true, + extra_rust_args: &[], + } + } + + pub fn new_with_extra_rust_args( + compiler: Compiler, + target: TargetSelection, + extra_rust_args: &'static [&'static str], + ) -> Self { + Self { + target, + compiler, + crates: Default::default(), + force_recompile: false, + extra_rust_args, + } + } +} + +impl Step for Std { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + // When downloading stage1, the standard library has already been copied to the sysroot, so + // there's no need to rebuild it. + let builder = run.builder; + run.crate_or_deps("sysroot") + .path("library") + .lazy_default_condition(Box::new(|| !builder.download_rustc())) + } + + fn make_run(run: RunConfig<'_>) { + // If the paths include "library", build the entire standard library. + let has_alias = + run.paths.iter().any(|set| set.assert_single_path().path.ends_with("library")); + let crates = if has_alias { Default::default() } else { run.cargo_crates_in_set() }; + + run.builder.ensure(Std { + compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), + target: run.target, + crates, + force_recompile: false, + extra_rust_args: &[], + }); + } + + /// Builds the standard library. + /// + /// This will build the standard library for a particular stage of the build + /// using the `compiler` targeting the `target` architecture. The artifacts + /// created will also be linked into the sysroot directory. + fn run(self, builder: &Builder<'_>) { + let target = self.target; + let compiler = self.compiler; + + // When using `download-rustc`, we already have artifacts for the host available. Don't + // recompile them. + if builder.download_rustc() && target == builder.build.build + // NOTE: the beta compiler may generate different artifacts than the downloaded compiler, so + // its artifacts can't be reused. + && compiler.stage != 0 + // This check is specific to testing std itself; see `test::Std` for more details. + && !self.force_recompile + { + cp_rustc_component_to_ci_sysroot( + builder, + compiler, + builder.config.ci_rust_std_contents(), + ); + return; + } + + if builder.config.keep_stage.contains(&compiler.stage) + || builder.config.keep_stage_std.contains(&compiler.stage) + { + builder.info("WARNING: Using a potentially old libstd. This may not behave well."); + + copy_third_party_objects(builder, &compiler, target); + copy_self_contained_objects(builder, &compiler, target); + + builder.ensure(StdLink::from_std(self, compiler)); + return; + } + + builder.update_submodule(&Path::new("library").join("stdarch")); + + // Profiler information requires LLVM's compiler-rt + if builder.config.profiler { + builder.update_submodule(&Path::new("src/llvm-project")); + } + + let mut target_deps = builder.ensure(StartupObjects { compiler, target }); + + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + if compiler_to_use != compiler { + builder.ensure(Std::new(compiler_to_use, target)); + let msg = if compiler_to_use.host == target { + format!( + "Uplifting library (stage{} -> stage{})", + compiler_to_use.stage, compiler.stage + ) + } else { + format!( + "Uplifting library (stage{}:{} -> stage{}:{})", + compiler_to_use.stage, compiler_to_use.host, compiler.stage, target + ) + }; + builder.info(&msg); + + // Even if we're not building std this stage, the new sysroot must + // still contain the third party objects needed by various targets. + copy_third_party_objects(builder, &compiler, target); + copy_self_contained_objects(builder, &compiler, target); + + builder.ensure(StdLink::from_std(self, compiler_to_use)); + return; + } + + target_deps.extend(copy_third_party_objects(builder, &compiler, target)); + target_deps.extend(copy_self_contained_objects(builder, &compiler, target)); + + // The LLD wrappers and `rust-lld` are self-contained linking components that can be + // necessary to link the stdlib on some targets. We'll also need to copy these binaries to + // the `stage0-sysroot` to ensure the linker is found when bootstrapping on such a target. + if compiler.stage == 0 && compiler.host == builder.config.build { + // We want to copy the host `bin` folder within the `rustlib` folder in the sysroot. + let src_sysroot_bin = builder + .rustc_snapshot_sysroot() + .join("lib") + .join("rustlib") + .join(compiler.host.triple) + .join("bin"); + if src_sysroot_bin.exists() { + let target_sysroot_bin = + builder.sysroot_libdir(compiler, target).parent().unwrap().join("bin"); + t!(fs::create_dir_all(&target_sysroot_bin)); + builder.cp_r(&src_sysroot_bin, &target_sysroot_bin); + } + } + + let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "build"); + std_cargo(builder, target, compiler.stage, &mut cargo); + for krate in &*self.crates { + cargo.arg("-p").arg(krate); + } + + // See src/bootstrap/synthetic_targets.rs + if target.is_synthetic() { + cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1"); + } + for rustflag in self.extra_rust_args.into_iter() { + cargo.rustflag(rustflag); + } + + let _guard = builder.msg( + Kind::Build, + compiler.stage, + format_args!("library artifacts{}", crate_description(&self.crates)), + compiler.host, + target, + ); + run_cargo( + builder, + cargo, + vec![], + &libstd_stamp(builder, compiler, target), + target_deps, + false, + false, + ); + + builder.ensure(StdLink::from_std( + self, + builder.compiler(compiler.stage, builder.config.build), + )); + } +} + +fn copy_and_stamp( + builder: &Builder<'_>, + libdir: &Path, + sourcedir: &Path, + name: &str, + target_deps: &mut Vec<(PathBuf, DependencyType)>, + dependency_type: DependencyType, +) { + let target = libdir.join(name); + builder.copy(&sourcedir.join(name), &target); + + target_deps.push((target, dependency_type)); +} + +fn copy_llvm_libunwind(builder: &Builder<'_>, target: TargetSelection, libdir: &Path) -> PathBuf { + let libunwind_path = builder.ensure(llvm::Libunwind { target }); + let libunwind_source = libunwind_path.join("libunwind.a"); + let libunwind_target = libdir.join("libunwind.a"); + builder.copy(&libunwind_source, &libunwind_target); + libunwind_target +} + +/// Copies third party objects needed by various targets. +fn copy_third_party_objects( + builder: &Builder<'_>, + compiler: &Compiler, + target: TargetSelection, +) -> Vec<(PathBuf, DependencyType)> { + let mut target_deps = vec![]; + + if builder.config.sanitizers_enabled(target) && compiler.stage != 0 { + // The sanitizers are only copied in stage1 or above, + // to avoid creating dependency on LLVM. + target_deps.extend( + copy_sanitizers(builder, &compiler, target) + .into_iter() + .map(|d| (d, DependencyType::Target)), + ); + } + + if target == "x86_64-fortanix-unknown-sgx" + || builder.config.llvm_libunwind(target) == LlvmLibunwind::InTree + && (target.contains("linux") || target.contains("fuchsia")) + { + let libunwind_path = + copy_llvm_libunwind(builder, target, &builder.sysroot_libdir(*compiler, target)); + target_deps.push((libunwind_path, DependencyType::Target)); + } + + target_deps +} + +/// Copies third party objects needed by various targets for self-contained linkage. +fn copy_self_contained_objects( + builder: &Builder<'_>, + compiler: &Compiler, + target: TargetSelection, +) -> Vec<(PathBuf, DependencyType)> { + let libdir_self_contained = builder.sysroot_libdir(*compiler, target).join("self-contained"); + t!(fs::create_dir_all(&libdir_self_contained)); + let mut target_deps = vec![]; + + // Copies the libc and CRT objects. + // + // rustc historically provides a more self-contained installation for musl targets + // not requiring the presence of a native musl toolchain. For example, it can fall back + // to using gcc from a glibc-targeting toolchain for linking. + // To do that we have to distribute musl startup objects as a part of Rust toolchain + // and link with them manually in the self-contained mode. + if target.contains("musl") && !target.contains("unikraft") { + let srcdir = builder.musl_libdir(target).unwrap_or_else(|| { + panic!("Target {:?} does not have a \"musl-libdir\" key", target.triple) + }); + for &obj in &["libc.a", "crt1.o", "Scrt1.o", "rcrt1.o", "crti.o", "crtn.o"] { + copy_and_stamp( + builder, + &libdir_self_contained, + &srcdir, + obj, + &mut target_deps, + DependencyType::TargetSelfContained, + ); + } + let crt_path = builder.ensure(llvm::CrtBeginEnd { target }); + for &obj in &["crtbegin.o", "crtbeginS.o", "crtend.o", "crtendS.o"] { + let src = crt_path.join(obj); + let target = libdir_self_contained.join(obj); + builder.copy(&src, &target); + target_deps.push((target, DependencyType::TargetSelfContained)); + } + + if !target.starts_with("s390x") { + let libunwind_path = copy_llvm_libunwind(builder, target, &libdir_self_contained); + target_deps.push((libunwind_path, DependencyType::TargetSelfContained)); + } + } else if target.contains("-wasi") { + let srcdir = builder + .wasi_root(target) + .unwrap_or_else(|| { + panic!("Target {:?} does not have a \"wasi-root\" key", target.triple) + }) + .join("lib") + .join(target.to_string().replace("-preview1", "")); + for &obj in &["libc.a", "crt1-command.o", "crt1-reactor.o"] { + copy_and_stamp( + builder, + &libdir_self_contained, + &srcdir, + obj, + &mut target_deps, + DependencyType::TargetSelfContained, + ); + } + } else if target.ends_with("windows-gnu") { + for obj in ["crt2.o", "dllcrt2.o"].iter() { + let src = compiler_file(builder, &builder.cc(target), target, CLang::C, obj); + let target = libdir_self_contained.join(obj); + builder.copy(&src, &target); + target_deps.push((target, DependencyType::TargetSelfContained)); + } + } + + target_deps +} + +/// Configure cargo to compile the standard library, adding appropriate env vars +/// and such. +pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, cargo: &mut Cargo) { + if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { + cargo.env("MACOSX_DEPLOYMENT_TARGET", target); + } + + if let Some(path) = builder.config.profiler_path(target) { + cargo.env("LLVM_PROFILER_RT_LIB", path); + } + + // Determine if we're going to compile in optimized C intrinsics to + // the `compiler-builtins` crate. These intrinsics live in LLVM's + // `compiler-rt` repository, but our `src/llvm-project` submodule isn't + // always checked out, so we need to conditionally look for this. (e.g. if + // an external LLVM is used we skip the LLVM submodule checkout). + // + // Note that this shouldn't affect the correctness of `compiler-builtins`, + // but only its speed. Some intrinsics in C haven't been translated to Rust + // yet but that's pretty rare. Other intrinsics have optimized + // implementations in C which have only had slower versions ported to Rust, + // so we favor the C version where we can, but it's not critical. + // + // If `compiler-rt` is available ensure that the `c` feature of the + // `compiler-builtins` crate is enabled and it's configured to learn where + // `compiler-rt` is located. + let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt"); + let compiler_builtins_c_feature = if compiler_builtins_root.exists() { + // Note that `libprofiler_builtins/build.rs` also computes this so if + // you're changing something here please also change that. + cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root); + " compiler-builtins-c" + } else { + "" + }; + + // `libtest` uses this to know whether or not to support + // `-Zunstable-options`. + if !builder.unstable_features() { + cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); + } + + let mut features = String::new(); + + if builder.no_std(target) == Some(true) { + features += " compiler-builtins-mem"; + if !target.starts_with("bpf") { + features.push_str(compiler_builtins_c_feature); + } + + // for no-std targets we only compile a few no_std crates + cargo + .args(&["-p", "alloc"]) + .arg("--manifest-path") + .arg(builder.src.join("library/alloc/Cargo.toml")) + .arg("--features") + .arg(features); + } else { + features += &builder.std_features(target); + features.push_str(compiler_builtins_c_feature); + + cargo + .arg("--features") + .arg(features) + .arg("--manifest-path") + .arg(builder.src.join("library/sysroot/Cargo.toml")); + + // Help the libc crate compile by assisting it in finding various + // sysroot native libraries. + if target.contains("musl") { + if let Some(p) = builder.musl_libdir(target) { + let root = format!("native={}", p.to_str().unwrap()); + cargo.rustflag("-L").rustflag(&root); + } + } + + if target.contains("-wasi") { + if let Some(p) = builder.wasi_root(target) { + let root = format!( + "native={}/lib/{}", + p.to_str().unwrap(), + target.to_string().replace("-preview1", "") + ); + cargo.rustflag("-L").rustflag(&root); + } + } + } + + // By default, rustc uses `-Cembed-bitcode=yes`, and Cargo overrides that + // with `-Cembed-bitcode=no` for non-LTO builds. However, libstd must be + // built with bitcode so that the produced rlibs can be used for both LTO + // builds (which use bitcode) and non-LTO builds (which use object code). + // So we override the override here! + // + // But we don't bother for the stage 0 compiler because it's never used + // with LTO. + if stage >= 1 { + cargo.rustflag("-Cembed-bitcode=yes"); + } + if builder.config.rust_lto == RustcLto::Off { + cargo.rustflag("-Clto=off"); + } + + // By default, rustc does not include unwind tables unless they are required + // for a particular target. They are not required by RISC-V targets, but + // compiling the standard library with them means that users can get + // backtraces without having to recompile the standard library themselves. + // + // This choice was discussed in https://github.com/rust-lang/rust/pull/69890 + if target.contains("riscv") { + cargo.rustflag("-Cforce-unwind-tables=yes"); + } + + let html_root = + format!("-Zcrate-attr=doc(html_root_url=\"{}/\")", builder.doc_rust_lang_org_channel(),); + cargo.rustflag(&html_root); + cargo.rustdocflag(&html_root); + + cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)"); +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct StdLink { + pub compiler: Compiler, + pub target_compiler: Compiler, + pub target: TargetSelection, + /// Not actually used; only present to make sure the cache invalidation is correct. + crates: Interned>, + /// See [`Std::force_recompile`]. + force_recompile: bool, +} + +impl StdLink { + fn from_std(std: Std, host_compiler: Compiler) -> Self { + Self { + compiler: host_compiler, + target_compiler: std.compiler, + target: std.target, + crates: std.crates, + force_recompile: std.force_recompile, + } + } +} + +impl Step for StdLink { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Link all libstd rlibs/dylibs into the sysroot location. + /// + /// Links those artifacts generated by `compiler` to the `stage` compiler's + /// sysroot for the specified `host` and `target`. + /// + /// Note that this assumes that `compiler` has already generated the libstd + /// libraries for `target`, and this method will find them in the relevant + /// output directory. + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target_compiler = self.target_compiler; + let target = self.target; + + // NOTE: intentionally does *not* check `target == builder.build` to avoid having to add the same check in `test::Crate`. + let (libdir, hostdir) = if self.force_recompile && builder.download_rustc() { + // NOTE: copies part of `sysroot_libdir` to avoid having to add a new `force_recompile` argument there too + let lib = builder.sysroot_libdir_relative(self.compiler); + let sysroot = builder.ensure(crate::core::build_steps::compile::Sysroot { + compiler: self.compiler, + force_recompile: self.force_recompile, + }); + let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib"); + let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib"); + (INTERNER.intern_path(libdir), INTERNER.intern_path(hostdir)) + } else { + let libdir = builder.sysroot_libdir(target_compiler, target); + let hostdir = builder.sysroot_libdir(target_compiler, compiler.host); + (libdir, hostdir) + }; + + add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); + + // Special case for stage0, to make `rustup toolchain link` and `x dist --stage 0` + // work for stage0-sysroot. We only do this if the stage0 compiler comes from beta, + // and is not set to a custom path. + if compiler.stage == 0 + && builder + .build + .config + .initial_rustc + .starts_with(builder.out.join(&compiler.host.triple).join("stage0/bin")) + { + // Copy bin files from stage0/bin to stage0-sysroot/bin + let sysroot = builder.out.join(&compiler.host.triple).join("stage0-sysroot"); + + let host = compiler.host.triple; + let stage0_bin_dir = builder.out.join(&host).join("stage0/bin"); + let sysroot_bin_dir = sysroot.join("bin"); + t!(fs::create_dir_all(&sysroot_bin_dir)); + builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir); + + // Copy all *.so files from stage0/lib to stage0-sysroot/lib + let stage0_lib_dir = builder.out.join(&host).join("stage0/lib"); + if let Ok(files) = fs::read_dir(&stage0_lib_dir) { + for file in files { + let file = t!(file); + let path = file.path(); + if path.is_file() && is_dylib(&file.file_name().into_string().unwrap()) { + builder.copy(&path, &sysroot.join("lib").join(path.file_name().unwrap())); + } + } + } + + // Copy codegen-backends from stage0 + let sysroot_codegen_backends = builder.sysroot_codegen_backends(compiler); + t!(fs::create_dir_all(&sysroot_codegen_backends)); + let stage0_codegen_backends = builder + .out + .join(&host) + .join("stage0/lib/rustlib") + .join(&host) + .join("codegen-backends"); + builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends); + } + } +} + +/// Copies sanitizer runtime libraries into target libdir. +fn copy_sanitizers( + builder: &Builder<'_>, + compiler: &Compiler, + target: TargetSelection, +) -> Vec { + let runtimes: Vec = builder.ensure(llvm::Sanitizers { target }); + + if builder.config.dry_run() { + return Vec::new(); + } + + let mut target_deps = Vec::new(); + let libdir = builder.sysroot_libdir(*compiler, target); + + for runtime in &runtimes { + let dst = libdir.join(&runtime.name); + builder.copy(&runtime.path, &dst); + + // The `aarch64-apple-ios-macabi` and `x86_64-apple-ios-macabi` are also supported for + // sanitizers, but they share a sanitizer runtime with `${arch}-apple-darwin`, so we do + // not list them here to rename and sign the runtime library. + if target == "x86_64-apple-darwin" + || target == "aarch64-apple-darwin" + || target == "aarch64-apple-ios" + || target == "aarch64-apple-ios-sim" + || target == "x86_64-apple-ios" + { + // Update the library’s install name to reflect that it has been renamed. + apple_darwin_update_library_name(&dst, &format!("@rpath/{}", &runtime.name)); + // Upon renaming the install name, the code signature of the file will invalidate, + // so we will sign it again. + apple_darwin_sign_file(&dst); + } + + target_deps.push(dst); + } + + target_deps +} + +fn apple_darwin_update_library_name(library_path: &Path, new_name: &str) { + let status = Command::new("install_name_tool") + .arg("-id") + .arg(new_name) + .arg(library_path) + .status() + .expect("failed to execute `install_name_tool`"); + assert!(status.success()); +} + +fn apple_darwin_sign_file(file_path: &Path) { + let status = Command::new("codesign") + .arg("-f") // Force to rewrite the existing signature + .arg("-s") + .arg("-") + .arg(file_path) + .status() + .expect("failed to execute `codesign`"); + assert!(status.success()); +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct StartupObjects { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for StartupObjects { + type Output = Vec<(PathBuf, DependencyType)>; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("library/rtstartup") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(StartupObjects { + compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), + target: run.target, + }); + } + + /// Builds and prepare startup objects like rsbegin.o and rsend.o + /// + /// These are primarily used on Windows right now for linking executables/dlls. + /// They don't require any library support as they're just plain old object + /// files, so we just use the nightly snapshot compiler to always build them (as + /// no other compilers are guaranteed to be available). + fn run(self, builder: &Builder<'_>) -> Vec<(PathBuf, DependencyType)> { + let for_compiler = self.compiler; + let target = self.target; + if !target.ends_with("windows-gnu") { + return vec![]; + } + + let mut target_deps = vec![]; + + let src_dir = &builder.src.join("library").join("rtstartup"); + let dst_dir = &builder.native_dir(target).join("rtstartup"); + let sysroot_dir = &builder.sysroot_libdir(for_compiler, target); + t!(fs::create_dir_all(dst_dir)); + + for file in &["rsbegin", "rsend"] { + let src_file = &src_dir.join(file.to_string() + ".rs"); + let dst_file = &dst_dir.join(file.to_string() + ".o"); + if !up_to_date(src_file, dst_file) { + let mut cmd = Command::new(&builder.initial_rustc); + cmd.env("RUSTC_BOOTSTRAP", "1"); + if !builder.local_rebuild { + // a local_rebuild compiler already has stage1 features + cmd.arg("--cfg").arg("bootstrap"); + } + builder.run( + cmd.arg("--target") + .arg(target.rustc_target_arg()) + .arg("--emit=obj") + .arg("-o") + .arg(dst_file) + .arg(src_file), + ); + } + + let target = sysroot_dir.join((*file).to_string() + ".o"); + builder.copy(dst_file, &target); + target_deps.push((target, DependencyType::Target)); + } + + target_deps + } +} + +fn cp_rustc_component_to_ci_sysroot( + builder: &Builder<'_>, + compiler: Compiler, + contents: Vec, +) { + let sysroot = builder.ensure(Sysroot { compiler, force_recompile: false }); + let ci_rustc_dir = builder.config.ci_rustc_dir(); + + for file in contents { + let src = ci_rustc_dir.join(&file); + let dst = sysroot.join(file); + if src.is_dir() { + t!(fs::create_dir_all(dst)); + } else { + builder.copy(&src, &dst); + } + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rustc { + pub target: TargetSelection, + pub compiler: Compiler, + /// Whether to build a subset of crates, rather than the whole compiler. + /// + /// This should only be requested by the user, not used within rustbuild itself. + /// Using it within rustbuild can lead to confusing situation where lints are replayed + /// in two different steps. + crates: Interned>, +} + +impl Rustc { + pub fn new(compiler: Compiler, target: TargetSelection) -> Self { + Self { target, compiler, crates: Default::default() } + } +} + +impl Step for Rustc { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let mut crates = run.builder.in_tree_crates("rustc-main", None); + for (i, krate) in crates.iter().enumerate() { + // We can't allow `build rustc` as an alias for this Step, because that's reserved by `Assemble`. + // Ideally Assemble would use `build compiler` instead, but that seems too confusing to be worth the breaking change. + if krate.name == "rustc-main" { + crates.swap_remove(i); + break; + } + } + run.crates(crates) + } + + fn make_run(run: RunConfig<'_>) { + let crates = run.cargo_crates_in_set(); + run.builder.ensure(Rustc { + compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), + target: run.target, + crates, + }); + } + + /// Builds the compiler. + /// + /// This will build the compiler for a particular stage of the build using + /// the `compiler` targeting the `target` architecture. The artifacts + /// created will also be linked into the sysroot directory. + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target = self.target; + + // NOTE: the ABI of the beta compiler is different from the ABI of the downloaded compiler, + // so its artifacts can't be reused. + if builder.download_rustc() && compiler.stage != 0 { + // Copy the existing artifacts instead of rebuilding them. + // NOTE: this path is only taken for tools linking to rustc-dev (including ui-fulldeps tests). + cp_rustc_component_to_ci_sysroot( + builder, + compiler, + builder.config.ci_rustc_dev_contents(), + ); + return; + } + + builder.ensure(Std::new(compiler, target)); + + if builder.config.keep_stage.contains(&compiler.stage) { + builder.info("WARNING: Using a potentially old librustc. This may not behave well."); + builder.info("WARNING: Use `--keep-stage-std` if you want to rebuild the compiler when it changes"); + builder.ensure(RustcLink::from_rustc(self, compiler)); + return; + } + + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + if compiler_to_use != compiler { + builder.ensure(Rustc::new(compiler_to_use, target)); + let msg = if compiler_to_use.host == target { + format!( + "Uplifting rustc (stage{} -> stage{})", + compiler_to_use.stage, + compiler.stage + 1 + ) + } else { + format!( + "Uplifting rustc (stage{}:{} -> stage{}:{})", + compiler_to_use.stage, + compiler_to_use.host, + compiler.stage + 1, + target + ) + }; + builder.info(&msg); + builder.ensure(RustcLink::from_rustc(self, compiler_to_use)); + return; + } + + // Ensure that build scripts and proc macros have a std / libproc_macro to link against. + builder.ensure(Std::new( + builder.compiler(self.compiler.stage, builder.config.build), + builder.config.build, + )); + + let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "build"); + rustc_cargo(builder, &mut cargo, target, compiler.stage); + + if builder.config.rust_profile_use.is_some() + && builder.config.rust_profile_generate.is_some() + { + panic!("Cannot use and generate PGO profiles at the same time"); + } + + // With LLD, we can use ICF (identical code folding) to reduce the executable size + // of librustc_driver/rustc and to improve i-cache utilization. + // + // -Wl,[link options] doesn't work on MSVC. However, /OPT:ICF (technically /OPT:REF,ICF) + // is already on by default in MSVC optimized builds, which is interpreted as --icf=all: + // https://github.com/llvm/llvm-project/blob/3329cec2f79185bafd678f310fafadba2a8c76d2/lld/COFF/Driver.cpp#L1746 + // https://github.com/rust-lang/rust/blob/f22819bcce4abaff7d1246a56eec493418f9f4ee/compiler/rustc_codegen_ssa/src/back/linker.rs#L827 + if builder.config.use_lld && !compiler.host.contains("msvc") { + cargo.rustflag("-Clink-args=-Wl,--icf=all"); + } + + let is_collecting = if let Some(path) = &builder.config.rust_profile_generate { + if compiler.stage == 1 { + cargo.rustflag(&format!("-Cprofile-generate={path}")); + // Apparently necessary to avoid overflowing the counters during + // a Cargo build profile + cargo.rustflag("-Cllvm-args=-vp-counters-per-site=4"); + true + } else { + false + } + } else if let Some(path) = &builder.config.rust_profile_use { + if compiler.stage == 1 { + cargo.rustflag(&format!("-Cprofile-use={path}")); + cargo.rustflag("-Cllvm-args=-pgo-warn-missing-function"); + true + } else { + false + } + } else { + false + }; + if is_collecting { + // Ensure paths to Rust sources are relative, not absolute. + cargo.rustflag(&format!( + "-Cllvm-args=-static-func-strip-dirname-prefix={}", + builder.config.src.components().count() + )); + } + + // We currently don't support cross-crate LTO in stage0. This also isn't hugely necessary + // and may just be a time sink. + if compiler.stage != 0 { + match builder.config.rust_lto { + RustcLto::Thin | RustcLto::Fat => { + // Since using LTO for optimizing dylibs is currently experimental, + // we need to pass -Zdylib-lto. + cargo.rustflag("-Zdylib-lto"); + // Cargo by default passes `-Cembed-bitcode=no` and doesn't pass `-Clto` when + // compiling dylibs (and their dependencies), even when LTO is enabled for the + // crate. Therefore, we need to override `-Clto` and `-Cembed-bitcode` here. + let lto_type = match builder.config.rust_lto { + RustcLto::Thin => "thin", + RustcLto::Fat => "fat", + _ => unreachable!(), + }; + cargo.rustflag(&format!("-Clto={lto_type}")); + cargo.rustflag("-Cembed-bitcode=yes"); + } + RustcLto::ThinLocal => { /* Do nothing, this is the default */ } + RustcLto::Off => { + cargo.rustflag("-Clto=off"); + } + } + } else if builder.config.rust_lto == RustcLto::Off { + cargo.rustflag("-Clto=off"); + } + + for krate in &*self.crates { + cargo.arg("-p").arg(krate); + } + + if builder.build.config.enable_bolt_settings && compiler.stage == 1 { + // Relocations are required for BOLT to work. + cargo.env("RUSTC_BOLT_LINK_FLAGS", "1"); + } + + let _guard = builder.msg_sysroot_tool( + Kind::Build, + compiler.stage, + format_args!("compiler artifacts{}", crate_description(&self.crates)), + compiler.host, + target, + ); + let stamp = librustc_stamp(builder, compiler, target); + run_cargo( + builder, + cargo, + vec![], + &stamp, + vec![], + false, + true, // Only ship rustc_driver.so and .rmeta files, not all intermediate .rlib files. + ); + + // When building `librustc_driver.so` (like `libLLVM.so`) on linux, it can contain + // unexpected debuginfo from dependencies, for example from the C++ standard library used in + // our LLVM wrapper. Unless we're explicitly requesting `librustc_driver` to be built with + // debuginfo (via the debuginfo level of the executables using it): strip this debuginfo + // away after the fact. + if builder.config.rust_debuginfo_level_rustc == DebuginfoLevel::None + && builder.config.rust_debuginfo_level_tools == DebuginfoLevel::None + { + let target_root_dir = stamp.parent().unwrap(); + let rustc_driver = target_root_dir.join("librustc_driver.so"); + strip_debug(builder, target, &rustc_driver); + } + + builder.ensure(RustcLink::from_rustc( + self, + builder.compiler(compiler.stage, builder.config.build), + )); + } +} + +pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection, stage: u32) { + cargo + .arg("--features") + .arg(builder.rustc_features(builder.kind)) + .arg("--manifest-path") + .arg(builder.src.join("compiler/rustc/Cargo.toml")); + + cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)"); + + rustc_cargo_env(builder, cargo, target, stage); +} + +pub fn rustc_cargo_env( + builder: &Builder<'_>, + cargo: &mut Cargo, + target: TargetSelection, + stage: u32, +) { + // Set some configuration variables picked up by build scripts and + // the compiler alike + cargo + .env("CFG_RELEASE", builder.rust_release()) + .env("CFG_RELEASE_CHANNEL", &builder.config.channel) + .env("CFG_VERSION", builder.rust_version()); + + if let Some(backend) = builder.config.default_codegen_backend() { + cargo.env("CFG_DEFAULT_CODEGEN_BACKEND", backend); + } + + let libdir_relative = builder.config.libdir_relative().unwrap_or_else(|| Path::new("lib")); + let target_config = builder.config.target_config.get(&target); + + cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative); + + if let Some(ref ver_date) = builder.rust_info().commit_date() { + cargo.env("CFG_VER_DATE", ver_date); + } + if let Some(ref ver_hash) = builder.rust_info().sha() { + cargo.env("CFG_VER_HASH", ver_hash); + } + if !builder.unstable_features() { + cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); + } + + // Prefer the current target's own default_linker, else a globally + // specified one. + if let Some(s) = target_config.and_then(|c| c.default_linker.as_ref()) { + cargo.env("CFG_DEFAULT_LINKER", s); + } else if let Some(ref s) = builder.config.rustc_default_linker { + cargo.env("CFG_DEFAULT_LINKER", s); + } + + if builder.config.rustc_parallel { + // keep in sync with `bootstrap/lib.rs:Build::rustc_features` + // `cfg` option for rustc, `features` option for cargo, for conditional compilation + cargo.rustflag("--cfg=parallel_compiler"); + cargo.rustdocflag("--cfg=parallel_compiler"); + } + if builder.config.rust_verify_llvm_ir { + cargo.env("RUSTC_VERIFY_LLVM_IR", "1"); + } + + // Note that this is disabled if LLVM itself is disabled or we're in a check + // build. If we are in a check build we still go ahead here presuming we've + // detected that LLVM is already built and good to go which helps prevent + // busting caches (e.g. like #71152). + if builder.config.llvm_enabled() { + let building_is_expensive = + crate::core::build_steps::llvm::prebuilt_llvm_config(builder, target).is_err(); + // `top_stage == stage` might be false for `check --stage 1`, if we are building the stage 1 compiler + let can_skip_build = builder.kind == Kind::Check && builder.top_stage == stage; + let should_skip_build = building_is_expensive && can_skip_build; + if !should_skip_build { + rustc_llvm_env(builder, cargo, target) + } + } +} + +/// Pass down configuration from the LLVM build into the build of +/// rustc_llvm and rustc_codegen_llvm. +fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) { + let target_config = builder.config.target_config.get(&target); + + if builder.is_rust_llvm(target) { + cargo.env("LLVM_RUSTLLVM", "1"); + } + let llvm::LlvmResult { llvm_config, .. } = builder.ensure(llvm::Llvm { target }); + cargo.env("LLVM_CONFIG", &llvm_config); + if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { + cargo.env("CFG_LLVM_ROOT", s); + } + + // Some LLVM linker flags (-L and -l) may be needed to link `rustc_llvm`. Its build script + // expects these to be passed via the `LLVM_LINKER_FLAGS` env variable, separated by + // whitespace. + // + // For example: + // - on windows, when `clang-cl` is used with instrumentation, we need to manually add + // clang's runtime library resource directory so that the profiler runtime library can be + // found. This is to avoid the linker errors about undefined references to + // `__llvm_profile_instrument_memop` when linking `rustc_driver`. + let mut llvm_linker_flags = String::new(); + if builder.config.llvm_profile_generate && target.contains("msvc") { + if let Some(ref clang_cl_path) = builder.config.llvm_clang_cl { + // Add clang's runtime library directory to the search path + let clang_rt_dir = get_clang_cl_resource_dir(clang_cl_path); + llvm_linker_flags.push_str(&format!("-L{}", clang_rt_dir.display())); + } + } + + // The config can also specify its own llvm linker flags. + if let Some(ref s) = builder.config.llvm_ldflags { + if !llvm_linker_flags.is_empty() { + llvm_linker_flags.push_str(" "); + } + llvm_linker_flags.push_str(s); + } + + // Set the linker flags via the env var that `rustc_llvm`'s build script will read. + if !llvm_linker_flags.is_empty() { + cargo.env("LLVM_LINKER_FLAGS", llvm_linker_flags); + } + + // Building with a static libstdc++ is only supported on linux right now, + // not for MSVC or macOS + if builder.config.llvm_static_stdcpp + && !target.contains("freebsd") + && !target.contains("msvc") + && !target.contains("apple") + && !target.contains("solaris") + { + let file = compiler_file( + builder, + &builder.cxx(target).unwrap(), + target, + CLang::Cxx, + "libstdc++.a", + ); + cargo.env("LLVM_STATIC_STDCPP", file); + } + if builder.llvm_link_shared() { + cargo.env("LLVM_LINK_SHARED", "1"); + } + if builder.config.llvm_use_libcxx { + cargo.env("LLVM_USE_LIBCXX", "1"); + } + if builder.config.llvm_optimize && !builder.config.llvm_release_debuginfo { + cargo.env("LLVM_NDEBUG", "1"); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct RustcLink { + pub compiler: Compiler, + pub target_compiler: Compiler, + pub target: TargetSelection, + /// Not actually used; only present to make sure the cache invalidation is correct. + crates: Interned>, +} + +impl RustcLink { + fn from_rustc(rustc: Rustc, host_compiler: Compiler) -> Self { + Self { + compiler: host_compiler, + target_compiler: rustc.compiler, + target: rustc.target, + crates: rustc.crates, + } + } +} + +impl Step for RustcLink { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Same as `std_link`, only for librustc + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target_compiler = self.target_compiler; + let target = self.target; + add_to_sysroot( + builder, + &builder.sysroot_libdir(target_compiler, target), + &builder.sysroot_libdir(target_compiler, compiler.host), + &librustc_stamp(builder, compiler, target), + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CodegenBackend { + pub target: TargetSelection, + pub compiler: Compiler, + pub backend: Interned, +} + +fn needs_codegen_config(run: &RunConfig<'_>) -> bool { + let mut needs_codegen_cfg = false; + for path_set in &run.paths { + needs_codegen_cfg = match path_set { + PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)), + PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run), + } + } + needs_codegen_cfg +} + +pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_"; + +fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool { + if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) { + let mut needs_codegen_backend_config = true; + for &backend in &run.builder.config.rust_codegen_backends { + if path + .path + .to_str() + .unwrap() + .ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + &backend)) + { + needs_codegen_backend_config = false; + } + } + if needs_codegen_backend_config { + run.builder.info( + "WARNING: no codegen-backends config matched the requested path to build a codegen backend. \ + HELP: add backend to codegen-backends in config.toml.", + ); + return true; + } + } + + return false; +} + +impl Step for CodegenBackend { + type Output = (); + const ONLY_HOSTS: bool = true; + // Only the backends specified in the `codegen-backends` entry of `config.toml` are built. + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.paths(&["compiler/rustc_codegen_cranelift", "compiler/rustc_codegen_gcc"]) + } + + fn make_run(run: RunConfig<'_>) { + if needs_codegen_config(&run) { + return; + } + + for &backend in &run.builder.config.rust_codegen_backends { + if backend == "llvm" { + continue; // Already built as part of rustc + } + + run.builder.ensure(CodegenBackend { + target: run.target, + compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()), + backend, + }); + } + } + + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target = self.target; + let backend = self.backend; + + builder.ensure(Rustc::new(compiler, target)); + + if builder.config.keep_stage.contains(&compiler.stage) { + builder.info( + "WARNING: Using a potentially old codegen backend. \ + This may not behave well.", + ); + // Codegen backends are linked separately from this step today, so we don't do + // anything here. + return; + } + + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + if compiler_to_use != compiler { + builder.ensure(CodegenBackend { compiler: compiler_to_use, target, backend }); + return; + } + + let out_dir = builder.cargo_out(compiler, Mode::Codegen, target); + + let mut cargo = builder.cargo(compiler, Mode::Codegen, SourceType::InTree, target, "build"); + cargo + .arg("--manifest-path") + .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); + rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + + let tmp_stamp = out_dir.join(".tmp.stamp"); + + let _guard = builder.msg_build(compiler, format_args!("codegen backend {backend}"), target); + let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false); + if builder.config.dry_run() { + return; + } + let mut files = files.into_iter().filter(|f| { + let filename = f.file_name().unwrap().to_str().unwrap(); + is_dylib(filename) && filename.contains("rustc_codegen_") + }); + let codegen_backend = match files.next() { + Some(f) => f, + None => panic!("no dylibs built for codegen backend?"), + }; + if let Some(f) = files.next() { + panic!( + "codegen backend built two dylibs:\n{}\n{}", + codegen_backend.display(), + f.display() + ); + } + let stamp = codegen_backend_stamp(builder, compiler, target, backend); + let codegen_backend = codegen_backend.to_str().unwrap(); + t!(fs::write(&stamp, &codegen_backend)); + } +} + +/// Creates the `codegen-backends` folder for a compiler that's about to be +/// assembled as a complete compiler. +/// +/// This will take the codegen artifacts produced by `compiler` and link them +/// into an appropriate location for `target_compiler` to be a functional +/// compiler. +fn copy_codegen_backends_to_sysroot( + builder: &Builder<'_>, + compiler: Compiler, + target_compiler: Compiler, +) { + let target = target_compiler.host; + + // Note that this step is different than all the other `*Link` steps in + // that it's not assembling a bunch of libraries but rather is primarily + // moving the codegen backend into place. The codegen backend of rustc is + // not linked into the main compiler by default but is rather dynamically + // selected at runtime for inclusion. + // + // Here we're looking for the output dylib of the `CodegenBackend` step and + // we're copying that into the `codegen-backends` folder. + let dst = builder.sysroot_codegen_backends(target_compiler); + t!(fs::create_dir_all(&dst), dst); + + if builder.config.dry_run() { + return; + } + + for backend in builder.config.rust_codegen_backends.iter() { + if backend == "llvm" { + continue; // Already built as part of rustc + } + + let stamp = codegen_backend_stamp(builder, compiler, target, *backend); + let dylib = t!(fs::read_to_string(&stamp)); + let file = Path::new(&dylib); + let filename = file.file_name().unwrap().to_str().unwrap(); + // change `librustc_codegen_cranelift-xxxxxx.so` to + // `librustc_codegen_cranelift-release.so` + let target_filename = { + let dash = filename.find('-').unwrap(); + let dot = filename.find('.').unwrap(); + format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..]) + }; + builder.copy(&file, &dst.join(target_filename)); + } +} + +/// Cargo's output path for the standard library in a given stage, compiled +/// by a particular compiler for the specified target. +pub fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf { + builder.cargo_out(compiler, Mode::Std, target).join(".libstd.stamp") +} + +/// Cargo's output path for librustc in a given stage, compiled by a particular +/// compiler for the specified target. +pub fn librustc_stamp( + builder: &Builder<'_>, + compiler: Compiler, + target: TargetSelection, +) -> PathBuf { + builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc.stamp") +} + +/// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular +/// compiler for the specified target and backend. +fn codegen_backend_stamp( + builder: &Builder<'_>, + compiler: Compiler, + target: TargetSelection, + backend: Interned, +) -> PathBuf { + builder + .cargo_out(compiler, Mode::Codegen, target) + .join(format!(".librustc_codegen_{backend}.stamp")) +} + +pub fn compiler_file( + builder: &Builder<'_>, + compiler: &Path, + target: TargetSelection, + c: CLang, + file: &str, +) -> PathBuf { + if builder.config.dry_run() { + return PathBuf::new(); + } + let mut cmd = Command::new(compiler); + cmd.args(builder.cflags(target, GitRepo::Rustc, c)); + cmd.arg(format!("-print-file-name={file}")); + let out = output(&mut cmd); + PathBuf::from(out.trim()) +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Sysroot { + pub compiler: Compiler, + /// See [`Std::force_recompile`]. + force_recompile: bool, +} + +impl Sysroot { + pub(crate) fn new(compiler: Compiler) -> Self { + Sysroot { compiler, force_recompile: false } + } +} + +impl Step for Sysroot { + type Output = Interned; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Returns the sysroot for the `compiler` specified that *this build system + /// generates*. + /// + /// That is, the sysroot for the stage0 compiler is not what the compiler + /// thinks it is by default, but it's the same as the default for stages + /// 1-3. + fn run(self, builder: &Builder<'_>) -> Interned { + let compiler = self.compiler; + let host_dir = builder.out.join(&compiler.host.triple); + + let sysroot_dir = |stage| { + if stage == 0 { + host_dir.join("stage0-sysroot") + } else if self.force_recompile && stage == compiler.stage { + host_dir.join(format!("stage{stage}-test-sysroot")) + } else if builder.download_rustc() && compiler.stage != builder.top_stage { + host_dir.join("ci-rustc-sysroot") + } else { + host_dir.join(format!("stage{}", stage)) + } + }; + let sysroot = sysroot_dir(compiler.stage); + + builder.verbose(&format!("Removing sysroot {} to avoid caching bugs", sysroot.display())); + let _ = fs::remove_dir_all(&sysroot); + t!(fs::create_dir_all(&sysroot)); + + // In some cases(see https://github.com/rust-lang/rust/issues/109314), when the stage0 + // compiler relies on more recent version of LLVM than the beta compiler, it may not + // be able to locate the correct LLVM in the sysroot. This situation typically occurs + // when we upgrade LLVM version while the beta compiler continues to use an older version. + // + // Make sure to add the correct version of LLVM into the stage0 sysroot. + if compiler.stage == 0 { + dist::maybe_install_llvm_target(builder, compiler.host, &sysroot); + } + + // If we're downloading a compiler from CI, we can use the same compiler for all stages other than 0. + if builder.download_rustc() && compiler.stage != 0 { + assert_eq!( + builder.config.build, compiler.host, + "Cross-compiling is not yet supported with `download-rustc`", + ); + + // #102002, cleanup old toolchain folders when using download-rustc so people don't use them by accident. + for stage in 0..=2 { + if stage != compiler.stage { + let dir = sysroot_dir(stage); + if !dir.ends_with("ci-rustc-sysroot") { + let _ = fs::remove_dir_all(dir); + } + } + } + + // Copy the compiler into the correct sysroot. + // NOTE(#108767): We intentionally don't copy `rustc-dev` artifacts until they're requested with `builder.ensure(Rustc)`. + // This fixes an issue where we'd have multiple copies of libc in the sysroot with no way to tell which to load. + // There are a few quirks of bootstrap that interact to make this reliable: + // 1. The order `Step`s are run is hard-coded in `builder.rs` and not configurable. This + // avoids e.g. reordering `test::UiFulldeps` before `test::Ui` and causing the latter to + // fail because of duplicate metadata. + // 2. The sysroot is deleted and recreated between each invocation, so running `x test + // ui-fulldeps && x test ui` can't cause failures. + let mut filtered_files = Vec::new(); + let mut add_filtered_files = |suffix, contents| { + for path in contents { + let path = Path::new(&path); + if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) { + filtered_files.push(path.file_name().unwrap().to_owned()); + } + } + }; + let suffix = format!("lib/rustlib/{}/lib", compiler.host); + add_filtered_files(suffix.as_str(), builder.config.ci_rustc_dev_contents()); + // NOTE: we can't copy std eagerly because `stage2-test-sysroot` needs to have only the + // newly compiled std, not the downloaded std. + add_filtered_files("lib", builder.config.ci_rust_std_contents()); + + let filtered_extensions = [ + OsStr::new("rmeta"), + OsStr::new("rlib"), + // FIXME: this is wrong when compiler.host != build, but we don't support that today + OsStr::new(std::env::consts::DLL_EXTENSION), + ]; + let ci_rustc_dir = builder.config.ci_rustc_dir(); + builder.cp_filtered(&ci_rustc_dir, &sysroot, &|path| { + if path.extension().map_or(true, |ext| !filtered_extensions.contains(&ext)) { + return true; + } + if !path.parent().map_or(true, |p| p.ends_with(&suffix)) { + return true; + } + if !filtered_files.iter().all(|f| f != path.file_name().unwrap()) { + builder.verbose_than(1, &format!("ignoring {}", path.display())); + false + } else { + true + } + }); + } + + // Symlink the source root into the same location inside the sysroot, + // where `rust-src` component would go (`$sysroot/lib/rustlib/src/rust`), + // so that any tools relying on `rust-src` also work for local builds, + // and also for translating the virtual `/rustc/$hash` back to the real + // directory (for running tests with `rust.remap-debuginfo = true`). + let sysroot_lib_rustlib_src = sysroot.join("lib/rustlib/src"); + t!(fs::create_dir_all(&sysroot_lib_rustlib_src)); + let sysroot_lib_rustlib_src_rust = sysroot_lib_rustlib_src.join("rust"); + if let Err(e) = symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_src_rust) { + eprintln!( + "WARNING: creating symbolic link `{}` to `{}` failed with {}", + sysroot_lib_rustlib_src_rust.display(), + builder.src.display(), + e, + ); + if builder.config.rust_remap_debuginfo { + eprintln!( + "WARNING: some `tests/ui` tests will fail when lacking `{}`", + sysroot_lib_rustlib_src_rust.display(), + ); + } + } + // Same for the rustc-src component. + let sysroot_lib_rustlib_rustcsrc = sysroot.join("lib/rustlib/rustc-src"); + t!(fs::create_dir_all(&sysroot_lib_rustlib_rustcsrc)); + let sysroot_lib_rustlib_rustcsrc_rust = sysroot_lib_rustlib_rustcsrc.join("rust"); + if let Err(e) = + symlink_dir(&builder.config, &builder.src, &sysroot_lib_rustlib_rustcsrc_rust) + { + eprintln!( + "WARNING: creating symbolic link `{}` to `{}` failed with {}", + sysroot_lib_rustlib_rustcsrc_rust.display(), + builder.src.display(), + e, + ); + } + + INTERNER.intern_path(sysroot) + } +} + +#[derive(Debug, Copy, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)] +pub struct Assemble { + /// The compiler which we will produce in this step. Assemble itself will + /// take care of ensuring that the necessary prerequisites to do so exist, + /// that is, this target can be a stage2 compiler and Assemble will build + /// previous stages for you. + pub target_compiler: Compiler, +} + +impl Step for Assemble { + type Output = Compiler; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("compiler/rustc").path("compiler") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Assemble { + target_compiler: run.builder.compiler(run.builder.top_stage + 1, run.target), + }); + } + + /// Prepare a new compiler from the artifacts in `stage` + /// + /// This will assemble a compiler in `build/$host/stage$stage`. The compiler + /// must have been previously produced by the `stage - 1` builder.build + /// compiler. + fn run(self, builder: &Builder<'_>) -> Compiler { + let target_compiler = self.target_compiler; + + if target_compiler.stage == 0 { + assert_eq!( + builder.config.build, target_compiler.host, + "Cannot obtain compiler for non-native build triple at stage 0" + ); + // The stage 0 compiler for the build triple is always pre-built. + return target_compiler; + } + + // Get the compiler that we'll use to bootstrap ourselves. + // + // Note that this is where the recursive nature of the bootstrap + // happens, as this will request the previous stage's compiler on + // downwards to stage 0. + // + // Also note that we're building a compiler for the host platform. We + // only assume that we can run `build` artifacts, which means that to + // produce some other architecture compiler we need to start from + // `build` to get there. + // + // FIXME: It may be faster if we build just a stage 1 compiler and then + // use that to bootstrap this compiler forward. + let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build); + + // If we're downloading a compiler from CI, we can use the same compiler for all stages other than 0. + if builder.download_rustc() { + let sysroot = + builder.ensure(Sysroot { compiler: target_compiler, force_recompile: false }); + // Ensure that `libLLVM.so` ends up in the newly created target directory, + // so that tools using `rustc_private` can use it. + dist::maybe_install_llvm_target(builder, target_compiler.host, &sysroot); + // Lower stages use `ci-rustc-sysroot`, not stageN + if target_compiler.stage == builder.top_stage { + builder.info(&format!("Creating a sysroot for stage{stage} compiler (use `rustup toolchain link 'name' build/host/stage{stage}`)", stage=target_compiler.stage)); + } + return target_compiler; + } + + // Build the libraries for this compiler to link to (i.e., the libraries + // it uses at runtime). NOTE: Crates the target compiler compiles don't + // link to these. (FIXME: Is that correct? It seems to be correct most + // of the time but I think we do link to these for stage2/bin compilers + // when not performing a full bootstrap). + builder.ensure(Rustc::new(build_compiler, target_compiler.host)); + + // FIXME: For now patch over problems noted in #90244 by early returning here, even though + // we've not properly assembled the target sysroot. A full fix is pending further investigation, + // for now full bootstrap usage is rare enough that this is OK. + if target_compiler.stage >= 3 && !builder.config.full_bootstrap { + return target_compiler; + } + + for &backend in builder.config.rust_codegen_backends.iter() { + if backend == "llvm" { + continue; // Already built as part of rustc + } + + builder.ensure(CodegenBackend { + compiler: build_compiler, + target: target_compiler.host, + backend, + }); + } + + let lld_install = if builder.config.lld_enabled { + Some(builder.ensure(llvm::Lld { target: target_compiler.host })) + } else { + None + }; + + let stage = target_compiler.stage; + let host = target_compiler.host; + let (host_info, dir_name) = if build_compiler.host == host { + ("".into(), "host".into()) + } else { + (format!(" ({host})"), host.to_string()) + }; + // NOTE: "Creating a sysroot" is somewhat inconsistent with our internal terminology, since + // sysroots can temporarily be empty until we put the compiler inside. However, + // `ensure(Sysroot)` isn't really something that's user facing, so there shouldn't be any + // ambiguity. + let msg = format!( + "Creating a sysroot for stage{stage} compiler{host_info} (use `rustup toolchain link 'name' build/{dir_name}/stage{stage}`)" + ); + builder.info(&msg); + + // Link in all dylibs to the libdir + let stamp = librustc_stamp(builder, build_compiler, target_compiler.host); + let proc_macros = builder + .read_stamp_file(&stamp) + .into_iter() + .filter_map(|(path, dependency_type)| { + if dependency_type == DependencyType::Host { + Some(path.file_name().unwrap().to_owned().into_string().unwrap()) + } else { + None + } + }) + .collect::>(); + + let sysroot = builder.sysroot(target_compiler); + let rustc_libdir = builder.rustc_libdir(target_compiler); + t!(fs::create_dir_all(&rustc_libdir)); + let src_libdir = builder.sysroot_libdir(build_compiler, host); + for f in builder.read_dir(&src_libdir) { + let filename = f.file_name().into_string().unwrap(); + if (is_dylib(&filename) || is_debug_info(&filename)) && !proc_macros.contains(&filename) + { + builder.copy(&f.path(), &rustc_libdir.join(&filename)); + } + } + + copy_codegen_backends_to_sysroot(builder, build_compiler, target_compiler); + + // We prepend this bin directory to the user PATH when linking Rust binaries. To + // avoid shadowing the system LLD we rename the LLD we provide to `rust-lld`. + let libdir = builder.sysroot_libdir(target_compiler, target_compiler.host); + let libdir_bin = libdir.parent().unwrap().join("bin"); + t!(fs::create_dir_all(&libdir_bin)); + if let Some(lld_install) = lld_install { + let src_exe = exe("lld", target_compiler.host); + let dst_exe = exe("rust-lld", target_compiler.host); + builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe)); + let self_contained_lld_dir = libdir_bin.join("gcc-ld"); + t!(fs::create_dir(&self_contained_lld_dir)); + let lld_wrapper_exe = builder.ensure(crate::core::build_steps::tool::LldWrapper { + compiler: build_compiler, + target: target_compiler.host, + }); + for name in crate::LLD_FILE_NAMES { + builder.copy( + &lld_wrapper_exe, + &self_contained_lld_dir.join(exe(name, target_compiler.host)), + ); + } + } + + if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) { + let llvm::LlvmResult { llvm_config, .. } = + builder.ensure(llvm::Llvm { target: target_compiler.host }); + if !builder.config.dry_run() { + let llvm_bin_dir = output(Command::new(llvm_config).arg("--bindir")); + let llvm_bin_dir = Path::new(llvm_bin_dir.trim()); + + // Since we've already built the LLVM tools, install them to the sysroot. + // This is the equivalent of installing the `llvm-tools-preview` component via + // rustup, and lets developers use a locally built toolchain to + // build projects that expect llvm tools to be present in the sysroot + // (e.g. the `bootimage` crate). + for tool in LLVM_TOOLS { + let tool_exe = exe(tool, target_compiler.host); + let src_path = llvm_bin_dir.join(&tool_exe); + // When using `download-ci-llvm`, some of the tools + // may not exist, so skip trying to copy them. + if src_path.exists() { + builder.copy(&src_path, &libdir_bin.join(&tool_exe)); + } + } + } + } + + // Ensure that `libLLVM.so` ends up in the newly build compiler directory, + // so that it can be found when the newly built `rustc` is run. + dist::maybe_install_llvm_runtime(builder, target_compiler.host, &sysroot); + dist::maybe_install_llvm_target(builder, target_compiler.host, &sysroot); + + // Link the compiler binary itself into place + let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host); + let rustc = out_dir.join(exe("rustc-main", host)); + let bindir = sysroot.join("bin"); + t!(fs::create_dir_all(&bindir)); + let compiler = builder.rustc(target_compiler); + builder.copy(&rustc, &compiler); + + target_compiler + } +} + +/// Link some files into a rustc sysroot. +/// +/// For a particular stage this will link the file listed in `stamp` into the +/// `sysroot_dst` provided. +pub fn add_to_sysroot( + builder: &Builder<'_>, + sysroot_dst: &Path, + sysroot_host_dst: &Path, + stamp: &Path, +) { + let self_contained_dst = &sysroot_dst.join("self-contained"); + t!(fs::create_dir_all(&sysroot_dst)); + t!(fs::create_dir_all(&sysroot_host_dst)); + t!(fs::create_dir_all(&self_contained_dst)); + for (path, dependency_type) in builder.read_stamp_file(stamp) { + let dst = match dependency_type { + DependencyType::Host => sysroot_host_dst, + DependencyType::Target => sysroot_dst, + DependencyType::TargetSelfContained => self_contained_dst, + }; + builder.copy(&path, &dst.join(path.file_name().unwrap())); + } +} + +pub fn run_cargo( + builder: &Builder<'_>, + cargo: Cargo, + tail_args: Vec, + stamp: &Path, + additional_target_deps: Vec<(PathBuf, DependencyType)>, + is_check: bool, + rlib_only_metadata: bool, +) -> Vec { + if builder.config.dry_run() { + return Vec::new(); + } + + // `target_root_dir` looks like $dir/$target/release + let target_root_dir = stamp.parent().unwrap(); + // `target_deps_dir` looks like $dir/$target/release/deps + let target_deps_dir = target_root_dir.join("deps"); + // `host_root_dir` looks like $dir/release + let host_root_dir = target_root_dir + .parent() + .unwrap() // chop off `release` + .parent() + .unwrap() // chop off `$target` + .join(target_root_dir.file_name().unwrap()); + + // Spawn Cargo slurping up its JSON output. We'll start building up the + // `deps` array of all files it generated along with a `toplevel` array of + // files we need to probe for later. + let mut deps = Vec::new(); + let mut toplevel = Vec::new(); + let ok = stream_cargo(builder, cargo, tail_args, &mut |msg| { + let (filenames, crate_types) = match msg { + CargoMessage::CompilerArtifact { + filenames, + target: CargoTarget { crate_types }, + .. + } => (filenames, crate_types), + _ => return, + }; + for filename in filenames { + // Skip files like executables + let mut keep = false; + if filename.ends_with(".lib") + || filename.ends_with(".a") + || is_debug_info(&filename) + || is_dylib(&filename) + { + // Always keep native libraries, rust dylibs and debuginfo + keep = true; + } + if is_check && filename.ends_with(".rmeta") { + // During check builds we need to keep crate metadata + keep = true; + } else if rlib_only_metadata { + if filename.contains("jemalloc_sys") + || filename.contains("rustc_smir") + || filename.contains("stable_mir") + { + // jemalloc_sys and rustc_smir are not linked into librustc_driver.so, + // so we need to distribute them as rlib to be able to use them. + keep |= filename.ends_with(".rlib"); + } else { + // Distribute the rest of the rustc crates as rmeta files only to reduce + // the tarball sizes by about 50%. The object files are linked into + // librustc_driver.so, so it is still possible to link against them. + keep |= filename.ends_with(".rmeta"); + } + } else { + // In all other cases keep all rlibs + keep |= filename.ends_with(".rlib"); + } + + if !keep { + continue; + } + + let filename = Path::new(&*filename); + + // If this was an output file in the "host dir" we don't actually + // worry about it, it's not relevant for us + if filename.starts_with(&host_root_dir) { + // Unless it's a proc macro used in the compiler + if crate_types.iter().any(|t| t == "proc-macro") { + deps.push((filename.to_path_buf(), DependencyType::Host)); + } + continue; + } + + // If this was output in the `deps` dir then this is a precise file + // name (hash included) so we start tracking it. + if filename.starts_with(&target_deps_dir) { + deps.push((filename.to_path_buf(), DependencyType::Target)); + continue; + } + + // Otherwise this was a "top level artifact" which right now doesn't + // have a hash in the name, but there's a version of this file in + // the `deps` folder which *does* have a hash in the name. That's + // the one we'll want to we'll probe for it later. + // + // We do not use `Path::file_stem` or `Path::extension` here, + // because some generated files may have multiple extensions e.g. + // `std-.dll.lib` on Windows. The aforementioned methods only + // split the file name by the last extension (`.lib`) while we need + // to split by all extensions (`.dll.lib`). + let expected_len = t!(filename.metadata()).len(); + let filename = filename.file_name().unwrap().to_str().unwrap(); + let mut parts = filename.splitn(2, '.'); + let file_stem = parts.next().unwrap().to_owned(); + let extension = parts.next().unwrap().to_owned(); + + toplevel.push((file_stem, extension, expected_len)); + } + }); + + if !ok { + crate::exit!(1); + } + + // Ok now we need to actually find all the files listed in `toplevel`. We've + // got a list of prefix/extensions and we basically just need to find the + // most recent file in the `deps` folder corresponding to each one. + let contents = t!(target_deps_dir.read_dir()) + .map(|e| t!(e)) + .map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata()))) + .collect::>(); + for (prefix, extension, expected_len) in toplevel { + let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { + meta.len() == expected_len + && filename + .strip_prefix(&prefix[..]) + .map(|s| s.starts_with('-') && s.ends_with(&extension[..])) + .unwrap_or(false) + }); + let max = candidates.max_by_key(|&&(_, _, ref metadata)| { + metadata.modified().expect("mtime should be available on all relevant OSes") + }); + let path_to_add = match max { + Some(triple) => triple.0.to_str().unwrap(), + None => panic!("no output generated for {prefix:?} {extension:?}"), + }; + if is_dylib(path_to_add) { + let candidate = format!("{path_to_add}.lib"); + let candidate = PathBuf::from(candidate); + if candidate.exists() { + deps.push((candidate, DependencyType::Target)); + } + } + deps.push((path_to_add.into(), DependencyType::Target)); + } + + deps.extend(additional_target_deps); + deps.sort(); + let mut new_contents = Vec::new(); + for (dep, dependency_type) in deps.iter() { + new_contents.extend(match *dependency_type { + DependencyType::Host => b"h", + DependencyType::Target => b"t", + DependencyType::TargetSelfContained => b"s", + }); + new_contents.extend(dep.to_str().unwrap().as_bytes()); + new_contents.extend(b"\0"); + } + t!(fs::write(&stamp, &new_contents)); + deps.into_iter().map(|(d, _)| d).collect() +} + +pub fn stream_cargo( + builder: &Builder<'_>, + cargo: Cargo, + tail_args: Vec, + cb: &mut dyn FnMut(CargoMessage<'_>), +) -> bool { + let mut cargo = Command::from(cargo); + if builder.config.dry_run() { + return true; + } + // Instruct Cargo to give us json messages on stdout, critically leaving + // stderr as piped so we can get those pretty colors. + let mut message_format = if builder.config.json_output { + String::from("json") + } else { + String::from("json-render-diagnostics") + }; + if let Some(s) = &builder.config.rustc_error_format { + message_format.push_str(",json-diagnostic-"); + message_format.push_str(s); + } + cargo.arg("--message-format").arg(message_format).stdout(Stdio::piped()); + + for arg in tail_args { + cargo.arg(arg); + } + + builder.verbose(&format!("running: {cargo:?}")); + let mut child = match cargo.spawn() { + Ok(child) => child, + Err(e) => panic!("failed to execute command: {cargo:?}\nERROR: {e}"), + }; + + // Spawn Cargo slurping up its JSON output. We'll start building up the + // `deps` array of all files it generated along with a `toplevel` array of + // files we need to probe for later. + let stdout = BufReader::new(child.stdout.take().unwrap()); + for line in stdout.lines() { + let line = t!(line); + match serde_json::from_str::>(&line) { + Ok(msg) => { + if builder.config.json_output { + // Forward JSON to stdout. + println!("{line}"); + } + cb(msg) + } + // If this was informational, just print it out and continue + Err(_) => println!("{line}"), + } + } + + // Make sure Cargo actually succeeded after we read all of its stdout. + let status = t!(child.wait()); + if builder.is_verbose() && !status.success() { + eprintln!( + "command did not execute successfully: {cargo:?}\n\ + expected success, got: {status}" + ); + } + status.success() +} + +#[derive(Deserialize)] +pub struct CargoTarget<'a> { + crate_types: Vec>, +} + +#[derive(Deserialize)] +#[serde(tag = "reason", rename_all = "kebab-case")] +pub enum CargoMessage<'a> { + CompilerArtifact { + package_id: Cow<'a, str>, + features: Vec>, + filenames: Vec>, + target: CargoTarget<'a>, + }, + BuildScriptExecuted { + package_id: Cow<'a, str>, + }, + BuildFinished { + success: bool, + }, +} + +pub fn strip_debug(builder: &Builder<'_>, target: TargetSelection, path: &Path) { + // FIXME: to make things simpler for now, limit this to the host and target where we know + // `strip -g` is both available and will fix the issue, i.e. on a x64 linux host that is not + // cross-compiling. Expand this to other appropriate targets in the future. + if target != "x86_64-unknown-linux-gnu" || target != builder.config.build || !path.exists() { + return; + } + + let previous_mtime = FileTime::from_last_modification_time(&path.metadata().unwrap()); + // NOTE: `output` will propagate any errors here. + output(Command::new("strip").arg("--strip-debug").arg(path)); + + // After running `strip`, we have to set the file modification time to what it was before, + // otherwise we risk Cargo invalidating its fingerprint and rebuilding the world next time + // bootstrap is invoked. + // + // An example of this is if we run this on librustc_driver.so. In the first invocation: + // - Cargo will build librustc_driver.so (mtime of 1) + // - Cargo will build rustc-main (mtime of 2) + // - Bootstrap will strip librustc_driver.so (changing the mtime to 3). + // + // In the second invocation of bootstrap, Cargo will see that the mtime of librustc_driver.so + // is greater than the mtime of rustc-main, and will rebuild rustc-main. That will then cause + // everything else (standard library, future stages...) to be rebuilt. + t!(filetime::set_file_mtime(path, previous_mtime)); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/dist.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/dist.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/dist.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/dist.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,2366 @@ +//! Implementation of the various distribution aspects of the compiler. +//! +//! This module is responsible for creating tarballs of the standard library, +//! compiler, and documentation. This ends up being what we distribute to +//! everyone as well. +//! +//! No tarball is actually created literally in this file, but rather we shell +//! out to `rust-installer` still. This may one day be replaced with bits and +//! pieces of `rustup.rs`! + +use std::collections::HashSet; +use std::env; +use std::ffi::OsStr; +use std::fs; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use object::read::archive::ArchiveFile; +use object::BinaryFormat; + +use crate::core::build_steps::compile; +use crate::core::build_steps::doc::DocumentationFormat; +use crate::core::build_steps::llvm; +use crate::core::build_steps::tool::{self, Tool}; +use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step}; +use crate::core::config::TargetSelection; +use crate::utils::cache::{Interned, INTERNER}; +use crate::utils::channel; +use crate::utils::helpers::{exe, is_dylib, output, t, target_supports_cranelift_backend, timeit}; +use crate::utils::tarball::{GeneratedTarball, OverlayKind, Tarball}; +use crate::{Compiler, DependencyType, Mode, LLVM_TOOLS}; + +pub fn pkgname(builder: &Builder<'_>, component: &str) -> String { + format!("{}-{}", component, builder.rust_package_vers()) +} + +pub(crate) fn distdir(builder: &Builder<'_>) -> PathBuf { + builder.out.join("dist") +} + +pub fn tmpdir(builder: &Builder<'_>) -> PathBuf { + builder.out.join("tmp/dist") +} + +fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool { + if !builder.config.extended { + return false; + } + builder.config.tools.as_ref().map_or(true, |tools| tools.contains(tool)) +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Docs { + pub host: TargetSelection, +} + +impl Step for Docs { + type Output = Option; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = run.builder.config.docs; + run.alias("rust-docs").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Docs { host: run.target }); + } + + /// Builds the `rust-docs` installer component. + fn run(self, builder: &Builder<'_>) -> Option { + let host = self.host; + builder.default_doc(&[]); + + let dest = "share/doc/rust/html"; + + let mut tarball = Tarball::new(builder, "rust-docs", &host.triple); + tarball.set_product_name("Rust Documentation"); + tarball.add_bulk_dir(&builder.doc_out(host), dest); + tarball.add_file(&builder.src.join("src/doc/robots.txt"), dest, 0o644); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct JsonDocs { + pub host: TargetSelection, +} + +impl Step for JsonDocs { + type Output = Option; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = run.builder.config.docs; + run.alias("rust-docs-json").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(JsonDocs { host: run.target }); + } + + /// Builds the `rust-docs-json` installer component. + fn run(self, builder: &Builder<'_>) -> Option { + let host = self.host; + builder.ensure(crate::core::build_steps::doc::Std::new( + builder.top_stage, + host, + builder, + DocumentationFormat::JSON, + )); + + let dest = "share/doc/rust/json"; + + let mut tarball = Tarball::new(builder, "rust-docs-json", &host.triple); + tarball.set_product_name("Rust Documentation In JSON Format"); + tarball.is_preview(true); + tarball.add_bulk_dir(&builder.json_doc_out(host), dest); + Some(tarball.generate()) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustcDocs { + pub host: TargetSelection, +} + +impl Step for RustcDocs { + type Output = Option; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.alias("rustc-docs").default_condition(builder.config.compiler_docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustcDocs { host: run.target }); + } + + /// Builds the `rustc-docs` installer component. + fn run(self, builder: &Builder<'_>) -> Option { + let host = self.host; + builder.default_doc(&[]); + + let mut tarball = Tarball::new(builder, "rustc-docs", &host.triple); + tarball.set_product_name("Rustc Documentation"); + tarball.add_bulk_dir(&builder.compiler_doc_out(host), "share/doc/rust/html/rustc"); + Some(tarball.generate()) + } +} + +fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { + let mut found = Vec::with_capacity(files.len()); + + for file in files { + let file_path = path.iter().map(|dir| dir.join(file)).find(|p| p.exists()); + + if let Some(file_path) = file_path { + found.push(file_path); + } else { + panic!("Could not find '{file}' in {path:?}"); + } + } + + found +} + +fn make_win_dist( + rust_root: &Path, + plat_root: &Path, + target: TargetSelection, + builder: &Builder<'_>, +) { + if builder.config.dry_run() { + return; + } + + //Ask gcc where it keeps its stuff + let mut cmd = Command::new(builder.cc(target)); + cmd.arg("-print-search-dirs"); + let gcc_out = output(&mut cmd); + + let mut bin_path: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); + let mut lib_path = Vec::new(); + + for line in gcc_out.lines() { + let idx = line.find(':').unwrap(); + let key = &line[..idx]; + let trim_chars: &[_] = &[' ', '=']; + let value = env::split_paths(line[(idx + 1)..].trim_start_matches(trim_chars)); + + if key == "programs" { + bin_path.extend(value); + } else if key == "libraries" { + lib_path.extend(value); + } + } + + let compiler = if target == "i686-pc-windows-gnu" { + "i686-w64-mingw32-gcc.exe" + } else if target == "x86_64-pc-windows-gnu" { + "x86_64-w64-mingw32-gcc.exe" + } else { + "gcc.exe" + }; + let target_tools = [compiler, "ld.exe", "dlltool.exe", "libwinpthread-1.dll"]; + let mut rustc_dlls = vec!["libwinpthread-1.dll"]; + if target.starts_with("i686-") { + rustc_dlls.push("libgcc_s_dw2-1.dll"); + } else { + rustc_dlls.push("libgcc_s_seh-1.dll"); + } + + // Libraries necessary to link the windows-gnu toolchains. + // System libraries will be preferred if they are available (see #67429). + let target_libs = [ + //MinGW libs + "libgcc.a", + "libgcc_eh.a", + "libgcc_s.a", + "libm.a", + "libmingw32.a", + "libmingwex.a", + "libstdc++.a", + "libiconv.a", + "libmoldname.a", + "libpthread.a", + //Windows import libs + //This should contain only the set of libraries necessary to link the standard library. + "libadvapi32.a", + "libbcrypt.a", + "libcomctl32.a", + "libcomdlg32.a", + "libcredui.a", + "libcrypt32.a", + "libdbghelp.a", + "libgdi32.a", + "libimagehlp.a", + "libiphlpapi.a", + "libkernel32.a", + "libmsimg32.a", + "libmsvcrt.a", + "libntdll.a", + "libodbc32.a", + "libole32.a", + "liboleaut32.a", + "libopengl32.a", + "libpsapi.a", + "librpcrt4.a", + "libsecur32.a", + "libsetupapi.a", + "libshell32.a", + "libsynchronization.a", + "libuser32.a", + "libuserenv.a", + "libuuid.a", + "libwinhttp.a", + "libwinmm.a", + "libwinspool.a", + "libws2_32.a", + "libwsock32.a", + ]; + + //Find mingw artifacts we want to bundle + let target_tools = find_files(&target_tools, &bin_path); + let rustc_dlls = find_files(&rustc_dlls, &bin_path); + let target_libs = find_files(&target_libs, &lib_path); + + // Copy runtime dlls next to rustc.exe + let dist_bin_dir = rust_root.join("bin/"); + fs::create_dir_all(&dist_bin_dir).expect("creating dist_bin_dir failed"); + for src in rustc_dlls { + builder.copy_to_folder(&src, &dist_bin_dir); + } + + //Copy platform tools to platform-specific bin directory + let target_bin_dir = plat_root + .join("lib") + .join("rustlib") + .join(target.triple) + .join("bin") + .join("self-contained"); + fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed"); + for src in target_tools { + builder.copy_to_folder(&src, &target_bin_dir); + } + + // Warn windows-gnu users that the bundled GCC cannot compile C files + builder.create( + &target_bin_dir.join("GCC-WARNING.txt"), + "gcc.exe contained in this folder cannot be used for compiling C files - it is only \ + used as a linker. In order to be able to compile projects containing C code use \ + the GCC provided by MinGW or Cygwin.", + ); + + //Copy platform libs to platform-specific lib directory + let target_lib_dir = plat_root + .join("lib") + .join("rustlib") + .join(target.triple) + .join("lib") + .join("self-contained"); + fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed"); + for src in target_libs { + builder.copy_to_folder(&src, &target_lib_dir); + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Mingw { + pub host: TargetSelection, +} + +impl Step for Mingw { + type Output = Option; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("rust-mingw") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Mingw { host: run.target }); + } + + /// Builds the `rust-mingw` installer component. + /// + /// This contains all the bits and pieces to run the MinGW Windows targets + /// without any extra installed software (e.g., we bundle gcc, libraries, etc). + fn run(self, builder: &Builder<'_>) -> Option { + let host = self.host; + if !host.ends_with("pc-windows-gnu") || !builder.config.dist_include_mingw_linker { + return None; + } + + let mut tarball = Tarball::new(builder, "rust-mingw", &host.triple); + tarball.set_product_name("Rust MinGW"); + + // The first argument is a "temporary directory" which is just + // thrown away (this contains the runtime DLLs included in the rustc package + // above) and the second argument is where to place all the MinGW components + // (which is what we want). + make_win_dist(&tmpdir(builder), tarball.image_dir(), host, &builder); + + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rustc { + pub compiler: Compiler, +} + +impl Step for Rustc { + type Output = GeneratedTarball; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("rustc") + } + + fn make_run(run: RunConfig<'_>) { + run.builder + .ensure(Rustc { compiler: run.builder.compiler(run.builder.top_stage, run.target) }); + } + + /// Creates the `rustc` installer component. + fn run(self, builder: &Builder<'_>) -> GeneratedTarball { + let compiler = self.compiler; + let host = self.compiler.host; + + let tarball = Tarball::new(builder, "rustc", &host.triple); + + // Prepare the rustc "image", what will actually end up getting installed + prepare_image(builder, compiler, tarball.image_dir()); + + // On MinGW we've got a few runtime DLL dependencies that we need to + // include. The first argument to this script is where to put these DLLs + // (the image we're creating), and the second argument is a junk directory + // to ignore all other MinGW stuff the script creates. + // + // On 32-bit MinGW we're always including a DLL which needs some extra + // licenses to distribute. On 64-bit MinGW we don't actually distribute + // anything requiring us to distribute a license, but it's likely the + // install will *also* include the rust-mingw package, which also needs + // licenses, so to be safe we just include it here in all MinGW packages. + if host.ends_with("pc-windows-gnu") && builder.config.dist_include_mingw_linker { + make_win_dist(tarball.image_dir(), &tmpdir(builder), host, builder); + tarball.add_dir(builder.src.join("src/etc/third-party"), "share/doc"); + } + + return tarball.generate(); + + fn prepare_image(builder: &Builder<'_>, compiler: Compiler, image: &Path) { + let host = compiler.host; + let src = builder.sysroot(compiler); + + // Copy rustc/rustdoc binaries + t!(fs::create_dir_all(image.join("bin"))); + builder.cp_r(&src.join("bin"), &image.join("bin")); + + if builder + .config + .tools + .as_ref() + .map_or(true, |tools| tools.iter().any(|tool| tool == "rustdoc")) + { + let rustdoc = builder.rustdoc(compiler); + builder.install(&rustdoc, &image.join("bin"), 0o755); + } + + if let Some(ra_proc_macro_srv) = builder.ensure_if_default( + tool::RustAnalyzerProcMacroSrv { + compiler: builder.compiler_for( + compiler.stage, + builder.config.build, + compiler.host, + ), + target: compiler.host, + }, + builder.kind, + ) { + builder.install(&ra_proc_macro_srv, &image.join("libexec"), 0o755); + } + + let libdir_relative = builder.libdir_relative(compiler); + + // Copy runtime DLLs needed by the compiler + if libdir_relative.to_str() != Some("bin") { + let libdir = builder.rustc_libdir(compiler); + for entry in builder.read_dir(&libdir) { + let name = entry.file_name(); + if let Some(s) = name.to_str() { + if is_dylib(s) { + // Don't use custom libdir here because ^lib/ will be resolved again + // with installer + builder.install(&entry.path(), &image.join("lib"), 0o644); + } + } + } + } + + // Copy libLLVM.so to the lib dir as well, if needed. While not + // technically needed by rustc itself it's needed by lots of other + // components like the llvm tools and LLD. LLD is included below and + // tools/LLDB come later, so let's just throw it in the rustc + // component for now. + maybe_install_llvm_runtime(builder, host, image); + + let dst_dir = image.join("lib/rustlib").join(&*host.triple).join("bin"); + t!(fs::create_dir_all(&dst_dir)); + + // Copy over lld if it's there + if builder.config.lld_enabled { + let src_dir = builder.sysroot_libdir(compiler, host).parent().unwrap().join("bin"); + let rust_lld = exe("rust-lld", compiler.host); + builder.copy(&src_dir.join(&rust_lld), &dst_dir.join(&rust_lld)); + let self_contained_lld_src_dir = src_dir.join("gcc-ld"); + let self_contained_lld_dst_dir = dst_dir.join("gcc-ld"); + t!(fs::create_dir(&self_contained_lld_dst_dir)); + for name in crate::LLD_FILE_NAMES { + let exe_name = exe(name, compiler.host); + builder.copy( + &self_contained_lld_src_dir.join(&exe_name), + &self_contained_lld_dst_dir.join(&exe_name), + ); + } + } + + // Man pages + t!(fs::create_dir_all(image.join("share/man/man1"))); + let man_src = builder.src.join("src/doc/man"); + let man_dst = image.join("share/man/man1"); + + // don't use our `bootstrap::{copy, cp_r}`, because those try + // to hardlink, and we don't want to edit the source templates + for file_entry in builder.read_dir(&man_src) { + let page_src = file_entry.path(); + let page_dst = man_dst.join(file_entry.file_name()); + let src_text = t!(std::fs::read_to_string(&page_src)); + let new_text = src_text.replace("", &builder.version); + t!(std::fs::write(&page_dst, &new_text)); + t!(fs::copy(&page_src, &page_dst)); + } + + // Debugger scripts + builder + .ensure(DebuggerScripts { sysroot: INTERNER.intern_path(image.to_owned()), host }); + + // Misc license info + let cp = |file: &str| { + builder.install(&builder.src.join(file), &image.join("share/doc/rust"), 0o644); + }; + cp("COPYRIGHT"); + cp("LICENSE-APACHE"); + cp("LICENSE-MIT"); + cp("README.md"); + } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct DebuggerScripts { + pub sysroot: Interned, + pub host: TargetSelection, +} + +impl Step for DebuggerScripts { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Copies debugger scripts for `target` into the `sysroot` specified. + fn run(self, builder: &Builder<'_>) { + let host = self.host; + let sysroot = self.sysroot; + let dst = sysroot.join("lib/rustlib/etc"); + t!(fs::create_dir_all(&dst)); + let cp_debugger_script = |file: &str| { + builder.install(&builder.src.join("src/etc/").join(file), &dst, 0o644); + }; + if host.contains("windows-msvc") { + // windbg debugger scripts + builder.install( + &builder.src.join("src/etc/rust-windbg.cmd"), + &sysroot.join("bin"), + 0o755, + ); + + cp_debugger_script("natvis/intrinsic.natvis"); + cp_debugger_script("natvis/liballoc.natvis"); + cp_debugger_script("natvis/libcore.natvis"); + cp_debugger_script("natvis/libstd.natvis"); + } else { + cp_debugger_script("rust_types.py"); + + // gdb debugger scripts + builder.install(&builder.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), 0o755); + builder.install(&builder.src.join("src/etc/rust-gdbgui"), &sysroot.join("bin"), 0o755); + + cp_debugger_script("gdb_load_rust_pretty_printers.py"); + cp_debugger_script("gdb_lookup.py"); + cp_debugger_script("gdb_providers.py"); + + // lldb debugger scripts + builder.install(&builder.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), 0o755); + + cp_debugger_script("lldb_lookup.py"); + cp_debugger_script("lldb_providers.py"); + cp_debugger_script("lldb_commands") + } + } +} + +fn skip_host_target_lib(builder: &Builder<'_>, compiler: Compiler) -> bool { + // The only true set of target libraries came from the build triple, so + // let's reduce redundant work by only producing archives from that host. + if compiler.host != builder.config.build { + builder.info("\tskipping, not a build host"); + true + } else { + false + } +} + +/// Check that all objects in rlibs for UEFI targets are COFF. This +/// ensures that the C compiler isn't producing ELF objects, which would +/// not link correctly with the COFF objects. +fn verify_uefi_rlib_format(builder: &Builder<'_>, target: TargetSelection, stamp: &Path) { + if !target.ends_with("-uefi") { + return; + } + + for (path, _) in builder.read_stamp_file(stamp) { + if path.extension() != Some(OsStr::new("rlib")) { + continue; + } + + let data = t!(fs::read(&path)); + let data = data.as_slice(); + let archive = t!(ArchiveFile::parse(data)); + for member in archive.members() { + let member = t!(member); + let member_data = t!(member.data(data)); + + let is_coff = match object::File::parse(member_data) { + Ok(member_file) => member_file.format() == BinaryFormat::Coff, + Err(_) => false, + }; + + if !is_coff { + let member_name = String::from_utf8_lossy(member.name()); + panic!("member {} in {} is not COFF", member_name, path.display()); + } + } + } +} + +/// Copy stamped files into an image's `target/lib` directory. +fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) { + let dst = image.join("lib/rustlib").join(target.triple).join("lib"); + let self_contained_dst = dst.join("self-contained"); + t!(fs::create_dir_all(&dst)); + t!(fs::create_dir_all(&self_contained_dst)); + for (path, dependency_type) in builder.read_stamp_file(stamp) { + if dependency_type == DependencyType::TargetSelfContained { + builder.copy(&path, &self_contained_dst.join(path.file_name().unwrap())); + } else if dependency_type == DependencyType::Target || builder.config.build == target { + builder.copy(&path, &dst.join(path.file_name().unwrap())); + } + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Std { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Std { + type Output = Option; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("rust-std") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Std { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + if skip_host_target_lib(builder, compiler) { + return None; + } + + builder.ensure(compile::Std::new(compiler, target)); + + let mut tarball = Tarball::new(builder, "rust-std", &target.triple); + tarball.include_target_in_component_name(true); + + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + let stamp = compile::libstd_stamp(builder, compiler_to_use, target); + verify_uefi_rlib_format(builder, target, &stamp); + copy_target_libs(builder, target, &tarball.image_dir(), &stamp); + + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustcDev { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for RustcDev { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("rustc-dev") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustcDev { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + if skip_host_target_lib(builder, compiler) { + return None; + } + + builder.ensure(compile::Rustc::new(compiler, target)); + + let tarball = Tarball::new(builder, "rustc-dev", &target.triple); + + let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target); + let stamp = compile::librustc_stamp(builder, compiler_to_use, target); + copy_target_libs(builder, target, tarball.image_dir(), &stamp); + + let src_files = &["Cargo.lock"]; + // This is the reduced set of paths which will become the rustc-dev component + // (essentially the compiler crates and all of their path dependencies). + copy_src_dirs( + builder, + &builder.src, + &["compiler"], + &[], + &tarball.image_dir().join("lib/rustlib/rustc-src/rust"), + ); + for file in src_files { + tarball.add_file(builder.src.join(file), "lib/rustlib/rustc-src/rust", 0o644); + } + + Some(tarball.generate()) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Analysis { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Analysis { + type Output = Option; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "analysis"); + run.alias("rust-analysis").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Analysis { + // Find the actual compiler (handling the full bootstrap option) which + // produced the save-analysis data because that data isn't copied + // through the sysroot uplifting. + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + /// Creates a tarball of (degenerate) save-analysis metadata, if available. + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + if compiler.host != builder.config.build { + return None; + } + + let src = builder + .stage_out(compiler, Mode::Std) + .join(target.triple) + .join(builder.cargo_dir()) + .join("deps") + .join("save-analysis"); + + // Write a file indicating that this component has been removed. + t!(std::fs::create_dir_all(&src)); + let mut removed = src.clone(); + removed.push("removed.json"); + let mut f = t!(std::fs::File::create(removed)); + t!(write!(f, r#"{{ "warning": "The `rust-analysis` component has been removed." }}"#)); + + let mut tarball = Tarball::new(builder, "rust-analysis", &target.triple); + tarball.include_target_in_component_name(true); + tarball.add_dir(src, format!("lib/rustlib/{}/analysis", target.triple)); + Some(tarball.generate()) + } +} + +/// Use the `builder` to make a filtered copy of `base`/X for X in (`src_dirs` - `exclude_dirs`) to +/// `dst_dir`. +fn copy_src_dirs( + builder: &Builder<'_>, + base: &Path, + src_dirs: &[&str], + exclude_dirs: &[&str], + dst_dir: &Path, +) { + fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool { + let spath = match path.to_str() { + Some(path) => path, + None => return false, + }; + if spath.ends_with('~') || spath.ends_with(".pyc") { + return false; + } + + const LLVM_PROJECTS: &[&str] = &[ + "llvm-project/clang", + "llvm-project\\clang", + "llvm-project/libunwind", + "llvm-project\\libunwind", + "llvm-project/lld", + "llvm-project\\lld", + "llvm-project/lldb", + "llvm-project\\lldb", + "llvm-project/llvm", + "llvm-project\\llvm", + "llvm-project/compiler-rt", + "llvm-project\\compiler-rt", + "llvm-project/cmake", + "llvm-project\\cmake", + "llvm-project/runtimes", + "llvm-project\\runtimes", + ]; + if spath.contains("llvm-project") + && !spath.ends_with("llvm-project") + && !LLVM_PROJECTS.iter().any(|path| spath.contains(path)) + { + return false; + } + + const LLVM_TEST: &[&str] = &["llvm-project/llvm/test", "llvm-project\\llvm\\test"]; + if LLVM_TEST.iter().any(|path| spath.contains(path)) + && (spath.ends_with(".ll") || spath.ends_with(".td") || spath.ends_with(".s")) + { + return false; + } + + let full_path = Path::new(dir).join(path); + if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) { + return false; + } + + let excludes = [ + "CVS", + "RCS", + "SCCS", + ".git", + ".gitignore", + ".gitmodules", + ".gitattributes", + ".cvsignore", + ".svn", + ".arch-ids", + "{arch}", + "=RELEASE-ID", + "=meta-update", + "=update", + ".bzr", + ".bzrignore", + ".bzrtags", + ".hg", + ".hgignore", + ".hgrags", + "_darcs", + ]; + !path.iter().map(|s| s.to_str().unwrap()).any(|s| excludes.contains(&s)) + } + + // Copy the directories using our filter + for item in src_dirs { + let dst = &dst_dir.join(item); + t!(fs::create_dir_all(dst)); + builder.cp_filtered(&base.join(item), dst, &|path| filter_fn(exclude_dirs, item, path)); + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Src; + +impl Step for Src { + /// The output path of the src installer tarball + type Output = GeneratedTarball; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("rust-src") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Src); + } + + /// Creates the `rust-src` installer component + fn run(self, builder: &Builder<'_>) -> GeneratedTarball { + if !builder.config.dry_run() { + builder.update_submodule(&Path::new("src/llvm-project")); + } + + let tarball = Tarball::new_targetless(builder, "rust-src"); + + // A lot of tools expect the rust-src component to be entirely in this directory, so if you + // change that (e.g. by adding another directory `lib/rustlib/src/foo` or + // `lib/rustlib/src/rust/foo`), you will need to go around hunting for implicit assumptions + // and fix them... + // + // NOTE: if you update the paths here, you also should update the "virtual" path + // translation code in `imported_source_files` in `src/librustc_metadata/rmeta/decoder.rs` + let dst_src = tarball.image_dir().join("lib/rustlib/src/rust"); + + let src_files = ["Cargo.lock"]; + // This is the reduced set of paths which will become the rust-src component + // (essentially libstd and all of its path dependencies). + copy_src_dirs( + builder, + &builder.src, + &["library", "src/llvm-project/libunwind"], + &[ + // not needed and contains symlinks which rustup currently + // chokes on when unpacking. + "library/backtrace/crates", + // these are 30MB combined and aren't necessary for building + // the standard library. + "library/stdarch/Cargo.toml", + "library/stdarch/crates/stdarch-verify", + "library/stdarch/crates/intrinsic-test", + ], + &dst_src, + ); + for file in src_files.iter() { + builder.copy(&builder.src.join(file), &dst_src.join(file)); + } + + tarball.generate() + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct PlainSourceTarball; + +impl Step for PlainSourceTarball { + /// Produces the location of the tarball generated + type Output = GeneratedTarball; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.alias("rustc-src").default_condition(builder.config.rust_dist_src) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(PlainSourceTarball); + } + + /// Creates the plain source tarball + fn run(self, builder: &Builder<'_>) -> GeneratedTarball { + // NOTE: This is a strange component in a lot of ways. It uses `src` as the target, which + // means neither rustup nor rustup-toolchain-install-master know how to download it. + // It also contains symbolic links, unlike other any other dist tarball. + // It's used for distros building rustc from source in a pre-vendored environment. + let mut tarball = Tarball::new(builder, "rustc", "src"); + tarball.permit_symlinks(true); + let plain_dst_src = tarball.image_dir(); + + // This is the set of root paths which will become part of the source package + let src_files = [ + "COPYRIGHT", + "LICENSE-APACHE", + "LICENSE-MIT", + "CONTRIBUTING.md", + "README.md", + "RELEASES.md", + "configure", + "x.py", + "config.example.toml", + "Cargo.toml", + "Cargo.lock", + ".gitmodules", + ]; + let src_dirs = ["src", "compiler", "library", "tests"]; + + copy_src_dirs(builder, &builder.src, &src_dirs, &[], &plain_dst_src); + + // Copy the files normally + for item in &src_files { + builder.copy(&builder.src.join(item), &plain_dst_src.join(item)); + } + + // Create the version file + builder.create(&plain_dst_src.join("version"), &builder.rust_version()); + if let Some(info) = builder.rust_info().info() { + channel::write_commit_hash_file(&plain_dst_src, &info.sha); + channel::write_commit_info_file(&plain_dst_src, info); + } + + // If we're building from git or tarball sources, we need to vendor + // a complete distribution. + if builder.rust_info().is_managed_git_subrepository() + || builder.rust_info().is_from_tarball() + { + if builder.rust_info().is_managed_git_subrepository() { + // Ensure we have the submodules checked out. + builder.update_submodule(Path::new("src/tools/cargo")); + } + + // Vendor all Cargo dependencies + let mut cmd = Command::new(&builder.initial_cargo); + cmd.arg("vendor") + .arg("--sync") + .arg(builder.src.join("./src/tools/cargo/Cargo.toml")) + .arg("--sync") + .arg(builder.src.join("./src/tools/rust-analyzer/Cargo.toml")) + .arg("--sync") + .arg(builder.src.join("./compiler/rustc_codegen_cranelift/Cargo.toml")) + .arg("--sync") + .arg(builder.src.join("./src/bootstrap/Cargo.toml")) + // Will read the libstd Cargo.toml + // which uses the unstable `public-dependency` feature. + .env("RUSTC_BOOTSTRAP", "1") + .current_dir(&plain_dst_src); + + let config = if !builder.config.dry_run() { + t!(String::from_utf8(t!(cmd.output()).stdout)) + } else { + String::new() + }; + + let cargo_config_dir = plain_dst_src.join(".cargo"); + builder.create_dir(&cargo_config_dir); + builder.create(&cargo_config_dir.join("config.toml"), &config); + } + + tarball.bare() + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Cargo { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Cargo { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "cargo"); + run.alias("cargo").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Cargo { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + let cargo = builder.ensure(tool::Cargo { compiler, target }); + let src = builder.src.join("src/tools/cargo"); + let etc = src.join("src/etc"); + + // Prepare the image directory + let mut tarball = Tarball::new(builder, "cargo", &target.triple); + tarball.set_overlay(OverlayKind::Cargo); + + tarball.add_file(&cargo, "bin", 0o755); + tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644); + tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo"); + tarball.add_dir(etc.join("man"), "share/man/man1"); + tarball.add_legal_and_readme_to("share/doc/cargo"); + + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rls { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Rls { + type Output = Option; + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "rls"); + run.alias("rls").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Rls { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + let rls = builder + .ensure(tool::Rls { compiler, target, extra_features: Vec::new() }) + .expect("rls expected to build"); + + let mut tarball = Tarball::new(builder, "rls", &target.triple); + tarball.set_overlay(OverlayKind::RLS); + tarball.is_preview(true); + tarball.add_file(rls, "bin", 0o755); + tarball.add_legal_and_readme_to("share/doc/rls"); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustAnalyzer { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for RustAnalyzer { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "rust-analyzer"); + run.alias("rust-analyzer").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustAnalyzer { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + let rust_analyzer = builder + .ensure(tool::RustAnalyzer { compiler, target }) + .expect("rust-analyzer always builds"); + + let mut tarball = Tarball::new(builder, "rust-analyzer", &target.triple); + tarball.set_overlay(OverlayKind::RustAnalyzer); + tarball.is_preview(true); + tarball.add_file(rust_analyzer, "bin", 0o755); + tarball.add_legal_and_readme_to("share/doc/rust-analyzer"); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Clippy { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Clippy { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "clippy"); + run.alias("clippy").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Clippy { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + // Prepare the image directory + // We expect clippy to build, because we've exited this step above if tool + // state for clippy isn't testing. + let clippy = builder + .ensure(tool::Clippy { compiler, target, extra_features: Vec::new() }) + .expect("clippy expected to build - essential tool"); + let cargoclippy = builder + .ensure(tool::CargoClippy { compiler, target, extra_features: Vec::new() }) + .expect("clippy expected to build - essential tool"); + + let mut tarball = Tarball::new(builder, "clippy", &target.triple); + tarball.set_overlay(OverlayKind::Clippy); + tarball.is_preview(true); + tarball.add_file(clippy, "bin", 0o755); + tarball.add_file(cargoclippy, "bin", 0o755); + tarball.add_legal_and_readme_to("share/doc/clippy"); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Miri { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Miri { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "miri"); + run.alias("miri").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Miri { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + // This prevents miri from being built for "dist" or "install" + // on the stable/beta channels. It is a nightly-only tool and should + // not be included. + if !builder.build.unstable_features() { + return None; + } + let compiler = self.compiler; + let target = self.target; + + let miri = builder.ensure(tool::Miri { compiler, target, extra_features: Vec::new() })?; + let cargomiri = + builder.ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })?; + + let mut tarball = Tarball::new(builder, "miri", &target.triple); + tarball.set_overlay(OverlayKind::Miri); + tarball.is_preview(true); + tarball.add_file(miri, "bin", 0o755); + tarball.add_file(cargomiri, "bin", 0o755); + tarball.add_legal_and_readme_to("share/doc/miri"); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct CodegenBackend { + pub compiler: Compiler, + pub backend: Interned, +} + +impl Step for CodegenBackend { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("compiler/rustc_codegen_cranelift") + } + + fn make_run(run: RunConfig<'_>) { + for &backend in &run.builder.config.rust_codegen_backends { + if backend == "llvm" { + continue; // Already built as part of rustc + } + + run.builder.ensure(CodegenBackend { + compiler: run.builder.compiler(run.builder.top_stage, run.target), + backend, + }); + } + } + + fn run(self, builder: &Builder<'_>) -> Option { + if builder.config.dry_run() { + return None; + } + + // This prevents rustc_codegen_cranelift from being built for "dist" + // or "install" on the stable/beta channels. It is not yet stable and + // should not be included. + if !builder.build.unstable_features() { + return None; + } + + if !builder.config.rust_codegen_backends.contains(&self.backend) { + return None; + } + + if self.backend == "cranelift" { + if !target_supports_cranelift_backend(self.compiler.host) { + builder.info("target not supported by rustc_codegen_cranelift. skipping"); + return None; + } + + if self.compiler.host.contains("windows") { + builder.info( + "dist currently disabled for windows by rustc_codegen_cranelift. skipping", + ); + return None; + } + } + + let compiler = self.compiler; + let backend = self.backend; + + let mut tarball = + Tarball::new(builder, &format!("rustc-codegen-{}", backend), &compiler.host.triple); + if backend == "cranelift" { + tarball.set_overlay(OverlayKind::RustcCodegenCranelift); + } else { + panic!("Unknown backend rustc_codegen_{}", backend); + } + tarball.is_preview(true); + tarball.add_legal_and_readme_to(format!("share/doc/rustc_codegen_{}", backend)); + + let src = builder.sysroot(compiler); + let backends_src = builder.sysroot_codegen_backends(compiler); + let backends_rel = backends_src + .strip_prefix(&src) + .unwrap() + .strip_prefix(builder.sysroot_libdir_relative(compiler)) + .unwrap(); + // Don't use custom libdir here because ^lib/ will be resolved again with installer + let backends_dst = PathBuf::from("lib").join(&backends_rel); + + let backend_name = format!("rustc_codegen_{}", backend); + let mut found_backend = false; + for backend in fs::read_dir(&backends_src).unwrap() { + let file_name = backend.unwrap().file_name(); + if file_name.to_str().unwrap().contains(&backend_name) { + tarball.add_file(backends_src.join(file_name), &backends_dst, 0o644); + found_backend = true; + } + } + assert!(found_backend); + + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rustfmt { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Rustfmt { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "rustfmt"); + run.alias("rustfmt").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Rustfmt { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + let rustfmt = builder + .ensure(tool::Rustfmt { compiler, target, extra_features: Vec::new() }) + .expect("rustfmt expected to build - essential tool"); + let cargofmt = builder + .ensure(tool::Cargofmt { compiler, target, extra_features: Vec::new() }) + .expect("cargo fmt expected to build - essential tool"); + let mut tarball = Tarball::new(builder, "rustfmt", &target.triple); + tarball.set_overlay(OverlayKind::Rustfmt); + tarball.is_preview(true); + tarball.add_file(rustfmt, "bin", 0o755); + tarball.add_file(cargofmt, "bin", 0o755); + tarball.add_legal_and_readme_to("share/doc/rustfmt"); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustDemangler { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for RustDemangler { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + // While other tools use `should_build_extended_tool` to decide whether to be run by + // default or not, `rust-demangler` must be build when *either* it's enabled as a tool like + // the other ones or if `profiler = true`. Because we don't know the target at this stage + // we run the step by default when only `extended = true`, and decide whether to actually + // run it or not later. + let default = run.builder.config.extended; + run.alias("rust-demangler").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustDemangler { + compiler: run.builder.compiler_for( + run.builder.top_stage, + run.builder.config.build, + run.target, + ), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + + // Only build this extended tool if explicitly included in `tools`, or if `profiler = true` + let condition = should_build_extended_tool(builder, "rust-demangler") + || builder.config.profiler_enabled(target); + if builder.config.extended && !condition { + return None; + } + + let rust_demangler = builder + .ensure(tool::RustDemangler { compiler, target, extra_features: Vec::new() }) + .expect("rust-demangler expected to build - in-tree tool"); + + // Prepare the image directory + let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple); + tarball.set_overlay(OverlayKind::RustDemangler); + tarball.is_preview(true); + tarball.add_file(&rust_demangler, "bin", 0o755); + tarball.add_legal_and_readme_to("share/doc/rust-demangler"); + Some(tarball.generate()) + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Extended { + stage: u32, + host: TargetSelection, + target: TargetSelection, +} + +impl Step for Extended { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.alias("extended").default_condition(builder.config.extended) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Extended { + stage: run.builder.top_stage, + host: run.builder.config.build, + target: run.target, + }); + } + + /// Creates a combined installer for the specified target in the provided stage. + fn run(self, builder: &Builder<'_>) { + let target = self.target; + let stage = self.stage; + let compiler = builder.compiler_for(self.stage, self.host, self.target); + + builder.info(&format!("Dist extended stage{} ({})", compiler.stage, target)); + + let mut tarballs = Vec::new(); + let mut built_tools = HashSet::new(); + macro_rules! add_component { + ($name:expr => $step:expr) => { + if let Some(tarball) = builder.ensure_if_default($step, Kind::Dist) { + tarballs.push(tarball); + built_tools.insert($name); + } + }; + } + + // When rust-std package split from rustc, we needed to ensure that during + // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering + // the std files during uninstall. To do this ensure that rustc comes + // before rust-std in the list below. + tarballs.push(builder.ensure(Rustc { compiler: builder.compiler(stage, target) })); + tarballs.push(builder.ensure(Std { compiler, target }).expect("missing std")); + + if target.ends_with("windows-gnu") { + tarballs.push(builder.ensure(Mingw { host: target }).expect("missing mingw")); + } + + add_component!("rust-docs" => Docs { host: target }); + add_component!("rust-json-docs" => JsonDocs { host: target }); + add_component!("rust-demangler"=> RustDemangler { compiler, target }); + add_component!("cargo" => Cargo { compiler, target }); + add_component!("rustfmt" => Rustfmt { compiler, target }); + add_component!("rls" => Rls { compiler, target }); + add_component!("rust-analyzer" => RustAnalyzer { compiler, target }); + add_component!("llvm-components" => LlvmTools { target }); + add_component!("clippy" => Clippy { compiler, target }); + add_component!("miri" => Miri { compiler, target }); + add_component!("analysis" => Analysis { compiler, target }); + add_component!("rustc-codegen-cranelift" => CodegenBackend { + compiler: builder.compiler(stage, target), + backend: INTERNER.intern_str("cranelift"), + }); + + let etc = builder.src.join("src/etc/installer"); + + // Avoid producing tarballs during a dry run. + if builder.config.dry_run() { + return; + } + + let tarball = Tarball::new(builder, "rust", &target.triple); + let generated = tarball.combine(&tarballs); + + let tmp = tmpdir(builder).join("combined-tarball"); + let work = generated.work_dir(); + + let mut license = String::new(); + license += &builder.read(&builder.src.join("COPYRIGHT")); + license += &builder.read(&builder.src.join("LICENSE-APACHE")); + license += &builder.read(&builder.src.join("LICENSE-MIT")); + license.push('\n'); + license.push('\n'); + + let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18"; + let mut rtf = rtf.to_string(); + rtf.push('\n'); + for line in license.lines() { + rtf.push_str(line); + rtf.push_str("\\line "); + } + rtf.push('}'); + + fn filter(contents: &str, marker: &str) -> String { + let start = format!("tool-{marker}-start"); + let end = format!("tool-{marker}-end"); + let mut lines = Vec::new(); + let mut omitted = false; + for line in contents.lines() { + if line.contains(&start) { + omitted = true; + } else if line.contains(&end) { + omitted = false; + } else if !omitted { + lines.push(line); + } + } + + lines.join("\n") + } + + let xform = |p: &Path| { + let mut contents = t!(fs::read_to_string(p)); + for tool in &["rust-demangler", "miri", "rust-docs"] { + if !built_tools.contains(tool) { + contents = filter(&contents, tool); + } + } + let ret = tmp.join(p.file_name().unwrap()); + t!(fs::write(&ret, &contents)); + ret + }; + + if target.contains("apple-darwin") { + builder.info("building pkg installer"); + let pkg = tmp.join("pkg"); + let _ = fs::remove_dir_all(&pkg); + + let pkgbuild = |component: &str| { + let mut cmd = Command::new("pkgbuild"); + cmd.arg("--identifier") + .arg(format!("org.rust-lang.{}", component)) + .arg("--scripts") + .arg(pkg.join(component)) + .arg("--nopayload") + .arg(pkg.join(component).with_extension("pkg")); + builder.run(&mut cmd); + }; + + let prepare = |name: &str| { + builder.create_dir(&pkg.join(name)); + builder.cp_r( + &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)), + &pkg.join(name), + ); + builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755); + pkgbuild(name); + }; + prepare("rustc"); + prepare("cargo"); + prepare("rust-std"); + prepare("rust-analysis"); + prepare("clippy"); + prepare("rust-analyzer"); + for tool in &["rust-docs", "rust-demangler", "miri", "rustc-codegen-cranelift"] { + if built_tools.contains(tool) { + prepare(tool); + } + } + // create an 'uninstall' package + builder.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755); + pkgbuild("uninstall"); + + builder.create_dir(&pkg.join("res")); + builder.create(&pkg.join("res/LICENSE.txt"), &license); + builder.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644); + let mut cmd = Command::new("productbuild"); + cmd.arg("--distribution") + .arg(xform(&etc.join("pkg/Distribution.xml"))) + .arg("--resources") + .arg(pkg.join("res")) + .arg(distdir(builder).join(format!( + "{}-{}.pkg", + pkgname(builder, "rust"), + target.triple + ))) + .arg("--package-path") + .arg(&pkg); + let _time = timeit(builder); + builder.run(&mut cmd); + } + + if target.contains("windows") { + let exe = tmp.join("exe"); + let _ = fs::remove_dir_all(&exe); + + let prepare = |name: &str| { + builder.create_dir(&exe.join(name)); + let dir = if name == "rust-std" || name == "rust-analysis" { + format!("{}-{}", name, target.triple) + } else if name == "rust-analyzer" { + "rust-analyzer-preview".to_string() + } else if name == "clippy" { + "clippy-preview".to_string() + } else if name == "rust-demangler" { + "rust-demangler-preview".to_string() + } else if name == "miri" { + "miri-preview".to_string() + } else if name == "rustc-codegen-cranelift" { + // FIXME add installer support for cg_clif once it is ready to be distributed on + // windows. + unreachable!("cg_clif shouldn't be built for windows"); + } else { + name.to_string() + }; + builder.cp_r( + &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir), + &exe.join(name), + ); + builder.remove(&exe.join(name).join("manifest.in")); + }; + prepare("rustc"); + prepare("cargo"); + prepare("rust-analysis"); + prepare("rust-std"); + for tool in &["clippy", "rust-analyzer", "rust-docs", "rust-demangler", "miri"] { + if built_tools.contains(tool) { + prepare(tool); + } + } + if target.ends_with("windows-gnu") { + prepare("rust-mingw"); + } + + builder.install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644); + + // Generate msi installer + let wix_path = env::var_os("WIX") + .expect("`WIX` environment variable must be set for generating MSI installer(s)."); + let wix = PathBuf::from(wix_path); + let heat = wix.join("bin/heat.exe"); + let candle = wix.join("bin/candle.exe"); + let light = wix.join("bin/light.exe"); + + let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"]; + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rustc") + .args(&heat_flags) + .arg("-cg") + .arg("RustcGroup") + .arg("-dr") + .arg("Rustc") + .arg("-var") + .arg("var.RustcDir") + .arg("-out") + .arg(exe.join("RustcGroup.wxs")), + ); + if built_tools.contains("rust-docs") { + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-docs") + .args(&heat_flags) + .arg("-cg") + .arg("DocsGroup") + .arg("-dr") + .arg("Docs") + .arg("-var") + .arg("var.DocsDir") + .arg("-out") + .arg(exe.join("DocsGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/squash-components.xsl")), + ); + } + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("cargo") + .args(&heat_flags) + .arg("-cg") + .arg("CargoGroup") + .arg("-dr") + .arg("Cargo") + .arg("-var") + .arg("var.CargoDir") + .arg("-out") + .arg(exe.join("CargoGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-std") + .args(&heat_flags) + .arg("-cg") + .arg("StdGroup") + .arg("-dr") + .arg("Std") + .arg("-var") + .arg("var.StdDir") + .arg("-out") + .arg(exe.join("StdGroup.wxs")), + ); + if built_tools.contains("rust-analyzer") { + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-analyzer") + .args(&heat_flags) + .arg("-cg") + .arg("RustAnalyzerGroup") + .arg("-dr") + .arg("RustAnalyzer") + .arg("-var") + .arg("var.RustAnalyzerDir") + .arg("-out") + .arg(exe.join("RustAnalyzerGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + } + if built_tools.contains("clippy") { + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("clippy") + .args(&heat_flags) + .arg("-cg") + .arg("ClippyGroup") + .arg("-dr") + .arg("Clippy") + .arg("-var") + .arg("var.ClippyDir") + .arg("-out") + .arg(exe.join("ClippyGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + } + if built_tools.contains("rust-demangler") { + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-demangler") + .args(&heat_flags) + .arg("-cg") + .arg("RustDemanglerGroup") + .arg("-dr") + .arg("RustDemangler") + .arg("-var") + .arg("var.RustDemanglerDir") + .arg("-out") + .arg(exe.join("RustDemanglerGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + } + if built_tools.contains("miri") { + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("miri") + .args(&heat_flags) + .arg("-cg") + .arg("MiriGroup") + .arg("-dr") + .arg("Miri") + .arg("-var") + .arg("var.MiriDir") + .arg("-out") + .arg(exe.join("MiriGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + } + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-analysis") + .args(&heat_flags) + .arg("-cg") + .arg("AnalysisGroup") + .arg("-dr") + .arg("Analysis") + .arg("-var") + .arg("var.AnalysisDir") + .arg("-out") + .arg(exe.join("AnalysisGroup.wxs")) + .arg("-t") + .arg(etc.join("msi/remove-duplicates.xsl")), + ); + if target.ends_with("windows-gnu") { + builder.run( + Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-mingw") + .args(&heat_flags) + .arg("-cg") + .arg("GccGroup") + .arg("-dr") + .arg("Gcc") + .arg("-var") + .arg("var.GccDir") + .arg("-out") + .arg(exe.join("GccGroup.wxs")), + ); + } + + let candle = |input: &Path| { + let output = exe.join(input.file_stem().unwrap()).with_extension("wixobj"); + let arch = if target.contains("x86_64") { "x64" } else { "x86" }; + let mut cmd = Command::new(&candle); + cmd.current_dir(&exe) + .arg("-nologo") + .arg("-dRustcDir=rustc") + .arg("-dCargoDir=cargo") + .arg("-dStdDir=rust-std") + .arg("-dAnalysisDir=rust-analysis") + .arg("-arch") + .arg(&arch) + .arg("-out") + .arg(&output) + .arg(&input); + add_env(builder, &mut cmd, target); + + if built_tools.contains("clippy") { + cmd.arg("-dClippyDir=clippy"); + } + if built_tools.contains("rust-docs") { + cmd.arg("-dDocsDir=rust-docs"); + } + if built_tools.contains("rust-demangler") { + cmd.arg("-dRustDemanglerDir=rust-demangler"); + } + if built_tools.contains("rust-analyzer") { + cmd.arg("-dRustAnalyzerDir=rust-analyzer"); + } + if built_tools.contains("miri") { + cmd.arg("-dMiriDir=miri"); + } + if target.ends_with("windows-gnu") { + cmd.arg("-dGccDir=rust-mingw"); + } + builder.run(&mut cmd); + }; + candle(&xform(&etc.join("msi/rust.wxs"))); + candle(&etc.join("msi/ui.wxs")); + candle(&etc.join("msi/rustwelcomedlg.wxs")); + candle("RustcGroup.wxs".as_ref()); + if built_tools.contains("rust-docs") { + candle("DocsGroup.wxs".as_ref()); + } + candle("CargoGroup.wxs".as_ref()); + candle("StdGroup.wxs".as_ref()); + if built_tools.contains("clippy") { + candle("ClippyGroup.wxs".as_ref()); + } + if built_tools.contains("miri") { + candle("MiriGroup.wxs".as_ref()); + } + if built_tools.contains("rust-demangler") { + candle("RustDemanglerGroup.wxs".as_ref()); + } + if built_tools.contains("rust-analyzer") { + candle("RustAnalyzerGroup.wxs".as_ref()); + } + candle("AnalysisGroup.wxs".as_ref()); + + if target.ends_with("windows-gnu") { + candle("GccGroup.wxs".as_ref()); + } + + builder.create(&exe.join("LICENSE.rtf"), &rtf); + builder.install(&etc.join("gfx/banner.bmp"), &exe, 0o644); + builder.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644); + + builder.info(&format!("building `msi` installer with {light:?}")); + let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target.triple); + let mut cmd = Command::new(&light); + cmd.arg("-nologo") + .arg("-ext") + .arg("WixUIExtension") + .arg("-ext") + .arg("WixUtilExtension") + .arg("-out") + .arg(exe.join(&filename)) + .arg("rust.wixobj") + .arg("ui.wixobj") + .arg("rustwelcomedlg.wixobj") + .arg("RustcGroup.wixobj") + .arg("CargoGroup.wixobj") + .arg("StdGroup.wixobj") + .arg("AnalysisGroup.wixobj") + .current_dir(&exe); + + if built_tools.contains("clippy") { + cmd.arg("ClippyGroup.wixobj"); + } + if built_tools.contains("miri") { + cmd.arg("MiriGroup.wixobj"); + } + if built_tools.contains("rust-analyzer") { + cmd.arg("RustAnalyzerGroup.wixobj"); + } + if built_tools.contains("rust-demangler") { + cmd.arg("RustDemanglerGroup.wixobj"); + } + if built_tools.contains("rust-docs") { + cmd.arg("DocsGroup.wixobj"); + } + + if target.ends_with("windows-gnu") { + cmd.arg("GccGroup.wixobj"); + } + // ICE57 wrongly complains about the shortcuts + cmd.arg("-sice:ICE57"); + + let _time = timeit(builder); + builder.run(&mut cmd); + + if !builder.config.dry_run() { + t!(fs::rename(exe.join(&filename), distdir(builder).join(&filename))); + } + } + } +} + +fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) { + let mut parts = builder.version.split('.'); + cmd.env("CFG_RELEASE_INFO", builder.rust_version()) + .env("CFG_RELEASE_NUM", &builder.version) + .env("CFG_RELEASE", builder.rust_release()) + .env("CFG_VER_MAJOR", parts.next().unwrap()) + .env("CFG_VER_MINOR", parts.next().unwrap()) + .env("CFG_VER_PATCH", parts.next().unwrap()) + .env("CFG_VER_BUILD", "0") // just needed to build + .env("CFG_PACKAGE_VERS", builder.rust_package_vers()) + .env("CFG_PACKAGE_NAME", pkgname(builder, "rust")) + .env("CFG_BUILD", target.triple) + .env("CFG_CHANNEL", &builder.config.channel); + + if target.contains("windows-gnullvm") { + cmd.env("CFG_MINGW", "1").env("CFG_ABI", "LLVM"); + } else if target.contains("windows-gnu") { + cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU"); + } else { + cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC"); + } +} + +fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) { + if builder.config.dry_run() { + return; + } + + builder.install(&source, destination, 0o644); +} + +/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking. +/// +/// Returns whether the files were actually copied. +fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir: &Path) -> bool { + if let Some(config) = builder.config.target_config.get(&target) { + if config.llvm_config.is_some() && !builder.config.llvm_from_ci { + // If the LLVM was externally provided, then we don't currently copy + // artifacts into the sysroot. This is not necessarily the right + // choice (in particular, it will require the LLVM dylib to be in + // the linker's load path at runtime), but the common use case for + // external LLVMs is distribution provided LLVMs, and in that case + // they're usually in the standard search path (e.g., /usr/lib) and + // copying them here is going to cause problems as we may end up + // with the wrong files and isn't what distributions want. + // + // This behavior may be revisited in the future though. + // + // If the LLVM is coming from ourselves (just from CI) though, we + // still want to install it, as it otherwise won't be available. + return false; + } + } + + // On macOS, rustc (and LLVM tools) link to an unversioned libLLVM.dylib + // instead of libLLVM-11-rust-....dylib, as on linux. It's not entirely + // clear why this is the case, though. llvm-config will emit the versioned + // paths and we don't want those in the sysroot (as we're expecting + // unversioned paths). + if target.contains("apple-darwin") && builder.llvm_link_shared() { + let src_libdir = builder.llvm_out(target).join("lib"); + let llvm_dylib_path = src_libdir.join("libLLVM.dylib"); + if llvm_dylib_path.exists() { + builder.install(&llvm_dylib_path, dst_libdir, 0o644); + } + !builder.config.dry_run() + } else if let Ok(llvm::LlvmResult { llvm_config, .. }) = + llvm::prebuilt_llvm_config(builder, target) + { + let mut cmd = Command::new(llvm_config); + cmd.arg("--libfiles"); + builder.verbose(&format!("running {cmd:?}")); + let files = if builder.config.dry_run() { "".into() } else { output(&mut cmd) }; + let build_llvm_out = &builder.llvm_out(builder.config.build); + let target_llvm_out = &builder.llvm_out(target); + for file in files.trim_end().split(' ') { + // If we're not using a custom LLVM, make sure we package for the target. + let file = if let Ok(relative_path) = Path::new(file).strip_prefix(build_llvm_out) { + target_llvm_out.join(relative_path) + } else { + PathBuf::from(file) + }; + install_llvm_file(builder, &file, dst_libdir); + } + !builder.config.dry_run() + } else { + false + } +} + +/// Maybe add libLLVM.so to the target lib-dir for linking. +pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { + let dst_libdir = sysroot.join("lib/rustlib").join(&*target.triple).join("lib"); + // We do not need to copy LLVM files into the sysroot if it is not + // dynamically linked; it is already included into librustc_llvm + // statically. + if builder.llvm_link_shared() { + maybe_install_llvm(builder, target, &dst_libdir); + } +} + +/// Maybe add libLLVM.so to the runtime lib-dir for rustc itself. +pub fn maybe_install_llvm_runtime(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { + let dst_libdir = + sysroot.join(builder.sysroot_libdir_relative(Compiler { stage: 1, host: target })); + // We do not need to copy LLVM files into the sysroot if it is not + // dynamically linked; it is already included into librustc_llvm + // statically. + if builder.llvm_link_shared() { + maybe_install_llvm(builder, target, &dst_libdir); + } +} + +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct LlvmTools { + pub target: TargetSelection, +} + +impl Step for LlvmTools { + type Output = Option; + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = should_build_extended_tool(&run.builder, "llvm-tools"); + // FIXME: allow using the names of the tools themselves? + run.alias("llvm-tools").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(LlvmTools { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let target = self.target; + + /* run only if llvm-config isn't used */ + if let Some(config) = builder.config.target_config.get(&target) { + if let Some(ref _s) = config.llvm_config { + builder.info(&format!("Skipping LlvmTools ({target}): external LLVM")); + return None; + } + } + + builder.ensure(crate::core::build_steps::llvm::Llvm { target }); + + let mut tarball = Tarball::new(builder, "llvm-tools", &target.triple); + tarball.set_overlay(OverlayKind::LLVM); + tarball.is_preview(true); + + // Prepare the image directory + let src_bindir = builder.llvm_out(target).join("bin"); + let dst_bindir = format!("lib/rustlib/{}/bin", target.triple); + for tool in LLVM_TOOLS { + let exe = src_bindir.join(exe(tool, target)); + tarball.add_file(&exe, &dst_bindir, 0o755); + } + + // Copy libLLVM.so to the target lib dir as well, so the RPATH like + // `$ORIGIN/../lib` can find it. It may also be used as a dependency + // of `rustc-dev` to support the inherited `-lLLVM` when using the + // compiler libraries. + maybe_install_llvm_target(builder, target, tarball.image_dir()); + + Some(tarball.generate()) + } +} + +// Tarball intended for internal consumption to ease rustc/std development. +// +// Should not be considered stable by end users. +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct RustDev { + pub target: TargetSelection, +} + +impl Step for RustDev { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("rust-dev") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustDev { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let target = self.target; + + /* run only if llvm-config isn't used */ + if let Some(config) = builder.config.target_config.get(&target) { + if let Some(ref _s) = config.llvm_config { + builder.info(&format!("Skipping RustDev ({target}): external LLVM")); + return None; + } + } + + let mut tarball = Tarball::new(builder, "rust-dev", &target.triple); + tarball.set_overlay(OverlayKind::LLVM); + + builder.ensure(crate::core::build_steps::llvm::Llvm { target }); + + // We want to package `lld` to use it with `download-ci-llvm`. + builder.ensure(crate::core::build_steps::llvm::Lld { target }); + + let src_bindir = builder.llvm_out(target).join("bin"); + // If updating this, you likely want to change + // src/bootstrap/download-ci-llvm-stamp as well, otherwise local users + // will not pick up the extra file until LLVM gets bumped. + // We should include all the build artifacts obtained from a source build, + // so that you can use the downloadable LLVM as if you’ve just run a full source build. + if src_bindir.exists() { + for entry in walkdir::WalkDir::new(&src_bindir) { + let entry = t!(entry); + if entry.file_type().is_file() && !entry.path_is_symlink() { + let name = entry.file_name().to_str().unwrap(); + tarball.add_file(src_bindir.join(name), "bin", 0o755); + } + } + } + + // We don't build LLD on some platforms, so only add it if it exists + let lld_path = builder.lld_out(target).join("bin").join(exe("lld", target)); + if lld_path.exists() { + tarball.add_file(lld_path, "bin", 0o755); + } + + tarball.add_file(&builder.llvm_filecheck(target), "bin", 0o755); + + // Copy the include directory as well; needed mostly to build + // librustc_llvm properly (e.g., llvm-config.h is in here). But also + // just broadly useful to be able to link against the bundled LLVM. + tarball.add_dir(&builder.llvm_out(target).join("include"), "include"); + + // Copy libLLVM.so to the target lib dir as well, so the RPATH like + // `$ORIGIN/../lib` can find it. It may also be used as a dependency + // of `rustc-dev` to support the inherited `-lLLVM` when using the + // compiler libraries. + let dst_libdir = tarball.image_dir().join("lib"); + maybe_install_llvm(builder, target, &dst_libdir); + let link_type = if builder.llvm_link_shared() { "dynamic" } else { "static" }; + t!(std::fs::write(tarball.image_dir().join("link-type.txt"), link_type), dst_libdir); + + Some(tarball.generate()) + } +} + +// Tarball intended for internal consumption to ease rustc/std development. +// +// Should not be considered stable by end users. +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct Bootstrap { + pub target: TargetSelection, +} + +impl Step for Bootstrap { + type Output = Option; + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("bootstrap") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Bootstrap { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let target = self.target; + + let tarball = Tarball::new(builder, "bootstrap", &target.triple); + + let bootstrap_outdir = &builder.bootstrap_out; + for file in &["bootstrap", "rustc", "rustdoc", "sccache-plus-cl"] { + tarball.add_file(bootstrap_outdir.join(exe(file, target)), "bootstrap/bin", 0o755); + } + + Some(tarball.generate()) + } +} + +/// Tarball containing a prebuilt version of the build-manifest tool, intended to be used by the +/// release process to avoid cloning the monorepo and building stuff. +/// +/// Should not be considered stable by end users. +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct BuildManifest { + pub target: TargetSelection, +} + +impl Step for BuildManifest { + type Output = GeneratedTarball; + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("build-manifest") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(BuildManifest { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) -> GeneratedTarball { + let build_manifest = builder.tool_exe(Tool::BuildManifest); + + let tarball = Tarball::new(builder, "build-manifest", &self.target.triple); + tarball.add_file(&build_manifest, "bin", 0o755); + tarball.generate() + } +} + +/// Tarball containing artifacts necessary to reproduce the build of rustc. +/// +/// Currently this is the PGO profile data. +/// +/// Should not be considered stable by end users. +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct ReproducibleArtifacts { + pub target: TargetSelection, +} + +impl Step for ReproducibleArtifacts { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("reproducible-artifacts") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(ReproducibleArtifacts { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let mut added_anything = false; + let tarball = Tarball::new(builder, "reproducible-artifacts", &self.target.triple); + if let Some(path) = builder.config.rust_profile_use.as_ref() { + tarball.add_file(path, ".", 0o644); + added_anything = true; + } + if let Some(path) = builder.config.llvm_profile_use.as_ref() { + tarball.add_file(path, ".", 0o644); + added_anything = true; + } + for profile in &builder.config.reproducible_artifacts { + tarball.add_file(profile, ".", 0o644); + added_anything = true; + } + if added_anything { Some(tarball.generate()) } else { None } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/doc.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/doc.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/doc.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/doc.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,1094 @@ +//! Documentation generation for rustbuilder. +//! +//! This module implements generation for all bits and pieces of documentation +//! for the Rust project. This notably includes suites like the rust book, the +//! nomicon, rust by example, standalone documentation, etc. +//! +//! Everything here is basically just a shim around calling either `rustbook` or +//! `rustdoc`. + +use std::fs; +use std::path::{Path, PathBuf}; + +use crate::core::build_steps::compile; +use crate::core::build_steps::tool::{self, prepare_tool_cargo, SourceType, Tool}; +use crate::core::builder::crate_description; +use crate::core::builder::{Alias, Builder, Compiler, Kind, RunConfig, ShouldRun, Step}; +use crate::core::config::{Config, TargetSelection}; +use crate::utils::cache::{Interned, INTERNER}; +use crate::utils::helpers::{dir_is_empty, symlink_dir, t, up_to_date}; +use crate::Mode; + +macro_rules! submodule_helper { + ($path:expr, submodule) => { + $path + }; + ($path:expr, submodule = $submodule:literal) => { + $submodule + }; +} + +macro_rules! book { + ($($name:ident, $path:expr, $book_name:expr $(, submodule $(= $submodule:literal)? )? ;)+) => { + $( + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $name { + target: TargetSelection, + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path($path).default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($name { + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) { + $( + let path = Path::new(submodule_helper!( $path, submodule $( = $submodule )? )); + builder.update_submodule(&path); + )? + builder.ensure(RustbookSrc { + target: self.target, + name: INTERNER.intern_str($book_name), + src: INTERNER.intern_path(builder.src.join($path)), + parent: Some(self), + }) + } + } + )+ + } +} + +// NOTE: When adding a book here, make sure to ALSO build the book by +// adding a build step in `src/bootstrap/builder.rs`! +// NOTE: Make sure to add the corresponding submodule when adding a new book. +// FIXME: Make checking for a submodule automatic somehow (maybe by having a list of all submodules +// and checking against it?). +book!( + CargoBook, "src/tools/cargo/src/doc", "cargo", submodule = "src/tools/cargo"; + ClippyBook, "src/tools/clippy/book", "clippy"; + EditionGuide, "src/doc/edition-guide", "edition-guide", submodule; + EmbeddedBook, "src/doc/embedded-book", "embedded-book", submodule; + Nomicon, "src/doc/nomicon", "nomicon", submodule; + Reference, "src/doc/reference", "reference", submodule; + RustByExample, "src/doc/rust-by-example", "rust-by-example", submodule; + RustdocBook, "src/doc/rustdoc", "rustdoc"; + StyleGuide, "src/doc/style-guide", "style-guide"; +); + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct UnstableBook { + target: TargetSelection, +} + +impl Step for UnstableBook { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/doc/unstable-book").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(UnstableBook { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + builder.ensure(UnstableBookGen { target: self.target }); + builder.ensure(RustbookSrc { + target: self.target, + name: INTERNER.intern_str("unstable-book"), + src: INTERNER.intern_path(builder.md_doc_out(self.target).join("unstable-book")), + parent: Some(self), + }) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +struct RustbookSrc { + target: TargetSelection, + name: Interned, + src: Interned, + parent: Option

, +} + +impl Step for RustbookSrc

{ + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Invoke `rustbook` for `target` for the doc book `name` from the `src` path. + /// + /// This will not actually generate any documentation if the documentation has + /// already been generated. + fn run(self, builder: &Builder<'_>) { + let target = self.target; + let name = self.name; + let src = self.src; + let out = builder.doc_out(target); + t!(fs::create_dir_all(&out)); + + let out = out.join(name); + let index = out.join("index.html"); + let rustbook = builder.tool_exe(Tool::Rustbook); + let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); + + if !builder.config.dry_run() && !(up_to_date(&src, &index) || up_to_date(&rustbook, &index)) + { + builder.info(&format!("Rustbook ({target}) - {name}")); + let _ = fs::remove_dir_all(&out); + + builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out)); + } + + if self.parent.is_some() { + builder.maybe_open_in_browser::

(index) + } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct TheBook { + compiler: Compiler, + target: TargetSelection, +} + +impl Step for TheBook { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/doc/book").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(TheBook { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + /// Builds the book and associated stuff. + /// + /// We need to build: + /// + /// * Book + /// * Older edition redirects + /// * Version info and CSS + /// * Index page + /// * Redirect pages + fn run(self, builder: &Builder<'_>) { + let relative_path = Path::new("src").join("doc").join("book"); + builder.update_submodule(&relative_path); + + let compiler = self.compiler; + let target = self.target; + + let absolute_path = builder.src.join(&relative_path); + let redirect_path = absolute_path.join("redirects"); + if !absolute_path.exists() + || !redirect_path.exists() + || dir_is_empty(&absolute_path) + || dir_is_empty(&redirect_path) + { + eprintln!("Please checkout submodule: {}", relative_path.display()); + crate::exit!(1); + } + // build book + builder.ensure(RustbookSrc { + target, + name: INTERNER.intern_str("book"), + src: INTERNER.intern_path(absolute_path.clone()), + parent: Some(self), + }); + + // building older edition redirects + for edition in &["first-edition", "second-edition", "2018-edition"] { + builder.ensure(RustbookSrc { + target, + name: INTERNER.intern_string(format!("book/{edition}")), + src: INTERNER.intern_path(absolute_path.join(edition)), + // There should only be one book that is marked as the parent for each target, so + // treat the other editions as not having a parent. + parent: Option::::None, + }); + } + + // build the version info page and CSS + let shared_assets = builder.ensure(SharedAssets { target }); + + // build the command first so we don't nest GHA groups + builder.rustdoc_cmd(compiler); + + // build the redirect pages + let _guard = builder.msg_doc(compiler, "book redirect pages", target); + for file in t!(fs::read_dir(redirect_path)) { + let file = t!(file); + let path = file.path(); + let path = path.to_str().unwrap(); + + invoke_rustdoc(builder, compiler, &shared_assets, target, path); + } + } +} + +fn invoke_rustdoc( + builder: &Builder<'_>, + compiler: Compiler, + shared_assets: &SharedAssetsPaths, + target: TargetSelection, + markdown: &str, +) { + let out = builder.doc_out(target); + + let path = builder.src.join("src/doc").join(markdown); + + let header = builder.src.join("src/doc/redirect.inc"); + let footer = builder.src.join("src/doc/footer.inc"); + + let mut cmd = builder.rustdoc_cmd(compiler); + + let out = out.join("book"); + + cmd.arg("--html-after-content") + .arg(&footer) + .arg("--html-before-content") + .arg(&shared_assets.version_info) + .arg("--html-in-header") + .arg(&header) + .arg("--markdown-no-toc") + .arg("--markdown-playground-url") + .arg("https://play.rust-lang.org/") + .arg("-o") + .arg(&out) + .arg(&path) + .arg("--markdown-css") + .arg("../rust.css"); + + if !builder.config.docs_minification { + cmd.arg("-Z").arg("unstable-options").arg("--disable-minification"); + } + + builder.run(&mut cmd); +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Standalone { + compiler: Compiler, + target: TargetSelection, +} + +impl Step for Standalone { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/doc").alias("standalone").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Standalone { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + /// Generates all standalone documentation as compiled by the rustdoc in `stage` + /// for the `target` into `out`. + /// + /// This will list all of `src/doc` looking for markdown files and appropriately + /// perform transformations like substituting `VERSION`, `SHORT_HASH`, and + /// `STAMP` along with providing the various header/footer HTML we've customized. + /// + /// In the end, this is just a glorified wrapper around rustdoc! + fn run(self, builder: &Builder<'_>) { + let target = self.target; + let compiler = self.compiler; + let _guard = builder.msg_doc(compiler, "standalone", target); + let out = builder.doc_out(target); + t!(fs::create_dir_all(&out)); + + let version_info = builder.ensure(SharedAssets { target: self.target }).version_info; + + let favicon = builder.src.join("src/doc/favicon.inc"); + let footer = builder.src.join("src/doc/footer.inc"); + let full_toc = builder.src.join("src/doc/full-toc.inc"); + + for file in t!(fs::read_dir(builder.src.join("src/doc"))) { + let file = t!(file); + let path = file.path(); + let filename = path.file_name().unwrap().to_str().unwrap(); + if !filename.ends_with(".md") || filename == "README.md" { + continue; + } + + let html = out.join(filename).with_extension("html"); + let rustdoc = builder.rustdoc(compiler); + if up_to_date(&path, &html) + && up_to_date(&footer, &html) + && up_to_date(&favicon, &html) + && up_to_date(&full_toc, &html) + && (builder.config.dry_run() || up_to_date(&version_info, &html)) + && (builder.config.dry_run() || up_to_date(&rustdoc, &html)) + { + continue; + } + + let mut cmd = builder.rustdoc_cmd(compiler); + // Needed for --index-page flag + cmd.arg("-Z").arg("unstable-options"); + + cmd.arg("--html-after-content") + .arg(&footer) + .arg("--html-before-content") + .arg(&version_info) + .arg("--html-in-header") + .arg(&favicon) + .arg("--markdown-no-toc") + .arg("--index-page") + .arg(&builder.src.join("src/doc/index.md")) + .arg("--markdown-playground-url") + .arg("https://play.rust-lang.org/") + .arg("-o") + .arg(&out) + .arg(&path); + + if !builder.config.docs_minification { + cmd.arg("--disable-minification"); + } + + if filename == "not_found.md" { + cmd.arg("--markdown-css").arg("https://doc.rust-lang.org/rust.css"); + } else { + cmd.arg("--markdown-css").arg("rust.css"); + } + builder.run(&mut cmd); + } + + // We open doc/index.html as the default if invoked as `x.py doc --open` + // with no particular explicit doc requested (e.g. library/core). + if builder.paths.is_empty() || builder.was_invoked_explicitly::(Kind::Doc) { + let index = out.join("index.html"); + builder.open_in_browser(&index); + } + } +} + +#[derive(Debug, Clone)] +pub struct SharedAssetsPaths { + pub version_info: PathBuf, +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct SharedAssets { + target: TargetSelection, +} + +impl Step for SharedAssets { + type Output = SharedAssetsPaths; + const DEFAULT: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + // Other tasks depend on this, no need to execute it on its own + run.never() + } + + // Generate shared resources used by other pieces of documentation. + fn run(self, builder: &Builder<'_>) -> Self::Output { + let out = builder.doc_out(self.target); + + let version_input = builder.src.join("src").join("doc").join("version_info.html.template"); + let version_info = out.join("version_info.html"); + if !builder.config.dry_run() && !up_to_date(&version_input, &version_info) { + let info = t!(fs::read_to_string(&version_input)) + .replace("VERSION", &builder.rust_release()) + .replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or("")) + .replace("STAMP", builder.rust_info().sha().unwrap_or("")); + t!(fs::write(&version_info, &info)); + } + + builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css")); + + SharedAssetsPaths { version_info } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Std { + pub stage: u32, + pub target: TargetSelection, + pub format: DocumentationFormat, + crates: Interned>, +} + +impl Std { + pub(crate) fn new( + stage: u32, + target: TargetSelection, + builder: &Builder<'_>, + format: DocumentationFormat, + ) -> Self { + let crates = builder + .in_tree_crates("sysroot", Some(target)) + .into_iter() + .map(|krate| krate.name.to_string()) + .collect(); + Std { stage, target, format, crates: INTERNER.intern_list(crates) } + } +} + +impl Step for Std { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.crate_or_deps("sysroot").path("library").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Std { + stage: run.builder.top_stage, + target: run.target, + format: if run.builder.config.cmd.json() { + DocumentationFormat::JSON + } else { + DocumentationFormat::HTML + }, + crates: run.make_run_crates(Alias::Library), + }); + } + + /// Compile all standard library documentation. + /// + /// This will generate all documentation for the standard library and its + /// dependencies. This is largely just a wrapper around `cargo doc`. + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let target = self.target; + let out = match self.format { + DocumentationFormat::HTML => builder.doc_out(target), + DocumentationFormat::JSON => builder.json_doc_out(target), + }; + + t!(fs::create_dir_all(&out)); + + if self.format == DocumentationFormat::HTML { + builder.ensure(SharedAssets { target: self.target }); + } + + let index_page = builder + .src + .join("src/doc/index.md") + .into_os_string() + .into_string() + .expect("non-utf8 paths are unsupported"); + let mut extra_args = match self.format { + DocumentationFormat::HTML => { + vec!["--markdown-css", "rust.css", "--markdown-no-toc", "--index-page", &index_page] + } + DocumentationFormat::JSON => vec!["--output-format", "json"], + }; + + if !builder.config.docs_minification { + extra_args.push("--disable-minification"); + } + + doc_std(builder, self.format, stage, target, &out, &extra_args, &self.crates); + + // Don't open if the format is json + if let DocumentationFormat::JSON = self.format { + return; + } + + if builder.paths.iter().any(|path| path.ends_with("library")) { + // For `x.py doc library --open`, open `std` by default. + let index = out.join("std").join("index.html"); + builder.open_in_browser(index); + } else { + for requested_crate in &*self.crates { + if STD_PUBLIC_CRATES.iter().any(|&k| k == requested_crate) { + let index = out.join(requested_crate).join("index.html"); + builder.open_in_browser(index); + break; + } + } + } + } +} + +/// Name of the crates that are visible to consumers of the standard library. +/// Documentation for internal crates is handled by the rustc step, so internal crates will show +/// up there. +/// +/// Order here is important! +/// Crates need to be processed starting from the leaves, otherwise rustdoc will not +/// create correct links between crates because rustdoc depends on the +/// existence of the output directories to know if it should be a local +/// or remote link. +const STD_PUBLIC_CRATES: [&str; 5] = ["core", "alloc", "std", "proc_macro", "test"]; + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum DocumentationFormat { + HTML, + JSON, +} + +impl DocumentationFormat { + fn as_str(&self) -> &str { + match self { + DocumentationFormat::HTML => "HTML", + DocumentationFormat::JSON => "JSON", + } + } +} + +/// Build the documentation for public standard library crates. +fn doc_std( + builder: &Builder<'_>, + format: DocumentationFormat, + stage: u32, + target: TargetSelection, + out: &Path, + extra_args: &[&str], + requested_crates: &[String], +) { + if builder.no_std(target) == Some(true) { + panic!( + "building std documentation for no_std target {target} is not supported\n\ + Set `docs = false` in the config to disable documentation, or pass `--skip library`." + ); + } + + let compiler = builder.compiler(stage, builder.config.build); + + let target_doc_dir_name = if format == DocumentationFormat::JSON { "json-doc" } else { "doc" }; + let target_dir = + builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name); + + // This is directory where the compiler will place the output of the command. + // We will then copy the files from this directory into the final `out` directory, the specified + // as a function parameter. + let out_dir = target_dir.join(target.triple).join("doc"); + + let mut cargo = builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "doc"); + compile::std_cargo(builder, target, compiler.stage, &mut cargo); + cargo + .arg("--no-deps") + .arg("--target-dir") + .arg(&*target_dir.to_string_lossy()) + .arg("-Zskip-rustdoc-fingerprint") + .rustdocflag("-Z") + .rustdocflag("unstable-options") + .rustdocflag("--resource-suffix") + .rustdocflag(&builder.version); + for arg in extra_args { + cargo.rustdocflag(arg); + } + + if builder.config.library_docs_private_items { + cargo.rustdocflag("--document-private-items").rustdocflag("--document-hidden-items"); + } + + for krate in requested_crates { + if krate == "sysroot" { + // The sysroot crate is an implementation detail, don't include it in public docs. + continue; + } + cargo.arg("-p").arg(krate); + } + + let description = + format!("library{} in {} format", crate_description(&requested_crates), format.as_str()); + let _guard = builder.msg_doc(compiler, &description, target); + + builder.run(&mut cargo.into()); + builder.cp_r(&out_dir, &out); +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rustc { + pub stage: u32, + pub target: TargetSelection, + crates: Interned>, +} + +impl Rustc { + pub(crate) fn new(stage: u32, target: TargetSelection, builder: &Builder<'_>) -> Self { + let crates = builder + .in_tree_crates("rustc-main", Some(target)) + .into_iter() + .map(|krate| krate.name.to_string()) + .collect(); + Self { stage, target, crates: INTERNER.intern_list(crates) } + } +} + +impl Step for Rustc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.crate_or_deps("rustc-main") + .path("compiler") + .default_condition(builder.config.compiler_docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Rustc { + stage: run.builder.top_stage, + target: run.target, + crates: run.make_run_crates(Alias::Compiler), + }); + } + + /// Generates compiler documentation. + /// + /// This will generate all documentation for compiler and dependencies. + /// Compiler documentation is distributed separately, so we make sure + /// we do not merge it with the other documentation from std, test and + /// proc_macros. This is largely just a wrapper around `cargo doc`. + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let target = self.target; + + // This is the intended out directory for compiler documentation. + let out = builder.compiler_doc_out(target); + t!(fs::create_dir_all(&out)); + + // Build the standard library, so that proc-macros can use it. + // (Normally, only the metadata would be necessary, but proc-macros are special since they run at compile-time.) + let compiler = builder.compiler(stage, builder.config.build); + builder.ensure(compile::Std::new(compiler, builder.config.build)); + + let _guard = builder.msg_sysroot_tool( + Kind::Doc, + stage, + &format!("compiler{}", crate_description(&self.crates)), + compiler.host, + target, + ); + + // Build cargo command. + let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "doc"); + cargo.rustdocflag("--document-private-items"); + // Since we always pass --document-private-items, there's no need to warn about linking to private items. + cargo.rustdocflag("-Arustdoc::private-intra-doc-links"); + cargo.rustdocflag("--enable-index-page"); + cargo.rustdocflag("-Zunstable-options"); + cargo.rustdocflag("-Znormalize-docs"); + cargo.rustdocflag("--show-type-layout"); + cargo.rustdocflag("--generate-link-to-definition"); + compile::rustc_cargo(builder, &mut cargo, target, compiler.stage); + cargo.arg("-Zunstable-options"); + cargo.arg("-Zskip-rustdoc-fingerprint"); + + // Only include compiler crates, no dependencies of those, such as `libc`. + // Do link to dependencies on `docs.rs` however using `rustdoc-map`. + cargo.arg("--no-deps"); + cargo.arg("-Zrustdoc-map"); + + // FIXME: `-Zrustdoc-map` does not yet correctly work for transitive dependencies, + // once this is no longer an issue the special case for `ena` can be removed. + cargo.rustdocflag("--extern-html-root-url"); + cargo.rustdocflag("ena=https://docs.rs/ena/latest/"); + + let mut to_open = None; + + let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc"); + for krate in &*self.crates { + // Create all crate output directories first to make sure rustdoc uses + // relative links. + // FIXME: Cargo should probably do this itself. + let dir_name = krate.replace("-", "_"); + t!(fs::create_dir_all(out_dir.join(&*dir_name))); + cargo.arg("-p").arg(krate); + if to_open.is_none() { + to_open = Some(dir_name); + } + } + + // This uses a shared directory so that librustdoc documentation gets + // correctly built and merged with the rustc documentation. + // + // This is needed because rustdoc is built in a different directory from + // rustc. rustdoc needs to be able to see everything, for example when + // merging the search index, or generating local (relative) links. + symlink_dir_force(&builder.config, &out, &out_dir); + // Cargo puts proc macros in `target/doc` even if you pass `--target` + // explicitly (https://github.com/rust-lang/cargo/issues/7677). + let proc_macro_out_dir = builder.stage_out(compiler, Mode::Rustc).join("doc"); + symlink_dir_force(&builder.config, &out, &proc_macro_out_dir); + + builder.run(&mut cargo.into()); + + if !builder.config.dry_run() { + // Sanity check on linked compiler crates + for krate in &*self.crates { + let dir_name = krate.replace("-", "_"); + // Making sure the directory exists and is not empty. + assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some()); + } + } + + if builder.paths.iter().any(|path| path.ends_with("compiler")) { + // For `x.py doc compiler --open`, open `rustc_middle` by default. + let index = out.join("rustc_middle").join("index.html"); + builder.open_in_browser(index); + } else if let Some(krate) = to_open { + // Let's open the first crate documentation page: + let index = out.join(krate).join("index.html"); + builder.open_in_browser(index); + } + } +} + +macro_rules! tool_doc { + ( + $tool: ident, + $should_run: literal, + $path: literal, + $(rustc_tool = $rustc_tool:literal, )? + $(in_tree = $in_tree:literal ,)? + $(is_library = $is_library:expr,)? + $(crates = $crates:expr)? + ) => { + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $tool { + target: TargetSelection, + } + + impl Step for $tool { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.crate_or_deps($should_run).default_condition(builder.config.compiler_docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($tool { target: run.target }); + } + + /// Generates compiler documentation. + /// + /// This will generate all documentation for compiler and dependencies. + /// Compiler documentation is distributed separately, so we make sure + /// we do not merge it with the other documentation from std, test and + /// proc_macros. This is largely just a wrapper around `cargo doc`. + fn run(self, builder: &Builder<'_>) { + let stage = builder.top_stage; + let target = self.target; + + // This is the intended out directory for compiler documentation. + let out = builder.compiler_doc_out(target); + t!(fs::create_dir_all(&out)); + + let compiler = builder.compiler(stage, builder.config.build); + builder.ensure(compile::Std::new(compiler, target)); + + if true $(&& $rustc_tool)? { + // Build rustc docs so that we generate relative links. + builder.ensure(Rustc::new(stage, target, builder)); + + // Rustdoc needs the rustc sysroot available to build. + // FIXME: is there a way to only ensure `check::Rustc` here? Last time I tried it failed + // with strange errors, but only on a full bors test ... + builder.ensure(compile::Rustc::new(compiler, target)); + } + + let source_type = if true $(&& $in_tree)? { + SourceType::InTree + } else { + SourceType::Submodule + }; + + // Build cargo command. + let mut cargo = prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + target, + "doc", + $path, + source_type, + &[], + ); + + cargo.arg("-Zskip-rustdoc-fingerprint"); + // Only include compiler crates, no dependencies of those, such as `libc`. + cargo.arg("--no-deps"); + + if false $(|| $is_library)? { + cargo.arg("--lib"); + } + + $(for krate in $crates { + cargo.arg("-p").arg(krate); + })? + + cargo.rustdocflag("--document-private-items"); + // Since we always pass --document-private-items, there's no need to warn about linking to private items. + cargo.rustdocflag("-Arustdoc::private-intra-doc-links"); + cargo.rustdocflag("--enable-index-page"); + cargo.rustdocflag("--show-type-layout"); + cargo.rustdocflag("--generate-link-to-definition"); + cargo.rustdocflag("-Zunstable-options"); + + let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc"); + $(for krate in $crates { + let dir_name = krate.replace("-", "_"); + t!(fs::create_dir_all(out_dir.join(&*dir_name))); + })? + + // Symlink compiler docs to the output directory of rustdoc documentation. + symlink_dir_force(&builder.config, &out, &out_dir); + let proc_macro_out_dir = builder.stage_out(compiler, Mode::ToolRustc).join("doc"); + symlink_dir_force(&builder.config, &out, &proc_macro_out_dir); + + let _guard = builder.msg_doc(compiler, stringify!($tool).to_lowercase(), target); + builder.run(&mut cargo.into()); + + if !builder.config.dry_run() { + // Sanity check on linked doc directories + $(for krate in $crates { + let dir_name = krate.replace("-", "_"); + // Making sure the directory exists and is not empty. + assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some()); + })? + } + } + } + } +} + +tool_doc!(Rustdoc, "rustdoc-tool", "src/tools/rustdoc", crates = ["rustdoc", "rustdoc-json-types"]); +tool_doc!( + Rustfmt, + "rustfmt-nightly", + "src/tools/rustfmt", + crates = ["rustfmt-nightly", "rustfmt-config_proc_macro"] +); +tool_doc!(Clippy, "clippy", "src/tools/clippy", crates = ["clippy_config", "clippy_utils"]); +tool_doc!(Miri, "miri", "src/tools/miri", crates = ["miri"]); +tool_doc!( + Cargo, + "cargo", + "src/tools/cargo", + rustc_tool = false, + in_tree = false, + crates = [ + "cargo", + "cargo-platform", + "cargo-util", + "crates-io", + "cargo-test-macro", + "cargo-test-support", + "cargo-credential", + "mdman", + // FIXME: this trips a license check in tidy. + // "resolver-tests", + ] +); +tool_doc!(Tidy, "tidy", "src/tools/tidy", rustc_tool = false, crates = ["tidy"]); +tool_doc!( + Bootstrap, + "bootstrap", + "src/bootstrap", + rustc_tool = false, + is_library = true, + crates = ["bootstrap"] +); + +#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct ErrorIndex { + pub target: TargetSelection, +} + +impl Step for ErrorIndex { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/tools/error_index_generator").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + let target = run.target; + run.builder.ensure(ErrorIndex { target }); + } + + /// Generates the HTML rendered error-index by running the + /// `error_index_generator` tool. + fn run(self, builder: &Builder<'_>) { + builder.info(&format!("Documenting error index ({})", self.target)); + let out = builder.doc_out(self.target); + t!(fs::create_dir_all(&out)); + let mut index = tool::ErrorIndex::command(builder); + index.arg("html"); + index.arg(out); + index.arg(&builder.version); + + builder.run(&mut index); + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct UnstableBookGen { + target: TargetSelection, +} + +impl Step for UnstableBookGen { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/tools/unstable-book-gen").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(UnstableBookGen { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let target = self.target; + + builder.info(&format!("Generating unstable book md files ({target})")); + let out = builder.md_doc_out(target).join("unstable-book"); + builder.create_dir(&out); + builder.remove_dir(&out); + let mut cmd = builder.tool_cmd(Tool::UnstableBookGen); + cmd.arg(builder.src.join("library")); + cmd.arg(builder.src.join("compiler")); + cmd.arg(builder.src.join("src")); + cmd.arg(out); + + builder.run(&mut cmd); + } +} + +fn symlink_dir_force(config: &Config, original: &Path, link: &Path) { + if config.dry_run() { + return; + } + if let Ok(m) = fs::symlink_metadata(link) { + if m.file_type().is_dir() { + t!(fs::remove_dir_all(link)); + } else { + // handle directory junctions on windows by falling back to + // `remove_dir`. + t!(fs::remove_file(link).or_else(|_| fs::remove_dir(link))); + } + } + + t!( + symlink_dir(config, original, link), + format!("failed to create link from {} -> {}", link.display(), original.display()) + ); +} + +#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustcBook { + pub compiler: Compiler, + pub target: TargetSelection, + pub validate: bool, +} + +impl Step for RustcBook { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/doc/rustc").default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustcBook { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + validate: false, + }); + } + + /// Builds the rustc book. + /// + /// The lints are auto-generated by a tool, and then merged into the book + /// in the "md-doc" directory in the build output directory. Then + /// "rustbook" is used to convert it to HTML. + fn run(self, builder: &Builder<'_>) { + let out_base = builder.md_doc_out(self.target).join("rustc"); + t!(fs::create_dir_all(&out_base)); + let out_listing = out_base.join("src/lints"); + builder.cp_r(&builder.src.join("src/doc/rustc"), &out_base); + builder.info(&format!("Generating lint docs ({})", self.target)); + + let rustc = builder.rustc(self.compiler); + // The tool runs `rustc` for extracting output examples, so it needs a + // functional sysroot. + builder.ensure(compile::Std::new(self.compiler, self.target)); + let mut cmd = builder.tool_cmd(Tool::LintDocs); + cmd.arg("--src"); + cmd.arg(builder.src.join("compiler")); + cmd.arg("--out"); + cmd.arg(&out_listing); + cmd.arg("--rustc"); + cmd.arg(&rustc); + cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg()); + if builder.is_verbose() { + cmd.arg("--verbose"); + } + if self.validate { + cmd.arg("--validate"); + } + // We need to validate nightly features, even on the stable channel. + // Set this unconditionally as the stage0 compiler may be being used to + // document. + cmd.env("RUSTC_BOOTSTRAP", "1"); + + // If the lib directories are in an unusual location (changed in + // config.toml), then this needs to explicitly update the dylib search + // path. + builder.add_rustc_lib_path(self.compiler, &mut cmd); + let doc_generator_guard = builder.msg( + Kind::Run, + self.compiler.stage, + "lint-docs", + self.compiler.host, + self.target, + ); + builder.run(&mut cmd); + drop(doc_generator_guard); + + // Run rustbook/mdbook to generate the HTML pages. + builder.ensure(RustbookSrc { + target: self.target, + name: INTERNER.intern_str("rustc"), + src: INTERNER.intern_path(out_base), + parent: Some(self), + }); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/format.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/format.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/format.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/format.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,322 @@ +//! Runs rustfmt on the repository. + +use crate::core::builder::Builder; +use crate::utils::helpers::{output, program_out_of_date, t}; +use build_helper::ci::CiEnv; +use build_helper::git::get_git_modified_files; +use ignore::WalkBuilder; +use std::collections::VecDeque; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::sync::mpsc::SyncSender; + +fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool { + let mut cmd = Command::new(&rustfmt); + // avoid the submodule config paths from coming into play, + // we only allow a single global config for the workspace for now + cmd.arg("--config-path").arg(&src.canonicalize().unwrap()); + cmd.arg("--edition").arg("2021"); + cmd.arg("--unstable-features"); + cmd.arg("--skip-children"); + if check { + cmd.arg("--check"); + } + cmd.args(paths); + let cmd_debug = format!("{cmd:?}"); + let mut cmd = cmd.spawn().expect("running rustfmt"); + // poor man's async: return a closure that'll wait for rustfmt's completion + move |block: bool| -> bool { + if !block { + match cmd.try_wait() { + Ok(Some(_)) => {} + _ => return false, + } + } + let status = cmd.wait().unwrap(); + if !status.success() { + eprintln!( + "Running `{}` failed.\nIf you're running `tidy`, \ + try again with `--bless`. Or, if you just want to format \ + code, run `./x.py fmt` instead.", + cmd_debug, + ); + crate::exit!(1); + } + true + } +} + +fn get_rustfmt_version(build: &Builder<'_>) -> Option<(String, PathBuf)> { + let stamp_file = build.out.join("rustfmt.stamp"); + + let mut cmd = Command::new(match build.initial_rustfmt() { + Some(p) => p, + None => return None, + }); + cmd.arg("--version"); + let output = match cmd.output() { + Ok(status) => status, + Err(_) => return None, + }; + if !output.status.success() { + return None; + } + Some((String::from_utf8(output.stdout).unwrap(), stamp_file)) +} + +/// Return whether the format cache can be reused. +fn verify_rustfmt_version(build: &Builder<'_>) -> bool { + let Some((version, stamp_file)) = get_rustfmt_version(build) else { + return false; + }; + !program_out_of_date(&stamp_file, &version) +} + +/// Updates the last rustfmt version used +fn update_rustfmt_version(build: &Builder<'_>) { + let Some((version, stamp_file)) = get_rustfmt_version(build) else { + return; + }; + t!(std::fs::write(stamp_file, version)) +} + +/// Returns the Rust files modified between the `merge-base` of HEAD and +/// rust-lang/master and what is now on the disk. +/// +/// Returns `None` if all files should be formatted. +fn get_modified_rs_files(build: &Builder<'_>) -> Result>, String> { + if !verify_rustfmt_version(build) { + return Ok(None); + } + + get_git_modified_files(&build.config.git_config(), Some(&build.config.src), &vec!["rs"]) +} + +#[derive(serde_derive::Deserialize)] +struct RustfmtConfig { + ignore: Vec, +} + +pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { + if build.config.dry_run() { + return; + } + let mut builder = ignore::types::TypesBuilder::new(); + builder.add_defaults(); + builder.select("rust"); + let matcher = builder.build().unwrap(); + let rustfmt_config = build.src.join("rustfmt.toml"); + if !rustfmt_config.exists() { + eprintln!("Not running formatting checks; rustfmt.toml does not exist."); + eprintln!("This may happen in distributed tarballs."); + return; + } + let rustfmt_config = t!(std::fs::read_to_string(&rustfmt_config)); + let rustfmt_config: RustfmtConfig = t!(toml::from_str(&rustfmt_config)); + let mut fmt_override = ignore::overrides::OverrideBuilder::new(&build.src); + for ignore in rustfmt_config.ignore { + fmt_override.add(&format!("!{ignore}")).expect(&ignore); + } + let git_available = match Command::new("git") + .arg("--version") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + { + Ok(status) => status.success(), + Err(_) => false, + }; + + if git_available { + let in_working_tree = match build + .config + .git() + .arg("rev-parse") + .arg("--is-inside-work-tree") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + { + Ok(status) => status.success(), + Err(_) => false, + }; + if in_working_tree { + let untracked_paths_output = output( + build.config.git().arg("status").arg("--porcelain").arg("--untracked-files=normal"), + ); + let untracked_paths = untracked_paths_output + .lines() + .filter(|entry| entry.starts_with("??")) + .map(|entry| { + entry.split(' ').nth(1).expect("every git status entry should list a path") + }); + let mut untracked_count = 0; + for untracked_path in untracked_paths { + println!("skip untracked path {untracked_path} during rustfmt invocations"); + // The leading `/` makes it an exact match against the + // repository root, rather than a glob. Without that, if you + // have `foo.rs` in the repository root it will also match + // against anything like `compiler/rustc_foo/src/foo.rs`, + // preventing the latter from being formatted. + untracked_count += 1; + fmt_override.add(&format!("!/{untracked_path}")).expect(&untracked_path); + } + // Only check modified files locally to speed up runtime. + // We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through; + // we also care about CI time less since this is still very fast compared to building the compiler. + if !CiEnv::is_ci() && paths.is_empty() { + match get_modified_rs_files(build) { + Ok(Some(files)) => { + if files.len() <= 10 { + for file in &files { + println!("formatting modified file {file}"); + } + } else { + let pluralized = |count| if count > 1 { "files" } else { "file" }; + let untracked_msg = if untracked_count == 0 { + "".to_string() + } else { + format!( + ", skipped {} untracked {}", + untracked_count, + pluralized(untracked_count), + ) + }; + println!( + "formatting {} modified {}{}", + files.len(), + pluralized(files.len()), + untracked_msg + ); + } + for file in files { + fmt_override.add(&format!("/{file}")).expect(&file); + } + } + Ok(None) => {} + Err(err) => { + println!( + "WARN: Something went wrong when running git commands:\n{err}\n\ + Falling back to formatting all files." + ); + } + } + } + } else { + println!("Not in git tree. Skipping git-aware format checks"); + } + } else { + println!("Could not find usable git. Skipping git-aware format checks"); + } + + let fmt_override = fmt_override.build().unwrap(); + + let rustfmt_path = build.initial_rustfmt().unwrap_or_else(|| { + eprintln!("./x.py fmt is not supported on this channel"); + crate::exit!(1); + }); + assert!(rustfmt_path.exists(), "{}", rustfmt_path.display()); + let src = build.src.clone(); + let (tx, rx): (SyncSender, _) = std::sync::mpsc::sync_channel(128); + let walker = match paths.get(0) { + Some(first) => { + let find_shortcut_candidates = |p: &PathBuf| { + let mut candidates = Vec::new(); + for candidate in WalkBuilder::new(src.clone()).max_depth(Some(3)).build() { + if let Ok(entry) = candidate { + if let Some(dir_name) = p.file_name() { + if entry.path().is_dir() && entry.file_name() == dir_name { + candidates.push(entry.into_path()); + } + } + } + } + candidates + }; + + // Only try to look for shortcut candidates for single component paths like + // `std` and not for e.g. relative paths like `../library/std`. + let should_look_for_shortcut_dir = |p: &PathBuf| p.components().count() == 1; + + let mut walker = if should_look_for_shortcut_dir(first) { + if let [single_candidate] = &find_shortcut_candidates(first)[..] { + WalkBuilder::new(single_candidate) + } else { + WalkBuilder::new(first) + } + } else { + WalkBuilder::new(src.join(first)) + }; + + for path in &paths[1..] { + if should_look_for_shortcut_dir(path) { + if let [single_candidate] = &find_shortcut_candidates(path)[..] { + walker.add(single_candidate); + } else { + walker.add(path); + } + } else { + walker.add(src.join(path)); + } + } + + walker + } + None => WalkBuilder::new(src.clone()), + } + .types(matcher) + .overrides(fmt_override) + .build_parallel(); + + // there is a lot of blocking involved in spawning a child process and reading files to format. + // spawn more processes than available concurrency to keep the CPU busy + let max_processes = build.jobs() as usize * 2; + + // spawn child processes on a separate thread so we can batch entries we have received from ignore + let thread = std::thread::spawn(move || { + let mut children = VecDeque::new(); + while let Ok(path) = rx.recv() { + // try getting a few more paths from the channel to amortize the overhead of spawning processes + let paths: Vec<_> = rx.try_iter().take(7).chain(std::iter::once(path)).collect(); + + let child = rustfmt(&src, &rustfmt_path, paths.as_slice(), check); + children.push_back(child); + + // poll completion before waiting + for i in (0..children.len()).rev() { + if children[i](false) { + children.swap_remove_back(i); + break; + } + } + + if children.len() >= max_processes { + // await oldest child + children.pop_front().unwrap()(true); + } + } + + // await remaining children + for mut child in children { + child(true); + } + }); + + walker.run(|| { + let tx = tx.clone(); + Box::new(move |entry| { + let entry = t!(entry); + if entry.file_type().map_or(false, |t| t.is_file()) { + t!(tx.send(entry.into_path())); + } + ignore::WalkState::Continue + }) + }); + + drop(tx); + + thread.join().unwrap(); + if !check { + update_rustfmt_version(build); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/install.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/install.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/install.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/install.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,332 @@ +//! Implementation of the install aspects of the compiler. +//! +//! This module is responsible for installing the standard library, +//! compiler, and documentation. + +use std::env; +use std::fs; +use std::path::{Component, Path, PathBuf}; +use std::process::Command; + +use crate::core::build_steps::dist; +use crate::core::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::core::config::{Config, TargetSelection}; +use crate::utils::helpers::t; +use crate::utils::tarball::GeneratedTarball; +use crate::INTERNER; +use crate::{Compiler, Kind}; + +#[cfg(target_os = "illumos")] +const SHELL: &str = "bash"; +#[cfg(not(target_os = "illumos"))] +const SHELL: &str = "sh"; + +// We have to run a few shell scripts, which choke quite a bit on both `\` +// characters and on `C:\` paths, so normalize both of them away. +fn sanitize_sh(path: &Path) -> String { + let path = path.to_str().unwrap().replace("\\", "/"); + return change_drive(unc_to_lfs(&path)).unwrap_or(path); + + fn unc_to_lfs(s: &str) -> &str { + s.strip_prefix("//?/").unwrap_or(s) + } + + fn change_drive(s: &str) -> Option { + let mut ch = s.chars(); + let drive = ch.next().unwrap_or('C'); + if ch.next() != Some(':') { + return None; + } + if ch.next() != Some('/') { + return None; + } + Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..])) + } +} + +fn is_dir_writable_for_user(dir: &PathBuf) -> bool { + let tmp = dir.join(".tmp"); + match fs::create_dir_all(&tmp) { + Ok(_) => { + fs::remove_dir_all(tmp).unwrap(); + true + } + Err(e) => { + if e.kind() == std::io::ErrorKind::PermissionDenied { + false + } else { + panic!("Failed the write access check for the current user. {}", e); + } + } + } +} + +fn install_sh( + builder: &Builder<'_>, + package: &str, + stage: u32, + host: Option, + tarball: &GeneratedTarball, +) { + let _guard = builder.msg(Kind::Install, stage, package, host, host); + + let prefix = default_path(&builder.config.prefix, "/usr/local"); + let sysconfdir = prefix.join(default_path(&builder.config.sysconfdir, "/etc")); + let destdir_env = env::var_os("DESTDIR").map(PathBuf::from); + + // Sanity checks on the write access of user. + // + // When the `DESTDIR` environment variable is present, there is no point to + // check write access for `prefix` and `sysconfdir` individually, as they + // are combined with the path from the `DESTDIR` environment variable. In + // this case, we only need to check the `DESTDIR` path, disregarding the + // `prefix` and `sysconfdir` paths. + if let Some(destdir) = &destdir_env { + assert!(is_dir_writable_for_user(destdir), "User doesn't have write access on DESTDIR."); + } else { + assert!( + is_dir_writable_for_user(&prefix), + "User doesn't have write access on `install.prefix` path in the `config.toml`.", + ); + assert!( + is_dir_writable_for_user(&sysconfdir), + "User doesn't have write access on `install.sysconfdir` path in `config.toml`." + ); + } + + let datadir = prefix.join(default_path(&builder.config.datadir, "share")); + let docdir = prefix.join(default_path(&builder.config.docdir, "share/doc/rust")); + let mandir = prefix.join(default_path(&builder.config.mandir, "share/man")); + let libdir = prefix.join(default_path(&builder.config.libdir, "lib")); + let bindir = prefix.join(&builder.config.bindir); // Default in config.rs + + let empty_dir = builder.out.join("tmp/empty_dir"); + t!(fs::create_dir_all(&empty_dir)); + + let mut cmd = Command::new(SHELL); + cmd.current_dir(&empty_dir) + .arg(sanitize_sh(&tarball.decompressed_output().join("install.sh"))) + .arg(format!("--prefix={}", prepare_dir(&destdir_env, prefix))) + .arg(format!("--sysconfdir={}", prepare_dir(&destdir_env, sysconfdir))) + .arg(format!("--datadir={}", prepare_dir(&destdir_env, datadir))) + .arg(format!("--docdir={}", prepare_dir(&destdir_env, docdir))) + .arg(format!("--bindir={}", prepare_dir(&destdir_env, bindir))) + .arg(format!("--libdir={}", prepare_dir(&destdir_env, libdir))) + .arg(format!("--mandir={}", prepare_dir(&destdir_env, mandir))) + .arg("--disable-ldconfig"); + builder.run(&mut cmd); + t!(fs::remove_dir_all(&empty_dir)); +} + +fn default_path(config: &Option, default: &str) -> PathBuf { + config.as_ref().cloned().unwrap_or_else(|| PathBuf::from(default)) +} + +fn prepare_dir(destdir_env: &Option, mut path: PathBuf) -> String { + // The DESTDIR environment variable is a standard way to install software in a subdirectory + // while keeping the original directory structure, even if the prefix or other directories + // contain absolute paths. + // + // More information on the environment variable is available here: + // https://www.gnu.org/prep/standards/html_node/DESTDIR.html + if let Some(destdir) = destdir_env { + let without_destdir = path.clone(); + path = destdir.clone(); + // Custom .join() which ignores disk roots. + for part in without_destdir.components() { + if let Component::Normal(s) = part { + path.push(s) + } + } + } + + // The installation command is not executed from the current directory, but from a temporary + // directory. To prevent relative paths from breaking this converts relative paths to absolute + // paths. std::fs::canonicalize is not used as that requires the path to actually be present. + if path.is_relative() { + path = std::env::current_dir().expect("failed to get the current directory").join(path); + assert!(path.is_absolute(), "could not make the path relative"); + } + + sanitize_sh(&path) +} + +macro_rules! install { + (($sel:ident, $builder:ident, $_config:ident), + $($name:ident, + $condition_name: ident = $path_or_alias: literal, + $default_cond:expr, + only_hosts: $only_hosts:expr, + $run_item:block $(, $c:ident)*;)+) => { + $( + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $name { + pub compiler: Compiler, + pub target: TargetSelection, + } + + impl $name { + #[allow(dead_code)] + fn should_build(config: &Config) -> bool { + config.extended && config.tools.as_ref() + .map_or(true, |t| t.contains($path_or_alias)) + } + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = $only_hosts; + $(const $c: bool = true;)* + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let $_config = &run.builder.config; + run.$condition_name($path_or_alias).default_condition($default_cond) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($name { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + fn run($sel, $builder: &Builder<'_>) { + $run_item + } + })+ + } +} + +install!((self, builder, _config), + Docs, path = "src/doc", _config.docs, only_hosts: false, { + let tarball = builder.ensure(dist::Docs { host: self.target }).expect("missing docs"); + install_sh(builder, "docs", self.compiler.stage, Some(self.target), &tarball); + }; + Std, path = "library/std", true, only_hosts: false, { + for target in &builder.targets { + // `expect` should be safe, only None when host != build, but this + // only runs when host == build + let tarball = builder.ensure(dist::Std { + compiler: self.compiler, + target: *target + }).expect("missing std"); + install_sh(builder, "std", self.compiler.stage, Some(*target), &tarball); + } + }; + Cargo, alias = "cargo", Self::should_build(_config), only_hosts: true, { + let tarball = builder + .ensure(dist::Cargo { compiler: self.compiler, target: self.target }) + .expect("missing cargo"); + install_sh(builder, "cargo", self.compiler.stage, Some(self.target), &tarball); + }; + RustAnalyzer, alias = "rust-analyzer", Self::should_build(_config), only_hosts: true, { + if let Some(tarball) = + builder.ensure(dist::RustAnalyzer { compiler: self.compiler, target: self.target }) + { + install_sh(builder, "rust-analyzer", self.compiler.stage, Some(self.target), &tarball); + } else { + builder.info( + &format!("skipping Install rust-analyzer stage{} ({})", self.compiler.stage, self.target), + ); + } + }; + Clippy, alias = "clippy", Self::should_build(_config), only_hosts: true, { + let tarball = builder + .ensure(dist::Clippy { compiler: self.compiler, target: self.target }) + .expect("missing clippy"); + install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball); + }; + Miri, alias = "miri", Self::should_build(_config), only_hosts: true, { + if let Some(tarball) = builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }) { + install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball); + } else { + // Miri is only available on nightly + builder.info( + &format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target), + ); + } + }; + LlvmTools, alias = "llvm-tools", Self::should_build(_config), only_hosts: true, { + if let Some(tarball) = builder.ensure(dist::LlvmTools { target: self.target }) { + install_sh(builder, "llvm-tools", self.compiler.stage, Some(self.target), &tarball); + } else { + builder.info( + &format!("skipping llvm-tools stage{} ({}): external LLVM", self.compiler.stage, self.target), + ); + } + }; + Rustfmt, alias = "rustfmt", Self::should_build(_config), only_hosts: true, { + if let Some(tarball) = builder.ensure(dist::Rustfmt { + compiler: self.compiler, + target: self.target + }) { + install_sh(builder, "rustfmt", self.compiler.stage, Some(self.target), &tarball); + } else { + builder.info( + &format!("skipping Install Rustfmt stage{} ({})", self.compiler.stage, self.target), + ); + } + }; + RustDemangler, alias = "rust-demangler", Self::should_build(_config), only_hosts: true, { + // NOTE: Even though `should_build` may return true for `extended` default tools, + // dist::RustDemangler may still return None, unless the target-dependent `profiler` config + // is also true, or the `tools` array explicitly includes "rust-demangler". + if let Some(tarball) = builder.ensure(dist::RustDemangler { + compiler: self.compiler, + target: self.target + }) { + install_sh(builder, "rust-demangler", self.compiler.stage, Some(self.target), &tarball); + } else { + builder.info( + &format!("skipping Install RustDemangler stage{} ({})", + self.compiler.stage, self.target), + ); + } + }; + Rustc, path = "compiler/rustc", true, only_hosts: true, { + let tarball = builder.ensure(dist::Rustc { + compiler: builder.compiler(builder.top_stage, self.target), + }); + install_sh(builder, "rustc", self.compiler.stage, Some(self.target), &tarball); + }; + RustcCodegenCranelift, alias = "rustc-codegen-cranelift", Self::should_build(_config), only_hosts: true, { + if let Some(tarball) = builder.ensure(dist::CodegenBackend { + compiler: self.compiler, + backend: INTERNER.intern_str("cranelift"), + }) { + install_sh(builder, "rustc-codegen-cranelift", self.compiler.stage, Some(self.target), &tarball); + } else { + builder.info( + &format!("skipping Install CodegenBackend(\"cranelift\") stage{} ({})", + self.compiler.stage, self.target), + ); + } + }; +); + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Src { + pub stage: u32, +} + +impl Step for Src { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let config = &run.builder.config; + let cond = config.extended && config.tools.as_ref().map_or(true, |t| t.contains("src")); + run.path("src").default_condition(cond) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Src { stage: run.builder.top_stage }); + } + + fn run(self, builder: &Builder<'_>) { + let tarball = builder.ensure(dist::Src); + install_sh(builder, "src", self.stage, None, &tarball); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/llvm.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/llvm.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/llvm.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/llvm.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,1366 @@ +//! Compilation of native dependencies like LLVM. +//! +//! Native projects like LLVM unfortunately aren't suited just yet for +//! compilation in build scripts that Cargo has. This is because the +//! compilation takes a *very* long time but also because we don't want to +//! compile LLVM 3 times as part of a normal bootstrap (we want it cached). +//! +//! LLVM and compiler-rt are essentially just wired up to everything else to +//! ensure that they're always in place if needed. + +use std::env; +use std::env::consts::EXE_EXTENSION; +use std::ffi::{OsStr, OsString}; +use std::fs::{self, File}; +use std::io; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use crate::core::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::core::config::{Config, TargetSelection}; +use crate::utils::channel; +use crate::utils::helpers::{self, exe, get_clang_cl_resource_dir, output, t, up_to_date}; +use crate::{CLang, GitRepo, Kind}; + +use build_helper::ci::CiEnv; +use build_helper::git::get_git_merge_base; + +#[derive(Clone)] +pub struct LlvmResult { + /// Path to llvm-config binary. + /// NB: This is always the host llvm-config! + pub llvm_config: PathBuf, + /// Path to LLVM cmake directory for the target. + pub llvm_cmake_dir: PathBuf, +} + +pub struct Meta { + stamp: HashStamp, + res: LlvmResult, + out_dir: PathBuf, + root: String, +} + +// Linker flags to pass to LLVM's CMake invocation. +#[derive(Debug, Clone, Default)] +struct LdFlags { + // CMAKE_EXE_LINKER_FLAGS + exe: OsString, + // CMAKE_SHARED_LINKER_FLAGS + shared: OsString, + // CMAKE_MODULE_LINKER_FLAGS + module: OsString, +} + +impl LdFlags { + fn push_all(&mut self, s: impl AsRef) { + let s = s.as_ref(); + self.exe.push(" "); + self.exe.push(s); + self.shared.push(" "); + self.shared.push(s); + self.module.push(" "); + self.module.push(s); + } +} + +/// This returns whether we've already previously built LLVM. +/// +/// It's used to avoid busting caches during x.py check -- if we've already built +/// LLVM, it's fine for us to not try to avoid doing so. +/// +/// This will return the llvm-config if it can get it (but it will not build it +/// if not). +pub fn prebuilt_llvm_config( + builder: &Builder<'_>, + target: TargetSelection, +) -> Result { + builder.config.maybe_download_ci_llvm(); + + // If we're using a custom LLVM bail out here, but we can only use a + // custom LLVM for the build triple. + if let Some(config) = builder.config.target_config.get(&target) { + if let Some(ref s) = config.llvm_config { + check_llvm_version(builder, s); + let llvm_config = s.to_path_buf(); + let mut llvm_cmake_dir = llvm_config.clone(); + llvm_cmake_dir.pop(); + llvm_cmake_dir.pop(); + llvm_cmake_dir.push("lib"); + llvm_cmake_dir.push("cmake"); + llvm_cmake_dir.push("llvm"); + return Ok(LlvmResult { llvm_config, llvm_cmake_dir }); + } + } + + let root = "src/llvm-project/llvm"; + let out_dir = builder.llvm_out(target); + + let mut llvm_config_ret_dir = builder.llvm_out(builder.config.build); + if !builder.config.build.contains("msvc") || builder.ninja() { + llvm_config_ret_dir.push("build"); + } + llvm_config_ret_dir.push("bin"); + let build_llvm_config = llvm_config_ret_dir.join(exe("llvm-config", builder.config.build)); + let llvm_cmake_dir = out_dir.join("lib/cmake/llvm"); + let res = LlvmResult { llvm_config: build_llvm_config, llvm_cmake_dir }; + + let stamp = out_dir.join("llvm-finished-building"); + let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha()); + + if stamp.is_done() { + if stamp.hash.is_none() { + builder.info( + "Could not determine the LLVM submodule commit hash. \ + Assuming that an LLVM rebuild is not necessary.", + ); + builder.info(&format!( + "To force LLVM to rebuild, remove the file `{}`", + stamp.path.display() + )); + } + return Ok(res); + } + + Err(Meta { stamp, res, out_dir, root: root.into() }) +} + +/// This retrieves the LLVM sha we *want* to use, according to git history. +pub(crate) fn detect_llvm_sha(config: &Config, is_git: bool) -> String { + let llvm_sha = if is_git { + // We proceed in 2 steps. First we get the closest commit that is actually upstream. Then we + // walk back further to the last bors merge commit that actually changed LLVM. The first + // step will fail on CI because only the `auto` branch exists; we just fall back to `HEAD` + // in that case. + let closest_upstream = get_git_merge_base(&config.git_config(), Some(&config.src)) + .unwrap_or_else(|_| "HEAD".into()); + let mut rev_list = config.git(); + rev_list.args(&[ + PathBuf::from("rev-list"), + format!("--author={}", config.stage0_metadata.config.git_merge_commit_email).into(), + "-n1".into(), + "--first-parent".into(), + closest_upstream.into(), + "--".into(), + config.src.join("src/llvm-project"), + config.src.join("src/bootstrap/download-ci-llvm-stamp"), + // the LLVM shared object file is named `LLVM-12-rust-{version}-nightly` + config.src.join("src/version"), + ]); + output(&mut rev_list).trim().to_owned() + } else if let Some(info) = channel::read_commit_info_file(&config.src) { + info.sha.trim().to_owned() + } else { + "".to_owned() + }; + + if llvm_sha.is_empty() { + eprintln!("error: could not find commit hash for downloading LLVM"); + eprintln!("HELP: maybe your repository history is too shallow?"); + eprintln!("HELP: consider disabling `download-ci-llvm`"); + eprintln!("HELP: or fetch enough history to include one upstream commit"); + panic!(); + } + + llvm_sha +} + +/// Returns whether the CI-found LLVM is currently usable. +/// +/// This checks both the build triple platform to confirm we're usable at all, +/// and then verifies if the current HEAD matches the detected LLVM SHA head, +/// in which case LLVM is indicated as not available. +pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool { + // This is currently all tier 1 targets and tier 2 targets with host tools + // (since others may not have CI artifacts) + // https://doc.rust-lang.org/rustc/platform-support.html#tier-1 + let supported_platforms = [ + // tier 1 + ("aarch64-unknown-linux-gnu", false), + ("i686-pc-windows-gnu", false), + ("i686-pc-windows-msvc", false), + ("i686-unknown-linux-gnu", false), + ("x86_64-unknown-linux-gnu", true), + ("x86_64-apple-darwin", true), + ("x86_64-pc-windows-gnu", true), + ("x86_64-pc-windows-msvc", true), + // tier 2 with host tools + ("aarch64-apple-darwin", false), + ("aarch64-pc-windows-msvc", false), + ("aarch64-unknown-linux-musl", false), + ("arm-unknown-linux-gnueabi", false), + ("arm-unknown-linux-gnueabihf", false), + ("armv7-unknown-linux-gnueabihf", false), + ("loongarch64-unknown-linux-gnu", false), + ("mips-unknown-linux-gnu", false), + ("mips64-unknown-linux-gnuabi64", false), + ("mips64el-unknown-linux-gnuabi64", false), + ("mipsel-unknown-linux-gnu", false), + ("powerpc-unknown-linux-gnu", false), + ("powerpc64-unknown-linux-gnu", false), + ("powerpc64le-unknown-linux-gnu", false), + ("riscv64gc-unknown-linux-gnu", false), + ("s390x-unknown-linux-gnu", false), + ("x86_64-unknown-freebsd", false), + ("x86_64-unknown-illumos", false), + ("x86_64-unknown-linux-musl", false), + ("x86_64-unknown-netbsd", false), + ]; + + if !supported_platforms.contains(&(&*config.build.triple, asserts)) + && (asserts || !supported_platforms.contains(&(&*config.build.triple, true))) + { + return false; + } + + if is_ci_llvm_modified(config) { + eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); + return false; + } + + true +} + +/// Returns true if we're running in CI with modified LLVM (and thus can't download it) +pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool { + CiEnv::is_ci() && config.rust_info.is_managed_git_subrepository() && { + // We assume we have access to git, so it's okay to unconditionally pass + // `true` here. + let llvm_sha = detect_llvm_sha(config, true); + let head_sha = output(config.git().arg("rev-parse").arg("HEAD")); + let head_sha = head_sha.trim(); + llvm_sha == head_sha + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Llvm { + pub target: TargetSelection, +} + +impl Step for Llvm { + type Output = LlvmResult; + + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/llvm-project").path("src/llvm-project/llvm") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Llvm { target: run.target }); + } + + /// Compile LLVM for `target`. + fn run(self, builder: &Builder<'_>) -> LlvmResult { + let target = self.target; + let target_native = if self.target.starts_with("riscv") { + // RISC-V target triples in Rust is not named the same as C compiler target triples. + // This converts Rust RISC-V target triples to C compiler triples. + let idx = target.triple.find('-').unwrap(); + + format!("riscv{}{}", &target.triple[5..7], &target.triple[idx..]) + } else if self.target.starts_with("powerpc") && self.target.ends_with("freebsd") { + // FreeBSD 13 had incompatible ABI changes on all PowerPC platforms. + // Set the version suffix to 13.0 so the correct target details are used. + format!("{}{}", self.target, "13.0") + } else { + target.to_string() + }; + + let Meta { stamp, res, out_dir, root } = match prebuilt_llvm_config(builder, target) { + Ok(p) => return p, + Err(m) => m, + }; + + builder.update_submodule(&Path::new("src").join("llvm-project")); + if builder.llvm_link_shared() && target.contains("windows") { + panic!("shared linking to LLVM is not currently supported on {}", target.triple); + } + + let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target); + t!(stamp.remove()); + let _time = helpers::timeit(&builder); + t!(fs::create_dir_all(&out_dir)); + + // https://llvm.org/docs/CMake.html + let mut cfg = cmake::Config::new(builder.src.join(root)); + let mut ldflags = LdFlags::default(); + + let profile = match (builder.config.llvm_optimize, builder.config.llvm_release_debuginfo) { + (false, _) => "Debug", + (true, false) => "Release", + (true, true) => "RelWithDebInfo", + }; + + // NOTE: remember to also update `config.example.toml` when changing the + // defaults! + let llvm_targets = match &builder.config.llvm_targets { + Some(s) => s, + None => { + "AArch64;ARM;BPF;Hexagon;LoongArch;MSP430;Mips;NVPTX;PowerPC;RISCV;\ + Sparc;SystemZ;WebAssembly;X86" + } + }; + + let llvm_exp_targets = match builder.config.llvm_experimental_targets { + Some(ref s) => s, + None => "AVR;M68k;CSKY", + }; + + let assertions = if builder.config.llvm_assertions { "ON" } else { "OFF" }; + let plugins = if builder.config.llvm_plugins { "ON" } else { "OFF" }; + let enable_tests = if builder.config.llvm_tests { "ON" } else { "OFF" }; + let enable_warnings = if builder.config.llvm_enable_warnings { "ON" } else { "OFF" }; + + cfg.out_dir(&out_dir) + .profile(profile) + .define("LLVM_ENABLE_ASSERTIONS", assertions) + .define("LLVM_UNREACHABLE_OPTIMIZE", "OFF") + .define("LLVM_ENABLE_PLUGINS", plugins) + .define("LLVM_TARGETS_TO_BUILD", llvm_targets) + .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) + .define("LLVM_INCLUDE_EXAMPLES", "OFF") + .define("LLVM_INCLUDE_DOCS", "OFF") + .define("LLVM_INCLUDE_BENCHMARKS", "OFF") + .define("LLVM_INCLUDE_TESTS", enable_tests) + .define("LLVM_ENABLE_TERMINFO", "OFF") + .define("LLVM_ENABLE_LIBEDIT", "OFF") + .define("LLVM_ENABLE_BINDINGS", "OFF") + .define("LLVM_ENABLE_Z3_SOLVER", "OFF") + .define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string()) + .define("LLVM_TARGET_ARCH", target_native.split('-').next().unwrap()) + .define("LLVM_DEFAULT_TARGET_TRIPLE", target_native) + .define("LLVM_ENABLE_WARNINGS", enable_warnings); + + // Parts of our test suite rely on the `FileCheck` tool, which is built by default in + // `build/$TARGET/llvm/build/bin` is but *not* then installed to `build/$TARGET/llvm/bin`. + // This flag makes sure `FileCheck` is copied in the final binaries directory. + cfg.define("LLVM_INSTALL_UTILS", "ON"); + + if builder.config.llvm_profile_generate { + cfg.define("LLVM_BUILD_INSTRUMENTED", "IR"); + if let Ok(llvm_profile_dir) = std::env::var("LLVM_PROFILE_DIR") { + cfg.define("LLVM_PROFILE_DATA_DIR", llvm_profile_dir); + } + cfg.define("LLVM_BUILD_RUNTIME", "No"); + } + if let Some(path) = builder.config.llvm_profile_use.as_ref() { + cfg.define("LLVM_PROFDATA_FILE", &path); + } + + // Disable zstd to avoid a dependency on libzstd.so. + cfg.define("LLVM_ENABLE_ZSTD", "OFF"); + + if !target.contains("windows") { + cfg.define("LLVM_ENABLE_ZLIB", "ON"); + } else { + cfg.define("LLVM_ENABLE_ZLIB", "OFF"); + } + + // Are we compiling for iOS/tvOS/watchOS? + if target.contains("apple-ios") + || target.contains("apple-tvos") + || target.contains("apple-watchos") + { + // These two defines prevent CMake from automatically trying to add a MacOSX sysroot, which leads to a compiler error. + cfg.define("CMAKE_OSX_SYSROOT", "/"); + cfg.define("CMAKE_OSX_DEPLOYMENT_TARGET", ""); + // Prevent cmake from adding -bundle to CFLAGS automatically, which leads to a compiler error because "-bitcode_bundle" also gets added. + cfg.define("LLVM_ENABLE_PLUGINS", "OFF"); + // Zlib fails to link properly, leading to a compiler error. + cfg.define("LLVM_ENABLE_ZLIB", "OFF"); + } + + // This setting makes the LLVM tools link to the dynamic LLVM library, + // which saves both memory during parallel links and overall disk space + // for the tools. We don't do this on every platform as it doesn't work + // equally well everywhere. + if builder.llvm_link_shared() { + cfg.define("LLVM_LINK_LLVM_DYLIB", "ON"); + } + + if (target.starts_with("riscv") || target.starts_with("csky")) + && !target.contains("freebsd") + && !target.contains("openbsd") + && !target.contains("netbsd") + { + // RISC-V and CSKY GCC erroneously requires linking against + // `libatomic` when using 1-byte and 2-byte C++ + // atomics but the LLVM build system check cannot + // detect this. Therefore it is set manually here. + // Some BSD uses Clang as its system compiler and + // provides no libatomic in its base system so does + // not want this. + ldflags.exe.push(" -latomic"); + ldflags.shared.push(" -latomic"); + } + + if target.starts_with("mips") && target.contains("netbsd") { + // LLVM wants 64-bit atomics, while mipsel is 32-bit only, so needs -latomic + ldflags.exe.push(" -latomic"); + ldflags.shared.push(" -latomic"); + } + + if target.contains("msvc") { + cfg.define("LLVM_USE_CRT_DEBUG", "MT"); + cfg.define("LLVM_USE_CRT_RELEASE", "MT"); + cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT"); + cfg.static_crt(true); + } + + if target.starts_with("i686") { + cfg.define("LLVM_BUILD_32_BITS", "ON"); + } + + let mut enabled_llvm_projects = Vec::new(); + + if helpers::forcing_clang_based_tests() { + enabled_llvm_projects.push("clang"); + enabled_llvm_projects.push("compiler-rt"); + } + + if builder.config.llvm_polly { + enabled_llvm_projects.push("polly"); + } + + if builder.config.llvm_clang { + enabled_llvm_projects.push("clang"); + } + + // We want libxml to be disabled. + // See https://github.com/rust-lang/rust/pull/50104 + cfg.define("LLVM_ENABLE_LIBXML2", "OFF"); + + if !enabled_llvm_projects.is_empty() { + enabled_llvm_projects.sort(); + enabled_llvm_projects.dedup(); + cfg.define("LLVM_ENABLE_PROJECTS", enabled_llvm_projects.join(";")); + } + + if let Some(num_linkers) = builder.config.llvm_link_jobs { + if num_linkers > 0 { + cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string()); + } + } + + // https://llvm.org/docs/HowToCrossCompileLLVM.html + if target != builder.config.build { + let LlvmResult { llvm_config, .. } = + builder.ensure(Llvm { target: builder.config.build }); + if !builder.config.dry_run() { + let llvm_bindir = output(Command::new(&llvm_config).arg("--bindir")); + let host_bin = Path::new(llvm_bindir.trim()); + cfg.define( + "LLVM_TABLEGEN", + host_bin.join("llvm-tblgen").with_extension(EXE_EXTENSION), + ); + // LLVM_NM is required for cross compiling using MSVC + cfg.define("LLVM_NM", host_bin.join("llvm-nm").with_extension(EXE_EXTENSION)); + } + cfg.define("LLVM_CONFIG_PATH", llvm_config); + if builder.config.llvm_clang { + let build_bin = builder.llvm_out(builder.config.build).join("build").join("bin"); + let clang_tblgen = build_bin.join("clang-tblgen").with_extension(EXE_EXTENSION); + if !builder.config.dry_run() && !clang_tblgen.exists() { + panic!("unable to find {}", clang_tblgen.display()); + } + cfg.define("CLANG_TABLEGEN", clang_tblgen); + } + } + + let llvm_version_suffix = if let Some(ref suffix) = builder.config.llvm_version_suffix { + // Allow version-suffix="" to not define a version suffix at all. + if !suffix.is_empty() { Some(suffix.to_string()) } else { None } + } else if builder.config.channel == "dev" { + // Changes to a version suffix require a complete rebuild of the LLVM. + // To avoid rebuilds during a time of version bump, don't include rustc + // release number on the dev channel. + Some("-rust-dev".to_string()) + } else { + Some(format!("-rust-{}-{}", builder.version, builder.config.channel)) + }; + if let Some(ref suffix) = llvm_version_suffix { + cfg.define("LLVM_VERSION_SUFFIX", suffix); + } + + configure_cmake(builder, target, &mut cfg, true, ldflags, &[]); + configure_llvm(builder, target, &mut cfg); + + for (key, val) in &builder.config.llvm_build_config { + cfg.define(key, val); + } + + if builder.config.dry_run() { + return res; + } + + cfg.build(); + + // Helper to find the name of LLVM's shared library on darwin and linux. + let find_llvm_lib_name = |extension| { + let mut cmd = Command::new(&res.llvm_config); + let version = output(cmd.arg("--version")); + let major = version.split('.').next().unwrap(); + + match &llvm_version_suffix { + Some(version_suffix) => format!("libLLVM-{major}{version_suffix}.{extension}"), + None => format!("libLLVM-{major}.{extension}"), + } + }; + + // When building LLVM with LLVM_LINK_LLVM_DYLIB for macOS, an unversioned + // libLLVM.dylib will be built. However, llvm-config will still look + // for a versioned path like libLLVM-14.dylib. Manually create a symbolic + // link to make llvm-config happy. + if builder.llvm_link_shared() && target.contains("apple-darwin") { + let lib_name = find_llvm_lib_name("dylib"); + let lib_llvm = out_dir.join("build").join("lib").join(lib_name); + if !lib_llvm.exists() { + t!(builder.symlink_file("libLLVM.dylib", &lib_llvm)); + } + } + + // When building LLVM as a shared library on linux, it can contain unexpected debuginfo: + // some can come from the C++ standard library. Unless we're explicitly requesting LLVM to + // be built with debuginfo, strip it away after the fact, to make dist artifacts smaller. + if builder.llvm_link_shared() + && builder.config.llvm_optimize + && !builder.config.llvm_release_debuginfo + { + // Find the name of the LLVM shared library that we just built. + let lib_name = find_llvm_lib_name("so"); + + // If the shared library exists in LLVM's `/build/lib/` or `/lib/` folders, strip its + // debuginfo. + crate::core::build_steps::compile::strip_debug( + builder, + target, + &out_dir.join("lib").join(&lib_name), + ); + crate::core::build_steps::compile::strip_debug( + builder, + target, + &out_dir.join("build").join("lib").join(&lib_name), + ); + } + + t!(stamp.write()); + + res + } +} + +fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) { + if builder.config.dry_run() { + return; + } + + let mut cmd = Command::new(llvm_config); + let version = output(cmd.arg("--version")); + let mut parts = version.split('.').take(2).filter_map(|s| s.parse::().ok()); + if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) { + if major >= 15 { + return; + } + } + panic!("\n\nbad LLVM version: {version}, need >=15.0\n\n") +} + +fn configure_cmake( + builder: &Builder<'_>, + target: TargetSelection, + cfg: &mut cmake::Config, + use_compiler_launcher: bool, + mut ldflags: LdFlags, + extra_compiler_flags: &[&str], +) { + // Do not print installation messages for up-to-date files. + // LLVM and LLD builds can produce a lot of those and hit CI limits on log size. + cfg.define("CMAKE_INSTALL_MESSAGE", "LAZY"); + + // Do not allow the user's value of DESTDIR to influence where + // LLVM will install itself. LLVM must always be installed in our + // own build directories. + cfg.env("DESTDIR", ""); + + if builder.ninja() { + cfg.generator("Ninja"); + } + cfg.target(&target.triple).host(&builder.config.build.triple); + + if target != builder.config.build { + cfg.define("CMAKE_CROSSCOMPILING", "True"); + + if target.contains("netbsd") { + cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); + } else if target.contains("dragonfly") { + cfg.define("CMAKE_SYSTEM_NAME", "DragonFly"); + } else if target.contains("freebsd") { + cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); + } else if target.contains("windows") { + cfg.define("CMAKE_SYSTEM_NAME", "Windows"); + } else if target.contains("haiku") { + cfg.define("CMAKE_SYSTEM_NAME", "Haiku"); + } else if target.contains("solaris") || target.contains("illumos") { + cfg.define("CMAKE_SYSTEM_NAME", "SunOS"); + } else if target.contains("linux") { + cfg.define("CMAKE_SYSTEM_NAME", "Linux"); + } else { + builder.info(&format!( + "could not determine CMAKE_SYSTEM_NAME from the target `{target}`, build may fail", + )); + } + + // When cross-compiling we should also set CMAKE_SYSTEM_VERSION, but in + // that case like CMake we cannot easily determine system version either. + // + // Since, the LLVM itself makes rather limited use of version checks in + // CMakeFiles (and then only in tests), and so far no issues have been + // reported, the system version is currently left unset. + + if target.contains("darwin") { + // Make sure that CMake does not build universal binaries on macOS. + // Explicitly specify the one single target architecture. + if target.starts_with("aarch64") { + // macOS uses a different name for building arm64 + cfg.define("CMAKE_OSX_ARCHITECTURES", "arm64"); + } else if target.starts_with("i686") { + // macOS uses a different name for building i386 + cfg.define("CMAKE_OSX_ARCHITECTURES", "i386"); + } else { + cfg.define("CMAKE_OSX_ARCHITECTURES", target.triple.split('-').next().unwrap()); + } + } + } + + let sanitize_cc = |cc: &Path| { + if target.contains("msvc") { + OsString::from(cc.to_str().unwrap().replace("\\", "/")) + } else { + cc.as_os_str().to_owned() + } + }; + + // MSVC with CMake uses msbuild by default which doesn't respect these + // vars that we'd otherwise configure. In that case we just skip this + // entirely. + if target.contains("msvc") && !builder.ninja() { + return; + } + + let (cc, cxx) = match builder.config.llvm_clang_cl { + Some(ref cl) => (cl.into(), cl.into()), + None => (builder.cc(target), builder.cxx(target).unwrap()), + }; + + // Handle msvc + ninja + ccache specially (this is what the bots use) + if target.contains("msvc") && builder.ninja() && builder.config.ccache.is_some() { + let mut wrap_cc = env::current_exe().expect("failed to get cwd"); + wrap_cc.set_file_name("sccache-plus-cl.exe"); + + cfg.define("CMAKE_C_COMPILER", sanitize_cc(&wrap_cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(&wrap_cc)); + cfg.env("SCCACHE_PATH", builder.config.ccache.as_ref().unwrap()) + .env("SCCACHE_TARGET", target.triple) + .env("SCCACHE_CC", &cc) + .env("SCCACHE_CXX", &cxx); + + // Building LLVM on MSVC can be a little ludicrous at times. We're so far + // off the beaten path here that I'm not really sure this is even half + // supported any more. Here we're trying to: + // + // * Build LLVM on MSVC + // * Build LLVM with `clang-cl` instead of `cl.exe` + // * Build a project with `sccache` + // * Build for 32-bit as well + // * Build with Ninja + // + // For `cl.exe` there are different binaries to compile 32/64 bit which + // we use but for `clang-cl` there's only one which internally + // multiplexes via flags. As a result it appears that CMake's detection + // of a compiler's architecture and such on MSVC **doesn't** pass any + // custom flags we pass in CMAKE_CXX_FLAGS below. This means that if we + // use `clang-cl.exe` it's always diagnosed as a 64-bit compiler which + // definitely causes problems since all the env vars are pointing to + // 32-bit libraries. + // + // To hack around this... again... we pass an argument that's + // unconditionally passed in the sccache shim. This'll get CMake to + // correctly diagnose it's doing a 32-bit compilation and LLVM will + // internally configure itself appropriately. + if builder.config.llvm_clang_cl.is_some() && target.contains("i686") { + cfg.env("SCCACHE_EXTRA_ARGS", "-m32"); + } + } else { + // If ccache is configured we inform the build a little differently how + // to invoke ccache while also invoking our compilers. + if use_compiler_launcher { + if let Some(ref ccache) = builder.config.ccache { + cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache) + .define("CMAKE_CXX_COMPILER_LAUNCHER", ccache); + } + } + cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(&cxx)) + .define("CMAKE_ASM_COMPILER", sanitize_cc(&cc)); + } + + cfg.build_arg("-j").build_arg(builder.jobs().to_string()); + let mut cflags: OsString = builder.cflags(target, GitRepo::Llvm, CLang::C).join(" ").into(); + if let Some(ref s) = builder.config.llvm_cflags { + cflags.push(" "); + cflags.push(s); + } + + if builder.config.llvm_clang_cl.is_some() { + cflags.push(&format!(" --target={target}")); + } + for flag in extra_compiler_flags { + cflags.push(&format!(" {flag}")); + } + cfg.define("CMAKE_C_FLAGS", cflags); + let mut cxxflags: OsString = builder.cflags(target, GitRepo::Llvm, CLang::Cxx).join(" ").into(); + if let Some(ref s) = builder.config.llvm_cxxflags { + cxxflags.push(" "); + cxxflags.push(s); + } + if builder.config.llvm_clang_cl.is_some() { + cxxflags.push(&format!(" --target={target}")); + } + for flag in extra_compiler_flags { + cxxflags.push(&format!(" {flag}")); + } + cfg.define("CMAKE_CXX_FLAGS", cxxflags); + if let Some(ar) = builder.ar(target) { + if ar.is_absolute() { + // LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it + // tries to resolve this path in the LLVM build directory. + cfg.define("CMAKE_AR", sanitize_cc(&ar)); + } + } + + if let Some(ranlib) = builder.ranlib(target) { + if ranlib.is_absolute() { + // LLVM build breaks if `CMAKE_RANLIB` is a relative path, for some reason it + // tries to resolve this path in the LLVM build directory. + cfg.define("CMAKE_RANLIB", sanitize_cc(&ranlib)); + } + } + + if let Some(ref flags) = builder.config.llvm_ldflags { + ldflags.push_all(flags); + } + + if let Some(flags) = get_var("LDFLAGS", &builder.config.build.triple, &target.triple) { + ldflags.push_all(&flags); + } + + // For distribution we want the LLVM tools to be *statically* linked to libstdc++. + // We also do this if the user explicitly requested static libstdc++. + if builder.config.llvm_static_stdcpp + && !target.contains("msvc") + && !target.contains("netbsd") + && !target.contains("solaris") + { + if target.contains("apple") || target.contains("windows") { + ldflags.push_all("-static-libstdc++"); + } else { + ldflags.push_all("-Wl,-Bsymbolic -static-libstdc++"); + } + } + + cfg.define("CMAKE_SHARED_LINKER_FLAGS", &ldflags.shared); + cfg.define("CMAKE_MODULE_LINKER_FLAGS", &ldflags.module); + cfg.define("CMAKE_EXE_LINKER_FLAGS", &ldflags.exe); + + if env::var_os("SCCACHE_ERROR_LOG").is_some() { + cfg.env("RUSTC_LOG", "sccache=warn"); + } +} + +fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmake::Config) { + // ThinLTO is only available when building with LLVM, enabling LLD is required. + // Apple's linker ld64 supports ThinLTO out of the box though, so don't use LLD on Darwin. + if builder.config.llvm_thin_lto { + cfg.define("LLVM_ENABLE_LTO", "Thin"); + if !target.contains("apple") { + cfg.define("LLVM_ENABLE_LLD", "ON"); + } + } + + if let Some(ref linker) = builder.config.llvm_use_linker { + cfg.define("LLVM_USE_LINKER", linker); + } + + if builder.config.llvm_allow_old_toolchain { + cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES"); + } +} + +// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365 +fn get_var(var_base: &str, host: &str, target: &str) -> Option { + let kind = if host == target { "HOST" } else { "TARGET" }; + let target_u = target.replace("-", "_"); + env::var_os(&format!("{var_base}_{target}")) + .or_else(|| env::var_os(&format!("{}_{}", var_base, target_u))) + .or_else(|| env::var_os(&format!("{}_{}", kind, var_base))) + .or_else(|| env::var_os(var_base)) +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Lld { + pub target: TargetSelection, +} + +impl Step for Lld { + type Output = PathBuf; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/llvm-project/lld") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Lld { target: run.target }); + } + + /// Compile LLD for `target`. + fn run(self, builder: &Builder<'_>) -> PathBuf { + if builder.config.dry_run() { + return PathBuf::from("lld-out-dir-test-gen"); + } + let target = self.target; + + let LlvmResult { llvm_config, llvm_cmake_dir } = builder.ensure(Llvm { target }); + + // The `dist` step packages LLD next to LLVM's binaries for download-ci-llvm. The root path + // we usually expect here is `./build/$triple/ci-llvm/`, with the binaries in its `bin` + // subfolder. We check if that's the case, and if LLD's binary already exists there next to + // `llvm-config`: if so, we can use it instead of building LLVM/LLD from source. + let ci_llvm_bin = llvm_config.parent().unwrap(); + if ci_llvm_bin.is_dir() && ci_llvm_bin.file_name().unwrap() == "bin" { + let lld_path = ci_llvm_bin.join(exe("lld", target)); + if lld_path.exists() { + // The following steps copying `lld` as `rust-lld` to the sysroot, expect it in the + // `bin` subfolder of this step's out dir. + return ci_llvm_bin.parent().unwrap().to_path_buf(); + } + } + + let out_dir = builder.lld_out(target); + let done_stamp = out_dir.join("lld-finished-building"); + if done_stamp.exists() { + return out_dir; + } + + let _guard = builder.msg_unstaged(Kind::Build, "LLD", target); + let _time = helpers::timeit(&builder); + t!(fs::create_dir_all(&out_dir)); + + let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld")); + let mut ldflags = LdFlags::default(); + + // When building LLD as part of a build with instrumentation on windows, for example + // when doing PGO on CI, cmake or clang-cl don't automatically link clang's + // profiler runtime in. In that case, we need to manually ask cmake to do it, to avoid + // linking errors, much like LLVM's cmake setup does in that situation. + if builder.config.llvm_profile_generate && target.contains("msvc") { + if let Some(clang_cl_path) = builder.config.llvm_clang_cl.as_ref() { + // Find clang's runtime library directory and push that as a search path to the + // cmake linker flags. + let clang_rt_dir = get_clang_cl_resource_dir(clang_cl_path); + ldflags.push_all(&format!("/libpath:{}", clang_rt_dir.display())); + } + } + + // LLD is built as an LLVM tool, but is distributed outside of the `llvm-tools` component, + // which impacts where it expects to find LLVM's shared library. This causes #80703. + // + // LLD is distributed at "$root/lib/rustlib/$host/bin/rust-lld", but the `libLLVM-*.so` it + // needs is distributed at "$root/lib". The default rpath of "$ORIGIN/../lib" points at the + // lib path for LLVM tools, not the one for rust binaries. + // + // (The `llvm-tools` component copies the .so there for the other tools, and with that + // component installed, one can successfully invoke `rust-lld` directly without rustup's + // `LD_LIBRARY_PATH` overrides) + // + if builder.config.rpath_enabled(target) + && helpers::use_host_linker(target) + && builder.config.llvm_link_shared() + && target.contains("linux") + { + // So we inform LLD where it can find LLVM's libraries by adding an rpath entry to the + // expected parent `lib` directory. + // + // Be careful when changing this path, we need to ensure it's quoted or escaped: + // `$ORIGIN` would otherwise be expanded when the `LdFlags` are passed verbatim to + // cmake. + ldflags.push_all("-Wl,-rpath,'$ORIGIN/../../../'"); + } + + configure_cmake(builder, target, &mut cfg, true, ldflags, &[]); + configure_llvm(builder, target, &mut cfg); + + // Re-use the same flags as llvm to control the level of debug information + // generated for lld. + let profile = match (builder.config.llvm_optimize, builder.config.llvm_release_debuginfo) { + (false, _) => "Debug", + (true, false) => "Release", + (true, true) => "RelWithDebInfo", + }; + + cfg.out_dir(&out_dir) + .profile(profile) + .define("LLVM_CMAKE_DIR", llvm_cmake_dir) + .define("LLVM_INCLUDE_TESTS", "OFF"); + + if target != builder.config.build { + // Use the host llvm-tblgen binary. + cfg.define( + "LLVM_TABLEGEN_EXE", + llvm_config.with_file_name("llvm-tblgen").with_extension(EXE_EXTENSION), + ); + } + + cfg.build(); + + t!(File::create(&done_stamp)); + out_dir + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Sanitizers { + pub target: TargetSelection, +} + +impl Step for Sanitizers { + type Output = Vec; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("sanitizers") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Sanitizers { target: run.target }); + } + + /// Builds sanitizer runtime libraries. + fn run(self, builder: &Builder<'_>) -> Self::Output { + let compiler_rt_dir = builder.src.join("src/llvm-project/compiler-rt"); + if !compiler_rt_dir.exists() { + return Vec::new(); + } + + let out_dir = builder.native_dir(self.target).join("sanitizers"); + let runtimes = supported_sanitizers(&out_dir, self.target, &builder.config.channel); + if runtimes.is_empty() { + return runtimes; + } + + let LlvmResult { llvm_config, .. } = builder.ensure(Llvm { target: builder.config.build }); + if builder.config.dry_run() { + return runtimes; + } + + let stamp = out_dir.join("sanitizers-finished-building"); + let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha()); + + if stamp.is_done() { + if stamp.hash.is_none() { + builder.info(&format!( + "Rebuild sanitizers by removing the file `{}`", + stamp.path.display() + )); + } + return runtimes; + } + + let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target); + t!(stamp.remove()); + let _time = helpers::timeit(&builder); + + let mut cfg = cmake::Config::new(&compiler_rt_dir); + cfg.profile("Release"); + cfg.define("CMAKE_C_COMPILER_TARGET", self.target.triple); + cfg.define("COMPILER_RT_BUILD_BUILTINS", "OFF"); + cfg.define("COMPILER_RT_BUILD_CRT", "OFF"); + cfg.define("COMPILER_RT_BUILD_LIBFUZZER", "OFF"); + cfg.define("COMPILER_RT_BUILD_PROFILE", "OFF"); + cfg.define("COMPILER_RT_BUILD_SANITIZERS", "ON"); + cfg.define("COMPILER_RT_BUILD_XRAY", "OFF"); + cfg.define("COMPILER_RT_DEFAULT_TARGET_ONLY", "ON"); + cfg.define("COMPILER_RT_USE_LIBCXX", "OFF"); + cfg.define("LLVM_CONFIG_PATH", &llvm_config); + + // On Darwin targets the sanitizer runtimes are build as universal binaries. + // Unfortunately sccache currently lacks support to build them successfully. + // Disable compiler launcher on Darwin targets to avoid potential issues. + let use_compiler_launcher = !self.target.contains("apple-darwin"); + let extra_compiler_flags: &[&str] = + if self.target.contains("apple") { &["-fembed-bitcode=off"] } else { &[] }; + configure_cmake( + builder, + self.target, + &mut cfg, + use_compiler_launcher, + LdFlags::default(), + extra_compiler_flags, + ); + + t!(fs::create_dir_all(&out_dir)); + cfg.out_dir(out_dir); + + for runtime in &runtimes { + cfg.build_target(&runtime.cmake_target); + cfg.build(); + } + t!(stamp.write()); + + runtimes + } +} + +#[derive(Clone, Debug)] +pub struct SanitizerRuntime { + /// CMake target used to build the runtime. + pub cmake_target: String, + /// Path to the built runtime library. + pub path: PathBuf, + /// Library filename that will be used rustc. + pub name: String, +} + +/// Returns sanitizers available on a given target. +fn supported_sanitizers( + out_dir: &Path, + target: TargetSelection, + channel: &str, +) -> Vec { + let darwin_libs = |os: &str, components: &[&str]| -> Vec { + components + .iter() + .map(move |c| SanitizerRuntime { + cmake_target: format!("clang_rt.{}_{}_dynamic", c, os), + path: out_dir + .join(&format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)), + name: format!("librustc-{}_rt.{}.dylib", channel, c), + }) + .collect() + }; + + let common_libs = |os: &str, arch: &str, components: &[&str]| -> Vec { + components + .iter() + .map(move |c| SanitizerRuntime { + cmake_target: format!("clang_rt.{}-{}", c, arch), + path: out_dir.join(&format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)), + name: format!("librustc-{}_rt.{}.a", channel, c), + }) + .collect() + }; + + match &*target.triple { + "aarch64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]), + "aarch64-apple-ios" => darwin_libs("ios", &["asan", "tsan"]), + "aarch64-apple-ios-sim" => darwin_libs("iossim", &["asan", "tsan"]), + "aarch64-apple-ios-macabi" => darwin_libs("osx", &["asan", "lsan", "tsan"]), + "aarch64-unknown-fuchsia" => common_libs("fuchsia", "aarch64", &["asan"]), + "aarch64-unknown-linux-gnu" => { + common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"]) + } + "aarch64-unknown-linux-ohos" => { + common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan", "hwasan"]) + } + "x86_64-apple-darwin" => darwin_libs("osx", &["asan", "lsan", "tsan"]), + "x86_64-unknown-fuchsia" => common_libs("fuchsia", "x86_64", &["asan"]), + "x86_64-apple-ios" => darwin_libs("iossim", &["asan", "tsan"]), + "x86_64-apple-ios-macabi" => darwin_libs("osx", &["asan", "lsan", "tsan"]), + "x86_64-unknown-freebsd" => common_libs("freebsd", "x86_64", &["asan", "msan", "tsan"]), + "x86_64-unknown-netbsd" => { + common_libs("netbsd", "x86_64", &["asan", "lsan", "msan", "tsan"]) + } + "x86_64-unknown-illumos" => common_libs("illumos", "x86_64", &["asan"]), + "x86_64-pc-solaris" => common_libs("solaris", "x86_64", &["asan"]), + "x86_64-unknown-linux-gnu" => { + common_libs("linux", "x86_64", &["asan", "lsan", "msan", "safestack", "tsan"]) + } + "x86_64-unknown-linux-musl" => { + common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"]) + } + "s390x-unknown-linux-gnu" => { + common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) + } + "s390x-unknown-linux-musl" => { + common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) + } + "x86_64-unknown-linux-ohos" => { + common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"]) + } + _ => Vec::new(), + } +} + +struct HashStamp { + path: PathBuf, + hash: Option>, +} + +impl HashStamp { + fn new(path: PathBuf, hash: Option<&str>) -> Self { + HashStamp { path, hash: hash.map(|s| s.as_bytes().to_owned()) } + } + + fn is_done(&self) -> bool { + match fs::read(&self.path) { + Ok(h) => self.hash.as_deref().unwrap_or(b"") == h.as_slice(), + Err(e) if e.kind() == io::ErrorKind::NotFound => false, + Err(e) => { + panic!("failed to read stamp file `{}`: {}", self.path.display(), e); + } + } + } + + fn remove(&self) -> io::Result<()> { + match fs::remove_file(&self.path) { + Ok(()) => Ok(()), + Err(e) => { + if e.kind() == io::ErrorKind::NotFound { + Ok(()) + } else { + Err(e) + } + } + } + } + + fn write(&self) -> io::Result<()> { + fs::write(&self.path, self.hash.as_deref().unwrap_or(b"")) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CrtBeginEnd { + pub target: TargetSelection, +} + +impl Step for CrtBeginEnd { + type Output = PathBuf; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/llvm-project/compiler-rt/lib/crt") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(CrtBeginEnd { target: run.target }); + } + + /// Build crtbegin.o/crtend.o for musl target. + fn run(self, builder: &Builder<'_>) -> Self::Output { + builder.update_submodule(&Path::new("src/llvm-project")); + + let out_dir = builder.native_dir(self.target).join("crt"); + + if builder.config.dry_run() { + return out_dir; + } + + let crtbegin_src = builder.src.join("src/llvm-project/compiler-rt/lib/builtins/crtbegin.c"); + let crtend_src = builder.src.join("src/llvm-project/compiler-rt/lib/builtins/crtend.c"); + if up_to_date(&crtbegin_src, &out_dir.join("crtbegin.o")) + && up_to_date(&crtend_src, &out_dir.join("crtendS.o")) + { + return out_dir; + } + + let _guard = builder.msg_unstaged(Kind::Build, "crtbegin.o and crtend.o", self.target); + t!(fs::create_dir_all(&out_dir)); + + let mut cfg = cc::Build::new(); + + if let Some(ar) = builder.ar(self.target) { + cfg.archiver(ar); + } + cfg.compiler(builder.cc(self.target)); + cfg.cargo_metadata(false) + .out_dir(&out_dir) + .target(&self.target.triple) + .host(&builder.config.build.triple) + .warnings(false) + .debug(false) + .opt_level(3) + .file(crtbegin_src) + .file(crtend_src); + + // Those flags are defined in src/llvm-project/compiler-rt/lib/crt/CMakeLists.txt + // Currently only consumer of those objects is musl, which use .init_array/.fini_array + // instead of .ctors/.dtors + cfg.flag("-std=c11") + .define("CRT_HAS_INITFINI_ARRAY", None) + .define("EH_USE_FRAME_REGISTRY", None); + + cfg.compile("crt"); + + t!(fs::copy(out_dir.join("crtbegin.o"), out_dir.join("crtbeginS.o"))); + t!(fs::copy(out_dir.join("crtend.o"), out_dir.join("crtendS.o"))); + out_dir + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Libunwind { + pub target: TargetSelection, +} + +impl Step for Libunwind { + type Output = PathBuf; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/llvm-project/libunwind") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Libunwind { target: run.target }); + } + + /// Build libunwind.a + fn run(self, builder: &Builder<'_>) -> Self::Output { + builder.update_submodule(&Path::new("src/llvm-project")); + + if builder.config.dry_run() { + return PathBuf::new(); + } + + let out_dir = builder.native_dir(self.target).join("libunwind"); + let root = builder.src.join("src/llvm-project/libunwind"); + + if up_to_date(&root, &out_dir.join("libunwind.a")) { + return out_dir; + } + + let _guard = builder.msg_unstaged(Kind::Build, "libunwind.a", self.target); + t!(fs::create_dir_all(&out_dir)); + + let mut cc_cfg = cc::Build::new(); + let mut cpp_cfg = cc::Build::new(); + + cpp_cfg.cpp(true); + cpp_cfg.cpp_set_stdlib(None); + cpp_cfg.flag("-nostdinc++"); + cpp_cfg.flag("-fno-exceptions"); + cpp_cfg.flag("-fno-rtti"); + cpp_cfg.flag_if_supported("-fvisibility-global-new-delete-hidden"); + + for cfg in [&mut cc_cfg, &mut cpp_cfg].iter_mut() { + if let Some(ar) = builder.ar(self.target) { + cfg.archiver(ar); + } + cfg.target(&self.target.triple); + cfg.host(&builder.config.build.triple); + cfg.warnings(false); + cfg.debug(false); + // get_compiler() need set opt_level first. + cfg.opt_level(3); + cfg.flag("-fstrict-aliasing"); + cfg.flag("-funwind-tables"); + cfg.flag("-fvisibility=hidden"); + cfg.define("_LIBUNWIND_DISABLE_VISIBILITY_ANNOTATIONS", None); + cfg.include(root.join("include")); + cfg.cargo_metadata(false); + cfg.out_dir(&out_dir); + + if self.target.contains("x86_64-fortanix-unknown-sgx") { + cfg.static_flag(true); + cfg.flag("-fno-stack-protector"); + cfg.flag("-ffreestanding"); + cfg.flag("-fexceptions"); + + // easiest way to undefine since no API available in cc::Build to undefine + cfg.flag("-U_FORTIFY_SOURCE"); + cfg.define("_FORTIFY_SOURCE", "0"); + cfg.define("RUST_SGX", "1"); + cfg.define("__NO_STRING_INLINES", None); + cfg.define("__NO_MATH_INLINES", None); + cfg.define("_LIBUNWIND_IS_BAREMETAL", None); + cfg.define("__LIBUNWIND_IS_NATIVE_ONLY", None); + cfg.define("NDEBUG", None); + } + if self.target.contains("windows") { + cfg.define("_LIBUNWIND_HIDE_SYMBOLS", "1"); + cfg.define("_LIBUNWIND_IS_NATIVE_ONLY", "1"); + } + } + + cc_cfg.compiler(builder.cc(self.target)); + if let Ok(cxx) = builder.cxx(self.target) { + cpp_cfg.compiler(cxx); + } else { + cc_cfg.compiler(builder.cc(self.target)); + } + + // Don't set this for clang + // By default, Clang builds C code in GNU C17 mode. + // By default, Clang builds C++ code according to the C++98 standard, + // with many C++11 features accepted as extensions. + if cc_cfg.get_compiler().is_like_gnu() { + cc_cfg.flag("-std=c99"); + } + if cpp_cfg.get_compiler().is_like_gnu() { + cpp_cfg.flag("-std=c++11"); + } + + if self.target.contains("x86_64-fortanix-unknown-sgx") || self.target.contains("musl") { + // use the same GCC C compiler command to compile C++ code so we do not need to setup the + // C++ compiler env variables on the builders. + // Don't set this for clang++, as clang++ is able to compile this without libc++. + if cpp_cfg.get_compiler().is_like_gnu() { + cpp_cfg.cpp(false); + cpp_cfg.compiler(builder.cc(self.target)); + } + } + + let mut c_sources = vec![ + "Unwind-sjlj.c", + "UnwindLevel1-gcc-ext.c", + "UnwindLevel1.c", + "UnwindRegistersRestore.S", + "UnwindRegistersSave.S", + ]; + + let cpp_sources = vec!["Unwind-EHABI.cpp", "Unwind-seh.cpp", "libunwind.cpp"]; + let cpp_len = cpp_sources.len(); + + if self.target.contains("x86_64-fortanix-unknown-sgx") { + c_sources.push("UnwindRustSgx.c"); + } + + for src in c_sources { + cc_cfg.file(root.join("src").join(src).canonicalize().unwrap()); + } + + for src in &cpp_sources { + cpp_cfg.file(root.join("src").join(src).canonicalize().unwrap()); + } + + cpp_cfg.compile("unwind-cpp"); + + // FIXME: https://github.com/alexcrichton/cc-rs/issues/545#issuecomment-679242845 + let mut count = 0; + for entry in fs::read_dir(&out_dir).unwrap() { + let file = entry.unwrap().path().canonicalize().unwrap(); + if file.is_file() && file.extension() == Some(OsStr::new("o")) { + // file name starts with "Unwind-EHABI", "Unwind-seh" or "libunwind" + let file_name = file.file_name().unwrap().to_str().expect("UTF-8 file name"); + if cpp_sources.iter().any(|f| file_name.starts_with(&f[..f.len() - 4])) { + cc_cfg.object(&file); + count += 1; + } + } + } + assert_eq!(cpp_len, count, "Can't get object files from {:?}", &out_dir); + + cc_cfg.compile("unwind"); + out_dir + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +pub(crate) mod check; +pub(crate) mod clean; +pub(crate) mod compile; +pub(crate) mod dist; +pub(crate) mod doc; +pub(crate) mod format; +pub(crate) mod install; +pub(crate) mod llvm; +pub(crate) mod run; +pub(crate) mod setup; +pub(crate) mod suggest; +pub(crate) mod synthetic_targets; +pub(crate) mod test; +pub(crate) mod tool; +pub(crate) mod toolstate; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/run.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/run.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/run.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/run.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,301 @@ +use std::path::PathBuf; +use std::process::Command; + +use crate::core::build_steps::dist::distdir; +use crate::core::build_steps::test; +use crate::core::build_steps::tool::{self, SourceType, Tool}; +use crate::core::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::core::config::flags::get_completion; +use crate::core::config::TargetSelection; +use crate::utils::helpers::output; +use crate::Mode; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ExpandYamlAnchors; + +impl Step for ExpandYamlAnchors { + type Output = (); + + /// Runs the `expand-yaml_anchors` tool. + /// + /// This tool in `src/tools` reads the CI configuration files written in YAML and expands the + /// anchors in them, since GitHub Actions doesn't support them. + fn run(self, builder: &Builder<'_>) { + builder.info("Expanding YAML anchors in the GitHub Actions configuration"); + builder.run_delaying_failure( + &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src), + ); + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/expand-yaml-anchors") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(ExpandYamlAnchors); + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct BuildManifest; + +impl Step for BuildManifest { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/build-manifest") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(BuildManifest); + } + + fn run(self, builder: &Builder<'_>) { + // This gets called by `promote-release` + // (https://github.com/rust-lang/promote-release). + let mut cmd = builder.tool_cmd(Tool::BuildManifest); + let sign = builder.config.dist_sign_folder.as_ref().unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n") + }); + let addr = builder.config.dist_upload_addr.as_ref().unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n") + }); + + let today = output(Command::new("date").arg("+%Y-%m-%d")); + + cmd.arg(sign); + cmd.arg(distdir(builder)); + cmd.arg(today.trim()); + cmd.arg(addr); + cmd.arg(&builder.config.channel); + + builder.create_dir(&distdir(builder)); + builder.run(&mut cmd); + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct BumpStage0; + +impl Step for BumpStage0 { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/bump-stage0") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(BumpStage0); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let mut cmd = builder.tool_cmd(Tool::BumpStage0); + cmd.args(builder.config.args()); + builder.run(&mut cmd); + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct ReplaceVersionPlaceholder; + +impl Step for ReplaceVersionPlaceholder { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/replace-version-placeholder") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(ReplaceVersionPlaceholder); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let mut cmd = builder.tool_cmd(Tool::ReplaceVersionPlaceholder); + cmd.arg(&builder.src); + builder.run(&mut cmd); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Miri { + stage: u32, + host: TargetSelection, + target: TargetSelection, +} + +impl Step for Miri { + type Output = (); + const ONLY_HOSTS: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/miri") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Miri { + stage: run.builder.top_stage, + host: run.build_triple(), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let host = self.host; + let target = self.target; + let compiler = builder.compiler(stage, host); + + let miri = builder + .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }) + .expect("in-tree tool"); + let miri_sysroot = test::Miri::build_miri_sysroot(builder, compiler, &miri, target); + + // # Run miri. + // Running it via `cargo run` as that figures out the right dylib path. + // add_rustc_lib_path does not add the path that contains librustc_driver-<...>.so. + let mut miri = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "run", + "src/tools/miri", + SourceType::InTree, + &[], + ); + miri.add_rustc_lib_path(builder, compiler); + // Forward arguments. + miri.arg("--").arg("--target").arg(target.rustc_target_arg()); + miri.args(builder.config.args()); + + // miri tests need to know about the stage sysroot + miri.env("MIRI_SYSROOT", &miri_sysroot); + + let mut miri = Command::from(miri); + builder.run(&mut miri); + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct CollectLicenseMetadata; + +impl Step for CollectLicenseMetadata { + type Output = PathBuf; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/collect-license-metadata") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(CollectLicenseMetadata); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let Some(reuse) = &builder.config.reuse else { + panic!("REUSE is required to collect the license metadata"); + }; + + // Temporary location, it will be moved to src/etc once it's accurate. + let dest = builder.out.join("license-metadata.json"); + + let mut cmd = builder.tool_cmd(Tool::CollectLicenseMetadata); + cmd.env("REUSE_EXE", reuse); + cmd.env("DEST", &dest); + builder.run(&mut cmd); + + dest + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct GenerateCopyright; + +impl Step for GenerateCopyright { + type Output = PathBuf; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/generate-copyright") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(GenerateCopyright); + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + let license_metadata = builder.ensure(CollectLicenseMetadata); + + // Temporary location, it will be moved to the proper one once it's accurate. + let dest = builder.out.join("COPYRIGHT.md"); + + let mut cmd = builder.tool_cmd(Tool::GenerateCopyright); + cmd.env("LICENSE_METADATA", &license_metadata); + cmd.env("DEST", &dest); + builder.run(&mut cmd); + + dest + } +} + +#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +pub struct GenerateWindowsSys; + +impl Step for GenerateWindowsSys { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/generate-windows-sys") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(GenerateWindowsSys); + } + + fn run(self, builder: &Builder<'_>) { + let mut cmd = builder.tool_cmd(Tool::GenerateWindowsSys); + cmd.arg(&builder.src); + builder.run(&mut cmd); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct GenerateCompletions; + +macro_rules! generate_completions { + ( $( ( $shell:ident, $filename:expr ) ),* ) => { + $( + if let Some(comp) = get_completion($shell, &$filename) { + std::fs::write(&$filename, comp).expect(&format!("writing {} completion", stringify!($shell))); + } + )* + }; +} + +impl Step for GenerateCompletions { + type Output = (); + + /// Uses `clap_complete` to generate shell completions. + fn run(self, builder: &Builder<'_>) { + use clap_complete::shells::{Bash, Fish, PowerShell, Zsh}; + + generate_completions!( + (Bash, builder.src.join("src/etc/completions/x.py.sh")), + (Zsh, builder.src.join("src/etc/completions/x.py.zsh")), + (Fish, builder.src.join("src/etc/completions/x.py.fish")), + (PowerShell, builder.src.join("src/etc/completions/x.py.ps1")) + ); + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("generate-completions") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(GenerateCompletions); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/setup.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/setup.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/setup.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/setup.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,623 @@ +use crate::core::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::Config; +use crate::{t, CONFIG_CHANGE_HISTORY}; +use sha2::Digest; +use std::env::consts::EXE_SUFFIX; +use std::fmt::Write as _; +use std::fs::File; +use std::io::Write; +use std::path::{Path, PathBuf, MAIN_SEPARATOR}; +use std::process::Command; +use std::str::FromStr; +use std::{fmt, fs, io}; + +#[cfg(test)] +#[path = "../../tests/setup.rs"] +mod tests; + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub enum Profile { + Compiler, + Codegen, + Library, + Tools, + Dist, + None, +} + +/// A list of historical hashes of `src/etc/rust_analyzer_settings.json`. +/// New entries should be appended whenever this is updated so we can detect +/// outdated vs. user-modified settings files. +static SETTINGS_HASHES: &[&str] = &[ + "ea67e259dedf60d4429b6c349a564ffcd1563cf41c920a856d1f5b16b4701ac8", + "56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922", + "af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0", + "3468fea433c25fff60be6b71e8a215a732a7b1268b6a83bf10d024344e140541", + "47d227f424bf889b0d899b9cc992d5695e1b78c406e183cd78eafefbe5488923", + "b526bd58d0262dd4dda2bff5bc5515b705fb668a46235ace3e057f807963a11a", +]; +static RUST_ANALYZER_SETTINGS: &str = include_str!("../../../../etc/rust_analyzer_settings.json"); + +impl Profile { + fn include_path(&self, src_path: &Path) -> PathBuf { + PathBuf::from(format!("{}/src/bootstrap/defaults/config.{}.toml", src_path.display(), self)) + } + + pub fn all() -> impl Iterator { + use Profile::*; + // N.B. these are ordered by how they are displayed, not alphabetically + [Library, Compiler, Codegen, Tools, Dist, None].iter().copied() + } + + pub fn purpose(&self) -> String { + use Profile::*; + match self { + Library => "Contribute to the standard library", + Compiler => "Contribute to the compiler itself", + Codegen => "Contribute to the compiler, and also modify LLVM or codegen", + Tools => "Contribute to tools which depend on the compiler, but do not modify it directly (e.g. rustdoc, clippy, miri)", + Dist => "Install Rust from source", + None => "Do not modify `config.toml`" + } + .to_string() + } + + pub fn all_for_help(indent: &str) -> String { + let mut out = String::new(); + for choice in Profile::all() { + writeln!(&mut out, "{}{}: {}", indent, choice, choice.purpose()).unwrap(); + } + out + } + + pub fn as_str(&self) -> &'static str { + match self { + Profile::Compiler => "compiler", + Profile::Codegen => "codegen", + Profile::Library => "library", + Profile::Tools => "tools", + Profile::Dist => "dist", + Profile::None => "none", + } + } +} + +impl FromStr for Profile { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "lib" | "library" => Ok(Profile::Library), + "compiler" => Ok(Profile::Compiler), + "llvm" | "codegen" => Ok(Profile::Codegen), + "maintainer" | "dist" | "user" => Ok(Profile::Dist), + "tools" | "tool" | "rustdoc" | "clippy" | "miri" | "rustfmt" | "rls" => { + Ok(Profile::Tools) + } + "none" => Ok(Profile::None), + _ => Err(format!("unknown profile: '{s}'")), + } + } +} + +impl fmt::Display for Profile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +impl Step for Profile { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(mut run: ShouldRun<'_>) -> ShouldRun<'_> { + for choice in Profile::all() { + run = run.alias(choice.as_str()); + } + run + } + + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + + let path = &run.builder.config.config.clone().unwrap_or(PathBuf::from("config.toml")); + if path.exists() { + eprintln!(); + eprintln!( + "ERROR: you asked for a new config file, but one already exists at `{}`", + t!(path.canonicalize()).display() + ); + + match prompt_user( + "Do you wish to override the existing configuration (which will allow the setup process to continue)?: [y/N]", + ) { + Ok(Some(PromptResult::Yes)) => { + t!(fs::remove_file(path)); + } + _ => { + println!("Exiting."); + crate::exit!(1); + } + } + } + + // for Profile, `run.paths` will have 1 and only 1 element + // this is because we only accept at most 1 path from user input. + // If user calls `x.py setup` without arguments, the interactive TUI + // will guide user to provide one. + let profile = if run.paths.len() > 1 { + // HACK: `builder` runs this step with all paths if no path was passed. + t!(interactive_path()) + } else { + run.paths + .first() + .unwrap() + .assert_single_path() + .path + .as_path() + .as_os_str() + .to_str() + .unwrap() + .parse() + .unwrap() + }; + + run.builder.ensure(profile); + } + + fn run(self, builder: &Builder<'_>) { + // During ./x.py setup once you select the codegen profile. + // The submodule will be downloaded. It does not work in the + // tarball case since they don't include Git and submodules + // are already included. + if !builder.rust_info().is_from_tarball() { + if self == Profile::Codegen { + builder.update_submodule(&Path::new("src/llvm-project")); + } + } + setup(&builder.build.config, self) + } +} + +pub fn setup(config: &Config, profile: Profile) { + let suggestions: &[&str] = match profile { + Profile::Codegen | Profile::Compiler | Profile::None => &["check", "build", "test"], + Profile::Tools => &[ + "check", + "build", + "test tests/rustdoc*", + "test src/tools/clippy", + "test src/tools/miri", + "test src/tools/rustfmt", + ], + Profile::Library => &["check", "build", "test library/std", "doc"], + Profile::Dist => &["dist", "build"], + }; + + println!(); + + println!("To get started, try one of the following commands:"); + for cmd in suggestions { + println!("- `x.py {cmd}`"); + } + + if profile != Profile::Dist { + println!( + "For more suggestions, see https://rustc-dev-guide.rust-lang.org/building/suggested.html" + ); + } + + if profile == Profile::Tools { + eprintln!(); + eprintln!( + "NOTE: the `tools` profile sets up the `stage2` toolchain (use \ + `rustup toolchain link 'name' build/host/stage2` to use rustc)" + ) + } + + let path = &config.config.clone().unwrap_or(PathBuf::from("config.toml")); + setup_config_toml(path, profile, config); +} + +fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { + if profile == Profile::None { + return; + } + + let latest_change_id = CONFIG_CHANGE_HISTORY.last().unwrap(); + let settings = format!( + "# Includes one of the default files in src/bootstrap/defaults\n\ + profile = \"{profile}\"\n\ + change-id = {latest_change_id}\n" + ); + + t!(fs::write(path, settings)); + + let include_path = profile.include_path(&config.src); + println!("`x.py` will now use the configuration at {}", include_path.display()); +} + +/// Creates a toolchain link for stage1 using `rustup` +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Link; +impl Step for Link { + type Output = (); + const DEFAULT: bool = true; + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("link") + } + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + if let [cmd] = &run.paths[..] { + if cmd.assert_single_path().path.as_path().as_os_str() == "link" { + run.builder.ensure(Link); + } + } + } + fn run(self, builder: &Builder<'_>) -> Self::Output { + let config = &builder.config; + if config.dry_run() { + return; + } + let stage_path = + ["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string()); + + if !rustup_installed() { + eprintln!("`rustup` is not installed; cannot link `stage1` toolchain"); + } else if stage_dir_exists(&stage_path[..]) && !config.dry_run() { + attempt_toolchain_link(&stage_path[..]); + } + } +} + +fn rustup_installed() -> bool { + Command::new("rustup") + .arg("--version") + .stdout(std::process::Stdio::null()) + .output() + .map_or(false, |output| output.status.success()) +} + +fn stage_dir_exists(stage_path: &str) -> bool { + match fs::create_dir(&stage_path) { + Ok(_) => true, + Err(_) => Path::new(&stage_path).exists(), + } +} + +fn attempt_toolchain_link(stage_path: &str) { + if toolchain_is_linked() { + return; + } + + if !ensure_stage1_toolchain_placeholder_exists(stage_path) { + eprintln!( + "Failed to create a template for stage 1 toolchain or confirm that it already exists" + ); + return; + } + + if try_link_toolchain(&stage_path) { + println!( + "Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain" + ); + } else { + eprintln!("`rustup` failed to link stage 1 build to `stage1` toolchain"); + eprintln!( + "To manually link stage 1 build to `stage1` toolchain, run:\n + `rustup toolchain link stage1 {}`", + &stage_path + ); + } +} + +fn toolchain_is_linked() -> bool { + match Command::new("rustup") + .args(&["toolchain", "list"]) + .stdout(std::process::Stdio::piped()) + .output() + { + Ok(toolchain_list) => { + if !String::from_utf8_lossy(&toolchain_list.stdout).contains("stage1") { + return false; + } + // The toolchain has already been linked. + println!( + "`stage1` toolchain already linked; not attempting to link `stage1` toolchain" + ); + } + Err(_) => { + // In this case, we don't know if the `stage1` toolchain has been linked; + // but `rustup` failed, so let's not go any further. + println!( + "`rustup` failed to list current toolchains; not attempting to link `stage1` toolchain" + ); + } + } + true +} + +fn try_link_toolchain(stage_path: &str) -> bool { + Command::new("rustup") + .stdout(std::process::Stdio::null()) + .args(&["toolchain", "link", "stage1", &stage_path]) + .output() + .map_or(false, |output| output.status.success()) +} + +fn ensure_stage1_toolchain_placeholder_exists(stage_path: &str) -> bool { + let pathbuf = PathBuf::from(stage_path); + + if fs::create_dir_all(pathbuf.join("lib")).is_err() { + return false; + }; + + let pathbuf = pathbuf.join("bin"); + if fs::create_dir_all(&pathbuf).is_err() { + return false; + }; + + let pathbuf = pathbuf.join(format!("rustc{EXE_SUFFIX}")); + + if pathbuf.exists() { + return true; + } + + // Take care not to overwrite the file + let result = File::options().append(true).create(true).open(&pathbuf); + if result.is_err() { + return false; + } + + return true; +} + +// Used to get the path for `Subcommand::Setup` +pub fn interactive_path() -> io::Result { + fn abbrev_all() -> impl Iterator { + ('a'..) + .zip(1..) + .map(|(letter, number)| (letter.to_string(), number.to_string())) + .zip(Profile::all()) + } + + fn parse_with_abbrev(input: &str) -> Result { + let input = input.trim().to_lowercase(); + for ((letter, number), profile) in abbrev_all() { + if input == letter || input == number { + return Ok(profile); + } + } + input.parse() + } + + println!("Welcome to the Rust project! What do you want to do with x.py?"); + for ((letter, _), profile) in abbrev_all() { + println!("{}) {}: {}", letter, profile, profile.purpose()); + } + let template = loop { + print!( + "Please choose one ({}): ", + abbrev_all().map(|((l, _), _)| l).collect::>().join("/") + ); + io::stdout().flush()?; + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + if input.is_empty() { + eprintln!("EOF on stdin, when expecting answer to question. Giving up."); + crate::exit!(1); + } + break match parse_with_abbrev(&input) { + Ok(profile) => profile, + Err(err) => { + eprintln!("ERROR: {err}"); + eprintln!("NOTE: press Ctrl+C to exit"); + continue; + } + }; + }; + Ok(template) +} + +#[derive(PartialEq)] +enum PromptResult { + Yes, // y/Y/yes + No, // n/N/no + Print, // p/P/print +} + +/// Prompt a user for a answer, looping until they enter an accepted input or nothing +fn prompt_user(prompt: &str) -> io::Result> { + let mut input = String::new(); + loop { + print!("{prompt} "); + io::stdout().flush()?; + input.clear(); + io::stdin().read_line(&mut input)?; + match input.trim().to_lowercase().as_str() { + "y" | "yes" => return Ok(Some(PromptResult::Yes)), + "n" | "no" => return Ok(Some(PromptResult::No)), + "p" | "print" => return Ok(Some(PromptResult::Print)), + "" => return Ok(None), + _ => { + eprintln!("ERROR: unrecognized option '{}'", input.trim()); + eprintln!("NOTE: press Ctrl+C to exit"); + } + }; + } +} + +/// Installs `src/etc/pre-push.sh` as a Git hook +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Hook; + +impl Step for Hook { + type Output = (); + const DEFAULT: bool = true; + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("hook") + } + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + if let [cmd] = &run.paths[..] { + if cmd.assert_single_path().path.as_path().as_os_str() == "hook" { + run.builder.ensure(Hook); + } + } + } + fn run(self, builder: &Builder<'_>) -> Self::Output { + let config = &builder.config; + if config.dry_run() { + return; + } + t!(install_git_hook_maybe(&config)); + } +} + +// install a git hook to automatically run tidy, if they want +fn install_git_hook_maybe(config: &Config) -> io::Result<()> { + let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| { + assert!(output.status.success(), "failed to run `git`"); + PathBuf::from(t!(String::from_utf8(output.stdout)).trim()) + })); + let hooks_dir = git.join("hooks"); + let dst = hooks_dir.join("pre-push"); + if dst.exists() { + // The git hook has already been set up, or the user already has a custom hook. + return Ok(()); + } + + println!( + "\nRust's CI will automatically fail if it doesn't pass `tidy`, the internal tool for ensuring code quality. +If you'd like, x.py can install a git hook for you that will automatically run `test tidy` before +pushing your code to ensure your code is up to par. If you decide later that this behavior is +undesirable, simply delete the `pre-push` file from .git/hooks." + ); + + if prompt_user("Would you like to install the git hook?: [y/N]")? != Some(PromptResult::Yes) { + println!("Ok, skipping installation!"); + return Ok(()); + } + if !hooks_dir.exists() { + // We need to (try to) create the hooks directory first. + let _ = fs::create_dir(hooks_dir); + } + let src = config.src.join("src").join("etc").join("pre-push.sh"); + match fs::hard_link(src, &dst) { + Err(e) => { + eprintln!( + "ERROR: could not create hook {}: do you already have the git hook installed?\n{}", + dst.display(), + e + ); + return Err(e); + } + Ok(_) => println!("Linked `src/etc/pre-push.sh` to `.git/hooks/pre-push`"), + }; + Ok(()) +} + +/// Sets up or displays `src/etc/rust_analyzer_settings.json` +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct Vscode; + +impl Step for Vscode { + type Output = (); + const DEFAULT: bool = true; + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("vscode") + } + fn make_run(run: RunConfig<'_>) { + if run.builder.config.dry_run() { + return; + } + if let [cmd] = &run.paths[..] { + if cmd.assert_single_path().path.as_path().as_os_str() == "vscode" { + run.builder.ensure(Vscode); + } + } + } + fn run(self, builder: &Builder<'_>) -> Self::Output { + let config = &builder.config; + if config.dry_run() { + return; + } + t!(create_vscode_settings_maybe(&config)); + } +} + +/// Create a `.vscode/settings.json` file for rustc development, or just print it +fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> { + let (current_hash, historical_hashes) = SETTINGS_HASHES.split_last().unwrap(); + let vscode_settings = config.src.join(".vscode").join("settings.json"); + // If None, no settings.json exists + // If Some(true), is a previous version of settings.json + // If Some(false), is not a previous version (i.e. user modified) + // If it's up to date we can just skip this + let mut mismatched_settings = None; + if let Ok(current) = fs::read_to_string(&vscode_settings) { + let mut hasher = sha2::Sha256::new(); + hasher.update(¤t); + let hash = hex::encode(hasher.finalize().as_slice()); + if hash == *current_hash { + return Ok(()); + } else if historical_hashes.contains(&hash.as_str()) { + mismatched_settings = Some(true); + } else { + mismatched_settings = Some(false); + } + } + println!( + "\nx.py can automatically install the recommended `.vscode/settings.json` file for rustc development" + ); + match mismatched_settings { + Some(true) => eprintln!( + "WARNING: existing `.vscode/settings.json` is out of date, x.py will update it" + ), + Some(false) => eprintln!( + "WARNING: existing `.vscode/settings.json` has been modified by user, x.py will back it up and replace it" + ), + _ => (), + } + let should_create = match prompt_user( + "Would you like to create/update `settings.json`, or only print suggested settings?: [y/p/N]", + )? { + Some(PromptResult::Yes) => true, + Some(PromptResult::Print) => false, + _ => { + println!("Ok, skipping settings!"); + return Ok(()); + } + }; + if should_create { + let path = config.src.join(".vscode"); + if !path.exists() { + fs::create_dir(&path)?; + } + let verb = match mismatched_settings { + // exists but outdated, we can replace this + Some(true) => "Updated", + // exists but user modified, back it up + Some(false) => { + // exists and is not current version or outdated, so back it up + let mut backup = vscode_settings.clone(); + backup.set_extension("json.bak"); + eprintln!("WARNING: copying `settings.json` to `settings.json.bak`"); + fs::copy(&vscode_settings, &backup)?; + "Updated" + } + _ => "Created", + }; + fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?; + println!("{verb} `.vscode/settings.json`"); + } else { + println!("\n{RUST_ANALYZER_SETTINGS}"); + } + Ok(()) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/suggest.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/suggest.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/suggest.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/suggest.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,78 @@ +#![cfg_attr(feature = "build-metrics", allow(unused))] + +use clap::Parser; +use std::path::PathBuf; +use std::str::FromStr; + +use crate::core::build_steps::tool::Tool; +use crate::core::builder::Builder; + +/// Suggests a list of possible `x.py` commands to run based on modified files in branch. +pub fn suggest(builder: &Builder<'_>, run: bool) { + let git_config = builder.config.git_config(); + let suggestions = builder + .tool_cmd(Tool::SuggestTests) + .env("SUGGEST_TESTS_GIT_REPOSITORY", git_config.git_repository) + .env("SUGGEST_TESTS_NIGHTLY_BRANCH", git_config.nightly_branch) + .output() + .expect("failed to run `suggest-tests` tool"); + + if !suggestions.status.success() { + println!("failed to run `suggest-tests` tool ({})", suggestions.status); + println!( + "`suggest_tests` stdout:\n{}`suggest_tests` stderr:\n{}", + String::from_utf8(suggestions.stdout).unwrap(), + String::from_utf8(suggestions.stderr).unwrap() + ); + panic!("failed to run `suggest-tests`"); + } + + let suggestions = String::from_utf8(suggestions.stdout).unwrap(); + let suggestions = suggestions + .lines() + .map(|line| { + let mut sections = line.split_ascii_whitespace(); + + // this code expects one suggestion per line in the following format: + // {some number of flags} [optional stage number] + let cmd = sections.next().unwrap(); + let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten(); + let paths: Vec = sections.map(|p| PathBuf::from_str(p).unwrap()).collect(); + + (cmd, stage, paths) + }) + .collect::>(); + + if !suggestions.is_empty() { + println!("==== SUGGESTIONS ===="); + for sug in &suggestions { + print!("x {} ", sug.0); + if let Some(stage) = sug.1 { + print!("--stage {stage} "); + } + + for path in &sug.2 { + print!("{} ", path.display()); + } + println!(); + } + println!("====================="); + } else { + println!("No suggestions found!"); + return; + } + + if run { + for sug in suggestions { + let mut build: crate::Build = builder.build.clone(); + build.config.paths = sug.2; + build.config.cmd = crate::core::config::flags::Flags::parse_from(["x.py", sug.0]).cmd; + if let Some(stage) = sug.1 { + build.config.stage = stage; + } + build.build(); + } + } else { + println!("HELP: consider using the `--run` flag to automatically run suggested tests"); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/synthetic_targets.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/synthetic_targets.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/synthetic_targets.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/synthetic_targets.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,82 @@ +//! In some cases, parts of bootstrap need to change part of a target spec just for one or a few +//! steps. Adding these targets to rustc proper would "leak" this implementation detail of +//! bootstrap, and would make it more complex to apply additional changes if the need arises. +//! +//! To address that problem, this module implements support for "synthetic targets". Synthetic +//! targets are custom target specs generated using builtin target specs as their base. You can use +//! one of the target specs already defined in this module, or create new ones by adding a new step +//! that calls create_synthetic_target. + +use crate::core::builder::{Builder, ShouldRun, Step}; +use crate::core::config::TargetSelection; +use crate::Compiler; +use std::process::{Command, Stdio}; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub(crate) struct MirOptPanicAbortSyntheticTarget { + pub(crate) compiler: Compiler, + pub(crate) base: TargetSelection, +} + +impl Step for MirOptPanicAbortSyntheticTarget { + type Output = TargetSelection; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + fn run(self, builder: &Builder<'_>) -> Self::Output { + create_synthetic_target(builder, self.compiler, "miropt-abort", self.base, |spec| { + spec.insert("panic-strategy".into(), "abort".into()); + }) + } +} + +fn create_synthetic_target( + builder: &Builder<'_>, + compiler: Compiler, + suffix: &str, + base: TargetSelection, + customize: impl FnOnce(&mut serde_json::Map), +) -> TargetSelection { + if base.contains("synthetic") { + // This check is not strictly needed, but nothing currently needs recursive synthetic + // targets. If the need arises, removing this in the future *SHOULD* be safe. + panic!("cannot create synthetic targets with other synthetic targets as their base"); + } + + let name = format!("{base}-synthetic-{suffix}"); + let path = builder.out.join("synthetic-target-specs").join(format!("{name}.json")); + std::fs::create_dir_all(path.parent().unwrap()).unwrap(); + + if builder.config.dry_run() { + std::fs::write(&path, b"dry run\n").unwrap(); + return TargetSelection::create_synthetic(&name, path.to_str().unwrap()); + } + + let mut cmd = Command::new(builder.rustc(compiler)); + cmd.arg("--target").arg(base.rustc_target_arg()); + cmd.args(["-Zunstable-options", "--print", "target-spec-json"]); + cmd.stdout(Stdio::piped()); + + let output = cmd.spawn().unwrap().wait_with_output().unwrap(); + if !output.status.success() { + panic!("failed to gather the target spec for {base}"); + } + + let mut spec: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); + let spec_map = spec.as_object_mut().unwrap(); + + // The `is-builtin` attribute of a spec needs to be removed, otherwise rustc will complain. + spec_map.remove("is-builtin"); + + customize(spec_map); + + std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap(); + let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap()); + crate::utils::cc_detect::find_target(builder, target); + + target +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/test.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/test.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,3315 @@ +//! Implementation of the test-related targets of the build system. +//! +//! This file implements the various regression test suites that we execute on +//! our CI. + +use std::env; +use std::ffi::OsStr; +use std::ffi::OsString; +use std::fs; +use std::iter; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; + +use clap_complete::shells; + +use crate::core::build_steps::compile; +use crate::core::build_steps::dist; +use crate::core::build_steps::doc::DocumentationFormat; +use crate::core::build_steps::llvm; +use crate::core::build_steps::synthetic_targets::MirOptPanicAbortSyntheticTarget; +use crate::core::build_steps::tool::{self, SourceType, Tool}; +use crate::core::build_steps::toolstate::ToolState; +use crate::core::builder::crate_description; +use crate::core::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step}; +use crate::core::config::flags::get_completion; +use crate::core::config::flags::Subcommand; +use crate::core::config::TargetSelection; +use crate::utils; +use crate::utils::cache::{Interned, INTERNER}; +use crate::utils::exec::BootstrapCommand; +use crate::utils::helpers::{ + self, add_link_lib_path, dylib_path, dylib_path_var, output, t, + target_supports_cranelift_backend, up_to_date, +}; +use crate::utils::render_tests::{add_flags_and_try_run_tests, try_run_tests}; +use crate::{envify, CLang, DocTests, GitRepo, Mode}; + +const ADB_TEST_DIR: &str = "/data/local/tmp/work"; + +// mir-opt tests have different variants depending on whether a target is 32bit or 64bit, and +// blessing them requires blessing with each target. To aid developers, when blessing the mir-opt +// test suite the corresponding target of the opposite pointer size is also blessed. +// +// This array serves as the known mappings between 32bit and 64bit targets. If you're developing on +// a target where a target with the opposite pointer size exists, feel free to add it here. +const MIR_OPT_BLESS_TARGET_MAPPING: &[(&str, &str)] = &[ + // (32bit, 64bit) + ("i686-unknown-linux-gnu", "x86_64-unknown-linux-gnu"), + ("i686-unknown-linux-musl", "x86_64-unknown-linux-musl"), + ("i686-pc-windows-msvc", "x86_64-pc-windows-msvc"), + ("i686-pc-windows-gnu", "x86_64-pc-windows-gnu"), + ("i686-apple-darwin", "x86_64-apple-darwin"), + // ARM Macs don't have a corresponding 32-bit target that they can (easily) + // build for, so there is no entry for "aarch64-apple-darwin" here. +]; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CrateBootstrap { + path: Interned, + host: TargetSelection, +} + +impl Step for CrateBootstrap { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/jsondoclint") + .path("src/tools/suggest-tests") + .path("src/tools/replace-version-placeholder") + .alias("tidyselftest") + } + + fn make_run(run: RunConfig<'_>) { + for path in run.paths { + let path = INTERNER.intern_path(path.assert_single_path().path.clone()); + run.builder.ensure(CrateBootstrap { host: run.target, path }); + } + } + + fn run(self, builder: &Builder<'_>) { + let bootstrap_host = builder.config.build; + let compiler = builder.compiler(0, bootstrap_host); + let mut path = self.path.to_str().unwrap(); + if path == "tidyselftest" { + path = "src/tools/tidy"; + } + + let cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolBootstrap, + bootstrap_host, + "test", + path, + SourceType::InTree, + &[], + ); + let crate_name = path.rsplit_once('/').unwrap().1; + run_cargo_test(cargo, &[], &[], crate_name, crate_name, compiler, bootstrap_host, builder); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Linkcheck { + host: TargetSelection, +} + +impl Step for Linkcheck { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` will verify the validity of all our links in the + /// documentation to ensure we don't have a bunch of dead ones. + fn run(self, builder: &Builder<'_>) { + let host = self.host; + let hosts = &builder.hosts; + let targets = &builder.targets; + + // if we have different hosts and targets, some things may be built for + // the host (e.g. rustc) and others for the target (e.g. std). The + // documentation built for each will contain broken links to + // docs built for the other platform (e.g. rustc linking to cargo) + if (hosts != targets) && !hosts.is_empty() && !targets.is_empty() { + panic!( + "Linkcheck currently does not support builds with different hosts and targets. +You can skip linkcheck with --skip src/tools/linkchecker" + ); + } + + builder.info(&format!("Linkcheck ({host})")); + + // Test the linkchecker itself. + let bootstrap_host = builder.config.build; + let compiler = builder.compiler(0, bootstrap_host); + + let cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolBootstrap, + bootstrap_host, + "test", + "src/tools/linkchecker", + SourceType::InTree, + &[], + ); + run_cargo_test( + cargo, + &[], + &[], + "linkchecker", + "linkchecker self tests", + compiler, + bootstrap_host, + builder, + ); + + if builder.doc_tests == DocTests::No { + return; + } + + // Build all the default documentation. + builder.default_doc(&[]); + + // Build the linkchecker before calling `msg`, since GHA doesn't support nested groups. + let mut linkchecker = builder.tool_cmd(Tool::Linkchecker); + + // Run the linkchecker. + let _guard = + builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host); + let _time = helpers::timeit(&builder); + builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc"))); + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + let run = run.path("src/tools/linkchecker"); + run.default_condition(builder.config.docs) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Linkcheck { host: run.target }); + } +} + +fn check_if_tidy_is_installed() -> bool { + Command::new("tidy") + .arg("--version") + .stdout(Stdio::null()) + .status() + .map_or(false, |status| status.success()) +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct HtmlCheck { + target: TargetSelection, +} + +impl Step for HtmlCheck { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let run = run.path("src/tools/html-checker"); + run.lazy_default_condition(Box::new(check_if_tidy_is_installed)) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(HtmlCheck { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + if !check_if_tidy_is_installed() { + eprintln!("not running HTML-check tool because `tidy` is missing"); + eprintln!( + "Note that `tidy` is not the in-tree `src/tools/tidy` but needs to be installed" + ); + panic!("Cannot run html-check tests"); + } + // Ensure that a few different kinds of documentation are available. + builder.default_doc(&[]); + builder.ensure(crate::core::build_steps::doc::Rustc::new( + builder.top_stage, + self.target, + builder, + )); + + builder.run_delaying_failure( + builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target)), + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Cargotest { + stage: u32, + host: TargetSelection, +} + +impl Step for Cargotest { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/cargotest") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Cargotest { stage: run.builder.top_stage, host: run.target }); + } + + /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` will check out a few Rust projects and run `cargo + /// test` to ensure that we don't regress the test suites there. + fn run(self, builder: &Builder<'_>) { + let compiler = builder.compiler(self.stage, self.host); + builder.ensure(compile::Rustc::new(compiler, compiler.host)); + let cargo = builder.ensure(tool::Cargo { compiler, target: compiler.host }); + + // Note that this is a short, cryptic, and not scoped directory name. This + // is currently to minimize the length of path on Windows where we otherwise + // quickly run into path name limit constraints. + let out_dir = builder.out.join("ct"); + t!(fs::create_dir_all(&out_dir)); + + let _time = helpers::timeit(&builder); + let mut cmd = builder.tool_cmd(Tool::CargoTest); + builder.run_delaying_failure( + cmd.arg(&cargo) + .arg(&out_dir) + .args(builder.config.test_args()) + .env("RUSTC", builder.rustc(compiler)) + .env("RUSTDOC", builder.rustdoc(compiler)), + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Cargo { + stage: u32, + host: TargetSelection, +} + +impl Step for Cargo { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/cargo") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Cargo { stage: run.builder.top_stage, host: run.target }); + } + + /// Runs `cargo test` for `cargo` packaged with Rust. + fn run(self, builder: &Builder<'_>) { + let compiler = builder.compiler(self.stage, self.host); + + builder.ensure(tool::Cargo { compiler, target: self.host }); + let cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + self.host, + "test", + "src/tools/cargo", + SourceType::Submodule, + &[], + ); + + // NOTE: can't use `run_cargo_test` because we need to overwrite `PATH` + let mut cargo = prepare_cargo_test(cargo, &[], &[], "cargo", compiler, self.host, builder); + + // Don't run cross-compile tests, we may not have cross-compiled libstd libs + // available. + cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); + // Forcibly disable tests using nightly features since any changes to + // those features won't be able to land. + cargo.env("CARGO_TEST_DISABLE_NIGHTLY", "1"); + cargo.env("PATH", &path_for_cargo(builder, compiler)); + + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + build_helper::metrics::TestSuiteMetadata::CargoPackage { + crates: vec!["cargo".into()], + target: self.host.triple.to_string(), + host: self.host.triple.to_string(), + stage: self.stage, + }, + builder, + ); + + let _time = helpers::timeit(&builder); + add_flags_and_try_run_tests(builder, &mut cargo); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RustAnalyzer { + stage: u32, + host: TargetSelection, +} + +impl Step for RustAnalyzer { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rust-analyzer") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Self { stage: run.builder.top_stage, host: run.target }); + } + + /// Runs `cargo test` for rust-analyzer + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + // We don't need to build the whole Rust Analyzer for the proc-macro-srv test suite, + // but we do need the standard library to be present. + builder.ensure(compile::Std::new(compiler, host)); + + let workspace_path = "src/tools/rust-analyzer"; + // until the whole RA test suite runs on `i686`, we only run + // `proc-macro-srv` tests + let crate_path = "src/tools/rust-analyzer/crates/proc-macro-srv"; + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolStd, + host, + "test", + crate_path, + SourceType::InTree, + &["sysroot-abi".to_owned()], + ); + cargo.allow_features(tool::RustAnalyzer::ALLOW_FEATURES); + + let dir = builder.src.join(workspace_path); + // needed by rust-analyzer to find its own text fixtures, cf. + // https://github.com/rust-analyzer/expect-test/issues/33 + cargo.env("CARGO_WORKSPACE_DIR", &dir); + + // RA's test suite tries to write to the source directory, that can't + // work in Rust CI + cargo.env("SKIP_SLOW_TESTS", "1"); + + cargo.add_rustc_lib_path(builder, compiler); + run_cargo_test(cargo, &[], &[], "rust-analyzer", "rust-analyzer", compiler, host, builder); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rustfmt { + stage: u32, + host: TargetSelection, +} + +impl Step for Rustfmt { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rustfmt") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Rustfmt { stage: run.builder.top_stage, host: run.target }); + } + + /// Runs `cargo test` for rustfmt. + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + builder + .ensure(tool::Rustfmt { compiler, target: self.host, extra_features: Vec::new() }) + .expect("in-tree tool"); + + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/rustfmt", + SourceType::InTree, + &[], + ); + + let dir = testdir(builder, compiler.host); + t!(fs::create_dir_all(&dir)); + cargo.env("RUSTFMT_TEST_DIR", dir); + + cargo.add_rustc_lib_path(builder, compiler); + + run_cargo_test(cargo, &[], &[], "rustfmt", "rustfmt", compiler, host, builder); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RustDemangler { + stage: u32, + host: TargetSelection, +} + +impl Step for RustDemangler { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rust-demangler") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustDemangler { stage: run.builder.top_stage, host: run.target }); + } + + /// Runs `cargo test` for rust-demangler. + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + let rust_demangler = builder + .ensure(tool::RustDemangler { compiler, target: self.host, extra_features: Vec::new() }) + .expect("in-tree tool"); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/rust-demangler", + SourceType::InTree, + &[], + ); + + let dir = testdir(builder, compiler.host); + t!(fs::create_dir_all(&dir)); + + cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler); + cargo.add_rustc_lib_path(builder, compiler); + + run_cargo_test( + cargo, + &[], + &[], + "rust-demangler", + "rust-demangler", + compiler, + host, + builder, + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Miri { + stage: u32, + host: TargetSelection, + target: TargetSelection, +} + +impl Miri { + /// Run `cargo miri setup` for the given target, return where the Miri sysroot was put. + pub fn build_miri_sysroot( + builder: &Builder<'_>, + compiler: Compiler, + miri: &Path, + target: TargetSelection, + ) -> String { + let miri_sysroot = builder.out.join(compiler.host.triple).join("miri-sysroot"); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + compiler.host, + "run", + "src/tools/miri/cargo-miri", + SourceType::InTree, + &[], + ); + cargo.add_rustc_lib_path(builder, compiler); + cargo.arg("--").arg("miri").arg("setup"); + cargo.arg("--target").arg(target.rustc_target_arg()); + + // Tell `cargo miri setup` where to find the sources. + cargo.env("MIRI_LIB_SRC", builder.src.join("library")); + // Tell it where to find Miri. + cargo.env("MIRI", &miri); + // Tell it where to put the sysroot. + cargo.env("MIRI_SYSROOT", &miri_sysroot); + // Debug things. + cargo.env("RUST_BACKTRACE", "1"); + + let mut cargo = Command::from(cargo); + let _guard = builder.msg( + Kind::Build, + compiler.stage + 1, + "miri sysroot", + compiler.host, + compiler.host, + ); + builder.run(&mut cargo); + + // # Determine where Miri put its sysroot. + // To this end, we run `cargo miri setup --print-sysroot` and capture the output. + // (We do this separately from the above so that when the setup actually + // happens we get some output.) + // We re-use the `cargo` from above. + cargo.arg("--print-sysroot"); + + // FIXME: Is there a way in which we can re-use the usual `run` helpers? + if builder.config.dry_run() { + String::new() + } else { + builder.verbose(&format!("running: {cargo:?}")); + let out = + cargo.output().expect("We already ran `cargo miri setup` before and that worked"); + assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code"); + // Output is "\n". + let stdout = String::from_utf8(out.stdout) + .expect("`cargo miri setup` stdout is not valid UTF-8"); + let sysroot = stdout.trim_end(); + builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {sysroot:?}")); + sysroot.to_owned() + } + } +} + +impl Step for Miri { + type Output = (); + const ONLY_HOSTS: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/miri") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Miri { + stage: run.builder.top_stage, + host: run.build_triple(), + target: run.target, + }); + } + + /// Runs `cargo test` for miri. + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let host = self.host; + let target = self.target; + let compiler = builder.compiler(stage, host); + // We need the stdlib for the *next* stage, as it was built with this compiler that also built Miri. + // Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage. + let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host); + + let miri = builder + .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }) + .expect("in-tree tool"); + let _cargo_miri = builder + .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() }) + .expect("in-tree tool"); + // The stdlib we need might be at a different stage. And just asking for the + // sysroot does not seem to populate it, so we do that first. + builder.ensure(compile::Std::new(compiler_std, host)); + let sysroot = builder.sysroot(compiler_std); + // We also need a Miri sysroot. + let miri_sysroot = Miri::build_miri_sysroot(builder, compiler, &miri, target); + + // # Run `cargo test`. + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/miri", + SourceType::InTree, + &[], + ); + let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "miri", host, target); + + cargo.add_rustc_lib_path(builder, compiler); + + // miri tests need to know about the stage sysroot + cargo.env("MIRI_SYSROOT", &miri_sysroot); + cargo.env("MIRI_HOST_SYSROOT", sysroot); + cargo.env("MIRI", &miri); + if builder.config.locked_deps { + // enforce lockfiles + cargo.env("CARGO_EXTRA_FLAGS", "--locked"); + } + + // Set the target. + cargo.env("MIRI_TEST_TARGET", target.rustc_target_arg()); + + // This can NOT be `run_cargo_test` since the Miri test runner + // does not understand the flags added by `add_flags_and_try_run_test`. + let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder); + { + let _time = helpers::timeit(&builder); + builder.run(&mut cargo); + } + + // Run it again for mir-opt-level 4 to catch some miscompilations. + if builder.config.test_args().is_empty() { + cargo.env("MIRIFLAGS", "-O -Zmir-opt-level=4 -Cdebug-assertions=yes"); + // Optimizations can change backtraces + cargo.env("MIRI_SKIP_UI_CHECKS", "1"); + // `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible + cargo.env_remove("RUSTC_BLESS"); + // Optimizations can change error locations and remove UB so don't run `fail` tests. + cargo.args(&["tests/pass", "tests/panic"]); + + let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder); + { + let _time = helpers::timeit(&builder); + builder.run(&mut cargo); + } + } + + // # Run `cargo miri test`. + // This is just a smoke test (Miri's own CI invokes this in a bunch of different ways and ensures + // that we get the desired output), but that is sufficient to make sure that the libtest harness + // itself executes properly under Miri. + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "run", + "src/tools/miri/cargo-miri", + SourceType::Submodule, + &[], + ); + cargo.add_rustc_lib_path(builder, compiler); + cargo.arg("--").arg("miri").arg("test"); + if builder.config.locked_deps { + cargo.arg("--locked"); + } + cargo + .arg("--manifest-path") + .arg(builder.src.join("src/tools/miri/test-cargo-miri/Cargo.toml")); + cargo.arg("--target").arg(target.rustc_target_arg()); + cargo.arg("--tests"); // don't run doctests, they are too confused by the staging + cargo.arg("--").args(builder.config.test_args()); + + // Tell `cargo miri` where to find things. + cargo.env("MIRI_SYSROOT", &miri_sysroot); + cargo.env("MIRI_HOST_SYSROOT", sysroot); + cargo.env("MIRI", &miri); + // Debug things. + cargo.env("RUST_BACKTRACE", "1"); + + let mut cargo = Command::from(cargo); + { + let _time = helpers::timeit(&builder); + builder.run(&mut cargo); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CompiletestTest { + host: TargetSelection, +} + +impl Step for CompiletestTest { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/compiletest") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(CompiletestTest { host: run.target }); + } + + /// Runs `cargo test` for compiletest. + fn run(self, builder: &Builder<'_>) { + let host = self.host; + let compiler = builder.compiler(builder.top_stage, host); + + // We need `ToolStd` for the locally-built sysroot because + // compiletest uses unstable features of the `test` crate. + builder.ensure(compile::Std::new(compiler, host)); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolStd, + host, + "test", + "src/tools/compiletest", + SourceType::InTree, + &[], + ); + cargo.allow_features("test"); + run_cargo_test( + cargo, + &[], + &[], + "compiletest", + "compiletest self test", + compiler, + host, + builder, + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Clippy { + stage: u32, + host: TargetSelection, +} + +impl Step for Clippy { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/clippy") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Clippy { stage: run.builder.top_stage, host: run.target }); + } + + /// Runs `cargo test` for clippy. + fn run(self, builder: &Builder<'_>) { + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + builder + .ensure(tool::Clippy { compiler, target: self.host, extra_features: Vec::new() }) + .expect("in-tree tool"); + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + host, + "test", + "src/tools/clippy", + SourceType::InTree, + &[], + ); + + cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); + cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); + let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir()); + cargo.env("HOST_LIBS", host_libs); + + cargo.add_rustc_lib_path(builder, compiler); + let mut cargo = prepare_cargo_test(cargo, &[], &[], "clippy", compiler, host, builder); + + let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "clippy", host, host); + + // Clippy reports errors if it blessed the outputs + if builder.run_cmd(BootstrapCommand::from(&mut cargo).allow_failure()) { + // The tests succeeded; nothing to do. + return; + } + + if !builder.config.cmd.bless() { + crate::exit!(1); + } + } +} + +fn path_for_cargo(builder: &Builder<'_>, compiler: Compiler) -> OsString { + // Configure PATH to find the right rustc. NB. we have to use PATH + // and not RUSTC because the Cargo test suite has tests that will + // fail if rustc is not spelled `rustc`. + let path = builder.sysroot(compiler).join("bin"); + let old_path = env::var_os("PATH").unwrap_or_default(); + env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustdocTheme { + pub compiler: Compiler, +} + +impl Step for RustdocTheme { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rustdoc-themes") + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.target); + + run.builder.ensure(RustdocTheme { compiler }); + } + + fn run(self, builder: &Builder<'_>) { + let rustdoc = builder.bootstrap_out.join("rustdoc"); + let mut cmd = builder.tool_cmd(Tool::RustdocTheme); + cmd.arg(rustdoc.to_str().unwrap()) + .arg(builder.src.join("src/librustdoc/html/static/css/rustdoc.css").to_str().unwrap()) + .env("RUSTC_STAGE", self.compiler.stage.to_string()) + .env("RUSTC_SYSROOT", builder.sysroot(self.compiler)) + .env("RUSTDOC_LIBDIR", builder.sysroot_libdir(self.compiler, self.compiler.host)) + .env("CFG_RELEASE_CHANNEL", &builder.config.channel) + .env("RUSTDOC_REAL", builder.rustdoc(self.compiler)) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(linker) = builder.linker(self.compiler.host) { + cmd.env("RUSTDOC_LINKER", linker); + } + if builder.is_fuse_ld_lld(self.compiler.host) { + cmd.env( + "RUSTDOC_LLD_NO_THREADS", + helpers::lld_flag_no_threads(self.compiler.host.contains("windows")), + ); + } + builder.run_delaying_failure(&mut cmd); + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustdocJSStd { + pub target: TargetSelection, +} + +impl Step for RustdocJSStd { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = run.builder.config.nodejs.is_some(); + run.suite_path("tests/rustdoc-js-std").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustdocJSStd { target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let nodejs = + builder.config.nodejs.as_ref().expect("need nodejs to run rustdoc-js-std tests"); + let mut command = Command::new(nodejs); + command + .arg(builder.src.join("src/tools/rustdoc-js/tester.js")) + .arg("--crate-name") + .arg("std") + .arg("--resource-suffix") + .arg(&builder.version) + .arg("--doc-folder") + .arg(builder.doc_out(self.target)) + .arg("--test-folder") + .arg(builder.src.join("tests/rustdoc-js-std")); + for path in &builder.paths { + if let Some(p) = helpers::is_valid_test_suite_arg(path, "tests/rustdoc-js-std", builder) + { + if !p.ends_with(".js") { + eprintln!("A non-js file was given: `{}`", path.display()); + panic!("Cannot run rustdoc-js-std tests"); + } + command.arg("--test-file").arg(path); + } + } + builder.ensure(crate::core::build_steps::doc::Std::new( + builder.top_stage, + self.target, + builder, + DocumentationFormat::HTML, + )); + let _guard = builder.msg( + Kind::Test, + builder.top_stage, + "rustdoc-js-std", + builder.config.build, + self.target, + ); + builder.run(&mut command); + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustdocJSNotStd { + pub target: TargetSelection, + pub compiler: Compiler, +} + +impl Step for RustdocJSNotStd { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let default = run.builder.config.nodejs.is_some(); + run.suite_path("tests/rustdoc-js").default_condition(default) + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); + run.builder.ensure(RustdocJSNotStd { target: run.target, compiler }); + } + + fn run(self, builder: &Builder<'_>) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode: "js-doc-test", + suite: "rustdoc-js", + path: "tests/rustdoc-js", + compare_mode: None, + }); + } +} + +fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option { + let mut command = Command::new(&npm); + command.arg("list").arg("--parseable").arg("--long").arg("--depth=0"); + if global { + command.arg("--global"); + } + let lines = command + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).into_owned()) + .unwrap_or(String::new()); + lines + .lines() + .find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@")) + .map(|v| v.to_owned()) +} + +fn get_browser_ui_test_version(npm: &Path) -> Option { + get_browser_ui_test_version_inner(npm, false) + .or_else(|| get_browser_ui_test_version_inner(npm, true)) +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustdocGUI { + pub target: TargetSelection, + pub compiler: Compiler, +} + +impl Step for RustdocGUI { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + let run = run.suite_path("tests/rustdoc-gui"); + run.lazy_default_condition(Box::new(move || { + builder.config.nodejs.is_some() + && builder + .config + .npm + .as_ref() + .map(|p| get_browser_ui_test_version(p).is_some()) + .unwrap_or(false) + })) + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); + run.builder.ensure(RustdocGUI { target: run.target, compiler }); + } + + fn run(self, builder: &Builder<'_>) { + builder.ensure(compile::Std::new(self.compiler, self.target)); + + let mut cmd = builder.tool_cmd(Tool::RustdocGUITest); + + let out_dir = builder.test_out(self.target).join("rustdoc-gui"); + builder.clear_if_dirty(&out_dir, &builder.rustdoc(self.compiler)); + + if let Some(src) = builder.config.src.to_str() { + cmd.arg("--rust-src").arg(src); + } + + if let Some(out_dir) = out_dir.to_str() { + cmd.arg("--out-dir").arg(out_dir); + } + + if let Some(initial_cargo) = builder.config.initial_cargo.to_str() { + cmd.arg("--initial-cargo").arg(initial_cargo); + } + + cmd.arg("--jobs").arg(builder.jobs().to_string()); + + cmd.env("RUSTDOC", builder.rustdoc(self.compiler)) + .env("RUSTC", builder.rustc(self.compiler)); + + for path in &builder.paths { + if let Some(p) = helpers::is_valid_test_suite_arg(path, "tests/rustdoc-gui", builder) { + if !p.ends_with(".goml") { + eprintln!("A non-goml file was given: `{}`", path.display()); + panic!("Cannot run rustdoc-gui tests"); + } + if let Some(name) = path.file_name().and_then(|f| f.to_str()) { + cmd.arg("--goml-file").arg(name); + } + } + } + + for test_arg in builder.config.test_args() { + cmd.arg("--test-arg").arg(test_arg); + } + + if let Some(ref nodejs) = builder.config.nodejs { + cmd.arg("--nodejs").arg(nodejs); + } + + if let Some(ref npm) = builder.config.npm { + cmd.arg("--npm").arg(npm); + } + + let _time = helpers::timeit(&builder); + let _guard = builder.msg_sysroot_tool( + Kind::Test, + self.compiler.stage, + "rustdoc-gui", + self.compiler.host, + self.target, + ); + try_run_tests(builder, &mut cmd, true); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Tidy; + +impl Step for Tidy { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + /// Runs the `tidy` tool. + /// + /// This tool in `src/tools` checks up on various bits and pieces of style and + /// otherwise just implements a few lint-like checks that are specific to the + /// compiler itself. + /// + /// Once tidy passes, this step also runs `fmt --check` if tests are being run + /// for the `dev` or `nightly` channels. + fn run(self, builder: &Builder<'_>) { + let mut cmd = builder.tool_cmd(Tool::Tidy); + cmd.arg(&builder.src); + cmd.arg(&builder.initial_cargo); + cmd.arg(&builder.out); + // Tidy is heavily IO constrained. Still respect `-j`, but use a higher limit if `jobs` hasn't been configured. + let jobs = builder.config.jobs.unwrap_or_else(|| { + 8 * std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 + }); + cmd.arg(jobs.to_string()); + if builder.is_verbose() { + cmd.arg("--verbose"); + } + if builder.config.cmd.bless() { + cmd.arg("--bless"); + } + if let Some(s) = builder.config.cmd.extra_checks() { + cmd.arg(format!("--extra-checks={s}")); + } + let mut args = std::env::args_os(); + if let Some(_) = args.find(|arg| arg == OsStr::new("--")) { + cmd.arg("--"); + cmd.args(args); + } + + if builder.config.channel == "dev" || builder.config.channel == "nightly" { + builder.info("fmt check"); + if builder.initial_rustfmt().is_none() { + let inferred_rustfmt_dir = builder.initial_rustc.parent().unwrap(); + eprintln!( + "\ +ERROR: no `rustfmt` binary found in {PATH} +INFO: `rust.channel` is currently set to \"{CHAN}\" +HELP: if you are testing a beta branch, set `rust.channel` to \"beta\" in the `config.toml` file +HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to `x.py test`", + PATH = inferred_rustfmt_dir.display(), + CHAN = builder.config.channel, + ); + crate::exit!(1); + } + crate::core::build_steps::format::format(&builder, !builder.config.cmd.bless(), &[]); + } + + builder.info("tidy check"); + builder.run_delaying_failure(&mut cmd); + + builder.ensure(ExpandYamlAnchors); + + builder.info("x.py completions check"); + let [bash, zsh, fish, powershell] = ["x.py.sh", "x.py.zsh", "x.py.fish", "x.py.ps1"] + .map(|filename| builder.src.join("src/etc/completions").join(filename)); + if builder.config.cmd.bless() { + builder.ensure(crate::core::build_steps::run::GenerateCompletions); + } else if get_completion(shells::Bash, &bash).is_some() + || get_completion(shells::Fish, &fish).is_some() + || get_completion(shells::PowerShell, &powershell).is_some() + || crate::flags::get_completion(shells::Zsh, &zsh).is_some() + { + eprintln!( + "x.py completions were changed; run `x.py run generate-completions` to update them" + ); + crate::exit!(1); + } + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/tidy") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Tidy); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ExpandYamlAnchors; + +impl Step for ExpandYamlAnchors { + type Output = (); + const ONLY_HOSTS: bool = true; + + /// Ensure the `generate-ci-config` tool was run locally. + /// + /// The tool in `src/tools` reads the CI definition in `src/ci/builders.yml` and generates the + /// appropriate configuration for all our CI providers. This step ensures the tool was called + /// by the user before committing CI changes. + fn run(self, builder: &Builder<'_>) { + // NOTE: `.github/` is not included in dist-src tarballs + if !builder.src.join(".github/workflows/ci.yml").exists() { + builder.info("Skipping YAML anchors check: GitHub Actions config not found"); + return; + } + builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded"); + builder.run_delaying_failure( + &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src), + ); + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/expand-yaml-anchors") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(ExpandYamlAnchors); + } +} + +fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf { + builder.out.join(host.triple).join("test") +} + +macro_rules! default_test { + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { + test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false }); + }; +} + +macro_rules! default_test_with_compare_mode { + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, + compare_mode: $compare_mode:expr }) => { + test_with_compare_mode!($name { + path: $path, + mode: $mode, + suite: $suite, + default: true, + host: false, + compare_mode: $compare_mode + }); + }; +} + +macro_rules! host_test { + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { + test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true }); + }; +} + +macro_rules! test { + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, + host: $host:expr }) => { + test_definitions!($name { + path: $path, + mode: $mode, + suite: $suite, + default: $default, + host: $host, + compare_mode: None + }); + }; +} + +macro_rules! test_with_compare_mode { + ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, + host: $host:expr, compare_mode: $compare_mode:expr }) => { + test_definitions!($name { + path: $path, + mode: $mode, + suite: $suite, + default: $default, + host: $host, + compare_mode: Some($compare_mode) + }); + }; +} + +macro_rules! test_definitions { + ($name:ident { + path: $path:expr, + mode: $mode:expr, + suite: $suite:expr, + default: $default:expr, + host: $host:expr, + compare_mode: $compare_mode:expr + }) => { + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct $name { + pub compiler: Compiler, + pub target: TargetSelection, + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = $default; + const ONLY_HOSTS: bool = $host; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.suite_path($path) + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); + + run.builder.ensure($name { compiler, target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode: $mode, + suite: $suite, + path: $path, + compare_mode: $compare_mode, + }) + } + } + }; +} + +/// Declares an alias for running the [`Coverage`] tests in only one mode. +/// Adapted from [`test_definitions`]. +macro_rules! coverage_test_alias { + ($name:ident { + alias_and_mode: $alias_and_mode:expr, + default: $default:expr, + only_hosts: $only_hosts:expr $(,)? + }) => { + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct $name { + pub compiler: Compiler, + pub target: TargetSelection, + } + + impl $name { + const MODE: &'static str = $alias_and_mode; + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = $default; + const ONLY_HOSTS: bool = $only_hosts; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias($alias_and_mode) + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); + + run.builder.ensure($name { compiler, target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + Coverage { compiler: self.compiler, target: self.target } + .run_unified_suite(builder, Self::MODE) + } + } + }; +} + +default_test!(Ui { path: "tests/ui", mode: "ui", suite: "ui" }); + +default_test!(RunPassValgrind { + path: "tests/run-pass-valgrind", + mode: "run-pass-valgrind", + suite: "run-pass-valgrind" +}); + +default_test!(Codegen { path: "tests/codegen", mode: "codegen", suite: "codegen" }); + +default_test!(CodegenUnits { + path: "tests/codegen-units", + mode: "codegen-units", + suite: "codegen-units" +}); + +default_test!(Incremental { path: "tests/incremental", mode: "incremental", suite: "incremental" }); + +default_test_with_compare_mode!(Debuginfo { + path: "tests/debuginfo", + mode: "debuginfo", + suite: "debuginfo", + compare_mode: "split-dwarf" +}); + +host_test!(UiFullDeps { path: "tests/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" }); + +host_test!(Rustdoc { path: "tests/rustdoc", mode: "rustdoc", suite: "rustdoc" }); +host_test!(RustdocUi { path: "tests/rustdoc-ui", mode: "ui", suite: "rustdoc-ui" }); + +host_test!(RustdocJson { path: "tests/rustdoc-json", mode: "rustdoc-json", suite: "rustdoc-json" }); + +host_test!(Pretty { path: "tests/pretty", mode: "pretty", suite: "pretty" }); + +default_test!(RunMake { path: "tests/run-make", mode: "run-make", suite: "run-make" }); + +host_test!(RunMakeFullDeps { + path: "tests/run-make-fulldeps", + mode: "run-make", + suite: "run-make-fulldeps" +}); + +default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assembly" }); + +/// Custom test step that is responsible for running the coverage tests +/// in multiple different modes. +/// +/// Each individual mode also has its own alias that will run the tests in +/// just that mode. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Coverage { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Coverage { + const PATH: &'static str = "tests/coverage"; + const SUITE: &'static str = "coverage"; + + fn run_unified_suite(&self, builder: &Builder<'_>, mode: &'static str) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode, + suite: Self::SUITE, + path: Self::PATH, + compare_mode: None, + }) + } +} + +impl Step for Coverage { + type Output = (); + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.suite_path(Self::PATH) + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); + + run.builder.ensure(Coverage { compiler, target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + self.run_unified_suite(builder, CoverageMap::MODE); + self.run_unified_suite(builder, CoverageRun::MODE); + } +} + +// Aliases for running the coverage tests in only one mode. +coverage_test_alias!(CoverageMap { + alias_and_mode: "coverage-map", + default: true, + only_hosts: false, +}); +coverage_test_alias!(CoverageRun { + alias_and_mode: "coverage-run", + default: true, + only_hosts: true, +}); + +host_test!(CoverageRunRustdoc { + path: "tests/coverage-run-rustdoc", + mode: "coverage-run", + suite: "coverage-run-rustdoc" +}); + +// For the mir-opt suite we do not use macros, as we need custom behavior when blessing. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct MirOpt { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for MirOpt { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = false; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.suite_path("tests/mir-opt") + } + + fn make_run(run: RunConfig<'_>) { + let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); + run.builder.ensure(MirOpt { compiler, target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let run = |target| { + builder.ensure(Compiletest { + compiler: self.compiler, + target, + mode: "mir-opt", + suite: "mir-opt", + path: "tests/mir-opt", + compare_mode: None, + }) + }; + + // We use custom logic to bless the mir-opt suite: mir-opt tests have multiple variants + // (32bit vs 64bit, and panic=abort vs panic=unwind), and all of them needs to be blessed. + // When blessing, we try best-effort to also bless the other variants, to aid developers. + if builder.config.cmd.bless() { + let targets = MIR_OPT_BLESS_TARGET_MAPPING + .iter() + .filter(|(target_32bit, target_64bit)| { + *target_32bit == &*self.target.triple || *target_64bit == &*self.target.triple + }) + .next() + .map(|(target_32bit, target_64bit)| { + let target_32bit = TargetSelection::from_user(target_32bit); + let target_64bit = TargetSelection::from_user(target_64bit); + + // Running compiletest requires a C compiler to be available, but it might not + // have been detected by bootstrap if the target we're testing wasn't in the + // --target flags. + if !builder.cc.borrow().contains_key(&target_32bit) { + utils::cc_detect::find_target(builder, target_32bit); + } + if !builder.cc.borrow().contains_key(&target_64bit) { + utils::cc_detect::find_target(builder, target_64bit); + } + + vec![target_32bit, target_64bit] + }) + .unwrap_or_else(|| { + eprintln!( + "\ +Note that not all variants of mir-opt tests are going to be blessed, as no mapping between +a 32bit and a 64bit target was found for {target}. +You can add that mapping by changing MIR_OPT_BLESS_TARGET_MAPPING in src/bootstrap/test.rs", + target = self.target, + ); + vec![self.target] + }); + + for target in targets { + run(target); + + let panic_abort_target = builder.ensure(MirOptPanicAbortSyntheticTarget { + compiler: self.compiler, + base: target, + }); + run(panic_abort_target); + } + } else { + run(self.target); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct Compiletest { + compiler: Compiler, + target: TargetSelection, + mode: &'static str, + suite: &'static str, + path: &'static str, + compare_mode: Option<&'static str>, +} + +impl Step for Compiletest { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Executes the `compiletest` tool to run a suite of tests. + /// + /// Compiles all tests with `compiler` for `target` with the specified + /// compiletest `mode` and `suite` arguments. For example `mode` can be + /// "run-pass" or `suite` can be something like `debuginfo`. + fn run(self, builder: &Builder<'_>) { + if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() { + eprintln!("\ +ERROR: `--stage 0` runs compiletest on the beta compiler, not your local changes, and will almost always cause tests to fail +HELP: to test the compiler, use `--stage 1` instead +HELP: to test the standard library, use `--stage 0 library/std` instead +NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`." + ); + crate::exit!(1); + } + + let mut compiler = self.compiler; + let target = self.target; + let mode = self.mode; + let suite = self.suite; + + // Path for test suite + let suite_path = self.path; + + // Skip codegen tests if they aren't enabled in configuration. + if !builder.config.codegen_tests && suite == "codegen" { + return; + } + + // Support stage 1 ui-fulldeps. This is somewhat complicated: ui-fulldeps tests for the most + // part test the *API* of the compiler, not how it compiles a given file. As a result, we + // can run them against the stage 1 sources as long as we build them with the stage 0 + // bootstrap compiler. + // NOTE: Only stage 1 is special cased because we need the rustc_private artifacts to match the + // running compiler in stage 2 when plugins run. + let stage_id = if suite == "ui-fulldeps" && compiler.stage == 1 { + compiler = builder.compiler(compiler.stage - 1, target); + format!("stage{}-{}", compiler.stage + 1, target) + } else { + format!("stage{}-{}", compiler.stage, target) + }; + + if suite.ends_with("fulldeps") { + builder.ensure(compile::Rustc::new(compiler, target)); + } + + if suite == "debuginfo" { + builder + .ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), host: target }); + } + + builder.ensure(compile::Std::new(compiler, target)); + // ensure that `libproc_macro` is available on the host. + builder.ensure(compile::Std::new(compiler, compiler.host)); + + // Also provide `rust_test_helpers` for the host. + builder.ensure(TestHelpers { target: compiler.host }); + + // As well as the target, except for plain wasm32, which can't build it + if !target.contains("wasm") || target.contains("emscripten") { + builder.ensure(TestHelpers { target }); + } + + builder.ensure(RemoteCopyLibs { compiler, target }); + + let mut cmd = builder.tool_cmd(Tool::Compiletest); + + // compiletest currently has... a lot of arguments, so let's just pass all + // of them! + + cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler)); + cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target)); + cmd.arg("--rustc-path").arg(builder.rustc(compiler)); + + let is_rustdoc = suite.ends_with("rustdoc-ui") || suite.ends_with("rustdoc-js"); + + // Avoid depending on rustdoc when we don't need it. + if mode == "rustdoc" + || mode == "run-make" + || (mode == "ui" && is_rustdoc) + || mode == "js-doc-test" + || mode == "rustdoc-json" + || suite == "coverage-run-rustdoc" + { + cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler)); + } + + if mode == "rustdoc-json" { + // Use the beta compiler for jsondocck + let json_compiler = compiler.with_stage(0); + cmd.arg("--jsondocck-path") + .arg(builder.ensure(tool::JsonDocCk { compiler: json_compiler, target })); + cmd.arg("--jsondoclint-path") + .arg(builder.ensure(tool::JsonDocLint { compiler: json_compiler, target })); + } + + if mode == "coverage-map" { + let coverage_dump = builder.ensure(tool::CoverageDump { + compiler: compiler.with_stage(0), + target: compiler.host, + }); + cmd.arg("--coverage-dump-path").arg(coverage_dump); + } + + if mode == "coverage-run" { + // The demangler doesn't need the current compiler, so we can avoid + // unnecessary rebuilds by using the bootstrap compiler instead. + let rust_demangler = builder + .ensure(tool::RustDemangler { + compiler: compiler.with_stage(0), + target: compiler.host, + extra_features: Vec::new(), + }) + .expect("in-tree tool"); + cmd.arg("--rust-demangler-path").arg(rust_demangler); + } + + cmd.arg("--src-base").arg(builder.src.join("tests").join(suite)); + cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite)); + + // When top stage is 0, that means that we're testing an externally provided compiler. + // In that case we need to use its specific sysroot for tests to pass. + let sysroot = if builder.top_stage == 0 { + builder.initial_sysroot.clone() + } else { + builder.sysroot(compiler).to_path_buf() + }; + cmd.arg("--sysroot-base").arg(sysroot); + cmd.arg("--stage-id").arg(stage_id); + cmd.arg("--suite").arg(suite); + cmd.arg("--mode").arg(mode); + cmd.arg("--target").arg(target.rustc_target_arg()); + cmd.arg("--host").arg(&*compiler.host.triple); + cmd.arg("--llvm-filecheck").arg(builder.llvm_filecheck(builder.config.build)); + + if builder.config.cmd.bless() { + cmd.arg("--bless"); + } + + if builder.config.cmd.force_rerun() { + cmd.arg("--force-rerun"); + } + + let compare_mode = + builder.config.cmd.compare_mode().or_else(|| { + if builder.config.test_compare_mode { self.compare_mode } else { None } + }); + + if let Some(ref pass) = builder.config.cmd.pass() { + cmd.arg("--pass"); + cmd.arg(pass); + } + + if let Some(ref run) = builder.config.cmd.run() { + cmd.arg("--run"); + cmd.arg(run); + } + + if let Some(ref nodejs) = builder.config.nodejs { + cmd.arg("--nodejs").arg(nodejs); + } else if mode == "js-doc-test" { + panic!("need nodejs to run js-doc-test suite"); + } + if let Some(ref npm) = builder.config.npm { + cmd.arg("--npm").arg(npm); + } + if builder.config.rust_optimize_tests { + cmd.arg("--optimize-tests"); + } + if builder.config.cmd.only_modified() { + cmd.arg("--only-modified"); + } + + let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; + flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); + flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string())); + + if let Some(linker) = builder.linker(target) { + cmd.arg("--target-linker").arg(linker); + } + if let Some(linker) = builder.linker(compiler.host) { + cmd.arg("--host-linker").arg(linker); + } + + let mut hostflags = flags.clone(); + hostflags.push(format!("-Lnative={}", builder.test_helpers_out(compiler.host).display())); + hostflags.extend(builder.lld_flags(compiler.host)); + for flag in hostflags { + cmd.arg("--host-rustcflags").arg(flag); + } + + let mut targetflags = flags; + targetflags.push(format!("-Lnative={}", builder.test_helpers_out(target).display())); + targetflags.extend(builder.lld_flags(target)); + for flag in targetflags { + cmd.arg("--target-rustcflags").arg(flag); + } + + cmd.arg("--python").arg(builder.python()); + + if let Some(ref gdb) = builder.config.gdb { + cmd.arg("--gdb").arg(gdb); + } + + let run = |cmd: &mut Command| { + cmd.output().map(|output| { + String::from_utf8_lossy(&output.stdout) + .lines() + .next() + .unwrap_or_else(|| panic!("{:?} failed {:?}", cmd, output)) + .to_string() + }) + }; + let lldb_exe = "lldb"; + let lldb_version = Command::new(lldb_exe) + .arg("--version") + .output() + .map(|output| String::from_utf8_lossy(&output.stdout).to_string()) + .ok(); + if let Some(ref vers) = lldb_version { + cmd.arg("--lldb-version").arg(vers); + let lldb_python_dir = run(Command::new(lldb_exe).arg("-P")).ok(); + if let Some(ref dir) = lldb_python_dir { + cmd.arg("--lldb-python-dir").arg(dir); + } + } + + if helpers::forcing_clang_based_tests() { + let clang_exe = builder.llvm_out(target).join("bin").join("clang"); + cmd.arg("--run-clang-based-tests-with").arg(clang_exe); + } + + for exclude in &builder.config.skip { + cmd.arg("--skip"); + cmd.arg(&exclude); + } + + // Get paths from cmd args + let paths = match &builder.config.cmd { + Subcommand::Test { .. } => &builder.config.paths[..], + _ => &[], + }; + + // Get test-args by striping suite path + let mut test_args: Vec<&str> = paths + .iter() + .filter_map(|p| helpers::is_valid_test_suite_arg(p, suite_path, builder)) + .collect(); + + test_args.append(&mut builder.config.test_args()); + + // On Windows, replace forward slashes in test-args by backslashes + // so the correct filters are passed to libtest + if cfg!(windows) { + let test_args_win: Vec = + test_args.iter().map(|s| s.replace("/", "\\")).collect(); + cmd.args(&test_args_win); + } else { + cmd.args(&test_args); + } + + if builder.is_verbose() { + cmd.arg("--verbose"); + } + + cmd.arg("--json"); + + let mut llvm_components_passed = false; + let mut copts_passed = false; + if builder.config.llvm_enabled() { + let llvm::LlvmResult { llvm_config, .. } = + builder.ensure(llvm::Llvm { target: builder.config.build }); + if !builder.config.dry_run() { + let llvm_version = output(Command::new(&llvm_config).arg("--version")); + let llvm_components = output(Command::new(&llvm_config).arg("--components")); + // Remove trailing newline from llvm-config output. + cmd.arg("--llvm-version") + .arg(llvm_version.trim()) + .arg("--llvm-components") + .arg(llvm_components.trim()); + llvm_components_passed = true; + } + if !builder.is_rust_llvm(target) { + cmd.arg("--system-llvm"); + } + + // Tests that use compiler libraries may inherit the `-lLLVM` link + // requirement, but the `-L` library path is not propagated across + // separate compilations. We can add LLVM's library path to the + // platform-specific environment variable as a workaround. + if !builder.config.dry_run() && suite.ends_with("fulldeps") { + let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir")); + add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cmd); + } + + if !builder.config.dry_run() + && (matches!(suite, "run-make" | "run-make-fulldeps") || mode == "coverage-run") + { + // The llvm/bin directory contains many useful cross-platform + // tools. Pass the path to run-make tests so they can use them. + // (The coverage-run tests also need these tools to process + // coverage reports.) + let llvm_bin_path = llvm_config + .parent() + .expect("Expected llvm-config to be contained in directory"); + assert!(llvm_bin_path.is_dir()); + cmd.arg("--llvm-bin-dir").arg(llvm_bin_path); + } + + if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") { + // If LLD is available, add it to the PATH + if builder.config.lld_enabled { + let lld_install_root = + builder.ensure(llvm::Lld { target: builder.config.build }); + + let lld_bin_path = lld_install_root.join("bin"); + + let old_path = env::var_os("PATH").unwrap_or_default(); + let new_path = env::join_paths( + std::iter::once(lld_bin_path).chain(env::split_paths(&old_path)), + ) + .expect("Could not add LLD bin path to PATH"); + cmd.env("PATH", new_path); + } + } + } + + // Only pass correct values for these flags for the `run-make` suite as it + // requires that a C++ compiler was configured which isn't always the case. + if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") { + cmd.arg("--cc") + .arg(builder.cc(target)) + .arg("--cxx") + .arg(builder.cxx(target).unwrap()) + .arg("--cflags") + .arg(builder.cflags(target, GitRepo::Rustc, CLang::C).join(" ")) + .arg("--cxxflags") + .arg(builder.cflags(target, GitRepo::Rustc, CLang::Cxx).join(" ")); + copts_passed = true; + if let Some(ar) = builder.ar(target) { + cmd.arg("--ar").arg(ar); + } + } + + if !llvm_components_passed { + cmd.arg("--llvm-components").arg(""); + } + if !copts_passed { + cmd.arg("--cc") + .arg("") + .arg("--cxx") + .arg("") + .arg("--cflags") + .arg("") + .arg("--cxxflags") + .arg(""); + } + + if builder.remote_tested(target) { + cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient)); + } + + // Running a C compiler on MSVC requires a few env vars to be set, to be + // sure to set them here. + // + // Note that if we encounter `PATH` we make sure to append to our own `PATH` + // rather than stomp over it. + if !builder.config.dry_run() && target.contains("msvc") { + for &(ref k, ref v) in builder.cc.borrow()[&target].env() { + if k != "PATH" { + cmd.env(k, v); + } + } + } + cmd.env("RUSTC_BOOTSTRAP", "1"); + // Override the rustc version used in symbol hashes to reduce the amount of normalization + // needed when diffing test output. + cmd.env("RUSTC_FORCE_RUSTC_VERSION", "compiletest"); + cmd.env("DOC_RUST_LANG_ORG_CHANNEL", builder.doc_rust_lang_org_channel()); + builder.add_rust_test_threads(&mut cmd); + + if builder.config.sanitizers_enabled(target) { + cmd.env("RUSTC_SANITIZER_SUPPORT", "1"); + } + + if builder.config.profiler_enabled(target) { + cmd.env("RUSTC_PROFILER_SUPPORT", "1"); + } + + cmd.env("RUST_TEST_TMPDIR", builder.tempdir()); + + cmd.arg("--adb-path").arg("adb"); + cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); + if target.contains("android") && !builder.config.dry_run() { + // Assume that cc for this target comes from the android sysroot + cmd.arg("--android-cross-path") + .arg(builder.cc(target).parent().unwrap().parent().unwrap()); + } else { + cmd.arg("--android-cross-path").arg(""); + } + + if builder.config.cmd.rustfix_coverage() { + cmd.arg("--rustfix-coverage"); + } + + cmd.env("BOOTSTRAP_CARGO", &builder.initial_cargo); + + cmd.arg("--channel").arg(&builder.config.channel); + + if !builder.config.omit_git_hash { + cmd.arg("--git-hash"); + } + + let git_config = builder.config.git_config(); + cmd.arg("--git-repository").arg(git_config.git_repository); + cmd.arg("--nightly-branch").arg(git_config.nightly_branch); + + builder.ci_env.force_coloring_in_ci(&mut cmd); + + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + build_helper::metrics::TestSuiteMetadata::Compiletest { + suite: suite.into(), + mode: mode.into(), + compare_mode: None, + target: self.target.triple.to_string(), + host: self.compiler.host.triple.to_string(), + stage: self.compiler.stage, + }, + builder, + ); + + let _group = builder.msg( + Kind::Test, + compiler.stage, + &format!("compiletest suite={suite} mode={mode}"), + compiler.host, + target, + ); + try_run_tests(builder, &mut cmd, false); + + if let Some(compare_mode) = compare_mode { + cmd.arg("--compare-mode").arg(compare_mode); + + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + build_helper::metrics::TestSuiteMetadata::Compiletest { + suite: suite.into(), + mode: mode.into(), + compare_mode: Some(compare_mode.into()), + target: self.target.triple.to_string(), + host: self.compiler.host.triple.to_string(), + stage: self.compiler.stage, + }, + builder, + ); + + builder.info(&format!( + "Check compiletest suite={} mode={} compare_mode={} ({} -> {})", + suite, mode, compare_mode, &compiler.host, target + )); + let _time = helpers::timeit(&builder); + try_run_tests(builder, &mut cmd, false); + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct BookTest { + compiler: Compiler, + path: PathBuf, + name: &'static str, + is_ext_doc: bool, +} + +impl Step for BookTest { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Runs the documentation tests for a book in `src/doc`. + /// + /// This uses the `rustdoc` that sits next to `compiler`. + fn run(self, builder: &Builder<'_>) { + // External docs are different from local because: + // - Some books need pre-processing by mdbook before being tested. + // - They need to save their state to toolstate. + // - They are only tested on the "checktools" builders. + // + // The local docs are tested by default, and we don't want to pay the + // cost of building mdbook, so they use `rustdoc --test` directly. + // Also, the unstable book is special because SUMMARY.md is generated, + // so it is easier to just run `rustdoc` on its files. + if self.is_ext_doc { + self.run_ext_doc(builder); + } else { + self.run_local_doc(builder); + } + } +} + +impl BookTest { + /// This runs the equivalent of `mdbook test` (via the rustbook wrapper) + /// which in turn runs `rustdoc --test` on each file in the book. + fn run_ext_doc(self, builder: &Builder<'_>) { + let compiler = self.compiler; + + builder.ensure(compile::Std::new(compiler, compiler.host)); + + // mdbook just executes a binary named "rustdoc", so we need to update + // PATH so that it points to our rustdoc. + let mut rustdoc_path = builder.rustdoc(compiler); + rustdoc_path.pop(); + let old_path = env::var_os("PATH").unwrap_or_default(); + let new_path = env::join_paths(iter::once(rustdoc_path).chain(env::split_paths(&old_path))) + .expect("could not add rustdoc to PATH"); + + let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); + let path = builder.src.join(&self.path); + // Books often have feature-gated example text. + rustbook_cmd.env("RUSTC_BOOTSTRAP", "1"); + rustbook_cmd.env("PATH", new_path).arg("test").arg(path); + builder.add_rust_test_threads(&mut rustbook_cmd); + let _guard = builder.msg( + Kind::Test, + compiler.stage, + format_args!("mdbook {}", self.path.display()), + compiler.host, + compiler.host, + ); + let _time = helpers::timeit(&builder); + let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) { + ToolState::TestPass + } else { + ToolState::TestFail + }; + builder.save_toolstate(self.name, toolstate); + } + + /// This runs `rustdoc --test` on all `.md` files in the path. + fn run_local_doc(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let host = self.compiler.host; + + builder.ensure(compile::Std::new(compiler, host)); + + let _guard = + builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host); + + // Do a breadth-first traversal of the `src/doc` directory and just run + // tests for all files that end in `*.md` + let mut stack = vec![builder.src.join(self.path)]; + let _time = helpers::timeit(&builder); + let mut files = Vec::new(); + while let Some(p) = stack.pop() { + if p.is_dir() { + stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); + continue; + } + + if p.extension().and_then(|s| s.to_str()) != Some("md") { + continue; + } + + files.push(p); + } + + files.sort(); + + for file in files { + markdown_test(builder, compiler, &file); + } + } +} + +macro_rules! test_book { + ($($name:ident, $path:expr, $book_name:expr, default=$default:expr;)+) => { + $( + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct $name { + compiler: Compiler, + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = $default; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path($path) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($name { + compiler: run.builder.compiler(run.builder.top_stage, run.target), + }); + } + + fn run(self, builder: &Builder<'_>) { + builder.ensure(BookTest { + compiler: self.compiler, + path: PathBuf::from($path), + name: $book_name, + is_ext_doc: !$default, + }); + } + } + )+ + } +} + +test_book!( + Nomicon, "src/doc/nomicon", "nomicon", default=false; + Reference, "src/doc/reference", "reference", default=false; + RustdocBook, "src/doc/rustdoc", "rustdoc", default=true; + RustcBook, "src/doc/rustc", "rustc", default=true; + RustByExample, "src/doc/rust-by-example", "rust-by-example", default=false; + EmbeddedBook, "src/doc/embedded-book", "embedded-book", default=false; + TheBook, "src/doc/book", "book", default=false; + UnstableBook, "src/doc/unstable-book", "unstable-book", default=true; + EditionGuide, "src/doc/edition-guide", "edition-guide", default=false; +); + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ErrorIndex { + compiler: Compiler, +} + +impl Step for ErrorIndex { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/error_index_generator") + } + + fn make_run(run: RunConfig<'_>) { + // error_index_generator depends on librustdoc. Use the compiler that + // is normally used to build rustdoc for other tests (like compiletest + // tests in tests/rustdoc) so that it shares the same artifacts. + let compiler = run.builder.compiler(run.builder.top_stage, run.builder.config.build); + run.builder.ensure(ErrorIndex { compiler }); + } + + /// Runs the error index generator tool to execute the tests located in the error + /// index. + /// + /// The `error_index_generator` tool lives in `src/tools` and is used to + /// generate a markdown file from the error indexes of the code base which is + /// then passed to `rustdoc --test`. + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + + let dir = testdir(builder, compiler.host); + t!(fs::create_dir_all(&dir)); + let output = dir.join("error-index.md"); + + let mut tool = tool::ErrorIndex::command(builder); + tool.arg("markdown").arg(&output); + + let guard = + builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host); + let _time = helpers::timeit(&builder); + builder.run_quiet(&mut tool); + drop(guard); + // The tests themselves need to link to std, so make sure it is + // available. + builder.ensure(compile::Std::new(compiler, compiler.host)); + markdown_test(builder, compiler, &output); + } +} + +fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> bool { + if let Ok(contents) = fs::read_to_string(markdown) { + if !contents.contains("```") { + return true; + } + } + + builder.verbose(&format!("doc tests for: {}", markdown.display())); + let mut cmd = builder.rustdoc_cmd(compiler); + builder.add_rust_test_threads(&mut cmd); + // allow for unstable options such as new editions + cmd.arg("-Z"); + cmd.arg("unstable-options"); + cmd.arg("--test"); + cmd.arg(markdown); + cmd.env("RUSTC_BOOTSTRAP", "1"); + + let test_args = builder.config.test_args().join(" "); + cmd.arg("--test-args").arg(test_args); + + if builder.config.verbose_tests { + builder.run_delaying_failure(&mut cmd) + } else { + builder.run_quiet_delaying_failure(&mut cmd) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RustcGuide; + +impl Step for RustcGuide { + type Output = (); + const DEFAULT: bool = false; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/doc/rustc-dev-guide") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustcGuide); + } + + fn run(self, builder: &Builder<'_>) { + let relative_path = Path::new("src").join("doc").join("rustc-dev-guide"); + builder.update_submodule(&relative_path); + + let src = builder.src.join(relative_path); + let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); + let toolstate = if builder.run_delaying_failure(rustbook_cmd.arg("linkcheck").arg(&src)) { + ToolState::TestPass + } else { + ToolState::TestFail + }; + builder.save_toolstate("rustc-dev-guide", toolstate); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CrateLibrustc { + compiler: Compiler, + target: TargetSelection, + crates: Vec>, +} + +impl Step for CrateLibrustc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.crate_or_deps("rustc-main") + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + let host = run.build_triple(); + let compiler = builder.compiler_for(builder.top_stage, host, host); + let crates = run + .paths + .iter() + .map(|p| builder.crate_paths[&p.assert_single_path().path].clone()) + .collect(); + + builder.ensure(CrateLibrustc { compiler, target: run.target, crates }); + } + + fn run(self, builder: &Builder<'_>) { + builder.ensure(Crate { + compiler: self.compiler, + target: self.target, + mode: Mode::Rustc, + crates: self.crates, + }); + } +} + +/// Given a `cargo test` subcommand, add the appropriate flags and run it. +/// +/// Returns whether the test succeeded. +fn run_cargo_test<'a>( + cargo: impl Into, + libtest_args: &[&str], + crates: &[Interned], + primary_crate: &str, + description: impl Into>, + compiler: Compiler, + target: TargetSelection, + builder: &Builder<'_>, +) -> bool { + let mut cargo = + prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder); + let _time = helpers::timeit(&builder); + let _group = description.into().and_then(|what| { + builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target) + }); + + #[cfg(feature = "build-metrics")] + builder.metrics.begin_test_suite( + build_helper::metrics::TestSuiteMetadata::CargoPackage { + crates: crates.iter().map(|c| c.to_string()).collect(), + target: target.triple.to_string(), + host: compiler.host.triple.to_string(), + stage: compiler.stage, + }, + builder, + ); + add_flags_and_try_run_tests(builder, &mut cargo) +} + +/// Given a `cargo test` subcommand, pass it the appropriate test flags given a `builder`. +fn prepare_cargo_test( + cargo: impl Into, + libtest_args: &[&str], + crates: &[Interned], + primary_crate: &str, + compiler: Compiler, + target: TargetSelection, + builder: &Builder<'_>, +) -> Command { + let mut cargo = cargo.into(); + + // Propegate `--bless` if it has not already been set/unset + // Any tools that want to use this should bless if `RUSTC_BLESS` is set to + // anything other than `0`. + if builder.config.cmd.bless() && !cargo.get_envs().any(|v| v.0 == "RUSTC_BLESS") { + cargo.env("RUSTC_BLESS", "Gesundheit"); + } + + // Pass in some standard flags then iterate over the graph we've discovered + // in `cargo metadata` with the maps above and figure out what `-p` + // arguments need to get passed. + if builder.kind == Kind::Test && !builder.fail_fast { + cargo.arg("--no-fail-fast"); + } + match builder.doc_tests { + DocTests::Only => { + cargo.arg("--doc"); + } + DocTests::No => { + let krate = &builder + .crates + .get(&INTERNER.intern_str(primary_crate)) + .unwrap_or_else(|| panic!("missing crate {primary_crate}")); + if krate.has_lib { + cargo.arg("--lib"); + } + cargo.args(&["--bins", "--examples", "--tests", "--benches"]); + } + DocTests::Yes => {} + } + + for &krate in crates { + cargo.arg("-p").arg(krate); + } + + cargo.arg("--").args(&builder.config.test_args()).args(libtest_args); + if !builder.config.verbose_tests { + cargo.arg("--quiet"); + } + + // The tests are going to run with the *target* libraries, so we need to + // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. + // + // Note that to run the compiler we need to run with the *host* libraries, + // but our wrapper scripts arrange for that to be the case anyway. + let mut dylib_path = dylib_path(); + dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target))); + cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + + if target.contains("emscripten") { + cargo.env( + format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), + builder.config.nodejs.as_ref().expect("nodejs not configured"), + ); + } else if target.starts_with("wasm32") { + let node = builder.config.nodejs.as_ref().expect("nodejs not configured"); + let runner = format!("{} {}/src/etc/wasm32-shim.js", node.display(), builder.src.display()); + cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), &runner); + } else if builder.remote_tested(target) { + cargo.env( + format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), + format!("{} run 0", builder.tool_exe(Tool::RemoteTestClient).display()), + ); + } + + cargo +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Crate { + pub compiler: Compiler, + pub target: TargetSelection, + pub mode: Mode, + pub crates: Vec>, +} + +impl Step for Crate { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.crate_or_deps("sysroot") + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + let host = run.build_triple(); + let compiler = builder.compiler_for(builder.top_stage, host, host); + let crates = run + .paths + .iter() + .map(|p| builder.crate_paths[&p.assert_single_path().path].clone()) + .collect(); + + builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates }); + } + + /// Runs all unit tests plus documentation tests for a given crate defined + /// by a `Cargo.toml` (single manifest) + /// + /// This is what runs tests for crates like the standard library, compiler, etc. + /// It essentially is the driver for running `cargo test`. + /// + /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` + /// arguments, and those arguments are discovered from `cargo metadata`. + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target = self.target; + let mode = self.mode; + + // See [field@compile::Std::force_recompile]. + builder.ensure(compile::Std::force_recompile(compiler, target)); + builder.ensure(RemoteCopyLibs { compiler, target }); + + // If we're not doing a full bootstrap but we're testing a stage2 + // version of libstd, then what we're actually testing is the libstd + // produced in stage1. Reflect that here by updating the compiler that + // we're working with automatically. + let compiler = builder.compiler_for(compiler.stage, compiler.host, target); + + let mut cargo = + builder.cargo(compiler, mode, SourceType::InTree, target, builder.kind.as_str()); + match mode { + Mode::Std => { + compile::std_cargo(builder, target, compiler.stage, &mut cargo); + // `std_cargo` actually does the wrong thing: it passes `--sysroot build/host/stage2`, + // but we want to use the force-recompile std we just built in `build/host/stage2-test-sysroot`. + // Override it. + if builder.download_rustc() && compiler.stage > 0 { + let sysroot = builder + .out + .join(compiler.host.triple) + .join(format!("stage{}-test-sysroot", compiler.stage)); + cargo.env("RUSTC_SYSROOT", sysroot); + } + } + Mode::Rustc => { + compile::rustc_cargo(builder, &mut cargo, target, compiler.stage); + } + _ => panic!("can only test libraries"), + }; + + run_cargo_test( + cargo, + &[], + &self.crates, + &self.crates[0], + &*crate_description(&self.crates), + compiler, + target, + builder, + ); + } +} + +/// Rustdoc is special in various ways, which is why this step is different from `Crate`. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CrateRustdoc { + host: TargetSelection, +} + +impl Step for CrateRustdoc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.paths(&["src/librustdoc", "src/tools/rustdoc"]) + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + + builder.ensure(CrateRustdoc { host: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let target = self.host; + + let compiler = if builder.download_rustc() { + builder.compiler(builder.top_stage, target) + } else { + // Use the previous stage compiler to reuse the artifacts that are + // created when running compiletest for tests/rustdoc. If this used + // `compiler`, then it would cause rustdoc to be built *again*, which + // isn't really necessary. + builder.compiler_for(builder.top_stage, target, target) + }; + // NOTE: normally `ensure(Rustc)` automatically runs `ensure(Std)` for us. However, when + // using `download-rustc`, the rustc_private artifacts may be in a *different sysroot* from + // the target rustdoc (`ci-rustc-sysroot` vs `stage2`). In that case, we need to ensure this + // explicitly to make sure it ends up in the stage2 sysroot. + builder.ensure(compile::Std::new(compiler, target)); + builder.ensure(compile::Rustc::new(compiler, target)); + + let mut cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + target, + builder.kind.as_str(), + "src/tools/rustdoc", + SourceType::InTree, + &[], + ); + if self.host.contains("musl") { + cargo.arg("'-Ctarget-feature=-crt-static'"); + } + + // This is needed for running doctests on librustdoc. This is a bit of + // an unfortunate interaction with how bootstrap works and how cargo + // sets up the dylib path, and the fact that the doctest (in + // html/markdown.rs) links to rustc-private libs. For stage1, the + // compiler host dylibs (in stage1/lib) are not the same as the target + // dylibs (in stage1/lib/rustlib/...). This is different from a normal + // rust distribution where they are the same. + // + // On the cargo side, normal tests use `target_process` which handles + // setting up the dylib for a *target* (stage1/lib/rustlib/... in this + // case). However, for doctests it uses `rustdoc_process` which only + // sets up the dylib path for the *host* (stage1/lib), which is the + // wrong directory. + // + // Recall that we special-cased `compiler_for(top_stage)` above, so we always use stage1. + // + // It should be considered to just stop running doctests on + // librustdoc. There is only one test, and it doesn't look too + // important. There might be other ways to avoid this, but it seems + // pretty convoluted. + // + // See also https://github.com/rust-lang/rust/issues/13983 where the + // host vs target dylibs for rustdoc are consistently tricky to deal + // with. + // + // Note that this set the host libdir for `download_rustc`, which uses a normal rust distribution. + let libdir = if builder.download_rustc() { + builder.rustc_libdir(compiler) + } else { + builder.sysroot_libdir(compiler, target).to_path_buf() + }; + let mut dylib_path = dylib_path(); + dylib_path.insert(0, PathBuf::from(&*libdir)); + cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + + run_cargo_test( + cargo, + &[], + &[INTERNER.intern_str("rustdoc:0.0.0")], + "rustdoc", + "rustdoc", + compiler, + target, + builder, + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CrateRustdocJsonTypes { + host: TargetSelection, +} + +impl Step for CrateRustdocJsonTypes { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/rustdoc-json-types") + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + + builder.ensure(CrateRustdocJsonTypes { host: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let target = self.host; + + // Use the previous stage compiler to reuse the artifacts that are + // created when running compiletest for tests/rustdoc. If this used + // `compiler`, then it would cause rustdoc to be built *again*, which + // isn't really necessary. + let compiler = builder.compiler_for(builder.top_stage, target, target); + builder.ensure(compile::Rustc::new(compiler, target)); + + let cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolRustc, + target, + builder.kind.as_str(), + "src/rustdoc-json-types", + SourceType::InTree, + &[], + ); + + // FIXME: this looks very wrong, libtest doesn't accept `-C` arguments and the quotes are fishy. + let libtest_args = if self.host.contains("musl") { + ["'-Ctarget-feature=-crt-static'"].as_slice() + } else { + &[] + }; + + run_cargo_test( + cargo, + libtest_args, + &[INTERNER.intern_str("rustdoc-json-types")], + "rustdoc-json-types", + "rustdoc-json-types", + compiler, + target, + builder, + ); + } +} + +/// Some test suites are run inside emulators or on remote devices, and most +/// of our test binaries are linked dynamically which means we need to ship +/// the standard library and such to the emulator ahead of time. This step +/// represents this and is a dependency of all test suites. +/// +/// Most of the time this is a no-op. For some steps such as shipping data to +/// QEMU we have to build our own tools so we've got conditional dependencies +/// on those programs as well. Note that the remote test client is built for +/// the build target (us) and the server is built for the target. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RemoteCopyLibs { + compiler: Compiler, + target: TargetSelection, +} + +impl Step for RemoteCopyLibs { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target = self.target; + if !builder.remote_tested(target) { + return; + } + + builder.ensure(compile::Std::new(compiler, target)); + + builder.info(&format!("REMOTE copy libs to emulator ({target})")); + + let server = builder.ensure(tool::RemoteTestServer { compiler, target }); + + // Spawn the emulator and wait for it to come online + let tool = builder.tool_exe(Tool::RemoteTestClient); + let mut cmd = Command::new(&tool); + cmd.arg("spawn-emulator").arg(target.triple).arg(&server).arg(builder.tempdir()); + if let Some(rootfs) = builder.qemu_rootfs(target) { + cmd.arg(rootfs); + } + builder.run(&mut cmd); + + // Push all our dylibs to the emulator + for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { + let f = t!(f); + let name = f.file_name().into_string().unwrap(); + if helpers::is_dylib(&name) { + builder.run(Command::new(&tool).arg("push").arg(f.path())); + } + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Distcheck; + +impl Step for Distcheck { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("distcheck") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Distcheck); + } + + /// Runs "distcheck", a 'make check' from a tarball + fn run(self, builder: &Builder<'_>) { + builder.info("Distcheck"); + let dir = builder.tempdir().join("distcheck"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + // Guarantee that these are built before we begin running. + builder.ensure(dist::PlainSourceTarball); + builder.ensure(dist::Src); + + let mut cmd = Command::new("tar"); + cmd.arg("-xf") + .arg(builder.ensure(dist::PlainSourceTarball).tarball()) + .arg("--strip-components=1") + .current_dir(&dir); + builder.run(&mut cmd); + builder.run( + Command::new("./configure") + .args(&builder.config.configure_args) + .arg("--enable-vendor") + .current_dir(&dir), + ); + builder.run( + Command::new(helpers::make(&builder.config.build.triple)) + .arg("check") + .current_dir(&dir), + ); + + // Now make sure that rust-src has all of libstd's dependencies + builder.info("Distcheck rust-src"); + let dir = builder.tempdir().join("distcheck-src"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + let mut cmd = Command::new("tar"); + cmd.arg("-xf") + .arg(builder.ensure(dist::Src).tarball()) + .arg("--strip-components=1") + .current_dir(&dir); + builder.run(&mut cmd); + + let toml = dir.join("rust-src/lib/rustlib/src/rust/library/std/Cargo.toml"); + builder.run( + Command::new(&builder.initial_cargo) + // Will read the libstd Cargo.toml + // which uses the unstable `public-dependency` feature. + .env("RUSTC_BOOTSTRAP", "1") + .arg("generate-lockfile") + .arg("--manifest-path") + .arg(&toml) + .current_dir(&dir), + ); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Bootstrap; + +impl Step for Bootstrap { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + /// Tests the build system itself. + fn run(self, builder: &Builder<'_>) { + let host = builder.config.build; + let compiler = builder.compiler(0, host); + let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host); + + let mut check_bootstrap = Command::new(&builder.python()); + check_bootstrap + .args(["-m", "unittest", "bootstrap_test.py"]) + .env("BUILD_DIR", &builder.out) + .env("BUILD_PLATFORM", &builder.build.build.triple) + .current_dir(builder.src.join("src/bootstrap/")); + // NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible. + // Use `python -m unittest` manually if you want to pass arguments. + builder.run_delaying_failure(&mut check_bootstrap); + + let mut cmd = Command::new(&builder.initial_cargo); + cmd.arg("test") + .current_dir(builder.src.join("src/bootstrap")) + .env("RUSTFLAGS", "-Cdebuginfo=2") + .env("CARGO_TARGET_DIR", builder.out.join("bootstrap")) + .env("RUSTC_BOOTSTRAP", "1") + .env("RUSTDOC", builder.rustdoc(compiler)) + .env("RUSTC", &builder.initial_rustc); + if let Some(flags) = option_env!("RUSTFLAGS") { + // Use the same rustc flags for testing as for "normal" compilation, + // so that Cargo doesn’t recompile the entire dependency graph every time: + // https://github.com/rust-lang/rust/issues/49215 + cmd.env("RUSTFLAGS", flags); + } + // rustbuild tests are racy on directory creation so just run them one at a time. + // Since there's not many this shouldn't be a problem. + run_cargo_test(cmd, &["--test-threads=1"], &[], "bootstrap", None, compiler, host, builder); + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/bootstrap") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Bootstrap); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct TierCheck { + pub compiler: Compiler, +} + +impl Step for TierCheck { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/tier-check") + } + + fn make_run(run: RunConfig<'_>) { + let compiler = + run.builder.compiler_for(run.builder.top_stage, run.builder.build.build, run.target); + run.builder.ensure(TierCheck { compiler }); + } + + /// Tests the Platform Support page in the rustc book. + fn run(self, builder: &Builder<'_>) { + builder.ensure(compile::Std::new(self.compiler, self.compiler.host)); + let mut cargo = tool::prepare_tool_cargo( + builder, + self.compiler, + Mode::ToolStd, + self.compiler.host, + "run", + "src/tools/tier-check", + SourceType::InTree, + &[], + ); + cargo.arg(builder.src.join("src/doc/rustc/src/platform-support.md")); + cargo.arg(&builder.rustc(self.compiler)); + if builder.is_verbose() { + cargo.arg("--verbose"); + } + + let _guard = builder.msg( + Kind::Test, + self.compiler.stage, + "platform support check", + self.compiler.host, + self.compiler.host, + ); + builder.run_delaying_failure(&mut cargo.into()); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct LintDocs { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for LintDocs { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/lint-docs") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(LintDocs { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + /// Tests that the lint examples in the rustc book generate the correct + /// lints and have the expected format. + fn run(self, builder: &Builder<'_>) { + builder.ensure(crate::core::build_steps::doc::RustcBook { + compiler: self.compiler, + target: self.target, + validate: true, + }); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RustInstaller; + +impl Step for RustInstaller { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + /// Ensure the version placeholder replacement tool builds + fn run(self, builder: &Builder<'_>) { + let bootstrap_host = builder.config.build; + let compiler = builder.compiler(0, bootstrap_host); + let cargo = tool::prepare_tool_cargo( + builder, + compiler, + Mode::ToolBootstrap, + bootstrap_host, + "test", + "src/tools/rust-installer", + SourceType::InTree, + &[], + ); + + let _guard = builder.msg( + Kind::Test, + compiler.stage, + "rust-installer", + bootstrap_host, + bootstrap_host, + ); + run_cargo_test(cargo, &[], &[], "installer", None, compiler, bootstrap_host, builder); + + // We currently don't support running the test.sh script outside linux(?) environments. + // Eventually this should likely migrate to #[test]s in rust-installer proper rather than a + // set of scripts, which will likely allow dropping this if. + if bootstrap_host != "x86_64-unknown-linux-gnu" { + return; + } + + let mut cmd = + std::process::Command::new(builder.src.join("src/tools/rust-installer/test.sh")); + let tmpdir = testdir(builder, compiler.host).join("rust-installer"); + let _ = std::fs::remove_dir_all(&tmpdir); + let _ = std::fs::create_dir_all(&tmpdir); + cmd.current_dir(&tmpdir); + cmd.env("CARGO_TARGET_DIR", tmpdir.join("cargo-target")); + cmd.env("CARGO", &builder.initial_cargo); + cmd.env("RUSTC", &builder.initial_rustc); + cmd.env("TMP_DIR", &tmpdir); + builder.run_delaying_failure(&mut cmd); + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rust-installer") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Self); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct TestHelpers { + pub target: TargetSelection, +} + +impl Step for TestHelpers { + type Output = (); + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("tests/auxiliary/rust_test_helpers.c") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(TestHelpers { target: run.target }) + } + + /// Compiles the `rust_test_helpers.c` library which we used in various + /// `run-pass` tests for ABI testing. + fn run(self, builder: &Builder<'_>) { + if builder.config.dry_run() { + return; + } + // The x86_64-fortanix-unknown-sgx target doesn't have a working C + // toolchain. However, some x86_64 ELF objects can be linked + // without issues. Use this hack to compile the test helpers. + let target = if self.target == "x86_64-fortanix-unknown-sgx" { + TargetSelection::from_user("x86_64-unknown-linux-gnu") + } else { + self.target + }; + let dst = builder.test_helpers_out(target); + let src = builder.src.join("tests/auxiliary/rust_test_helpers.c"); + if up_to_date(&src, &dst.join("librust_test_helpers.a")) { + return; + } + + let _guard = builder.msg_unstaged(Kind::Build, "test helpers", target); + t!(fs::create_dir_all(&dst)); + let mut cfg = cc::Build::new(); + + // We may have found various cross-compilers a little differently due to our + // extra configuration, so inform cc of these compilers. Note, though, that + // on MSVC we still need cc's detection of env vars (ugh). + if !target.contains("msvc") { + if let Some(ar) = builder.ar(target) { + cfg.archiver(ar); + } + cfg.compiler(builder.cc(target)); + } + cfg.cargo_metadata(false) + .out_dir(&dst) + .target(&target.triple) + .host(&builder.config.build.triple) + .opt_level(0) + .warnings(false) + .debug(false) + .file(builder.src.join("tests/auxiliary/rust_test_helpers.c")) + .compile("rust_test_helpers"); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CodegenCranelift { + compiler: Compiler, + target: TargetSelection, +} + +impl Step for CodegenCranelift { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.paths(&["compiler/rustc_codegen_cranelift"]) + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + let host = run.build_triple(); + let compiler = run.builder.compiler_for(run.builder.top_stage, host, host); + + if builder.doc_tests == DocTests::Only { + return; + } + + if !target_supports_cranelift_backend(run.target) { + builder.info("target not supported by rustc_codegen_cranelift. skipping"); + return; + } + + if builder.remote_tested(run.target) { + builder.info("remote testing is not supported by rustc_codegen_cranelift. skipping"); + return; + } + + if !builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("cranelift")) { + builder.info("cranelift not in rust.codegen-backends. skipping"); + return; + } + + builder.ensure(CodegenCranelift { compiler, target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target = self.target; + + builder.ensure(compile::Std::new(compiler, target)); + + // If we're not doing a full bootstrap but we're testing a stage2 + // version of libstd, then what we're actually testing is the libstd + // produced in stage1. Reflect that here by updating the compiler that + // we're working with automatically. + let compiler = builder.compiler_for(compiler.stage, compiler.host, target); + + let build_cargo = || { + let mut cargo = builder.cargo( + compiler, + Mode::Codegen, // Must be codegen to ensure dlopen on compiled dylibs works + SourceType::InTree, + target, + "run", + ); + cargo.current_dir(&builder.src.join("compiler/rustc_codegen_cranelift")); + cargo + .arg("--manifest-path") + .arg(builder.src.join("compiler/rustc_codegen_cranelift/build_system/Cargo.toml")); + compile::rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + + // Avoid incremental cache issues when changing rustc + cargo.env("CARGO_BUILD_INCREMENTAL", "false"); + + cargo + }; + + builder.info(&format!( + "{} cranelift stage{} ({} -> {})", + Kind::Test.description(), + compiler.stage, + &compiler.host, + target + )); + let _time = helpers::timeit(&builder); + + // FIXME handle vendoring for source tarballs before removing the --skip-test below + let download_dir = builder.out.join("cg_clif_download"); + + // FIXME: Uncomment the `prepare` command below once vendoring is implemented. + /* + let mut prepare_cargo = build_cargo(); + prepare_cargo.arg("--").arg("prepare").arg("--download-dir").arg(&download_dir); + #[allow(deprecated)] + builder.config.try_run(&mut prepare_cargo.into()).unwrap(); + */ + + let mut cargo = build_cargo(); + cargo + .arg("--") + .arg("test") + .arg("--download-dir") + .arg(&download_dir) + .arg("--out-dir") + .arg(builder.stage_out(compiler, Mode::ToolRustc).join("cg_clif")) + .arg("--no-unstable-features") + .arg("--use-backend") + .arg("cranelift") + // Avoid having to vendor the standard library dependencies + .arg("--sysroot") + .arg("llvm") + // These tests depend on crates that are not yet vendored + // FIXME remove once vendoring is handled + .arg("--skip-test") + .arg("testsuite.extended_sysroot"); + cargo.args(builder.config.test_args()); + + let mut cmd: Command = cargo.into(); + builder.run_cmd(BootstrapCommand::from(&mut cmd).fail_fast()); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CodegenGCC { + compiler: Compiler, + target: TargetSelection, +} + +impl Step for CodegenGCC { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.paths(&["compiler/rustc_codegen_gcc"]) + } + + fn make_run(run: RunConfig<'_>) { + let builder = run.builder; + let host = run.build_triple(); + let compiler = run.builder.compiler_for(run.builder.top_stage, host, host); + + if builder.doc_tests == DocTests::Only { + return; + } + + let triple = run.target.triple; + let target_supported = + if triple.contains("linux") { triple.contains("x86_64") } else { false }; + if !target_supported { + builder.info("target not supported by rustc_codegen_gcc. skipping"); + return; + } + + if builder.remote_tested(run.target) { + builder.info("remote testing is not supported by rustc_codegen_gcc. skipping"); + return; + } + + if !builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("gcc")) { + builder.info("gcc not in rust.codegen-backends. skipping"); + return; + } + + builder.ensure(CodegenGCC { compiler, target: run.target }); + } + + fn run(self, builder: &Builder<'_>) { + let compiler = self.compiler; + let target = self.target; + + builder.ensure(compile::Std::new_with_extra_rust_args( + compiler, + target, + &["-Csymbol-mangling-version=v0", "-Cpanic=abort"], + )); + + // If we're not doing a full bootstrap but we're testing a stage2 + // version of libstd, then what we're actually testing is the libstd + // produced in stage1. Reflect that here by updating the compiler that + // we're working with automatically. + let compiler = builder.compiler_for(compiler.stage, compiler.host, target); + + let build_cargo = || { + let mut cargo = builder.cargo( + compiler, + Mode::Codegen, // Must be codegen to ensure dlopen on compiled dylibs works + SourceType::InTree, + target, + "run", + ); + cargo.current_dir(&builder.src.join("compiler/rustc_codegen_gcc")); + cargo + .arg("--manifest-path") + .arg(builder.src.join("compiler/rustc_codegen_gcc/build_system/Cargo.toml")); + compile::rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + + // Avoid incremental cache issues when changing rustc + cargo.env("CARGO_BUILD_INCREMENTAL", "false"); + cargo.rustflag("-Cpanic=abort"); + + cargo + }; + + builder.info(&format!( + "{} GCC stage{} ({} -> {})", + Kind::Test.description(), + compiler.stage, + &compiler.host, + target + )); + let _time = helpers::timeit(&builder); + + // FIXME: Uncomment the `prepare` command below once vendoring is implemented. + /* + let mut prepare_cargo = build_cargo(); + prepare_cargo.arg("--").arg("prepare"); + #[allow(deprecated)] + builder.config.try_run(&mut prepare_cargo.into()).unwrap(); + */ + + let mut cargo = build_cargo(); + + cargo + .arg("--") + .arg("test") + .arg("--use-system-gcc") + .arg("--use-backend") + .arg("gcc") + .arg("--out-dir") + .arg(builder.stage_out(compiler, Mode::ToolRustc).join("cg_gcc")) + .arg("--release") + .arg("--no-default-features") + .arg("--mini-tests") + .arg("--std-tests"); + cargo.args(builder.config.test_args()); + + let mut cmd: Command = cargo.into(); + builder.run_cmd(BootstrapCommand::from(&mut cmd).fail_fast()); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/tool.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/tool.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/tool.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/tool.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,849 @@ +use std::env; +use std::fs; +use std::path::PathBuf; +use std::process::Command; + +use crate::core::build_steps::compile; +use crate::core::build_steps::toolstate::ToolState; +use crate::core::builder::{Builder, Cargo as CargoCommand, RunConfig, ShouldRun, Step}; +use crate::core::config::TargetSelection; +use crate::utils::channel::GitInfo; +use crate::utils::exec::BootstrapCommand; +use crate::utils::helpers::{add_dylib_path, exe, t}; +use crate::Compiler; +use crate::Mode; +use crate::{gha, Kind}; + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub enum SourceType { + InTree, + Submodule, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +struct ToolBuild { + compiler: Compiler, + target: TargetSelection, + tool: &'static str, + path: &'static str, + mode: Mode, + is_optional_tool: bool, + source_type: SourceType, + extra_features: Vec, + /// Nightly-only features that are allowed (comma-separated list). + allow_features: &'static str, +} + +impl Builder<'_> { + #[track_caller] + fn msg_tool( + &self, + mode: Mode, + tool: &str, + build_stage: u32, + host: &TargetSelection, + target: &TargetSelection, + ) -> Option { + match mode { + // depends on compiler stage, different to host compiler + Mode::ToolRustc => self.msg_sysroot_tool( + Kind::Build, + build_stage, + format_args!("tool {tool}"), + *host, + *target, + ), + // doesn't depend on compiler, same as host compiler + _ => self.msg(Kind::Build, build_stage, format_args!("tool {tool}"), *host, *target), + } + } +} + +impl Step for ToolBuild { + type Output = Option; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + /// Builds a tool in `src/tools` + /// + /// This will build the specified tool with the specified `host` compiler in + /// `stage` into the normal cargo output directory. + fn run(self, builder: &Builder<'_>) -> Option { + let compiler = self.compiler; + let target = self.target; + let mut tool = self.tool; + let path = self.path; + let is_optional_tool = self.is_optional_tool; + + match self.mode { + Mode::ToolRustc => { + builder.ensure(compile::Std::new(compiler, compiler.host)); + builder.ensure(compile::Rustc::new(compiler, target)); + } + Mode::ToolStd => builder.ensure(compile::Std::new(compiler, target)), + Mode::ToolBootstrap => {} // uses downloaded stage0 compiler libs + _ => panic!("unexpected Mode for tool build"), + } + + let mut cargo = prepare_tool_cargo( + builder, + compiler, + self.mode, + target, + "build", + path, + self.source_type, + &self.extra_features, + ); + if !self.allow_features.is_empty() { + cargo.allow_features(self.allow_features); + } + let _guard = builder.msg_tool( + self.mode, + self.tool, + self.compiler.stage, + &self.compiler.host, + &self.target, + ); + + let mut cargo = Command::from(cargo); + // we check this in `is_optional_tool` in a second + let is_expected = builder.run_cmd(BootstrapCommand::from(&mut cargo).allow_failure()); + + builder.save_toolstate( + tool, + if is_expected { ToolState::TestFail } else { ToolState::BuildFail }, + ); + + if !is_expected { + if !is_optional_tool { + crate::exit!(1); + } else { + None + } + } else { + // HACK(#82501): on Windows, the tools directory gets added to PATH when running tests, and + // compiletest confuses HTML tidy with the in-tree tidy. Name the in-tree tidy something + // different so the problem doesn't come up. + if tool == "tidy" { + tool = "rust-tidy"; + } + let cargo_out = builder.cargo_out(compiler, self.mode, target).join(exe(tool, target)); + let bin = builder.tools_dir(compiler).join(exe(tool, target)); + builder.copy(&cargo_out, &bin); + Some(bin) + } + } +} + +pub fn prepare_tool_cargo( + builder: &Builder<'_>, + compiler: Compiler, + mode: Mode, + target: TargetSelection, + command: &'static str, + path: &str, + source_type: SourceType, + extra_features: &[String], +) -> CargoCommand { + let mut cargo = builder.cargo(compiler, mode, source_type, target, command); + let dir = builder.src.join(path); + cargo.arg("--manifest-path").arg(dir.join("Cargo.toml")); + + let mut features = extra_features.to_vec(); + if builder.build.config.cargo_native_static { + if path.ends_with("cargo") + || path.ends_with("rls") + || path.ends_with("clippy") + || path.ends_with("miri") + || path.ends_with("rustfmt") + { + cargo.env("LIBZ_SYS_STATIC", "1"); + } + if path.ends_with("cargo") { + features.push("all-static".to_string()); + } + } + + // clippy tests need to know about the stage sysroot. Set them consistently while building to + // avoid rebuilding when running tests. + cargo.env("SYSROOT", builder.sysroot(compiler)); + + // if tools are using lzma we want to force the build script to build its + // own copy + cargo.env("LZMA_API_STATIC", "1"); + + // CFG_RELEASE is needed by rustfmt (and possibly other tools) which + // import rustc-ap-rustc_attr which requires this to be set for the + // `#[cfg(version(...))]` attribute. + cargo.env("CFG_RELEASE", builder.rust_release()); + cargo.env("CFG_RELEASE_CHANNEL", &builder.config.channel); + cargo.env("CFG_VERSION", builder.rust_version()); + cargo.env("CFG_RELEASE_NUM", &builder.version); + cargo.env("DOC_RUST_LANG_ORG_CHANNEL", builder.doc_rust_lang_org_channel()); + if let Some(ref ver_date) = builder.rust_info().commit_date() { + cargo.env("CFG_VER_DATE", ver_date); + } + if let Some(ref ver_hash) = builder.rust_info().sha() { + cargo.env("CFG_VER_HASH", ver_hash); + } + + let info = GitInfo::new(builder.config.omit_git_hash, &dir); + if let Some(sha) = info.sha() { + cargo.env("CFG_COMMIT_HASH", sha); + } + if let Some(sha_short) = info.sha_short() { + cargo.env("CFG_SHORT_COMMIT_HASH", sha_short); + } + if let Some(date) = info.commit_date() { + cargo.env("CFG_COMMIT_DATE", date); + } + if !features.is_empty() { + cargo.arg("--features").arg(&features.join(", ")); + } + cargo +} + +macro_rules! bootstrap_tool { + ($( + $name:ident, $path:expr, $tool_name:expr + $(,is_external_tool = $external:expr)* + $(,is_unstable_tool = $unstable:expr)* + $(,allow_features = $allow_features:expr)? + ; + )+) => { + #[derive(Copy, PartialEq, Eq, Clone)] + pub enum Tool { + $( + $name, + )+ + } + + impl<'a> Builder<'a> { + pub fn tool_exe(&self, tool: Tool) -> PathBuf { + match tool { + $(Tool::$name => + self.ensure($name { + compiler: self.compiler(0, self.config.build), + target: self.config.build, + }), + )+ + } + } + } + + $( + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $name { + pub compiler: Compiler, + pub target: TargetSelection, + } + + impl Step for $name { + type Output = PathBuf; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path($path) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($name { + // snapshot compiler + compiler: run.builder.compiler(0, run.builder.config.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: $tool_name, + mode: if false $(|| $unstable)* { + // use in-tree libraries for unstable features + Mode::ToolStd + } else { + Mode::ToolBootstrap + }, + path: $path, + is_optional_tool: false, + source_type: if false $(|| $external)* { + SourceType::Submodule + } else { + SourceType::InTree + }, + extra_features: vec![], + allow_features: concat!($($allow_features)*), + }).expect("expected to build -- essential tool") + } + } + )+ + } +} + +bootstrap_tool!( + Rustbook, "src/tools/rustbook", "rustbook"; + UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen"; + Tidy, "src/tools/tidy", "tidy"; + Linkchecker, "src/tools/linkchecker", "linkchecker"; + CargoTest, "src/tools/cargotest", "cargotest"; + Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true, allow_features = "test"; + BuildManifest, "src/tools/build-manifest", "build-manifest"; + RemoteTestClient, "src/tools/remote-test-client", "remote-test-client"; + RustInstaller, "src/tools/rust-installer", "rust-installer"; + RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes"; + ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors"; + LintDocs, "src/tools/lint-docs", "lint-docs"; + JsonDocCk, "src/tools/jsondocck", "jsondocck"; + JsonDocLint, "src/tools/jsondoclint", "jsondoclint"; + HtmlChecker, "src/tools/html-checker", "html-checker"; + BumpStage0, "src/tools/bump-stage0", "bump-stage0"; + ReplaceVersionPlaceholder, "src/tools/replace-version-placeholder", "replace-version-placeholder"; + CollectLicenseMetadata, "src/tools/collect-license-metadata", "collect-license-metadata"; + GenerateCopyright, "src/tools/generate-copyright", "generate-copyright"; + SuggestTests, "src/tools/suggest-tests", "suggest-tests"; + GenerateWindowsSys, "src/tools/generate-windows-sys", "generate-windows-sys"; + RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = "test"; + OptimizedDist, "src/tools/opt-dist", "opt-dist"; + CoverageDump, "src/tools/coverage-dump", "coverage-dump"; +); + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +pub struct ErrorIndex { + pub compiler: Compiler, +} + +impl ErrorIndex { + pub fn command(builder: &Builder<'_>) -> Command { + // Error-index-generator links with the rustdoc library, so we need to add `rustc_lib_paths` + // for rustc_private and libLLVM.so, and `sysroot_lib` for libstd, etc. + let host = builder.config.build; + let compiler = builder.compiler_for(builder.top_stage, host, host); + let mut cmd = Command::new(builder.ensure(ErrorIndex { compiler })); + let mut dylib_paths = builder.rustc_lib_paths(compiler); + dylib_paths.push(PathBuf::from(&builder.sysroot_libdir(compiler, compiler.host))); + add_dylib_path(dylib_paths, &mut cmd); + cmd + } +} + +impl Step for ErrorIndex { + type Output = PathBuf; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/error_index_generator") + } + + fn make_run(run: RunConfig<'_>) { + // Compile the error-index in the same stage as rustdoc to avoid + // recompiling rustdoc twice if we can. + // + // NOTE: This `make_run` isn't used in normal situations, only if you + // manually build the tool with `x.py build + // src/tools/error-index-generator` which almost nobody does. + // Normally, `x.py test` or `x.py doc` will use the + // `ErrorIndex::command` function instead. + let compiler = + run.builder.compiler(run.builder.top_stage.saturating_sub(1), run.builder.config.build); + run.builder.ensure(ErrorIndex { compiler }); + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.compiler.host, + tool: "error_index_generator", + mode: Mode::ToolRustc, + path: "src/tools/error_index_generator", + is_optional_tool: false, + source_type: SourceType::InTree, + extra_features: Vec::new(), + allow_features: "", + }) + .expect("expected to build -- essential tool") + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RemoteTestServer { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for RemoteTestServer { + type Output = PathBuf; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/remote-test-server") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RemoteTestServer { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "remote-test-server", + mode: Mode::ToolStd, + path: "src/tools/remote-test-server", + is_optional_tool: false, + source_type: SourceType::InTree, + extra_features: Vec::new(), + allow_features: "", + }) + .expect("expected to build -- essential tool") + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +pub struct Rustdoc { + /// This should only ever be 0 or 2. + /// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here. + pub compiler: Compiler, +} + +impl Step for Rustdoc { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/rustdoc").path("src/librustdoc") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Rustdoc { + // NOTE: this is somewhat unique in that we actually want a *target* + // compiler here, because rustdoc *is* a compiler. We won't be using + // this as the compiler to build with, but rather this is "what + // compiler are we producing"? + compiler: run.builder.compiler(run.builder.top_stage, run.target), + }); + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + let target_compiler = self.compiler; + if target_compiler.stage == 0 { + if !target_compiler.is_snapshot(builder) { + panic!("rustdoc in stage 0 must be snapshot rustdoc"); + } + return builder.initial_rustc.with_file_name(exe("rustdoc", target_compiler.host)); + } + let target = target_compiler.host; + // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise + // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage + // compilers, which isn't what we want. Rustdoc should be linked in the same way as the + // rustc compiler it's paired with, so it must be built with the previous stage compiler. + let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build); + + // When using `download-rustc` and a stage0 build_compiler, copying rustc doesn't actually + // build stage0 libstd (because the libstd in sysroot has the wrong ABI). Explicitly build + // it. + builder.ensure(compile::Std::new(build_compiler, target_compiler.host)); + builder.ensure(compile::Rustc::new(build_compiler, target_compiler.host)); + // NOTE: this implies that `download-rustc` is pretty useless when compiling with the stage0 + // compiler, since you do just as much work. + if !builder.config.dry_run() && builder.download_rustc() && build_compiler.stage == 0 { + println!( + "WARNING: `download-rustc` does nothing when building stage1 tools; consider using `--stage 2` instead" + ); + } + + // The presence of `target_compiler` ensures that the necessary libraries (codegen backends, + // compiler libraries, ...) are built. Rustdoc does not require the presence of any + // libraries within sysroot_libdir (i.e., rustlib), though doctests may want it (since + // they'll be linked to those libraries). As such, don't explicitly `ensure` any additional + // libraries here. The intuition here is that If we've built a compiler, we should be able + // to build rustdoc. + // + let mut features = Vec::new(); + if builder.config.jemalloc { + features.push("jemalloc".to_string()); + } + + let mut cargo = prepare_tool_cargo( + builder, + build_compiler, + Mode::ToolRustc, + target, + "build", + "src/tools/rustdoc", + SourceType::InTree, + features.as_slice(), + ); + + if builder.config.rustc_parallel { + cargo.rustflag("--cfg=parallel_compiler"); + } + + let _guard = builder.msg_tool( + Mode::ToolRustc, + "rustdoc", + build_compiler.stage, + &self.compiler.host, + &target, + ); + builder.run(&mut cargo.into()); + + // Cargo adds a number of paths to the dylib search path on windows, which results in + // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" + // rustdoc a different name. + let tool_rustdoc = builder + .cargo_out(build_compiler, Mode::ToolRustc, target) + .join(exe("rustdoc_tool_binary", target_compiler.host)); + + // don't create a stage0-sysroot/bin directory. + if target_compiler.stage > 0 { + let sysroot = builder.sysroot(target_compiler); + let bindir = sysroot.join("bin"); + t!(fs::create_dir_all(&bindir)); + let bin_rustdoc = bindir.join(exe("rustdoc", target_compiler.host)); + let _ = fs::remove_file(&bin_rustdoc); + builder.copy(&tool_rustdoc, &bin_rustdoc); + bin_rustdoc + } else { + tool_rustdoc + } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Cargo { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for Cargo { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/tools/cargo").default_condition( + builder.config.extended + && builder.config.tools.as_ref().map_or( + true, + // If `tools` is set, search list for this tool. + |tools| tools.iter().any(|tool| tool == "cargo"), + ), + ) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(Cargo { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + let cargo_bin_path = builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "cargo", + mode: Mode::ToolRustc, + path: "src/tools/cargo", + is_optional_tool: false, + source_type: SourceType::Submodule, + extra_features: Vec::new(), + allow_features: "", + }) + .expect("expected to build -- essential tool"); + cargo_bin_path + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct LldWrapper { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for LldWrapper { + type Output = PathBuf; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + fn run(self, builder: &Builder<'_>) -> PathBuf { + let src_exe = builder + .ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "lld-wrapper", + mode: Mode::ToolStd, + path: "src/tools/lld-wrapper", + is_optional_tool: false, + source_type: SourceType::InTree, + extra_features: Vec::new(), + allow_features: "", + }) + .expect("expected to build -- essential tool"); + + src_exe + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustAnalyzer { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl RustAnalyzer { + pub const ALLOW_FEATURES: &'static str = + "proc_macro_internals,proc_macro_diagnostic,proc_macro_span,proc_macro_span_shrink"; +} + +impl Step for RustAnalyzer { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path("src/tools/rust-analyzer").default_condition( + builder.config.extended + && builder + .config + .tools + .as_ref() + .map_or(true, |tools| tools.iter().any(|tool| tool == "rust-analyzer")), + ) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustAnalyzer { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "rust-analyzer", + mode: Mode::ToolStd, + path: "src/tools/rust-analyzer", + extra_features: vec!["rust-analyzer/in-rust-tree".to_owned()], + is_optional_tool: false, + source_type: SourceType::InTree, + allow_features: RustAnalyzer::ALLOW_FEATURES, + }) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustAnalyzerProcMacroSrv { + pub compiler: Compiler, + pub target: TargetSelection, +} + +impl Step for RustAnalyzerProcMacroSrv { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + // Allow building `rust-analyzer-proc-macro-srv` both as part of the `rust-analyzer` and as a stand-alone tool. + run.path("src/tools/rust-analyzer") + .path("src/tools/rust-analyzer/crates/proc-macro-srv-cli") + .default_condition(builder.config.tools.as_ref().map_or(true, |tools| { + tools + .iter() + .any(|tool| tool == "rust-analyzer" || tool == "rust-analyzer-proc-macro-srv") + })) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(RustAnalyzerProcMacroSrv { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder<'_>) -> Option { + let path = builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "rust-analyzer-proc-macro-srv", + mode: Mode::ToolStd, + path: "src/tools/rust-analyzer/crates/proc-macro-srv-cli", + extra_features: vec!["sysroot-abi".to_owned()], + is_optional_tool: false, + source_type: SourceType::InTree, + allow_features: RustAnalyzer::ALLOW_FEATURES, + })?; + + // Copy `rust-analyzer-proc-macro-srv` to `/libexec/` + // so that r-a can use it. + let libexec_path = builder.sysroot(self.compiler).join("libexec"); + t!(fs::create_dir_all(&libexec_path)); + builder.copy(&path, &libexec_path.join("rust-analyzer-proc-macro-srv")); + + Some(path) + } +} + +macro_rules! tool_extended { + (($sel:ident, $builder:ident), + $($name:ident, + $path:expr, + $tool_name:expr, + stable = $stable:expr + $(,tool_std = $tool_std:literal)? + $(,allow_features = $allow_features:expr)? + $(,add_bins_to_sysroot = $add_bins_to_sysroot:expr)? + ;)+) => { + $( + #[derive(Debug, Clone, Hash, PartialEq, Eq)] + pub struct $name { + pub compiler: Compiler, + pub target: TargetSelection, + pub extra_features: Vec, + } + + impl Step for $name { + type Output = Option; + const DEFAULT: bool = true; // Overwritten below + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + let builder = run.builder; + run.path($path).default_condition( + builder.config.extended + && builder.config.tools.as_ref().map_or( + // By default, on nightly/dev enable all tools, else only + // build stable tools. + $stable || builder.build.unstable_features(), + // If `tools` is set, search list for this tool. + |tools| { + tools.iter().any(|tool| match tool.as_ref() { + "clippy" => $tool_name == "clippy-driver", + x => $tool_name == x, + }) + }), + ) + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure($name { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), + target: run.target, + extra_features: Vec::new(), + }); + } + + #[allow(unused_mut)] + fn run(mut $sel, $builder: &Builder<'_>) -> Option { + let tool = $builder.ensure(ToolBuild { + compiler: $sel.compiler, + target: $sel.target, + tool: $tool_name, + mode: if false $(|| $tool_std)? { Mode::ToolStd } else { Mode::ToolRustc }, + path: $path, + extra_features: $sel.extra_features, + is_optional_tool: true, + source_type: SourceType::InTree, + allow_features: concat!($($allow_features)*), + })?; + + if (false $(|| !$add_bins_to_sysroot.is_empty())?) && $sel.compiler.stage > 0 { + let bindir = $builder.sysroot($sel.compiler).join("bin"); + t!(fs::create_dir_all(&bindir)); + + #[allow(unused_variables)] + let tools_out = $builder + .cargo_out($sel.compiler, Mode::ToolRustc, $sel.target); + + $(for add_bin in $add_bins_to_sysroot { + let bin_source = tools_out.join(exe(add_bin, $sel.target)); + let bin_destination = bindir.join(exe(add_bin, $sel.compiler.host)); + $builder.copy(&bin_source, &bin_destination); + })? + + let tool = bindir.join(exe($tool_name, $sel.compiler.host)); + Some(tool) + } else { + Some(tool) + } + } + } + )+ + } +} + +// NOTE: tools need to be also added to `Builder::get_step_descriptions` in `builder.rs` +// to make `./x.py build ` work. +// NOTE: Most submodule updates for tools are handled by bootstrap.py, since they're needed just to +// invoke Cargo to build bootstrap. See the comment there for more details. +tool_extended!((self, builder), + Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true; + CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true; + Clippy, "src/tools/clippy", "clippy-driver", stable=true, add_bins_to_sysroot = ["clippy-driver", "cargo-clippy"]; + Miri, "src/tools/miri", "miri", stable=false, add_bins_to_sysroot = ["miri"]; + CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=true, add_bins_to_sysroot = ["cargo-miri"]; + // FIXME: tool_std is not quite right, we shouldn't allow nightly features. + // But `builder.cargo` doesn't know how to handle ToolBootstrap in stages other than 0, + // and this is close enough for now. + Rls, "src/tools/rls", "rls", stable=true, tool_std=true; + RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, tool_std=true; + Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, add_bins_to_sysroot = ["rustfmt", "cargo-fmt"]; +); + +impl<'a> Builder<'a> { + /// Gets a `Command` which is ready to run `tool` in `stage` built for + /// `host`. + pub fn tool_cmd(&self, tool: Tool) -> Command { + let mut cmd = Command::new(self.tool_exe(tool)); + let compiler = self.compiler(0, self.config.build); + let host = &compiler.host; + // Prepares the `cmd` provided to be able to run the `compiler` provided. + // + // Notably this munges the dynamic library lookup path to point to the + // right location to run `compiler`. + let mut lib_paths: Vec = vec![ + self.build.rustc_snapshot_libdir(), + self.cargo_out(compiler, Mode::ToolBootstrap, *host).join("deps"), + ]; + + // On MSVC a tool may invoke a C compiler (e.g., compiletest in run-make + // mode) and that C compiler may need some extra PATH modification. Do + // so here. + if compiler.host.contains("msvc") { + let curpaths = env::var_os("PATH").unwrap_or_default(); + let curpaths = env::split_paths(&curpaths).collect::>(); + for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() { + if k != "PATH" { + continue; + } + for path in env::split_paths(v) { + if !curpaths.contains(&path) { + lib_paths.push(path); + } + } + } + } + + add_dylib_path(lib_paths, &mut cmd); + + // Provide a RUSTC for this command to use. + cmd.env("RUSTC", &self.initial_rustc); + + cmd + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/toolstate.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/toolstate.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/toolstate.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/build_steps/toolstate.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,478 @@ +use crate::core::builder::{Builder, RunConfig, ShouldRun, Step}; +use crate::utils::helpers::t; +use serde_derive::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::env; +use std::fmt; +use std::fs; +use std::io::{Seek, SeekFrom}; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::time; + +// Each cycle is 42 days long (6 weeks); the last week is 35..=42 then. +const BETA_WEEK_START: u64 = 35; + +#[cfg(target_os = "linux")] +const OS: Option<&str> = Some("linux"); + +#[cfg(windows)] +const OS: Option<&str> = Some("windows"); + +#[cfg(all(not(target_os = "linux"), not(windows)))] +const OS: Option<&str> = None; + +type ToolstateData = HashMap, ToolState>; + +#[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, PartialOrd)] +#[serde(rename_all = "kebab-case")] +/// Whether a tool can be compiled, tested or neither +pub enum ToolState { + /// The tool compiles successfully, but the test suite fails + TestFail = 1, + /// The tool compiles successfully and its test suite passes + TestPass = 2, + /// The tool can't even be compiled + BuildFail = 0, +} + +impl fmt::Display for ToolState { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "{}", + match self { + ToolState::TestFail => "test-fail", + ToolState::TestPass => "test-pass", + ToolState::BuildFail => "build-fail", + } + ) + } +} + +/// Number of days after the last promotion of beta. +/// Its value is 41 on the Tuesday where "Promote master to beta (T-2)" happens. +/// The Wednesday after this has value 0. +/// We track this value to prevent regressing tools in the last week of the 6-week cycle. +fn days_since_beta_promotion() -> u64 { + let since_epoch = t!(time::SystemTime::UNIX_EPOCH.elapsed()); + (since_epoch.as_secs() / 86400 - 20) % 42 +} + +// These tools must test-pass on the beta/stable channels. +// +// On the nightly channel, their build step must be attempted, but they may not +// be able to build successfully. +static STABLE_TOOLS: &[(&str, &str)] = &[ + ("book", "src/doc/book"), + ("nomicon", "src/doc/nomicon"), + ("reference", "src/doc/reference"), + ("rust-by-example", "src/doc/rust-by-example"), + ("edition-guide", "src/doc/edition-guide"), +]; + +// These tools are permitted to not build on the beta/stable channels. +// +// We do require that we checked whether they build or not on the tools builder, +// though, as otherwise we will be unable to file an issue if they start +// failing. +static NIGHTLY_TOOLS: &[(&str, &str)] = &[ + ("embedded-book", "src/doc/embedded-book"), + // ("rustc-dev-guide", "src/doc/rustc-dev-guide"), +]; + +fn print_error(tool: &str, submodule: &str) { + eprintln!(); + eprintln!("We detected that this PR updated '{tool}', but its tests failed."); + eprintln!(); + eprintln!("If you do intend to update '{tool}', please check the error messages above and"); + eprintln!("commit another update."); + eprintln!(); + eprintln!("If you do NOT intend to update '{tool}', please ensure you did not accidentally"); + eprintln!("change the submodule at '{submodule}'. You may ask your reviewer for the"); + eprintln!("proper steps."); + crate::exit!(3); +} + +fn check_changed_files(toolstates: &HashMap, ToolState>) { + // Changed files + let output = std::process::Command::new("git") + .arg("diff") + .arg("--name-status") + .arg("HEAD") + .arg("HEAD^") + .output(); + let output = match output { + Ok(o) => o, + Err(e) => { + eprintln!("Failed to get changed files: {e:?}"); + crate::exit!(1); + } + }; + + let output = t!(String::from_utf8(output.stdout)); + + for (tool, submodule) in STABLE_TOOLS.iter().chain(NIGHTLY_TOOLS.iter()) { + let changed = output.lines().any(|l| l.starts_with('M') && l.ends_with(submodule)); + eprintln!("Verifying status of {tool}..."); + if !changed { + continue; + } + + eprintln!("This PR updated '{submodule}', verifying if status is 'test-pass'..."); + if toolstates[*tool] != ToolState::TestPass { + print_error(tool, submodule); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ToolStateCheck; + +impl Step for ToolStateCheck { + type Output = (); + + /// Checks tool state status. + /// + /// This is intended to be used in the `checktools.sh` script. To use + /// this, set `save-toolstates` in `config.toml` so that tool status will + /// be saved to a JSON file. Then, run `x.py test --no-fail-fast` for all + /// of the tools to populate the JSON file. After that is done, this + /// command can be run to check for any status failures, and exits with an + /// error if there are any. + /// + /// This also handles publishing the results to the `history` directory of + /// the toolstate repo + /// if the env var `TOOLSTATE_PUBLISH` is set. Note that there is a + /// *separate* step of updating the `latest.json` file and creating GitHub + /// issues and comments in `src/ci/publish_toolstate.sh`, which is only + /// performed on master. (The shell/python code is intended to be migrated + /// here eventually.) + /// + /// The rules for failure are: + /// * If the PR modifies a tool, the status must be test-pass. + /// NOTE: There is intent to change this, see + /// . + /// * All "stable" tools must be test-pass on the stable or beta branches. + /// * During beta promotion week, a PR is not allowed to "regress" a + /// stable tool. That is, the status is not allowed to get worse + /// (test-pass to test-fail or build-fail). + fn run(self, builder: &Builder<'_>) { + if builder.config.dry_run() { + return; + } + + let days_since_beta_promotion = days_since_beta_promotion(); + let in_beta_week = days_since_beta_promotion >= BETA_WEEK_START; + let is_nightly = !(builder.config.channel == "beta" || builder.config.channel == "stable"); + let toolstates = builder.toolstates(); + + let mut did_error = false; + + for (tool, _) in STABLE_TOOLS.iter().chain(NIGHTLY_TOOLS.iter()) { + if !toolstates.contains_key(*tool) { + did_error = true; + eprintln!("ERROR: Tool `{tool}` was not recorded in tool state."); + } + } + + if did_error { + crate::exit!(1); + } + + check_changed_files(&toolstates); + checkout_toolstate_repo(); + let old_toolstate = read_old_toolstate(); + + for (tool, _) in STABLE_TOOLS.iter() { + let state = toolstates[*tool]; + + if state != ToolState::TestPass { + if !is_nightly { + did_error = true; + eprintln!("ERROR: Tool `{tool}` should be test-pass but is {state}"); + } else if in_beta_week { + let old_state = old_toolstate + .iter() + .find(|ts| ts.tool == *tool) + .expect("latest.json missing tool") + .state(); + if state < old_state { + did_error = true; + eprintln!( + "ERROR: Tool `{tool}` has regressed from {old_state} to {state} during beta week." + ); + } else { + // This warning only appears in the logs, which most + // people won't read. It's mostly here for testing and + // debugging. + eprintln!( + "WARNING: Tool `{tool}` is not test-pass (is `{state}`), \ + this should be fixed before beta is branched." + ); + } + } + // `publish_toolstate.py` is responsible for updating + // `latest.json` and creating comments/issues warning people + // if there is a regression. That all happens in a separate CI + // job on the master branch once the PR has passed all tests + // on the `auto` branch. + } + } + + if did_error { + crate::exit!(1); + } + + if builder.config.channel == "nightly" && env::var_os("TOOLSTATE_PUBLISH").is_some() { + commit_toolstate_change(&toolstates); + } + } + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.alias("check-tools") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(ToolStateCheck); + } +} + +impl Builder<'_> { + fn toolstates(&self) -> HashMap, ToolState> { + if let Some(ref path) = self.config.save_toolstates { + if let Some(parent) = path.parent() { + // Ensure the parent directory always exists + t!(std::fs::create_dir_all(parent)); + } + let mut file = + t!(fs::OpenOptions::new().create(true).write(true).read(true).open(path)); + + serde_json::from_reader(&mut file).unwrap_or_default() + } else { + Default::default() + } + } + + /// Updates the actual toolstate of a tool. + /// + /// The toolstates are saved to the file specified by the key + /// `rust.save-toolstates` in `config.toml`. If unspecified, nothing will be + /// done. The file is updated immediately after this function completes. + pub fn save_toolstate(&self, tool: &str, state: ToolState) { + use std::io::Write; + + // If we're in a dry run setting we don't want to save toolstates as + // that means if we e.g. panic down the line it'll look like we tested + // everything (but we actually haven't). + if self.config.dry_run() { + return; + } + // Toolstate isn't tracked for clippy or rustfmt, but since most tools do, we avoid checking + // in all the places we could save toolstate and just do so here. + if tool == "clippy-driver" || tool == "rustfmt" { + return; + } + if let Some(ref path) = self.config.save_toolstates { + if let Some(parent) = path.parent() { + // Ensure the parent directory always exists + t!(std::fs::create_dir_all(parent)); + } + let mut file = + t!(fs::OpenOptions::new().create(true).read(true).write(true).open(path)); + + let mut current_toolstates: HashMap, ToolState> = + serde_json::from_reader(&mut file).unwrap_or_default(); + current_toolstates.insert(tool.into(), state); + t!(file.seek(SeekFrom::Start(0))); + t!(file.set_len(0)); + t!(serde_json::to_writer(&file, ¤t_toolstates)); + t!(writeln!(file)); // make sure this ends in a newline + } + } +} + +fn toolstate_repo() -> String { + env::var("TOOLSTATE_REPO") + .unwrap_or_else(|_| "https://github.com/rust-lang-nursery/rust-toolstate.git".to_string()) +} + +/// Directory where the toolstate repo is checked out. +const TOOLSTATE_DIR: &str = "rust-toolstate"; + +/// Checks out the toolstate repo into `TOOLSTATE_DIR`. +fn checkout_toolstate_repo() { + if let Ok(token) = env::var("TOOLSTATE_REPO_ACCESS_TOKEN") { + prepare_toolstate_config(&token); + } + if Path::new(TOOLSTATE_DIR).exists() { + eprintln!("Cleaning old toolstate directory..."); + t!(fs::remove_dir_all(TOOLSTATE_DIR)); + } + + let status = Command::new("git") + .arg("clone") + .arg("--depth=1") + .arg(toolstate_repo()) + .arg(TOOLSTATE_DIR) + .status(); + let success = match status { + Ok(s) => s.success(), + Err(_) => false, + }; + if !success { + panic!("git clone unsuccessful (status: {status:?})"); + } +} + +/// Sets up config and authentication for modifying the toolstate repo. +fn prepare_toolstate_config(token: &str) { + fn git_config(key: &str, value: &str) { + let status = Command::new("git").arg("config").arg("--global").arg(key).arg(value).status(); + let success = match status { + Ok(s) => s.success(), + Err(_) => false, + }; + if !success { + panic!("git config key={key} value={value} failed (status: {status:?})"); + } + } + + // If changing anything here, then please check that `src/ci/publish_toolstate.sh` is up to date + // as well. + git_config("user.email", "7378925+rust-toolstate-update@users.noreply.github.com"); + git_config("user.name", "Rust Toolstate Update"); + git_config("credential.helper", "store"); + + let credential = format!("https://{token}:x-oauth-basic@github.com\n",); + let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials"); + t!(fs::write(&git_credential_path, credential)); +} + +/// Reads the latest toolstate from the toolstate repo. +fn read_old_toolstate() -> Vec { + let latest_path = Path::new(TOOLSTATE_DIR).join("_data").join("latest.json"); + let old_toolstate = t!(fs::read(latest_path)); + t!(serde_json::from_slice(&old_toolstate)) +} + +/// This function `commit_toolstate_change` provides functionality for pushing a change +/// to the `rust-toolstate` repository. +/// +/// The function relies on a GitHub bot user, which should have a Personal access +/// token defined in the environment variable $TOOLSTATE_REPO_ACCESS_TOKEN. If for +/// some reason you need to change the token, please update the Azure Pipelines +/// variable group. +/// +/// 1. Generate a new Personal access token: +/// +/// * Login to the bot account, and go to Settings -> Developer settings -> +/// Personal access tokens +/// * Click "Generate new token" +/// * Enable the "public_repo" permission, then click "Generate token" +/// * Copy the generated token (should be a 40-digit hexadecimal number). +/// Save it somewhere secure, as the token would be gone once you leave +/// the page. +/// +/// 2. Update the variable group in Azure Pipelines +/// +/// * Ping a member of the infrastructure team to do this. +/// +/// 4. Replace the email address below if the bot account identity is changed +/// +/// * See +/// if a private email by GitHub is wanted. +fn commit_toolstate_change(current_toolstate: &ToolstateData) { + let message = format!("({} CI update)", OS.expect("linux/windows only")); + let mut success = false; + for _ in 1..=5 { + // Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo. + // This does *not* change the "current toolstate"; that only happens post-landing + // via `src/ci/docker/publish_toolstate.sh`. + publish_test_results(¤t_toolstate); + + // `git commit` failing means nothing to commit. + let status = t!(Command::new("git") + .current_dir(TOOLSTATE_DIR) + .arg("commit") + .arg("-a") + .arg("-m") + .arg(&message) + .status()); + if !status.success() { + success = true; + break; + } + + let status = t!(Command::new("git") + .current_dir(TOOLSTATE_DIR) + .arg("push") + .arg("origin") + .arg("master") + .status()); + // If we successfully push, exit. + if status.success() { + success = true; + break; + } + eprintln!("Sleeping for 3 seconds before retrying push"); + std::thread::sleep(std::time::Duration::from_secs(3)); + let status = t!(Command::new("git") + .current_dir(TOOLSTATE_DIR) + .arg("fetch") + .arg("origin") + .arg("master") + .status()); + assert!(status.success()); + let status = t!(Command::new("git") + .current_dir(TOOLSTATE_DIR) + .arg("reset") + .arg("--hard") + .arg("origin/master") + .status()); + assert!(status.success()); + } + + if !success { + panic!("Failed to update toolstate repository with new data"); + } +} + +/// Updates the "history" files with the latest results. +/// +/// These results will later be promoted to `latest.json` by the +/// `publish_toolstate.py` script if the PR passes all tests and is merged to +/// master. +fn publish_test_results(current_toolstate: &ToolstateData) { + let commit = t!(std::process::Command::new("git").arg("rev-parse").arg("HEAD").output()); + let commit = t!(String::from_utf8(commit.stdout)); + + let toolstate_serialized = t!(serde_json::to_string(¤t_toolstate)); + + let history_path = Path::new(TOOLSTATE_DIR) + .join("history") + .join(format!("{}.tsv", OS.expect("linux/windows only"))); + let mut file = t!(fs::read_to_string(&history_path)); + let end_of_first_line = file.find('\n').unwrap(); + file.insert_str(end_of_first_line, &format!("\n{}\t{}", commit.trim(), toolstate_serialized)); + t!(fs::write(&history_path, file)); +} + +#[derive(Debug, Deserialize)] +struct RepoState { + tool: String, + windows: ToolState, + linux: ToolState, +} + +impl RepoState { + fn state(&self) -> ToolState { + if cfg!(target_os = "linux") { + self.linux + } else if cfg!(windows) { + self.windows + } else { + unimplemented!() + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/builder.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,2392 @@ +use std::any::{type_name, Any}; +use std::cell::{Cell, RefCell}; +use std::collections::BTreeSet; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fmt::{Debug, Write}; +use std::fs::{self, File}; +use std::hash::Hash; +use std::io::{BufRead, BufReader}; +use std::ops::Deref; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::time::{Duration, Instant}; + +use crate::core::build_steps::llvm; +use crate::core::build_steps::tool::{self, SourceType}; +use crate::core::build_steps::{check, clean, compile, dist, doc, install, run, setup, test}; +use crate::core::config::flags::{Color, Subcommand}; +use crate::core::config::{DryRun, SplitDebuginfo, TargetSelection}; +use crate::utils::cache::{Cache, Interned, INTERNER}; +use crate::utils::helpers::{self, add_dylib_path, add_link_lib_path, exe, libdir, output, t}; +use crate::Crate; +use crate::EXTRA_CHECK_CFGS; +use crate::{Build, CLang, DocTests, GitRepo, Mode}; + +pub use crate::Compiler; +// FIXME: +// - use std::lazy for `Lazy` +// - use std::cell for `OnceCell` +// Once they get stabilized and reach beta. +use clap::ValueEnum; +use once_cell::sync::{Lazy, OnceCell}; + +#[cfg(test)] +#[path = "../tests/builder.rs"] +mod tests; + +pub struct Builder<'a> { + pub build: &'a Build, + pub top_stage: u32, + pub kind: Kind, + cache: Cache, + stack: RefCell>>, + time_spent_on_dependencies: Cell, + pub paths: Vec, +} + +impl<'a> Deref for Builder<'a> { + type Target = Build; + + fn deref(&self) -> &Self::Target { + self.build + } +} + +pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { + /// `PathBuf` when directories are created or to return a `Compiler` once + /// it's been assembled. + type Output: Clone; + + /// Whether this step is run by default as part of its respective phase. + /// `true` here can still be overwritten by `should_run` calling `default_condition`. + const DEFAULT: bool = false; + + /// If true, then this rule should be skipped if --target was specified, but --host was not + const ONLY_HOSTS: bool = false; + + /// Primary function to execute this rule. Can call `builder.ensure()` + /// with other steps to run those. + fn run(self, builder: &Builder<'_>) -> Self::Output; + + /// When bootstrap is passed a set of paths, this controls whether this rule + /// will execute. However, it does not get called in a "default" context + /// when we are not passed any paths; in that case, `make_run` is called + /// directly. + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_>; + + /// Builds up a "root" rule, either as a default rule or from a path passed + /// to us. + /// + /// When path is `None`, we are executing in a context where no paths were + /// passed. When `./x.py build` is run, for example, this rule could get + /// called if it is in the correct list below with a path of `None`. + fn make_run(_run: RunConfig<'_>) { + // It is reasonable to not have an implementation of make_run for rules + // who do not want to get called from the root context. This means that + // they are likely dependencies (e.g., sysroot creation) or similar, and + // as such calling them from ./x.py isn't logical. + unimplemented!() + } +} + +pub struct RunConfig<'a> { + pub builder: &'a Builder<'a>, + pub target: TargetSelection, + pub paths: Vec, +} + +impl RunConfig<'_> { + pub fn build_triple(&self) -> TargetSelection { + self.builder.build.build + } + + /// Return a list of crate names selected by `run.paths`. + #[track_caller] + pub fn cargo_crates_in_set(&self) -> Interned> { + let mut crates = Vec::new(); + for krate in &self.paths { + let path = krate.assert_single_path(); + let Some(crate_name) = self.builder.crate_paths.get(&path.path) else { + panic!("missing crate for path {}", path.path.display()) + }; + crates.push(crate_name.to_string()); + } + INTERNER.intern_list(crates) + } + + /// Given an `alias` selected by the `Step` and the paths passed on the command line, + /// return a list of the crates that should be built. + /// + /// Normally, people will pass *just* `library` if they pass it. + /// But it's possible (although strange) to pass something like `library std core`. + /// Build all crates anyway, as if they hadn't passed the other args. + pub fn make_run_crates(&self, alias: Alias) -> Interned> { + let has_alias = + self.paths.iter().any(|set| set.assert_single_path().path.ends_with(alias.as_str())); + if !has_alias { + return self.cargo_crates_in_set(); + } + + let crates = match alias { + Alias::Library => self.builder.in_tree_crates("sysroot", Some(self.target)), + Alias::Compiler => self.builder.in_tree_crates("rustc-main", Some(self.target)), + }; + + let crate_names = crates.into_iter().map(|krate| krate.name.to_string()).collect(); + INTERNER.intern_list(crate_names) + } +} + +#[derive(Debug, Copy, Clone)] +pub enum Alias { + Library, + Compiler, +} + +impl Alias { + fn as_str(self) -> &'static str { + match self { + Alias::Library => "library", + Alias::Compiler => "compiler", + } + } +} + +/// A description of the crates in this set, suitable for passing to `builder.info`. +/// +/// `crates` should be generated by [`RunConfig::cargo_crates_in_set`]. +pub fn crate_description(crates: &[impl AsRef]) -> String { + if crates.is_empty() { + return "".into(); + } + + let mut descr = String::from(" {"); + descr.push_str(crates[0].as_ref()); + for krate in &crates[1..] { + descr.push_str(", "); + descr.push_str(krate.as_ref()); + } + descr.push('}'); + descr +} + +struct StepDescription { + default: bool, + only_hosts: bool, + should_run: fn(ShouldRun<'_>) -> ShouldRun<'_>, + make_run: fn(RunConfig<'_>), + name: &'static str, + kind: Kind, +} + +#[derive(Clone, PartialOrd, Ord, PartialEq, Eq)] +pub struct TaskPath { + pub path: PathBuf, + pub kind: Option, +} + +impl Debug for TaskPath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(kind) = &self.kind { + write!(f, "{}::", kind.as_str())?; + } + write!(f, "{}", self.path.display()) + } +} + +/// Collection of paths used to match a task rule. +#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] +pub enum PathSet { + /// A collection of individual paths or aliases. + /// + /// These are generally matched as a path suffix. For example, a + /// command-line value of `std` will match if `library/std` is in the + /// set. + /// + /// NOTE: the paths within a set should always be aliases of one another. + /// For example, `src/librustdoc` and `src/tools/rustdoc` should be in the same set, + /// but `library/core` and `library/std` generally should not, unless there's no way (for that Step) + /// to build them separately. + Set(BTreeSet), + /// A "suite" of paths. + /// + /// These can match as a path suffix (like `Set`), or as a prefix. For + /// example, a command-line value of `tests/ui/abi/variadic-ffi.rs` + /// will match `tests/ui`. A command-line value of `ui` would also + /// match `tests/ui`. + Suite(TaskPath), +} + +impl PathSet { + fn empty() -> PathSet { + PathSet::Set(BTreeSet::new()) + } + + fn one>(path: P, kind: Kind) -> PathSet { + let mut set = BTreeSet::new(); + set.insert(TaskPath { path: path.into(), kind: Some(kind) }); + PathSet::Set(set) + } + + fn has(&self, needle: &Path, module: Kind) -> bool { + match self { + PathSet::Set(set) => set.iter().any(|p| Self::check(p, needle, module)), + PathSet::Suite(suite) => Self::check(suite, needle, module), + } + } + + // internal use only + fn check(p: &TaskPath, needle: &Path, module: Kind) -> bool { + if let Some(p_kind) = &p.kind { + p.path.ends_with(needle) && *p_kind == module + } else { + p.path.ends_with(needle) + } + } + + /// Return all `TaskPath`s in `Self` that contain any of the `needles`, removing the + /// matched needles. + /// + /// This is used for `StepDescription::krate`, which passes all matching crates at once to + /// `Step::make_run`, rather than calling it many times with a single crate. + /// See `tests.rs` for examples. + fn intersection_removing_matches(&self, needles: &mut Vec<&Path>, module: Kind) -> PathSet { + let mut check = |p| { + for (i, n) in needles.iter().enumerate() { + let matched = Self::check(p, n, module); + if matched { + needles.remove(i); + return true; + } + } + false + }; + match self { + PathSet::Set(set) => PathSet::Set(set.iter().filter(|&p| check(p)).cloned().collect()), + PathSet::Suite(suite) => { + if check(suite) { + self.clone() + } else { + PathSet::empty() + } + } + } + } + + /// A convenience wrapper for Steps which know they have no aliases and all their sets contain only a single path. + /// + /// This can be used with [`ShouldRun::crate_or_deps`], [`ShouldRun::path`], or [`ShouldRun::alias`]. + #[track_caller] + pub fn assert_single_path(&self) -> &TaskPath { + match self { + PathSet::Set(set) => { + assert_eq!(set.len(), 1, "called assert_single_path on multiple paths"); + set.iter().next().unwrap() + } + PathSet::Suite(_) => unreachable!("called assert_single_path on a Suite path"), + } + } +} + +impl StepDescription { + fn from(kind: Kind) -> StepDescription { + StepDescription { + default: S::DEFAULT, + only_hosts: S::ONLY_HOSTS, + should_run: S::should_run, + make_run: S::make_run, + name: std::any::type_name::(), + kind, + } + } + + fn maybe_run(&self, builder: &Builder<'_>, mut pathsets: Vec) { + pathsets.retain(|set| !self.is_excluded(builder, set)); + + if pathsets.is_empty() { + return; + } + + // Determine the targets participating in this rule. + let targets = if self.only_hosts { &builder.hosts } else { &builder.targets }; + + for target in targets { + let run = RunConfig { builder, paths: pathsets.clone(), target: *target }; + (self.make_run)(run); + } + } + + fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool { + if builder.config.skip.iter().any(|e| pathset.has(&e, builder.kind)) { + if !matches!(builder.config.dry_run, DryRun::SelfCheck) { + println!("Skipping {pathset:?} because it is excluded"); + } + return true; + } + + if !builder.config.skip.is_empty() && !matches!(builder.config.dry_run, DryRun::SelfCheck) { + builder.verbose(&format!( + "{:?} not skipped for {:?} -- not in {:?}", + pathset, self.name, builder.config.skip + )); + } + false + } + + fn run(v: &[StepDescription], builder: &Builder<'_>, paths: &[PathBuf]) { + let should_runs = v + .iter() + .map(|desc| (desc.should_run)(ShouldRun::new(builder, desc.kind))) + .collect::>(); + + // sanity checks on rules + for (desc, should_run) in v.iter().zip(&should_runs) { + assert!( + !should_run.paths.is_empty(), + "{:?} should have at least one pathset", + desc.name + ); + } + + if paths.is_empty() || builder.config.include_default_paths { + for (desc, should_run) in v.iter().zip(&should_runs) { + if desc.default && should_run.is_really_default() { + desc.maybe_run(builder, should_run.paths.iter().cloned().collect()); + } + } + } + + // strip CurDir prefix if present + let mut paths: Vec<_> = + paths.into_iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect(); + + // Handle all test suite paths. + // (This is separate from the loop below to avoid having to handle multiple paths in `is_suite_path` somehow.) + paths.retain(|path| { + for (desc, should_run) in v.iter().zip(&should_runs) { + if let Some(suite) = should_run.is_suite_path(&path) { + desc.maybe_run(builder, vec![suite.clone()]); + return false; + } + } + true + }); + + if paths.is_empty() { + return; + } + + // Handle all PathSets. + for (desc, should_run) in v.iter().zip(&should_runs) { + let pathsets = should_run.pathset_for_paths_removing_matches(&mut paths, desc.kind); + if !pathsets.is_empty() { + desc.maybe_run(builder, pathsets); + } + } + + if !paths.is_empty() { + eprintln!("ERROR: no `{}` rules matched {:?}", builder.kind.as_str(), paths,); + eprintln!( + "HELP: run `x.py {} --help --verbose` to show a list of available paths", + builder.kind.as_str() + ); + eprintln!( + "NOTE: if you are adding a new Step to bootstrap itself, make sure you register it with `describe!`" + ); + crate::exit!(1); + } + } +} + +enum ReallyDefault<'a> { + Bool(bool), + Lazy(Lazy bool + 'a>>), +} + +pub struct ShouldRun<'a> { + pub builder: &'a Builder<'a>, + kind: Kind, + + // use a BTreeSet to maintain sort order + paths: BTreeSet, + + // If this is a default rule, this is an additional constraint placed on + // its run. Generally something like compiler docs being enabled. + is_really_default: ReallyDefault<'a>, +} + +impl<'a> ShouldRun<'a> { + fn new(builder: &'a Builder<'_>, kind: Kind) -> ShouldRun<'a> { + ShouldRun { + builder, + kind, + paths: BTreeSet::new(), + is_really_default: ReallyDefault::Bool(true), // by default no additional conditions + } + } + + pub fn default_condition(mut self, cond: bool) -> Self { + self.is_really_default = ReallyDefault::Bool(cond); + self + } + + pub fn lazy_default_condition(mut self, lazy_cond: Box bool + 'a>) -> Self { + self.is_really_default = ReallyDefault::Lazy(Lazy::new(lazy_cond)); + self + } + + pub fn is_really_default(&self) -> bool { + match &self.is_really_default { + ReallyDefault::Bool(val) => *val, + ReallyDefault::Lazy(lazy) => *lazy.deref(), + } + } + + /// Indicates it should run if the command-line selects the given crate or + /// any of its (local) dependencies. + /// + /// `make_run` will be called a single time with all matching command-line paths. + pub fn crate_or_deps(self, name: &str) -> Self { + let crates = self.builder.in_tree_crates(name, None); + self.crates(crates) + } + + /// Indicates it should run if the command-line selects any of the given crates. + /// + /// `make_run` will be called a single time with all matching command-line paths. + /// + /// Prefer [`ShouldRun::crate_or_deps`] to this function where possible. + pub(crate) fn crates(mut self, crates: Vec<&Crate>) -> Self { + for krate in crates { + let path = krate.local_path(self.builder); + self.paths.insert(PathSet::one(path, self.kind)); + } + self + } + + // single alias, which does not correspond to any on-disk path + pub fn alias(mut self, alias: &str) -> Self { + // exceptional case for `Kind::Setup` because its `library` + // and `compiler` options would otherwise naively match with + // `compiler` and `library` folders respectively. + assert!( + self.kind == Kind::Setup || !self.builder.src.join(alias).exists(), + "use `builder.path()` for real paths: {alias}" + ); + self.paths.insert(PathSet::Set( + std::iter::once(TaskPath { path: alias.into(), kind: Some(self.kind) }).collect(), + )); + self + } + + // single, non-aliased path + pub fn path(self, path: &str) -> Self { + self.paths(&[path]) + } + + /// Multiple aliases for the same job. + /// + /// This differs from [`path`] in that multiple calls to path will end up calling `make_run` + /// multiple times, whereas a single call to `paths` will only ever generate a single call to + /// `paths`. + /// + /// This is analogous to `all_krates`, although `all_krates` is gone now. Prefer [`path`] where possible. + /// + /// [`path`]: ShouldRun::path + pub fn paths(mut self, paths: &[&str]) -> Self { + static SUBMODULES_PATHS: OnceCell> = OnceCell::new(); + + let init_submodules_paths = |src: &PathBuf| { + let file = File::open(src.join(".gitmodules")).unwrap(); + + let mut submodules_paths = vec![]; + for line in BufReader::new(file).lines() { + if let Ok(line) = line { + let line = line.trim(); + + if line.starts_with("path") { + let actual_path = + line.split(' ').last().expect("Couldn't get value of path"); + submodules_paths.push(actual_path.to_owned()); + } + } + } + + submodules_paths + }; + + let submodules_paths = + SUBMODULES_PATHS.get_or_init(|| init_submodules_paths(&self.builder.src)); + + self.paths.insert(PathSet::Set( + paths + .iter() + .map(|p| { + // assert only if `p` isn't submodule + if submodules_paths.iter().find(|sm_p| p.contains(*sm_p)).is_none() { + assert!( + self.builder.src.join(p).exists(), + "`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}", + p + ); + } + + TaskPath { path: p.into(), kind: Some(self.kind) } + }) + .collect(), + )); + self + } + + /// Handles individual files (not directories) within a test suite. + fn is_suite_path(&self, requested_path: &Path) -> Option<&PathSet> { + self.paths.iter().find(|pathset| match pathset { + PathSet::Suite(suite) => requested_path.starts_with(&suite.path), + PathSet::Set(_) => false, + }) + } + + pub fn suite_path(mut self, suite: &str) -> Self { + self.paths.insert(PathSet::Suite(TaskPath { path: suite.into(), kind: Some(self.kind) })); + self + } + + // allows being more explicit about why should_run in Step returns the value passed to it + pub fn never(mut self) -> ShouldRun<'a> { + self.paths.insert(PathSet::empty()); + self + } + + /// Given a set of requested paths, return the subset which match the Step for this `ShouldRun`, + /// removing the matches from `paths`. + /// + /// NOTE: this returns multiple PathSets to allow for the possibility of multiple units of work + /// within the same step. For example, `test::Crate` allows testing multiple crates in the same + /// cargo invocation, which are put into separate sets because they aren't aliases. + /// + /// The reason we return PathSet instead of PathBuf is to allow for aliases that mean the same thing + /// (for now, just `all_krates` and `paths`, but we may want to add an `aliases` function in the future?) + fn pathset_for_paths_removing_matches( + &self, + paths: &mut Vec<&Path>, + kind: Kind, + ) -> Vec { + let mut sets = vec![]; + for pathset in &self.paths { + let subset = pathset.intersection_removing_matches(paths, kind); + if subset != PathSet::empty() { + sets.push(subset); + } + } + sets + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] +pub enum Kind { + #[clap(alias = "b")] + Build, + #[clap(alias = "c")] + Check, + Clippy, + Fix, + Format, + #[clap(alias = "t")] + Test, + Bench, + #[clap(alias = "d")] + Doc, + Clean, + Dist, + Install, + #[clap(alias = "r")] + Run, + Setup, + Suggest, +} + +impl Kind { + pub fn parse(string: &str) -> Option { + // these strings, including the one-letter aliases, must match the x.py help text + Some(match string { + "build" | "b" => Kind::Build, + "check" | "c" => Kind::Check, + "clippy" => Kind::Clippy, + "fix" => Kind::Fix, + "fmt" => Kind::Format, + "test" | "t" => Kind::Test, + "bench" => Kind::Bench, + "doc" | "d" => Kind::Doc, + "clean" => Kind::Clean, + "dist" => Kind::Dist, + "install" => Kind::Install, + "run" | "r" => Kind::Run, + "setup" => Kind::Setup, + "suggest" => Kind::Suggest, + _ => return None, + }) + } + + pub fn as_str(&self) -> &'static str { + match self { + Kind::Build => "build", + Kind::Check => "check", + Kind::Clippy => "clippy", + Kind::Fix => "fix", + Kind::Format => "fmt", + Kind::Test => "test", + Kind::Bench => "bench", + Kind::Doc => "doc", + Kind::Clean => "clean", + Kind::Dist => "dist", + Kind::Install => "install", + Kind::Run => "run", + Kind::Setup => "setup", + Kind::Suggest => "suggest", + } + } + + pub fn description(&self) -> String { + match self { + Kind::Test => "Testing", + Kind::Bench => "Benchmarking", + Kind::Doc => "Documenting", + Kind::Run => "Running", + Kind::Suggest => "Suggesting", + _ => { + let title_letter = self.as_str()[0..1].to_ascii_uppercase(); + return format!("{title_letter}{}ing", &self.as_str()[1..]); + } + } + .to_owned() + } +} + +impl<'a> Builder<'a> { + fn get_step_descriptions(kind: Kind) -> Vec { + macro_rules! describe { + ($($rule:ty),+ $(,)?) => {{ + vec![$(StepDescription::from::<$rule>(kind)),+] + }}; + } + match kind { + Kind::Build => describe!( + compile::Std, + compile::Rustc, + compile::Assemble, + compile::CodegenBackend, + compile::StartupObjects, + tool::BuildManifest, + tool::Rustbook, + tool::ErrorIndex, + tool::UnstableBookGen, + tool::Tidy, + tool::Linkchecker, + tool::CargoTest, + tool::Compiletest, + tool::RemoteTestServer, + tool::RemoteTestClient, + tool::RustInstaller, + tool::Cargo, + tool::Rls, + tool::RustAnalyzer, + tool::RustAnalyzerProcMacroSrv, + tool::RustDemangler, + tool::Rustdoc, + tool::Clippy, + tool::CargoClippy, + llvm::Llvm, + llvm::Sanitizers, + tool::Rustfmt, + tool::Miri, + tool::CargoMiri, + llvm::Lld, + llvm::CrtBeginEnd, + tool::RustdocGUITest, + tool::OptimizedDist, + tool::CoverageDump, + ), + Kind::Check | Kind::Clippy | Kind::Fix => describe!( + check::Std, + check::Rustc, + check::Rustdoc, + check::CodegenBackend, + check::Clippy, + check::Miri, + check::CargoMiri, + check::MiroptTestTools, + check::Rls, + check::Rustfmt, + check::RustAnalyzer, + check::Bootstrap + ), + Kind::Test => describe!( + crate::core::build_steps::toolstate::ToolStateCheck, + test::ExpandYamlAnchors, + test::Tidy, + test::Ui, + test::RunPassValgrind, + test::Coverage, + test::CoverageMap, + test::CoverageRun, + test::MirOpt, + test::Codegen, + test::CodegenUnits, + test::Assembly, + test::Incremental, + test::Debuginfo, + test::UiFullDeps, + test::CodegenCranelift, + test::CodegenGCC, + test::Rustdoc, + test::CoverageRunRustdoc, + test::Pretty, + test::Crate, + test::CrateLibrustc, + test::CrateRustdoc, + test::CrateRustdocJsonTypes, + test::CrateBootstrap, + test::Linkcheck, + test::TierCheck, + test::Cargotest, + test::Cargo, + test::RustAnalyzer, + test::ErrorIndex, + test::Distcheck, + test::RunMakeFullDeps, + test::Nomicon, + test::Reference, + test::RustdocBook, + test::RustByExample, + test::TheBook, + test::UnstableBook, + test::RustcBook, + test::LintDocs, + test::RustcGuide, + test::EmbeddedBook, + test::EditionGuide, + test::Rustfmt, + test::Miri, + test::Clippy, + test::RustDemangler, + test::CompiletestTest, + test::RustdocJSStd, + test::RustdocJSNotStd, + test::RustdocGUI, + test::RustdocTheme, + test::RustdocUi, + test::RustdocJson, + test::HtmlCheck, + test::RustInstaller, + // Run bootstrap close to the end as it's unlikely to fail + test::Bootstrap, + // Run run-make last, since these won't pass without make on Windows + test::RunMake, + ), + Kind::Bench => describe!(test::Crate, test::CrateLibrustc), + Kind::Doc => describe!( + doc::UnstableBook, + doc::UnstableBookGen, + doc::TheBook, + doc::Standalone, + doc::Std, + doc::Rustc, + doc::Rustdoc, + doc::Rustfmt, + doc::ErrorIndex, + doc::Nomicon, + doc::Reference, + doc::RustdocBook, + doc::RustByExample, + doc::RustcBook, + doc::Cargo, + doc::CargoBook, + doc::Clippy, + doc::ClippyBook, + doc::Miri, + doc::EmbeddedBook, + doc::EditionGuide, + doc::StyleGuide, + doc::Tidy, + doc::Bootstrap, + ), + Kind::Dist => describe!( + dist::Docs, + dist::RustcDocs, + dist::JsonDocs, + dist::Mingw, + dist::Rustc, + dist::CodegenBackend, + dist::Std, + dist::RustcDev, + dist::Analysis, + dist::Src, + dist::Cargo, + dist::Rls, + dist::RustAnalyzer, + dist::Rustfmt, + dist::RustDemangler, + dist::Clippy, + dist::Miri, + dist::LlvmTools, + dist::RustDev, + dist::Bootstrap, + dist::Extended, + // It seems that PlainSourceTarball somehow changes how some of the tools + // perceive their dependencies (see #93033) which would invalidate fingerprints + // and force us to rebuild tools after vendoring dependencies. + // To work around this, create the Tarball after building all the tools. + dist::PlainSourceTarball, + dist::BuildManifest, + dist::ReproducibleArtifacts, + ), + Kind::Install => describe!( + install::Docs, + install::Std, + install::Cargo, + install::RustAnalyzer, + install::Rustfmt, + install::RustDemangler, + install::Clippy, + install::Miri, + install::LlvmTools, + install::Src, + install::Rustc + ), + Kind::Run => describe!( + run::ExpandYamlAnchors, + run::BuildManifest, + run::BumpStage0, + run::ReplaceVersionPlaceholder, + run::Miri, + run::CollectLicenseMetadata, + run::GenerateCopyright, + run::GenerateWindowsSys, + run::GenerateCompletions, + ), + Kind::Setup => describe!(setup::Profile, setup::Hook, setup::Link, setup::Vscode), + Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std), + // special-cased in Build::build() + Kind::Format | Kind::Suggest => vec![], + } + } + + pub fn get_help(build: &Build, kind: Kind) -> Option { + let step_descriptions = Builder::get_step_descriptions(kind); + if step_descriptions.is_empty() { + return None; + } + + let builder = Self::new_internal(build, kind, vec![]); + let builder = &builder; + // The "build" kind here is just a placeholder, it will be replaced with something else in + // the following statement. + let mut should_run = ShouldRun::new(builder, Kind::Build); + for desc in step_descriptions { + should_run.kind = desc.kind; + should_run = (desc.should_run)(should_run); + } + let mut help = String::from("Available paths:\n"); + let mut add_path = |path: &Path| { + t!(write!(help, " ./x.py {} {}\n", kind.as_str(), path.display())); + }; + for pathset in should_run.paths { + match pathset { + PathSet::Set(set) => { + for path in set { + add_path(&path.path); + } + } + PathSet::Suite(path) => { + add_path(&path.path.join("...")); + } + } + } + Some(help) + } + + fn new_internal(build: &Build, kind: Kind, paths: Vec) -> Builder<'_> { + Builder { + build, + top_stage: build.config.stage, + kind, + cache: Cache::new(), + stack: RefCell::new(Vec::new()), + time_spent_on_dependencies: Cell::new(Duration::new(0, 0)), + paths, + } + } + + pub fn new(build: &Build) -> Builder<'_> { + let paths = &build.config.paths; + let (kind, paths) = match build.config.cmd { + Subcommand::Build => (Kind::Build, &paths[..]), + Subcommand::Check { .. } => (Kind::Check, &paths[..]), + Subcommand::Clippy { .. } => (Kind::Clippy, &paths[..]), + Subcommand::Fix => (Kind::Fix, &paths[..]), + Subcommand::Doc { .. } => (Kind::Doc, &paths[..]), + Subcommand::Test { .. } => (Kind::Test, &paths[..]), + Subcommand::Bench { .. } => (Kind::Bench, &paths[..]), + Subcommand::Dist => (Kind::Dist, &paths[..]), + Subcommand::Install => (Kind::Install, &paths[..]), + Subcommand::Run { .. } => (Kind::Run, &paths[..]), + Subcommand::Clean { .. } => (Kind::Clean, &paths[..]), + Subcommand::Format { .. } => (Kind::Format, &[][..]), + Subcommand::Suggest { .. } => (Kind::Suggest, &[][..]), + Subcommand::Setup { profile: ref path } => ( + Kind::Setup, + path.as_ref().map_or([].as_slice(), |path| std::slice::from_ref(path)), + ), + }; + + Self::new_internal(build, kind, paths.to_owned()) + } + + pub fn execute_cli(&self) { + self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.paths); + } + + pub fn default_doc(&self, paths: &[PathBuf]) { + self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), paths); + } + + pub fn doc_rust_lang_org_channel(&self) -> String { + let channel = match &*self.config.channel { + "stable" => &self.version, + "beta" => "beta", + "nightly" | "dev" => "nightly", + // custom build of rustdoc maybe? link to the latest stable docs just in case + _ => "stable", + }; + "https://doc.rust-lang.org/".to_owned() + channel + } + + fn run_step_descriptions(&self, v: &[StepDescription], paths: &[PathBuf]) { + StepDescription::run(v, self, paths); + } + + /// Obtain a compiler at a given stage and for a given host. Explicitly does + /// not take `Compiler` since all `Compiler` instances are meant to be + /// obtained through this function, since it ensures that they are valid + /// (i.e., built and assembled). + pub fn compiler(&self, stage: u32, host: TargetSelection) -> Compiler { + self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } }) + } + + /// Similar to `compiler`, except handles the full-bootstrap option to + /// silently use the stage1 compiler instead of a stage2 compiler if one is + /// requested. + /// + /// Note that this does *not* have the side effect of creating + /// `compiler(stage, host)`, unlike `compiler` above which does have such + /// a side effect. The returned compiler here can only be used to compile + /// new artifacts, it can't be used to rely on the presence of a particular + /// sysroot. + /// + /// See `force_use_stage1` and `force_use_stage2` for documentation on what each argument is. + pub fn compiler_for( + &self, + stage: u32, + host: TargetSelection, + target: TargetSelection, + ) -> Compiler { + if self.build.force_use_stage2(stage) { + self.compiler(2, self.config.build) + } else if self.build.force_use_stage1(stage, target) { + self.compiler(1, self.config.build) + } else { + self.compiler(stage, host) + } + } + + pub fn sysroot(&self, compiler: Compiler) -> Interned { + self.ensure(compile::Sysroot::new(compiler)) + } + + /// Returns the libdir where the standard library and other artifacts are + /// found for a compiler's sysroot. + pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned { + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + struct Libdir { + compiler: Compiler, + target: TargetSelection, + } + impl Step for Libdir { + type Output = Interned; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.never() + } + + fn run(self, builder: &Builder<'_>) -> Interned { + let lib = builder.sysroot_libdir_relative(self.compiler); + let sysroot = builder + .sysroot(self.compiler) + .join(lib) + .join("rustlib") + .join(self.target.triple) + .join("lib"); + // Avoid deleting the rustlib/ directory we just copied + // (in `impl Step for Sysroot`). + if !builder.download_rustc() { + builder.verbose(&format!( + "Removing sysroot {} to avoid caching bugs", + sysroot.display() + )); + let _ = fs::remove_dir_all(&sysroot); + t!(fs::create_dir_all(&sysroot)); + } + + if self.compiler.stage == 0 { + // The stage 0 compiler for the build triple is always pre-built. + // Ensure that `libLLVM.so` ends up in the target libdir, so that ui-fulldeps tests can use it when run. + dist::maybe_install_llvm_target( + builder, + self.compiler.host, + &builder.sysroot(self.compiler), + ); + } + + INTERNER.intern_path(sysroot) + } + } + self.ensure(Libdir { compiler, target }) + } + + pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf { + self.sysroot_libdir(compiler, compiler.host).with_file_name("codegen-backends") + } + + /// Returns the compiler's libdir where it stores the dynamic libraries that + /// it itself links against. + /// + /// For example this returns `/lib` on Unix and `/bin` on + /// Windows. + pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf { + if compiler.is_snapshot(self) { + self.rustc_snapshot_libdir() + } else { + match self.config.libdir_relative() { + Some(relative_libdir) if compiler.stage >= 1 => { + self.sysroot(compiler).join(relative_libdir) + } + _ => self.sysroot(compiler).join(libdir(compiler.host)), + } + } + } + + /// Returns the compiler's relative libdir where it stores the dynamic libraries that + /// it itself links against. + /// + /// For example this returns `lib` on Unix and `bin` on + /// Windows. + pub fn libdir_relative(&self, compiler: Compiler) -> &Path { + if compiler.is_snapshot(self) { + libdir(self.config.build).as_ref() + } else { + match self.config.libdir_relative() { + Some(relative_libdir) if compiler.stage >= 1 => relative_libdir, + _ => libdir(compiler.host).as_ref(), + } + } + } + + /// Returns the compiler's relative libdir where the standard library and other artifacts are + /// found for a compiler's sysroot. + /// + /// For example this returns `lib` on Unix and Windows. + pub fn sysroot_libdir_relative(&self, compiler: Compiler) -> &Path { + match self.config.libdir_relative() { + Some(relative_libdir) if compiler.stage >= 1 => relative_libdir, + _ if compiler.stage == 0 => &self.build.initial_libdir, + _ => Path::new("lib"), + } + } + + pub fn rustc_lib_paths(&self, compiler: Compiler) -> Vec { + let mut dylib_dirs = vec![self.rustc_libdir(compiler)]; + + // Ensure that the downloaded LLVM libraries can be found. + if self.config.llvm_from_ci { + let ci_llvm_lib = self.out.join(&*compiler.host.triple).join("ci-llvm").join("lib"); + dylib_dirs.push(ci_llvm_lib); + } + + dylib_dirs + } + + /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic + /// library lookup path. + pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) { + // Windows doesn't need dylib path munging because the dlls for the + // compiler live next to the compiler and the system will find them + // automatically. + if cfg!(windows) { + return; + } + + add_dylib_path(self.rustc_lib_paths(compiler), cmd); + } + + /// Gets a path to the compiler specified. + pub fn rustc(&self, compiler: Compiler) -> PathBuf { + if compiler.is_snapshot(self) { + self.initial_rustc.clone() + } else { + self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host)) + } + } + + /// Gets the paths to all of the compiler's codegen backends. + fn codegen_backends(&self, compiler: Compiler) -> impl Iterator { + fs::read_dir(self.sysroot_codegen_backends(compiler)) + .into_iter() + .flatten() + .filter_map(Result::ok) + .map(|entry| entry.path()) + } + + pub fn rustdoc(&self, compiler: Compiler) -> PathBuf { + self.ensure(tool::Rustdoc { compiler }) + } + + pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command { + let mut cmd = Command::new(&self.bootstrap_out.join("rustdoc")); + cmd.env("RUSTC_STAGE", compiler.stage.to_string()) + .env("RUSTC_SYSROOT", self.sysroot(compiler)) + // Note that this is *not* the sysroot_libdir because rustdoc must be linked + // equivalently to rustc. + .env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler)) + .env("CFG_RELEASE_CHANNEL", &self.config.channel) + .env("RUSTDOC_REAL", self.rustdoc(compiler)) + .env("RUSTC_BOOTSTRAP", "1"); + + cmd.arg("-Wrustdoc::invalid_codeblock_attributes"); + + if self.config.deny_warnings { + cmd.arg("-Dwarnings"); + } + cmd.arg("-Znormalize-docs"); + + // Remove make-related flags that can cause jobserver problems. + cmd.env_remove("MAKEFLAGS"); + cmd.env_remove("MFLAGS"); + + if let Some(linker) = self.linker(compiler.host) { + cmd.env("RUSTDOC_LINKER", linker); + } + cmd + } + + /// Return the path to `llvm-config` for the target, if it exists. + /// + /// Note that this returns `None` if LLVM is disabled, or if we're in a + /// check build or dry-run, where there's no need to build all of LLVM. + fn llvm_config(&self, target: TargetSelection) -> Option { + if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run() { + let llvm::LlvmResult { llvm_config, .. } = self.ensure(llvm::Llvm { target }); + if llvm_config.is_file() { + return Some(llvm_config); + } + } + None + } + + /// Like `cargo`, but only passes flags that are valid for all commands. + pub fn bare_cargo( + &self, + compiler: Compiler, + mode: Mode, + target: TargetSelection, + cmd: &str, + ) -> Command { + let mut cargo = Command::new(&self.initial_cargo); + // Run cargo from the source root so it can find .cargo/config. + // This matters when using vendoring and the working directory is outside the repository. + cargo.current_dir(&self.src); + + let out_dir = self.stage_out(compiler, mode); + cargo.env("CARGO_TARGET_DIR", &out_dir).arg(cmd); + + // Found with `rg "init_env_logger\("`. If anyone uses `init_env_logger` + // from out of tree it shouldn't matter, since x.py is only used for + // building in-tree. + let color_logs = ["RUSTDOC_LOG_COLOR", "RUSTC_LOG_COLOR", "RUST_LOG_COLOR"]; + match self.build.config.color { + Color::Always => { + cargo.arg("--color=always"); + for log in &color_logs { + cargo.env(log, "always"); + } + } + Color::Never => { + cargo.arg("--color=never"); + for log in &color_logs { + cargo.env(log, "never"); + } + } + Color::Auto => {} // nothing to do + } + + if cmd != "install" { + cargo.arg("--target").arg(target.rustc_target_arg()); + } else { + assert_eq!(target, compiler.host); + } + + if self.config.rust_optimize.is_release() { + // FIXME: cargo bench/install do not accept `--release` + if cmd != "bench" && cmd != "install" { + cargo.arg("--release"); + } + } + + // Remove make-related flags to ensure Cargo can correctly set things up + cargo.env_remove("MAKEFLAGS"); + cargo.env_remove("MFLAGS"); + + cargo + } + + /// Prepares an invocation of `cargo` to be run. + /// + /// This will create a `Command` that represents a pending execution of + /// Cargo. This cargo will be configured to use `compiler` as the actual + /// rustc compiler, its output will be scoped by `mode`'s output directory, + /// it will pass the `--target` flag for the specified `target`, and will be + /// executing the Cargo command `cmd`. + pub fn cargo( + &self, + compiler: Compiler, + mode: Mode, + source_type: SourceType, + target: TargetSelection, + cmd: &str, + ) -> Cargo { + let mut cargo = self.bare_cargo(compiler, mode, target, cmd); + let out_dir = self.stage_out(compiler, mode); + + let mut hostflags = HostFlags::default(); + + // Codegen backends are not yet tracked by -Zbinary-dep-depinfo, + // so we need to explicitly clear out if they've been updated. + for backend in self.codegen_backends(compiler) { + self.clear_if_dirty(&out_dir, &backend); + } + + if cmd == "doc" || cmd == "rustdoc" { + let my_out = match mode { + // This is the intended out directory for compiler documentation. + Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target), + Mode::Std => { + if self.config.cmd.json() { + out_dir.join(target.triple).join("json-doc") + } else { + out_dir.join(target.triple).join("doc") + } + } + _ => panic!("doc mode {mode:?} not expected"), + }; + let rustdoc = self.rustdoc(compiler); + self.clear_if_dirty(&my_out, &rustdoc); + } + + let profile_var = |name: &str| { + let profile = if self.config.rust_optimize.is_release() { "RELEASE" } else { "DEV" }; + format!("CARGO_PROFILE_{}_{}", profile, name) + }; + + // See comment in rustc_llvm/build.rs for why this is necessary, largely llvm-config + // needs to not accidentally link to libLLVM in stage0/lib. + cargo.env("REAL_LIBRARY_PATH_VAR", &helpers::dylib_path_var()); + if let Some(e) = env::var_os(helpers::dylib_path_var()) { + cargo.env("REAL_LIBRARY_PATH", e); + } + + // Set a flag for `check`/`clippy`/`fix`, so that certain build + // scripts can do less work (i.e. not building/requiring LLVM). + if cmd == "check" || cmd == "clippy" || cmd == "fix" { + // If we've not yet built LLVM, or it's stale, then bust + // the rustc_llvm cache. That will always work, even though it + // may mean that on the next non-check build we'll need to rebuild + // rustc_llvm. But if LLVM is stale, that'll be a tiny amount + // of work comparatively, and we'd likely need to rebuild it anyway, + // so that's okay. + if crate::core::build_steps::llvm::prebuilt_llvm_config(self, target).is_err() { + cargo.env("RUST_CHECK", "1"); + } + } + + let stage = if compiler.stage == 0 && self.local_rebuild { + // Assume the local-rebuild rustc already has stage1 features. + 1 + } else { + compiler.stage + }; + + let mut rustflags = Rustflags::new(target); + if stage != 0 { + if let Ok(s) = env::var("CARGOFLAGS_NOT_BOOTSTRAP") { + cargo.args(s.split_whitespace()); + } + rustflags.env("RUSTFLAGS_NOT_BOOTSTRAP"); + } else { + if let Ok(s) = env::var("CARGOFLAGS_BOOTSTRAP") { + cargo.args(s.split_whitespace()); + } + rustflags.env("RUSTFLAGS_BOOTSTRAP"); + if cmd == "clippy" { + // clippy overwrites sysroot if we pass it to cargo. + // Pass it directly to clippy instead. + // NOTE: this can't be fixed in clippy because we explicitly don't set `RUSTC`, + // so it has no way of knowing the sysroot. + rustflags.arg("--sysroot"); + rustflags.arg( + self.sysroot(compiler) + .as_os_str() + .to_str() + .expect("sysroot must be valid UTF-8"), + ); + // Only run clippy on a very limited subset of crates (in particular, not build scripts). + cargo.arg("-Zunstable-options"); + // Explicitly does *not* set `--cfg=bootstrap`, since we're using a nightly clippy. + let host_version = Command::new("rustc").arg("--version").output().map_err(|_| ()); + let output = host_version.and_then(|output| { + if output.status.success() { + Ok(output) + } else { + Err(()) + } + }).unwrap_or_else(|_| { + eprintln!( + "ERROR: `x.py clippy` requires a host `rustc` toolchain with the `clippy` component" + ); + eprintln!("HELP: try `rustup component add clippy`"); + crate::exit!(1); + }); + if !t!(std::str::from_utf8(&output.stdout)).contains("nightly") { + rustflags.arg("--cfg=bootstrap"); + } + } else { + rustflags.arg("--cfg=bootstrap"); + } + } + + let use_new_symbol_mangling = match self.config.rust_new_symbol_mangling { + Some(setting) => { + // If an explicit setting is given, use that + setting + } + None => { + if mode == Mode::Std { + // The standard library defaults to the legacy scheme + false + } else { + // The compiler and tools default to the new scheme + true + } + } + }; + + // By default, windows-rs depends on a native library that doesn't get copied into the + // sysroot. Passing this cfg enables raw-dylib support instead, which makes the native + // library unnecessary. This can be removed when windows-rs enables raw-dylib + // unconditionally. + if let Mode::Rustc | Mode::ToolRustc = mode { + rustflags.arg("--cfg=windows_raw_dylib"); + } + + if use_new_symbol_mangling { + rustflags.arg("-Csymbol-mangling-version=v0"); + } else { + rustflags.arg("-Csymbol-mangling-version=legacy"); + rustflags.arg("-Zunstable-options"); + } + + // #[cfg(bootstrap)] + let use_new_check_cfg_syntax = self.local_rebuild; + + // Enable compile-time checking of `cfg` names, values and Cargo `features`. + // + // Note: `std`, `alloc` and `core` imports some dependencies by #[path] (like + // backtrace, core_simd, std_float, ...), those dependencies have their own + // features but cargo isn't involved in the #[path] process and so cannot pass the + // complete list of features, so for that reason we don't enable checking of + // features for std crates. + if use_new_check_cfg_syntax { + cargo.arg("-Zcheck-cfg"); + if mode == Mode::Std { + rustflags.arg("--check-cfg=cfg(feature,values(any()))"); + } + } else { + cargo.arg(if mode != Mode::Std { + "-Zcheck-cfg=names,values,output,features" + } else { + "-Zcheck-cfg=names,values,output" + }); + } + + // Add extra cfg not defined in/by rustc + // + // Note: Although it would seems that "-Zunstable-options" to `rustflags` is useless as + // cargo would implicitly add it, it was discover that sometimes bootstrap only use + // `rustflags` without `cargo` making it required. + rustflags.arg("-Zunstable-options"); + for (restricted_mode, name, values) in EXTRA_CHECK_CFGS { + if *restricted_mode == None || *restricted_mode == Some(mode) { + // Creating a string of the values by concatenating each value: + // ',"tvos","watchos"' or '' (nothing) when there are no values + let values = match values { + Some(values) => values + .iter() + .map(|val| [",", "\"", val, "\""]) + .flatten() + .collect::(), + None => String::new(), + }; + if use_new_check_cfg_syntax { + let values = values.strip_prefix(",").unwrap_or(&values); // remove the first `,` + rustflags.arg(&format!("--check-cfg=cfg({name},values({values}))")); + } else { + rustflags.arg(&format!("--check-cfg=values({name}{values})")); + } + } + } + + // FIXME(rust-lang/cargo#5754) we shouldn't be using special command arguments + // to the host invocation here, but rather Cargo should know what flags to pass rustc + // itself. + if stage == 0 { + hostflags.arg("--cfg=bootstrap"); + } + // Cargo doesn't pass RUSTFLAGS to proc_macros: + // https://github.com/rust-lang/cargo/issues/4423 + // Thus, if we are on stage 0, we explicitly set `--cfg=bootstrap`. + // We also declare that the flag is expected, which we need to do to not + // get warnings about it being unexpected. + hostflags.arg("-Zunstable-options"); + if use_new_check_cfg_syntax { + hostflags.arg("--check-cfg=cfg(bootstrap)"); + } else { + hostflags.arg("--check-cfg=values(bootstrap)"); + } + + // FIXME: It might be better to use the same value for both `RUSTFLAGS` and `RUSTDOCFLAGS`, + // but this breaks CI. At the very least, stage0 `rustdoc` needs `--cfg bootstrap`. See + // #71458. + let mut rustdocflags = rustflags.clone(); + rustdocflags.propagate_cargo_env("RUSTDOCFLAGS"); + if stage == 0 { + rustdocflags.env("RUSTDOCFLAGS_BOOTSTRAP"); + } else { + rustdocflags.env("RUSTDOCFLAGS_NOT_BOOTSTRAP"); + } + + if let Ok(s) = env::var("CARGOFLAGS") { + cargo.args(s.split_whitespace()); + } + + match mode { + Mode::Std | Mode::ToolBootstrap | Mode::ToolStd => {} + Mode::Rustc | Mode::Codegen | Mode::ToolRustc => { + // Build proc macros both for the host and the target + if target != compiler.host && cmd != "check" { + cargo.arg("-Zdual-proc-macros"); + rustflags.arg("-Zdual-proc-macros"); + } + } + } + + // This tells Cargo (and in turn, rustc) to output more complete + // dependency information. Most importantly for rustbuild, this + // includes sysroot artifacts, like libstd, which means that we don't + // need to track those in rustbuild (an error prone process!). This + // feature is currently unstable as there may be some bugs and such, but + // it represents a big improvement in rustbuild's reliability on + // rebuilds, so we're using it here. + // + // For some additional context, see #63470 (the PR originally adding + // this), as well as #63012 which is the tracking issue for this + // feature on the rustc side. + cargo.arg("-Zbinary-dep-depinfo"); + let allow_features = match mode { + Mode::ToolBootstrap | Mode::ToolStd => { + // Restrict the allowed features so we don't depend on nightly + // accidentally. + // + // binary-dep-depinfo is used by rustbuild itself for all + // compilations. + // + // Lots of tools depend on proc_macro2 and proc-macro-error. + // Those have build scripts which assume nightly features are + // available if the `rustc` version is "nighty" or "dev". See + // bin/rustc.rs for why that is a problem. Instead of labeling + // those features for each individual tool that needs them, + // just blanket allow them here. + // + // If this is ever removed, be sure to add something else in + // its place to keep the restrictions in place (or make a way + // to unset RUSTC_BOOTSTRAP). + "binary-dep-depinfo,proc_macro_span,proc_macro_span_shrink,proc_macro_diagnostic" + .to_string() + } + Mode::Std | Mode::Rustc | Mode::Codegen | Mode::ToolRustc => String::new(), + }; + + cargo.arg("-j").arg(self.jobs().to_string()); + + // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005 + // Force cargo to output binaries with disambiguating hashes in the name + let mut metadata = if compiler.stage == 0 { + // Treat stage0 like a special channel, whether it's a normal prior- + // release rustc or a local rebuild with the same version, so we + // never mix these libraries by accident. + "bootstrap".to_string() + } else { + self.config.channel.to_string() + }; + // We want to make sure that none of the dependencies between + // std/test/rustc unify with one another. This is done for weird linkage + // reasons but the gist of the problem is that if librustc, libtest, and + // libstd all depend on libc from crates.io (which they actually do) we + // want to make sure they all get distinct versions. Things get really + // weird if we try to unify all these dependencies right now, namely + // around how many times the library is linked in dynamic libraries and + // such. If rustc were a static executable or if we didn't ship dylibs + // this wouldn't be a problem, but we do, so it is. This is in general + // just here to make sure things build right. If you can remove this and + // things still build right, please do! + match mode { + Mode::Std => metadata.push_str("std"), + // When we're building rustc tools, they're built with a search path + // that contains things built during the rustc build. For example, + // bitflags is built during the rustc build, and is a dependency of + // rustdoc as well. We're building rustdoc in a different target + // directory, though, which means that Cargo will rebuild the + // dependency. When we go on to build rustdoc, we'll look for + // bitflags, and find two different copies: one built during the + // rustc step and one that we just built. This isn't always a + // problem, somehow -- not really clear why -- but we know that this + // fixes things. + Mode::ToolRustc => metadata.push_str("tool-rustc"), + // Same for codegen backends. + Mode::Codegen => metadata.push_str("codegen"), + _ => {} + } + cargo.env("__CARGO_DEFAULT_LIB_METADATA", &metadata); + + if cmd == "clippy" { + rustflags.arg("-Zforce-unstable-if-unmarked"); + } + + rustflags.arg("-Zmacro-backtrace"); + + let want_rustdoc = self.doc_tests != DocTests::No; + + // We synthetically interpret a stage0 compiler used to build tools as a + // "raw" compiler in that it's the exact snapshot we download. Normally + // the stage0 build means it uses libraries build by the stage0 + // compiler, but for tools we just use the precompiled libraries that + // we've downloaded + let use_snapshot = mode == Mode::ToolBootstrap; + assert!(!use_snapshot || stage == 0 || self.local_rebuild); + + let maybe_sysroot = self.sysroot(compiler); + let sysroot = if use_snapshot { self.rustc_snapshot_sysroot() } else { &maybe_sysroot }; + let libdir = self.rustc_libdir(compiler); + + // Clear the output directory if the real rustc we're using has changed; + // Cargo cannot detect this as it thinks rustc is bootstrap/debug/rustc. + // + // Avoid doing this during dry run as that usually means the relevant + // compiler is not yet linked/copied properly. + // + // Only clear out the directory if we're compiling std; otherwise, we + // should let Cargo take care of things for us (via depdep info) + if !self.config.dry_run() && mode == Mode::Std && cmd == "build" { + self.clear_if_dirty(&out_dir, &self.rustc(compiler)); + } + + // Customize the compiler we're running. Specify the compiler to cargo + // as our shim and then pass it some various options used to configure + // how the actual compiler itself is called. + // + // These variables are primarily all read by + // src/bootstrap/bin/{rustc.rs,rustdoc.rs} + cargo + .env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) + .env("RUSTC_REAL", self.rustc(compiler)) + .env("RUSTC_STAGE", stage.to_string()) + .env("RUSTC_SYSROOT", &sysroot) + .env("RUSTC_LIBDIR", &libdir) + .env("RUSTDOC", self.bootstrap_out.join("rustdoc")) + .env( + "RUSTDOC_REAL", + if cmd == "doc" || cmd == "rustdoc" || (cmd == "test" && want_rustdoc) { + self.rustdoc(compiler) + } else { + PathBuf::from("/path/to/nowhere/rustdoc/not/required") + }, + ) + .env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir()) + .env("RUSTC_BREAK_ON_ICE", "1"); + // Clippy support is a hack and uses the default `cargo-clippy` in path. + // Don't override RUSTC so that the `cargo-clippy` in path will be run. + if cmd != "clippy" { + cargo.env("RUSTC", self.bootstrap_out.join("rustc")); + } + + // Dealing with rpath here is a little special, so let's go into some + // detail. First off, `-rpath` is a linker option on Unix platforms + // which adds to the runtime dynamic loader path when looking for + // dynamic libraries. We use this by default on Unix platforms to ensure + // that our nightlies behave the same on Windows, that is they work out + // of the box. This can be disabled by setting `rpath = false` in `[rust]` + // table of `config.toml` + // + // Ok, so the astute might be wondering "why isn't `-C rpath` used + // here?" and that is indeed a good question to ask. This codegen + // option is the compiler's current interface to generating an rpath. + // Unfortunately it doesn't quite suffice for us. The flag currently + // takes no value as an argument, so the compiler calculates what it + // should pass to the linker as `-rpath`. This unfortunately is based on + // the **compile time** directory structure which when building with + // Cargo will be very different than the runtime directory structure. + // + // All that's a really long winded way of saying that if we use + // `-Crpath` then the executables generated have the wrong rpath of + // something like `$ORIGIN/deps` when in fact the way we distribute + // rustc requires the rpath to be `$ORIGIN/../lib`. + // + // So, all in all, to set up the correct rpath we pass the linker + // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it + // fun to pass a flag to a tool to pass a flag to pass a flag to a tool + // to change a flag in a binary? + if self.config.rpath_enabled(target) && helpers::use_host_linker(target) { + let libdir = self.sysroot_libdir_relative(compiler).to_str().unwrap(); + let rpath = if target.contains("apple") { + // Note that we need to take one extra step on macOS to also pass + // `-Wl,-instal_name,@rpath/...` to get things to work right. To + // do that we pass a weird flag to the compiler to get it to do + // so. Note that this is definitely a hack, and we should likely + // flesh out rpath support more fully in the future. + rustflags.arg("-Zosx-rpath-install-name"); + Some(format!("-Wl,-rpath,@loader_path/../{libdir}")) + } else if !target.contains("windows") + && !target.contains("aix") + && !target.contains("xous") + { + rustflags.arg("-Clink-args=-Wl,-z,origin"); + Some(format!("-Wl,-rpath,$ORIGIN/../{libdir}")) + } else { + None + }; + if let Some(rpath) = rpath { + rustflags.arg(&format!("-Clink-args={rpath}")); + } + } + + if let Some(host_linker) = self.linker(compiler.host) { + hostflags.arg(format!("-Clinker={}", host_linker.display())); + } + if self.is_fuse_ld_lld(compiler.host) { + hostflags.arg("-Clink-args=-fuse-ld=lld"); + } + + if let Some(target_linker) = self.linker(target) { + let target = crate::envify(&target.triple); + cargo.env(&format!("CARGO_TARGET_{target}_LINKER"), target_linker); + } + if self.is_fuse_ld_lld(target) { + rustflags.arg("-Clink-args=-fuse-ld=lld"); + } + self.lld_flags(target).for_each(|flag| { + rustdocflags.arg(&flag); + }); + + if !(["build", "check", "clippy", "fix", "rustc"].contains(&cmd)) && want_rustdoc { + cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler)); + } + + let debuginfo_level = match mode { + Mode::Rustc | Mode::Codegen => self.config.rust_debuginfo_level_rustc, + Mode::Std => self.config.rust_debuginfo_level_std, + Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolRustc => { + self.config.rust_debuginfo_level_tools + } + }; + cargo.env(profile_var("DEBUG"), debuginfo_level.to_string()); + if let Some(opt_level) = &self.config.rust_optimize.get_opt_level() { + cargo.env(profile_var("OPT_LEVEL"), opt_level); + } + if !self.config.dry_run() && self.cc.borrow()[&target].args().iter().any(|arg| arg == "-gz") + { + rustflags.arg("-Clink-arg=-gz"); + } + cargo.env( + profile_var("DEBUG_ASSERTIONS"), + if mode == Mode::Std { + self.config.rust_debug_assertions_std.to_string() + } else { + self.config.rust_debug_assertions.to_string() + }, + ); + cargo.env( + profile_var("OVERFLOW_CHECKS"), + if mode == Mode::Std { + self.config.rust_overflow_checks_std.to_string() + } else { + self.config.rust_overflow_checks.to_string() + }, + ); + + let split_debuginfo_is_stable = target.contains("linux") + || target.contains("apple") + || (target.contains("msvc") + && self.config.rust_split_debuginfo == SplitDebuginfo::Packed) + || (target.contains("windows") + && self.config.rust_split_debuginfo == SplitDebuginfo::Off); + + if !split_debuginfo_is_stable { + rustflags.arg("-Zunstable-options"); + } + match self.config.rust_split_debuginfo { + SplitDebuginfo::Packed => rustflags.arg("-Csplit-debuginfo=packed"), + SplitDebuginfo::Unpacked => rustflags.arg("-Csplit-debuginfo=unpacked"), + SplitDebuginfo::Off => rustflags.arg("-Csplit-debuginfo=off"), + }; + + if self.config.cmd.bless() { + // Bless `expect!` tests. + cargo.env("UPDATE_EXPECT", "1"); + } + + if !mode.is_tool() { + cargo.env("RUSTC_FORCE_UNSTABLE", "1"); + } + + if let Some(x) = self.crt_static(target) { + if x { + rustflags.arg("-Ctarget-feature=+crt-static"); + } else { + rustflags.arg("-Ctarget-feature=-crt-static"); + } + } + + if let Some(x) = self.crt_static(compiler.host) { + let sign = if x { "+" } else { "-" }; + hostflags.arg(format!("-Ctarget-feature={sign}crt-static")); + } + + if let Some(map_to) = self.build.debuginfo_map_to(GitRepo::Rustc) { + let map = format!("{}={}", self.build.src.display(), map_to); + cargo.env("RUSTC_DEBUGINFO_MAP", map); + + // `rustc` needs to know the virtual `/rustc/$hash` we're mapping to, + // in order to opportunistically reverse it later. + cargo.env("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR", map_to); + } + + if self.config.rust_remap_debuginfo { + // FIXME: handle vendored sources + let registry_src = t!(home::cargo_home()).join("registry").join("src"); + let mut env_var = OsString::new(); + for entry in t!(std::fs::read_dir(registry_src)) { + if !env_var.is_empty() { + env_var.push("\t"); + } + env_var.push(t!(entry).path()); + env_var.push("=/rust/deps"); + } + cargo.env("RUSTC_CARGO_REGISTRY_SRC_TO_REMAP", env_var); + } + + // Enable usage of unstable features + cargo.env("RUSTC_BOOTSTRAP", "1"); + self.add_rust_test_threads(&mut cargo); + + // Almost all of the crates that we compile as part of the bootstrap may + // have a build script, including the standard library. To compile a + // build script, however, it itself needs a standard library! This + // introduces a bit of a pickle when we're compiling the standard + // library itself. + // + // To work around this we actually end up using the snapshot compiler + // (stage0) for compiling build scripts of the standard library itself. + // The stage0 compiler is guaranteed to have a libstd available for use. + // + // For other crates, however, we know that we've already got a standard + // library up and running, so we can use the normal compiler to compile + // build scripts in that situation. + if mode == Mode::Std { + cargo + .env("RUSTC_SNAPSHOT", &self.initial_rustc) + .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); + } else { + cargo + .env("RUSTC_SNAPSHOT", self.rustc(compiler)) + .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); + } + + // Tools that use compiler libraries may inherit the `-lLLVM` link + // requirement, but the `-L` library path is not propagated across + // separate Cargo projects. We can add LLVM's library path to the + // platform-specific environment variable as a workaround. + if mode == Mode::ToolRustc || mode == Mode::Codegen { + if let Some(llvm_config) = self.llvm_config(target) { + let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir")); + add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo); + } + } + + // Compile everything except libraries and proc macros with the more + // efficient initial-exec TLS model. This doesn't work with `dlopen`, + // so we can't use it by default in general, but we can use it for tools + // and our own internal libraries. + if !mode.must_support_dlopen() && !target.triple.starts_with("powerpc-") { + cargo.env("RUSTC_TLS_MODEL_INITIAL_EXEC", "1"); + } + + // Ignore incremental modes except for stage0, since we're + // not guaranteeing correctness across builds if the compiler + // is changing under your feet. + if self.config.incremental && compiler.stage == 0 { + cargo.env("CARGO_INCREMENTAL", "1"); + } else { + // Don't rely on any default setting for incr. comp. in Cargo + cargo.env("CARGO_INCREMENTAL", "0"); + } + + if let Some(ref on_fail) = self.config.on_fail { + cargo.env("RUSTC_ON_FAIL", on_fail); + } + + if self.config.print_step_timings { + cargo.env("RUSTC_PRINT_STEP_TIMINGS", "1"); + } + + if self.config.print_step_rusage { + cargo.env("RUSTC_PRINT_STEP_RUSAGE", "1"); + } + + if self.config.backtrace_on_ice { + cargo.env("RUSTC_BACKTRACE_ON_ICE", "1"); + } + + cargo.env("RUSTC_VERBOSE", self.verbosity.to_string()); + + // Downstream forks of the Rust compiler might want to use a custom libc to add support for + // targets that are not yet available upstream. Adding a patch to replace libc with a + // custom one would cause compilation errors though, because Cargo would interpret the + // custom libc as part of the workspace, and apply the check-cfg lints on it. + // + // The libc build script emits check-cfg flags only when this environment variable is set, + // so this line allows the use of custom libcs. + cargo.env("LIBC_CHECK_CFG", "1"); + + if source_type == SourceType::InTree { + let mut lint_flags = Vec::new(); + // When extending this list, add the new lints to the RUSTFLAGS of the + // build_bootstrap function of src/bootstrap/bootstrap.py as well as + // some code doesn't go through this `rustc` wrapper. + lint_flags.push("-Wrust_2018_idioms"); + lint_flags.push("-Wunused_lifetimes"); + lint_flags.push("-Wsemicolon_in_expressions_from_macros"); + + if self.config.deny_warnings { + lint_flags.push("-Dwarnings"); + rustdocflags.arg("-Dwarnings"); + } + + // This does not use RUSTFLAGS due to caching issues with Cargo. + // Clippy is treated as an "in tree" tool, but shares the same + // cache as other "submodule" tools. With these options set in + // RUSTFLAGS, that causes *every* shared dependency to be rebuilt. + // By injecting this into the rustc wrapper, this circumvents + // Cargo's fingerprint detection. This is fine because lint flags + // are always ignored in dependencies. Eventually this should be + // fixed via better support from Cargo. + cargo.env("RUSTC_LINT_FLAGS", lint_flags.join(" ")); + + rustdocflags.arg("-Wrustdoc::invalid_codeblock_attributes"); + } + + if mode == Mode::Rustc { + rustflags.arg("-Zunstable-options"); + rustflags.arg("-Wrustc::internal"); + } + + // Throughout the build Cargo can execute a number of build scripts + // compiling C/C++ code and we need to pass compilers, archivers, flags, etc + // obtained previously to those build scripts. + // Build scripts use either the `cc` crate or `configure/make` so we pass + // the options through environment variables that are fetched and understood by both. + // + // FIXME: the guard against msvc shouldn't need to be here + if target.contains("msvc") { + if let Some(ref cl) = self.config.llvm_clang_cl { + cargo.env("CC", cl).env("CXX", cl); + } + } else { + let ccache = self.config.ccache.as_ref(); + let ccacheify = |s: &Path| { + let ccache = match ccache { + Some(ref s) => s, + None => return s.display().to_string(), + }; + // FIXME: the cc-rs crate only recognizes the literal strings + // `ccache` and `sccache` when doing caching compilations, so we + // mirror that here. It should probably be fixed upstream to + // accept a new env var or otherwise work with custom ccache + // vars. + match &ccache[..] { + "ccache" | "sccache" => format!("{} {}", ccache, s.display()), + _ => s.display().to_string(), + } + }; + let triple_underscored = target.triple.replace("-", "_"); + let cc = ccacheify(&self.cc(target)); + cargo.env(format!("CC_{triple_underscored}"), &cc); + + let cflags = self.cflags(target, GitRepo::Rustc, CLang::C).join(" "); + cargo.env(format!("CFLAGS_{triple_underscored}"), &cflags); + + if let Some(ar) = self.ar(target) { + let ranlib = format!("{} s", ar.display()); + cargo + .env(format!("AR_{triple_underscored}"), ar) + .env(format!("RANLIB_{triple_underscored}"), ranlib); + } + + if let Ok(cxx) = self.cxx(target) { + let cxx = ccacheify(&cxx); + let cxxflags = self.cflags(target, GitRepo::Rustc, CLang::Cxx).join(" "); + cargo + .env(format!("CXX_{triple_underscored}"), &cxx) + .env(format!("CXXFLAGS_{triple_underscored}"), cxxflags); + } + } + + // If Control Flow Guard is enabled, pass the `control-flow-guard` flag to rustc + // when compiling the standard library, since this might be linked into the final outputs + // produced by rustc. Since this mitigation is only available on Windows, only enable it + // for the standard library in case the compiler is run on a non-Windows platform. + // This is not needed for stage 0 artifacts because these will only be used for building + // the stage 1 compiler. + if cfg!(windows) + && mode == Mode::Std + && self.config.control_flow_guard + && compiler.stage >= 1 + { + rustflags.arg("-Ccontrol-flow-guard"); + } + + // For `cargo doc` invocations, make rustdoc print the Rust version into the docs + // This replaces spaces with tabs because RUSTDOCFLAGS does not + // support arguments with regular spaces. Hopefully someday Cargo will + // have space support. + let rust_version = self.rust_version().replace(' ', "\t"); + rustdocflags.arg("--crate-version").arg(&rust_version); + + // Environment variables *required* throughout the build + // + // FIXME: should update code to not require this env var + cargo.env("CFG_COMPILER_HOST_TRIPLE", target.triple); + + // Set this for all builds to make sure doc builds also get it. + cargo.env("CFG_RELEASE_CHANNEL", &self.config.channel); + + // This one's a bit tricky. As of the time of this writing the compiler + // links to the `winapi` crate on crates.io. This crate provides raw + // bindings to Windows system functions, sort of like libc does for + // Unix. This crate also, however, provides "import libraries" for the + // MinGW targets. There's an import library per dll in the windows + // distribution which is what's linked to. These custom import libraries + // are used because the winapi crate can reference Windows functions not + // present in the MinGW import libraries. + // + // For example MinGW may ship libdbghelp.a, but it may not have + // references to all the functions in the dbghelp dll. Instead the + // custom import library for dbghelp in the winapi crates has all this + // information. + // + // Unfortunately for us though the import libraries are linked by + // default via `-ldylib=winapi_foo`. That is, they're linked with the + // `dylib` type with a `winapi_` prefix (so the winapi ones don't + // conflict with the system MinGW ones). This consequently means that + // the binaries we ship of things like rustc_codegen_llvm (aka the rustc_codegen_llvm + // DLL) when linked against *again*, for example with procedural macros + // or plugins, will trigger the propagation logic of `-ldylib`, passing + // `-lwinapi_foo` to the linker again. This isn't actually available in + // our distribution, however, so the link fails. + // + // To solve this problem we tell winapi to not use its bundled import + // libraries. This means that it will link to the system MinGW import + // libraries by default, and the `-ldylib=foo` directives will still get + // passed to the final linker, but they'll look like `-lfoo` which can + // be resolved because MinGW has the import library. The downside is we + // don't get newer functions from Windows, but we don't use any of them + // anyway. + if !mode.is_tool() { + cargo.env("WINAPI_NO_BUNDLED_LIBRARIES", "1"); + } + + for _ in 0..self.verbosity { + cargo.arg("-v"); + } + + match (mode, self.config.rust_codegen_units_std, self.config.rust_codegen_units) { + (Mode::Std, Some(n), _) | (_, _, Some(n)) => { + cargo.env(profile_var("CODEGEN_UNITS"), n.to_string()); + } + _ => { + // Don't set anything + } + } + + if self.config.locked_deps { + cargo.arg("--locked"); + } + if self.config.vendor || self.is_sudo { + cargo.arg("--frozen"); + } + + // Try to use a sysroot-relative bindir, in case it was configured absolutely. + cargo.env("RUSTC_INSTALL_BINDIR", self.config.bindir_relative()); + + self.ci_env.force_coloring_in_ci(&mut cargo); + + // When we build Rust dylibs they're all intended for intermediate + // usage, so make sure we pass the -Cprefer-dynamic flag instead of + // linking all deps statically into the dylib. + if matches!(mode, Mode::Std | Mode::Rustc) { + rustflags.arg("-Cprefer-dynamic"); + } + + // When building incrementally we default to a lower ThinLTO import limit + // (unless explicitly specified otherwise). This will produce a somewhat + // slower code but give way better compile times. + { + let limit = match self.config.rust_thin_lto_import_instr_limit { + Some(limit) => Some(limit), + None if self.config.incremental => Some(10), + _ => None, + }; + + if let Some(limit) = limit { + if stage == 0 || self.config.default_codegen_backend().unwrap_or_default() == "llvm" + { + rustflags.arg(&format!("-Cllvm-args=-import-instr-limit={limit}")); + } + } + } + + if matches!(mode, Mode::Std) { + if let Some(mir_opt_level) = self.config.rust_validate_mir_opts { + rustflags.arg("-Zvalidate-mir"); + rustflags.arg(&format!("-Zmir-opt-level={mir_opt_level}")); + } + // Always enable inlining MIR when building the standard library. + // Without this flag, MIR inlining is disabled when incremental compilation is enabled. + // That causes some mir-opt tests which inline functions from the standard library to + // break when incremental compilation is enabled. So this overrides the "no inlining + // during incremental builds" heuristic for the standard library. + rustflags.arg("-Zinline-mir"); + } + + // set rustc args passed from command line + let rustc_args = + self.config.cmd.rustc_args().iter().map(|s| s.to_string()).collect::>(); + if !rustc_args.is_empty() { + cargo.env("RUSTFLAGS", &rustc_args.join(" ")); + } + + Cargo { command: cargo, rustflags, rustdocflags, hostflags, allow_features } + } + + /// Ensure that a given step is built, returning its output. This will + /// cache the step, so it is safe (and good!) to call this as often as + /// needed to ensure that all dependencies are built. + pub fn ensure(&'a self, step: S) -> S::Output { + { + let mut stack = self.stack.borrow_mut(); + for stack_step in stack.iter() { + // should skip + if stack_step.downcast_ref::().map_or(true, |stack_step| *stack_step != step) { + continue; + } + let mut out = String::new(); + out += &format!("\n\nCycle in build detected when adding {step:?}\n"); + for el in stack.iter().rev() { + out += &format!("\t{el:?}\n"); + } + panic!("{}", out); + } + if let Some(out) = self.cache.get(&step) { + self.verbose_than(1, &format!("{}c {:?}", " ".repeat(stack.len()), step)); + + return out; + } + self.verbose_than(1, &format!("{}> {:?}", " ".repeat(stack.len()), step)); + stack.push(Box::new(step.clone())); + } + + #[cfg(feature = "build-metrics")] + self.metrics.enter_step(&step, self); + + let (out, dur) = { + let start = Instant::now(); + let zero = Duration::new(0, 0); + let parent = self.time_spent_on_dependencies.replace(zero); + let out = step.clone().run(self); + let dur = start.elapsed(); + let deps = self.time_spent_on_dependencies.replace(parent + dur); + (out, dur - deps) + }; + + if self.config.print_step_timings && !self.config.dry_run() { + let step_string = format!("{step:?}"); + let brace_index = step_string.find("{").unwrap_or(0); + let type_string = type_name::(); + println!( + "[TIMING] {} {} -- {}.{:03}", + &type_string.strip_prefix("bootstrap::").unwrap_or(type_string), + &step_string[brace_index..], + dur.as_secs(), + dur.subsec_millis() + ); + } + + #[cfg(feature = "build-metrics")] + self.metrics.exit_step(self); + + { + let mut stack = self.stack.borrow_mut(); + let cur_step = stack.pop().expect("step stack empty"); + assert_eq!(cur_step.downcast_ref(), Some(&step)); + } + self.verbose_than(1, &format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step)); + self.cache.put(step, out.clone()); + out + } + + /// Ensure that a given step is built *only if it's supposed to be built by default*, returning + /// its output. This will cache the step, so it's safe (and good!) to call this as often as + /// needed to ensure that all dependencies are build. + pub(crate) fn ensure_if_default>>( + &'a self, + step: S, + kind: Kind, + ) -> S::Output { + let desc = StepDescription::from::(kind); + let should_run = (desc.should_run)(ShouldRun::new(self, desc.kind)); + + // Avoid running steps contained in --skip + for pathset in &should_run.paths { + if desc.is_excluded(self, pathset) { + return None; + } + } + + // Only execute if it's supposed to run as default + if desc.default && should_run.is_really_default() { self.ensure(step) } else { None } + } + + /// Checks if any of the "should_run" paths is in the `Builder` paths. + pub(crate) fn was_invoked_explicitly(&'a self, kind: Kind) -> bool { + let desc = StepDescription::from::(kind); + let should_run = (desc.should_run)(ShouldRun::new(self, desc.kind)); + + for path in &self.paths { + if should_run.paths.iter().any(|s| s.has(path, desc.kind)) + && !desc.is_excluded( + self, + &PathSet::Suite(TaskPath { path: path.clone(), kind: Some(desc.kind) }), + ) + { + return true; + } + } + + false + } + + pub(crate) fn maybe_open_in_browser(&self, path: impl AsRef) { + if self.was_invoked_explicitly::(Kind::Doc) { + self.open_in_browser(path); + } + } + + pub(crate) fn open_in_browser(&self, path: impl AsRef) { + if self.config.dry_run() || !self.config.cmd.open() { + return; + } + + let path = path.as_ref(); + self.info(&format!("Opening doc {}", path.display())); + if let Err(err) = opener::open(path) { + self.info(&format!("{err}\n")); + } + } +} + +/// Represents flag values in `String` form with whitespace delimiter to pass it to the compiler later. +/// +/// `-Z crate-attr` flags will be applied recursively on the target code using the `rustc_parse::parser::Parser`. +/// See `rustc_builtin_macros::cmdline_attrs::inject` for more information. +#[derive(Debug, Clone)] +struct Rustflags(String, TargetSelection); + +impl Rustflags { + fn new(target: TargetSelection) -> Rustflags { + let mut ret = Rustflags(String::new(), target); + ret.propagate_cargo_env("RUSTFLAGS"); + ret + } + + /// By default, cargo will pick up on various variables in the environment. However, bootstrap + /// reuses those variables to pass additional flags to rustdoc, so by default they get overridden. + /// Explicitly add back any previous value in the environment. + /// + /// `prefix` is usually `RUSTFLAGS` or `RUSTDOCFLAGS`. + fn propagate_cargo_env(&mut self, prefix: &str) { + // Inherit `RUSTFLAGS` by default ... + self.env(prefix); + + // ... and also handle target-specific env RUSTFLAGS if they're configured. + let target_specific = format!("CARGO_TARGET_{}_{}", crate::envify(&self.1.triple), prefix); + self.env(&target_specific); + } + + fn env(&mut self, env: &str) { + if let Ok(s) = env::var(env) { + for part in s.split(' ') { + self.arg(part); + } + } + } + + fn arg(&mut self, arg: &str) -> &mut Self { + assert_eq!(arg.split(' ').count(), 1); + if !self.0.is_empty() { + self.0.push(' '); + } + self.0.push_str(arg); + self + } +} + +/// Flags that are passed to the `rustc` shim binary. +/// These flags will only be applied when compiling host code, i.e. when +/// `--target` is unset. +#[derive(Debug, Default)] +pub struct HostFlags { + rustc: Vec, +} + +impl HostFlags { + const SEPARATOR: &'static str = " "; + + /// Adds a host rustc flag. + fn arg>(&mut self, flag: S) { + let value = flag.into().trim().to_string(); + assert!(!value.contains(Self::SEPARATOR)); + self.rustc.push(value); + } + + /// Encodes all the flags into a single string. + fn encode(self) -> String { + self.rustc.join(Self::SEPARATOR) + } +} + +#[derive(Debug)] +pub struct Cargo { + command: Command, + rustflags: Rustflags, + rustdocflags: Rustflags, + hostflags: HostFlags, + allow_features: String, +} + +impl Cargo { + pub fn rustdocflag(&mut self, arg: &str) -> &mut Cargo { + self.rustdocflags.arg(arg); + self + } + pub fn rustflag(&mut self, arg: &str) -> &mut Cargo { + self.rustflags.arg(arg); + self + } + + pub fn arg(&mut self, arg: impl AsRef) -> &mut Cargo { + self.command.arg(arg.as_ref()); + self + } + + pub fn args(&mut self, args: I) -> &mut Cargo + where + I: IntoIterator, + S: AsRef, + { + for arg in args { + self.arg(arg.as_ref()); + } + self + } + + pub fn env(&mut self, key: impl AsRef, value: impl AsRef) -> &mut Cargo { + // These are managed through rustflag/rustdocflag interfaces. + assert_ne!(key.as_ref(), "RUSTFLAGS"); + assert_ne!(key.as_ref(), "RUSTDOCFLAGS"); + self.command.env(key.as_ref(), value.as_ref()); + self + } + + pub fn add_rustc_lib_path(&mut self, builder: &Builder<'_>, compiler: Compiler) { + builder.add_rustc_lib_path(compiler, &mut self.command); + } + + pub fn current_dir(&mut self, dir: &Path) -> &mut Cargo { + self.command.current_dir(dir); + self + } + + /// Adds nightly-only features that this invocation is allowed to use. + /// + /// By default, all nightly features are allowed. Once this is called, it + /// will be restricted to the given set. + pub fn allow_features(&mut self, features: &str) -> &mut Cargo { + if !self.allow_features.is_empty() { + self.allow_features.push(','); + } + self.allow_features.push_str(features); + self + } +} + +impl From for Command { + fn from(mut cargo: Cargo) -> Command { + let rustflags = &cargo.rustflags.0; + if !rustflags.is_empty() { + cargo.command.env("RUSTFLAGS", rustflags); + } + + let rustdocflags = &cargo.rustdocflags.0; + if !rustdocflags.is_empty() { + cargo.command.env("RUSTDOCFLAGS", rustdocflags); + } + + let encoded_hostflags = cargo.hostflags.encode(); + if !encoded_hostflags.is_empty() { + cargo.command.env("RUSTC_HOST_FLAGS", encoded_hostflags); + } + + if !cargo.allow_features.is_empty() { + cargo.command.env("RUSTC_ALLOW_FEATURES", cargo.allow_features); + } + + cargo.command + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/config.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/config.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/config.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/config.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,2208 @@ +//! Serialized configuration of a build. +//! +//! This module implements parsing `config.toml` configuration files to tweak +//! how the build runs. + +#[cfg(test)] +#[path = "../../tests/config.rs"] +mod tests; + +use std::cell::{Cell, RefCell}; +use std::cmp; +use std::collections::{HashMap, HashSet}; +use std::env; +use std::fmt::{self, Display}; +use std::fs; +use std::io::IsTerminal; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::str::FromStr; + +use crate::core::build_steps::compile::CODEGEN_BACKEND_PREFIX; +use crate::core::build_steps::llvm; +use crate::core::config::flags::{Color, Flags, Warnings}; +use crate::utils::cache::{Interned, INTERNER}; +use crate::utils::channel::{self, GitInfo}; +use crate::utils::helpers::{exe, output, t}; +use build_helper::exit; +use once_cell::sync::OnceCell; +use semver::Version; +use serde::{Deserialize, Deserializer}; +use serde_derive::Deserialize; + +pub use crate::core::config::flags::Subcommand; +use build_helper::git::GitConfig; + +macro_rules! check_ci_llvm { + ($name:expr) => { + assert!( + $name.is_none(), + "setting {} is incompatible with download-ci-llvm.", + stringify!($name) + ); + }; +} + +#[derive(Clone, Default)] +pub enum DryRun { + /// This isn't a dry run. + #[default] + Disabled, + /// This is a dry run enabled by bootstrap itself, so it can verify that no work is done. + SelfCheck, + /// This is a dry run enabled by the `--dry-run` flag. + UserSelected, +} + +#[derive(Copy, Clone, Default, PartialEq, Eq)] +pub enum DebuginfoLevel { + #[default] + None, + LineTablesOnly, + Limited, + Full, +} + +// NOTE: can't derive(Deserialize) because the intermediate trip through toml::Value only +// deserializes i64, and derive() only generates visit_u64 +impl<'de> Deserialize<'de> for DebuginfoLevel { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + use serde::de::Error; + + Ok(match Deserialize::deserialize(deserializer)? { + StringOrInt::String("none") | StringOrInt::Int(0) => DebuginfoLevel::None, + StringOrInt::String("line-tables-only") => DebuginfoLevel::LineTablesOnly, + StringOrInt::String("limited") | StringOrInt::Int(1) => DebuginfoLevel::Limited, + StringOrInt::String("full") | StringOrInt::Int(2) => DebuginfoLevel::Full, + StringOrInt::Int(n) => { + let other = serde::de::Unexpected::Signed(n); + return Err(D::Error::invalid_value(other, &"expected 0, 1, or 2")); + } + StringOrInt::String(s) => { + let other = serde::de::Unexpected::Str(s); + return Err(D::Error::invalid_value( + other, + &"expected none, line-tables-only, limited, or full", + )); + } + }) + } +} + +/// Suitable for passing to `-C debuginfo` +impl Display for DebuginfoLevel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use DebuginfoLevel::*; + f.write_str(match self { + None => "0", + LineTablesOnly => "line-tables-only", + Limited => "1", + Full => "2", + }) + } +} + +/// Global configuration for the entire build and/or bootstrap. +/// +/// This structure is parsed from `config.toml`, and some of the fields are inferred from `git` or build-time parameters. +/// +/// Note that this structure is not decoded directly into, but rather it is +/// filled out from the decoded forms of the structs below. For documentation +/// each field, see the corresponding fields in +/// `config.example.toml`. +#[derive(Default, Clone)] +pub struct Config { + pub changelog_seen: Option, // FIXME: Deprecated field. Remove it at 2024. + pub change_id: Option, + pub ccache: Option, + /// Call Build::ninja() instead of this. + pub ninja_in_file: bool, + pub verbose: usize, + pub submodules: Option, + pub compiler_docs: bool, + pub library_docs_private_items: bool, + pub docs_minification: bool, + pub docs: bool, + pub locked_deps: bool, + pub vendor: bool, + pub target_config: HashMap, + pub full_bootstrap: bool, + pub extended: bool, + pub tools: Option>, + pub sanitizers: bool, + pub profiler: bool, + pub omit_git_hash: bool, + pub skip: Vec, + pub include_default_paths: bool, + pub rustc_error_format: Option, + pub json_output: bool, + pub test_compare_mode: bool, + pub color: Color, + pub patch_binaries_for_nix: Option, + pub stage0_metadata: Stage0Metadata, + pub android_ndk: Option, + + pub stdout_is_tty: bool, + pub stderr_is_tty: bool, + + pub on_fail: Option, + pub stage: u32, + pub keep_stage: Vec, + pub keep_stage_std: Vec, + pub src: PathBuf, + /// defaults to `config.toml` + pub config: Option, + pub jobs: Option, + pub cmd: Subcommand, + pub incremental: bool, + pub dry_run: DryRun, + /// Arguments appearing after `--` to be forwarded to tools, + /// e.g. `--fix-broken` or test arguments. + pub free_args: Vec, + + /// `None` if we shouldn't download CI compiler artifacts, or the commit to download if we should. + #[cfg(not(test))] + download_rustc_commit: Option, + #[cfg(test)] + pub download_rustc_commit: Option, + + pub deny_warnings: bool, + pub backtrace_on_ice: bool, + + // llvm codegen options + pub llvm_assertions: bool, + pub llvm_tests: bool, + pub llvm_plugins: bool, + pub llvm_optimize: bool, + pub llvm_thin_lto: bool, + pub llvm_release_debuginfo: bool, + pub llvm_static_stdcpp: bool, + /// `None` if `llvm_from_ci` is true and we haven't yet downloaded llvm. + #[cfg(not(test))] + llvm_link_shared: Cell>, + #[cfg(test)] + pub llvm_link_shared: Cell>, + pub llvm_clang_cl: Option, + pub llvm_targets: Option, + pub llvm_experimental_targets: Option, + pub llvm_link_jobs: Option, + pub llvm_version_suffix: Option, + pub llvm_use_linker: Option, + pub llvm_allow_old_toolchain: bool, + pub llvm_polly: bool, + pub llvm_clang: bool, + pub llvm_enable_warnings: bool, + pub llvm_from_ci: bool, + pub llvm_build_config: HashMap, + + pub use_lld: bool, + pub lld_enabled: bool, + pub llvm_tools_enabled: bool, + + pub llvm_cflags: Option, + pub llvm_cxxflags: Option, + pub llvm_ldflags: Option, + pub llvm_use_libcxx: bool, + + // rust codegen options + pub rust_optimize: RustOptimize, + pub rust_codegen_units: Option, + pub rust_codegen_units_std: Option, + pub rust_debug_assertions: bool, + pub rust_debug_assertions_std: bool, + pub rust_overflow_checks: bool, + pub rust_overflow_checks_std: bool, + pub rust_debug_logging: bool, + pub rust_debuginfo_level_rustc: DebuginfoLevel, + pub rust_debuginfo_level_std: DebuginfoLevel, + pub rust_debuginfo_level_tools: DebuginfoLevel, + pub rust_debuginfo_level_tests: DebuginfoLevel, + pub rust_split_debuginfo: SplitDebuginfo, + pub rust_rpath: bool, + pub rustc_parallel: bool, + pub rustc_default_linker: Option, + pub rust_optimize_tests: bool, + pub rust_dist_src: bool, + pub rust_codegen_backends: Vec>, + pub rust_verify_llvm_ir: bool, + pub rust_thin_lto_import_instr_limit: Option, + pub rust_remap_debuginfo: bool, + pub rust_new_symbol_mangling: Option, + pub rust_profile_use: Option, + pub rust_profile_generate: Option, + pub rust_lto: RustcLto, + pub rust_validate_mir_opts: Option, + pub llvm_profile_use: Option, + pub llvm_profile_generate: bool, + pub llvm_libunwind_default: Option, + pub enable_bolt_settings: bool, + + pub reproducible_artifacts: Vec, + + pub build: TargetSelection, + pub hosts: Vec, + pub targets: Vec, + pub local_rebuild: bool, + pub jemalloc: bool, + pub control_flow_guard: bool, + + // dist misc + pub dist_sign_folder: Option, + pub dist_upload_addr: Option, + pub dist_compression_formats: Option>, + pub dist_compression_profile: String, + pub dist_include_mingw_linker: bool, + + // libstd features + pub backtrace: bool, // support for RUST_BACKTRACE + + // misc + pub low_priority: bool, + pub channel: String, + pub description: Option, + pub verbose_tests: bool, + pub save_toolstates: Option, + pub print_step_timings: bool, + pub print_step_rusage: bool, + pub missing_tools: bool, + + // Fallback musl-root for all targets + pub musl_root: Option, + pub prefix: Option, + pub sysconfdir: Option, + pub datadir: Option, + pub docdir: Option, + pub bindir: PathBuf, + pub libdir: Option, + pub mandir: Option, + pub codegen_tests: bool, + pub nodejs: Option, + pub npm: Option, + pub gdb: Option, + pub python: Option, + pub reuse: Option, + pub cargo_native_static: bool, + pub configure_args: Vec, + pub out: PathBuf, + pub rust_info: channel::GitInfo, + + // These are either the stage0 downloaded binaries or the locally installed ones. + pub initial_cargo: PathBuf, + pub initial_rustc: PathBuf, + + #[cfg(not(test))] + initial_rustfmt: RefCell, + #[cfg(test)] + pub initial_rustfmt: RefCell, + + pub paths: Vec, +} + +#[derive(Default, Deserialize, Clone)] +pub struct Stage0Metadata { + pub compiler: CompilerMetadata, + pub config: Stage0Config, + pub checksums_sha256: HashMap, + pub rustfmt: Option, +} +#[derive(Default, Deserialize, Clone)] +pub struct CompilerMetadata { + pub date: String, + pub version: String, +} + +#[derive(Default, Deserialize, Clone)] +pub struct Stage0Config { + pub dist_server: String, + pub artifacts_server: String, + pub artifacts_with_llvm_assertions_server: String, + pub git_merge_commit_email: String, + pub git_repository: String, + pub nightly_branch: String, +} +#[derive(Default, Deserialize, Clone)] +pub struct RustfmtMetadata { + pub date: String, + pub version: String, +} + +#[derive(Clone, Debug, Default)] +pub enum RustfmtState { + SystemToolchain(PathBuf), + Downloaded(PathBuf), + Unavailable, + #[default] + LazyEvaluated, +} + +#[derive(Debug, Default, Clone, Copy, PartialEq)] +pub enum LlvmLibunwind { + #[default] + No, + InTree, + System, +} + +impl FromStr for LlvmLibunwind { + type Err = String; + + fn from_str(value: &str) -> Result { + match value { + "no" => Ok(Self::No), + "in-tree" => Ok(Self::InTree), + "system" => Ok(Self::System), + invalid => Err(format!("Invalid value '{invalid}' for rust.llvm-libunwind config.")), + } + } +} + +#[derive(Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum SplitDebuginfo { + Packed, + Unpacked, + #[default] + Off, +} + +impl std::str::FromStr for SplitDebuginfo { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "packed" => Ok(SplitDebuginfo::Packed), + "unpacked" => Ok(SplitDebuginfo::Unpacked), + "off" => Ok(SplitDebuginfo::Off), + _ => Err(()), + } + } +} + +impl SplitDebuginfo { + /// Returns the default `-Csplit-debuginfo` value for the current target. See the comment for + /// `rust.split-debuginfo` in `config.example.toml`. + fn default_for_platform(target: &str) -> Self { + if target.contains("apple") { + SplitDebuginfo::Unpacked + } else if target.contains("windows") { + SplitDebuginfo::Packed + } else { + SplitDebuginfo::Off + } + } +} + +/// LTO mode used for compiling rustc itself. +#[derive(Default, Clone, PartialEq, Debug)] +pub enum RustcLto { + Off, + #[default] + ThinLocal, + Thin, + Fat, +} + +impl std::str::FromStr for RustcLto { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "thin-local" => Ok(RustcLto::ThinLocal), + "thin" => Ok(RustcLto::Thin), + "fat" => Ok(RustcLto::Fat), + "off" => Ok(RustcLto::Off), + _ => Err(format!("Invalid value for rustc LTO: {s}")), + } + } +} + +#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct TargetSelection { + pub triple: Interned, + file: Option>, + synthetic: bool, +} + +/// Newtype over `Vec` so we can implement custom parsing logic +#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +pub struct TargetSelectionList(Vec); + +pub fn target_selection_list(s: &str) -> Result { + Ok(TargetSelectionList( + s.split(",").filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(), + )) +} + +impl TargetSelection { + pub fn from_user(selection: &str) -> Self { + let path = Path::new(selection); + + let (triple, file) = if path.exists() { + let triple = path + .file_stem() + .expect("Target specification file has no file stem") + .to_str() + .expect("Target specification file stem is not UTF-8"); + + (triple, Some(selection)) + } else { + (selection, None) + }; + + let triple = INTERNER.intern_str(triple); + let file = file.map(|f| INTERNER.intern_str(f)); + + Self { triple, file, synthetic: false } + } + + pub fn create_synthetic(triple: &str, file: &str) -> Self { + Self { + triple: INTERNER.intern_str(triple), + file: Some(INTERNER.intern_str(file)), + synthetic: true, + } + } + + pub fn rustc_target_arg(&self) -> &str { + self.file.as_ref().unwrap_or(&self.triple) + } + + pub fn contains(&self, needle: &str) -> bool { + self.triple.contains(needle) + } + + pub fn starts_with(&self, needle: &str) -> bool { + self.triple.starts_with(needle) + } + + pub fn ends_with(&self, needle: &str) -> bool { + self.triple.ends_with(needle) + } + + // See src/bootstrap/synthetic_targets.rs + pub fn is_synthetic(&self) -> bool { + self.synthetic + } +} + +impl fmt::Display for TargetSelection { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.triple)?; + if let Some(file) = self.file { + write!(f, "({file})")?; + } + Ok(()) + } +} + +impl fmt::Debug for TargetSelection { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{self}") + } +} + +impl PartialEq<&str> for TargetSelection { + fn eq(&self, other: &&str) -> bool { + self.triple == *other + } +} + +/// Per-target configuration stored in the global configuration structure. +#[derive(Default, Clone)] +pub struct Target { + /// Some(path to llvm-config) if using an external LLVM. + pub llvm_config: Option, + pub llvm_has_rust_patches: Option, + /// Some(path to FileCheck) if one was specified. + pub llvm_filecheck: Option, + pub llvm_libunwind: Option, + pub cc: Option, + pub cxx: Option, + pub ar: Option, + pub ranlib: Option, + pub default_linker: Option, + pub linker: Option, + pub sanitizers: Option, + pub profiler: Option, + pub rpath: Option, + pub crt_static: Option, + pub musl_root: Option, + pub musl_libdir: Option, + pub wasi_root: Option, + pub qemu_rootfs: Option, + pub no_std: bool, +} + +impl Target { + pub fn from_triple(triple: &str) -> Self { + let mut target: Self = Default::default(); + if triple.contains("-none") || triple.contains("nvptx") || triple.contains("switch") { + target.no_std = true; + } + target + } +} +/// Structure of the `config.toml` file that configuration is read from. +/// +/// This structure uses `Decodable` to automatically decode a TOML configuration +/// file into this format, and then this is traversed and written into the above +/// `Config` structure. +#[derive(Deserialize, Default)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub(crate) struct TomlConfig { + changelog_seen: Option, // FIXME: Deprecated field. Remove it at 2024. + change_id: Option, + build: Option, + install: Option, + llvm: Option, + rust: Option, + target: Option>, + dist: Option, + profile: Option, +} + +/// Describes how to handle conflicts in merging two [`TomlConfig`] +#[derive(Copy, Clone, Debug)] +enum ReplaceOpt { + /// Silently ignore a duplicated value + IgnoreDuplicate, + /// Override the current value, even if it's `Some` + Override, + /// Exit with an error on duplicate values + ErrorOnDuplicate, +} + +trait Merge { + fn merge(&mut self, other: Self, replace: ReplaceOpt); +} + +impl Merge for TomlConfig { + fn merge( + &mut self, + TomlConfig { + build, + install, + llvm, + rust, + dist, + target, + profile: _, + changelog_seen, + change_id, + }: Self, + replace: ReplaceOpt, + ) { + fn do_merge(x: &mut Option, y: Option, replace: ReplaceOpt) { + if let Some(new) = y { + if let Some(original) = x { + original.merge(new, replace); + } else { + *x = Some(new); + } + } + } + self.changelog_seen.merge(changelog_seen, replace); + self.change_id.merge(change_id, replace); + do_merge(&mut self.build, build, replace); + do_merge(&mut self.install, install, replace); + do_merge(&mut self.llvm, llvm, replace); + do_merge(&mut self.rust, rust, replace); + do_merge(&mut self.dist, dist, replace); + assert!(target.is_none(), "merging target-specific config is not currently supported"); + } +} + +// We are using a decl macro instead of a derive proc macro here to reduce the compile time of +// rustbuild. +macro_rules! define_config { + ($(#[$attr:meta])* struct $name:ident { + $($field:ident: Option<$field_ty:ty> = $field_key:literal,)* + }) => { + $(#[$attr])* + struct $name { + $($field: Option<$field_ty>,)* + } + + impl Merge for $name { + fn merge(&mut self, other: Self, replace: ReplaceOpt) { + $( + match replace { + ReplaceOpt::IgnoreDuplicate => { + if self.$field.is_none() { + self.$field = other.$field; + } + }, + ReplaceOpt::Override => { + if other.$field.is_some() { + self.$field = other.$field; + } + } + ReplaceOpt::ErrorOnDuplicate => { + if other.$field.is_some() { + if self.$field.is_some() { + if cfg!(test) { + panic!("overriding existing option") + } else { + eprintln!("overriding existing option: `{}`", stringify!($field)); + exit!(2); + } + } else { + self.$field = other.$field; + } + } + } + } + )* + } + } + + // The following is a trimmed version of what serde_derive generates. All parts not relevant + // for toml deserialization have been removed. This reduces the binary size and improves + // compile time of rustbuild. + impl<'de> Deserialize<'de> for $name { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct Field; + impl<'de> serde::de::Visitor<'de> for Field { + type Value = $name; + fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(concat!("struct ", stringify!($name))) + } + + #[inline] + fn visit_map(self, mut map: A) -> Result + where + A: serde::de::MapAccess<'de>, + { + $(let mut $field: Option<$field_ty> = None;)* + while let Some(key) = + match serde::de::MapAccess::next_key::(&mut map) { + Ok(val) => val, + Err(err) => { + return Err(err); + } + } + { + match &*key { + $($field_key => { + if $field.is_some() { + return Err(::duplicate_field( + $field_key, + )); + } + $field = match serde::de::MapAccess::next_value::<$field_ty>( + &mut map, + ) { + Ok(val) => Some(val), + Err(err) => { + return Err(err); + } + }; + })* + key => { + return Err(serde::de::Error::unknown_field(key, FIELDS)); + } + } + } + Ok($name { $($field),* }) + } + } + const FIELDS: &'static [&'static str] = &[ + $($field_key,)* + ]; + Deserializer::deserialize_struct( + deserializer, + stringify!($name), + FIELDS, + Field, + ) + } + } + } +} + +impl Merge for Option { + fn merge(&mut self, other: Self, replace: ReplaceOpt) { + match replace { + ReplaceOpt::IgnoreDuplicate => { + if self.is_none() { + *self = other; + } + } + ReplaceOpt::Override => { + if other.is_some() { + *self = other; + } + } + ReplaceOpt::ErrorOnDuplicate => { + if other.is_some() { + if self.is_some() { + if cfg!(test) { + panic!("overriding existing option") + } else { + eprintln!("overriding existing option"); + exit!(2); + } + } else { + *self = other; + } + } + } + } + } +} + +define_config! { + /// TOML representation of various global build decisions. + #[derive(Default)] + struct Build { + build: Option = "build", + host: Option> = "host", + target: Option> = "target", + build_dir: Option = "build-dir", + cargo: Option = "cargo", + rustc: Option = "rustc", + rustfmt: Option = "rustfmt", + docs: Option = "docs", + compiler_docs: Option = "compiler-docs", + library_docs_private_items: Option = "library-docs-private-items", + docs_minification: Option = "docs-minification", + submodules: Option = "submodules", + gdb: Option = "gdb", + nodejs: Option = "nodejs", + npm: Option = "npm", + python: Option = "python", + reuse: Option = "reuse", + locked_deps: Option = "locked-deps", + vendor: Option = "vendor", + full_bootstrap: Option = "full-bootstrap", + extended: Option = "extended", + tools: Option> = "tools", + verbose: Option = "verbose", + sanitizers: Option = "sanitizers", + profiler: Option = "profiler", + cargo_native_static: Option = "cargo-native-static", + low_priority: Option = "low-priority", + configure_args: Option> = "configure-args", + local_rebuild: Option = "local-rebuild", + print_step_timings: Option = "print-step-timings", + print_step_rusage: Option = "print-step-rusage", + check_stage: Option = "check-stage", + doc_stage: Option = "doc-stage", + build_stage: Option = "build-stage", + test_stage: Option = "test-stage", + install_stage: Option = "install-stage", + dist_stage: Option = "dist-stage", + bench_stage: Option = "bench-stage", + patch_binaries_for_nix: Option = "patch-binaries-for-nix", + // NOTE: only parsed by bootstrap.py, `--feature build-metrics` enables metrics unconditionally + metrics: Option = "metrics", + android_ndk: Option = "android-ndk", + } +} + +define_config! { + /// TOML representation of various global install decisions. + struct Install { + prefix: Option = "prefix", + sysconfdir: Option = "sysconfdir", + docdir: Option = "docdir", + bindir: Option = "bindir", + libdir: Option = "libdir", + mandir: Option = "mandir", + datadir: Option = "datadir", + } +} + +define_config! { + /// TOML representation of how the LLVM build is configured. + struct Llvm { + optimize: Option = "optimize", + thin_lto: Option = "thin-lto", + release_debuginfo: Option = "release-debuginfo", + assertions: Option = "assertions", + tests: Option = "tests", + plugins: Option = "plugins", + ccache: Option = "ccache", + static_libstdcpp: Option = "static-libstdcpp", + ninja: Option = "ninja", + targets: Option = "targets", + experimental_targets: Option = "experimental-targets", + link_jobs: Option = "link-jobs", + link_shared: Option = "link-shared", + version_suffix: Option = "version-suffix", + clang_cl: Option = "clang-cl", + cflags: Option = "cflags", + cxxflags: Option = "cxxflags", + ldflags: Option = "ldflags", + use_libcxx: Option = "use-libcxx", + use_linker: Option = "use-linker", + allow_old_toolchain: Option = "allow-old-toolchain", + polly: Option = "polly", + clang: Option = "clang", + enable_warnings: Option = "enable-warnings", + download_ci_llvm: Option = "download-ci-llvm", + build_config: Option> = "build-config", + } +} + +define_config! { + struct Dist { + sign_folder: Option = "sign-folder", + gpg_password_file: Option = "gpg-password-file", + upload_addr: Option = "upload-addr", + src_tarball: Option = "src-tarball", + missing_tools: Option = "missing-tools", + compression_formats: Option> = "compression-formats", + compression_profile: Option = "compression-profile", + include_mingw_linker: Option = "include-mingw-linker", + } +} + +#[derive(Clone, Debug, Deserialize)] +#[serde(untagged)] +pub enum StringOrBool { + String(String), + Bool(bool), +} + +impl Default for StringOrBool { + fn default() -> StringOrBool { + StringOrBool::Bool(false) + } +} + +impl StringOrBool { + fn is_string_or_true(&self) -> bool { + matches!(self, Self::String(_) | Self::Bool(true)) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum RustOptimize { + String(String), + Int(u8), + Bool(bool), +} + +impl Default for RustOptimize { + fn default() -> RustOptimize { + RustOptimize::Bool(false) + } +} + +impl<'de> Deserialize<'de> for RustOptimize { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_any(OptimizeVisitor) + } +} + +struct OptimizeVisitor; + +impl<'de> serde::de::Visitor<'de> for OptimizeVisitor { + type Value = RustOptimize; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str(r#"one of: 0, 1, 2, 3, "s", "z", true, false"#) + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + if ["s", "z"].iter().find(|x| **x == value).is_some() { + Ok(RustOptimize::String(value.to_string())) + } else { + Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom) + } + } + + fn visit_i64(self, value: i64) -> Result + where + E: serde::de::Error, + { + if matches!(value, 0..=3) { + Ok(RustOptimize::Int(value as u8)) + } else { + Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom) + } + } + + fn visit_bool(self, value: bool) -> Result + where + E: serde::de::Error, + { + Ok(RustOptimize::Bool(value)) + } +} + +fn format_optimize_error_msg(v: impl std::fmt::Display) -> String { + format!( + r#"unrecognized option for rust optimize: "{v}", expected one of 0, 1, 2, 3, "s", "z", true, false"# + ) +} + +impl RustOptimize { + pub(crate) fn is_release(&self) -> bool { + match &self { + RustOptimize::Bool(true) | RustOptimize::String(_) => true, + RustOptimize::Int(i) => *i > 0, + RustOptimize::Bool(false) => false, + } + } + + pub(crate) fn get_opt_level(&self) -> Option { + match &self { + RustOptimize::String(s) => Some(s.clone()), + RustOptimize::Int(i) => Some(i.to_string()), + RustOptimize::Bool(_) => None, + } + } +} + +#[derive(Deserialize)] +#[serde(untagged)] +enum StringOrInt<'a> { + String(&'a str), + Int(i64), +} +define_config! { + /// TOML representation of how the Rust build is configured. + struct Rust { + optimize: Option = "optimize", + debug: Option = "debug", + codegen_units: Option = "codegen-units", + codegen_units_std: Option = "codegen-units-std", + debug_assertions: Option = "debug-assertions", + debug_assertions_std: Option = "debug-assertions-std", + overflow_checks: Option = "overflow-checks", + overflow_checks_std: Option = "overflow-checks-std", + debug_logging: Option = "debug-logging", + debuginfo_level: Option = "debuginfo-level", + debuginfo_level_rustc: Option = "debuginfo-level-rustc", + debuginfo_level_std: Option = "debuginfo-level-std", + debuginfo_level_tools: Option = "debuginfo-level-tools", + debuginfo_level_tests: Option = "debuginfo-level-tests", + split_debuginfo: Option = "split-debuginfo", + run_dsymutil: Option = "run-dsymutil", + backtrace: Option = "backtrace", + incremental: Option = "incremental", + parallel_compiler: Option = "parallel-compiler", + default_linker: Option = "default-linker", + channel: Option = "channel", + description: Option = "description", + musl_root: Option = "musl-root", + rpath: Option = "rpath", + verbose_tests: Option = "verbose-tests", + optimize_tests: Option = "optimize-tests", + codegen_tests: Option = "codegen-tests", + omit_git_hash: Option = "omit-git-hash", + dist_src: Option = "dist-src", + save_toolstates: Option = "save-toolstates", + codegen_backends: Option> = "codegen-backends", + lld: Option = "lld", + use_lld: Option = "use-lld", + llvm_tools: Option = "llvm-tools", + deny_warnings: Option = "deny-warnings", + backtrace_on_ice: Option = "backtrace-on-ice", + verify_llvm_ir: Option = "verify-llvm-ir", + thin_lto_import_instr_limit: Option = "thin-lto-import-instr-limit", + remap_debuginfo: Option = "remap-debuginfo", + jemalloc: Option = "jemalloc", + test_compare_mode: Option = "test-compare-mode", + llvm_libunwind: Option = "llvm-libunwind", + control_flow_guard: Option = "control-flow-guard", + new_symbol_mangling: Option = "new-symbol-mangling", + profile_generate: Option = "profile-generate", + profile_use: Option = "profile-use", + // ignored; this is set from an env var set by bootstrap.py + download_rustc: Option = "download-rustc", + lto: Option = "lto", + validate_mir_opts: Option = "validate-mir-opts", + } +} + +define_config! { + /// TOML representation of how each build target is configured. + struct TomlTarget { + cc: Option = "cc", + cxx: Option = "cxx", + ar: Option = "ar", + ranlib: Option = "ranlib", + default_linker: Option = "default-linker", + linker: Option = "linker", + llvm_config: Option = "llvm-config", + llvm_has_rust_patches: Option = "llvm-has-rust-patches", + llvm_filecheck: Option = "llvm-filecheck", + llvm_libunwind: Option = "llvm-libunwind", + sanitizers: Option = "sanitizers", + profiler: Option = "profiler", + rpath: Option = "rpath", + crt_static: Option = "crt-static", + musl_root: Option = "musl-root", + musl_libdir: Option = "musl-libdir", + wasi_root: Option = "wasi-root", + qemu_rootfs: Option = "qemu-rootfs", + no_std: Option = "no-std", + } +} + +impl Config { + pub fn default_opts() -> Config { + let mut config = Config::default(); + config.llvm_optimize = true; + config.ninja_in_file = true; + config.llvm_static_stdcpp = false; + config.backtrace = true; + config.rust_optimize = RustOptimize::Bool(true); + config.rust_optimize_tests = true; + config.submodules = None; + config.docs = true; + config.docs_minification = true; + config.rust_rpath = true; + config.channel = "dev".to_string(); + config.codegen_tests = true; + config.rust_dist_src = true; + config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")]; + config.deny_warnings = true; + config.bindir = "bin".into(); + config.dist_include_mingw_linker = true; + config.dist_compression_profile = "fast".into(); + config.rustc_parallel = true; + + config.stdout_is_tty = std::io::stdout().is_terminal(); + config.stderr_is_tty = std::io::stderr().is_terminal(); + + // set by build.rs + config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE")); + + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + // Undo `src/bootstrap` + config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned(); + config.out = PathBuf::from("build"); + + config + } + + pub fn parse(args: &[String]) -> Config { + #[cfg(test)] + fn get_toml(_: &Path) -> TomlConfig { + TomlConfig::default() + } + + #[cfg(not(test))] + fn get_toml(file: &Path) -> TomlConfig { + let contents = + t!(fs::read_to_string(file), format!("config file {} not found", file.display())); + // Deserialize to Value and then TomlConfig to prevent the Deserialize impl of + // TomlConfig and sub types to be monomorphized 5x by toml. + toml::from_str(&contents) + .and_then(|table: toml::Value| TomlConfig::deserialize(table)) + .unwrap_or_else(|err| { + eprintln!("failed to parse TOML configuration '{}': {err}", file.display()); + exit!(2); + }) + } + Self::parse_inner(args, get_toml) + } + + fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config { + let mut flags = Flags::parse(&args); + let mut config = Config::default_opts(); + + // Set flags. + config.paths = std::mem::take(&mut flags.paths); + config.skip = flags.skip.into_iter().chain(flags.exclude).collect(); + config.include_default_paths = flags.include_default_paths; + config.rustc_error_format = flags.rustc_error_format; + config.json_output = flags.json_output; + config.on_fail = flags.on_fail; + config.jobs = Some(threads_from_config(flags.jobs as u32)); + config.cmd = flags.cmd; + config.incremental = flags.incremental; + config.dry_run = if flags.dry_run { DryRun::UserSelected } else { DryRun::Disabled }; + config.keep_stage = flags.keep_stage; + config.keep_stage_std = flags.keep_stage_std; + config.color = flags.color; + config.free_args = std::mem::take(&mut flags.free_args); + config.llvm_profile_use = flags.llvm_profile_use; + config.llvm_profile_generate = flags.llvm_profile_generate; + config.enable_bolt_settings = flags.enable_bolt_settings; + + // Infer the rest of the configuration. + + // Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary, + // running on a completely machine from where it was compiled. + let mut cmd = Command::new("git"); + // NOTE: we cannot support running from outside the repository because the only path we have available + // is set at compile time, which can be wrong if bootstrap was downloaded from source. + // We still support running outside the repository if we find we aren't in a git directory. + cmd.arg("rev-parse").arg("--show-toplevel"); + // Discard stderr because we expect this to fail when building from a tarball. + let output = cmd + .stderr(std::process::Stdio::null()) + .output() + .ok() + .and_then(|output| if output.status.success() { Some(output) } else { None }); + if let Some(output) = output { + let git_root = String::from_utf8(output.stdout).unwrap(); + // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes. + let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap(); + let s = git_root.to_str().unwrap(); + + // Bootstrap is quite bad at handling /? in front of paths + let src = match s.strip_prefix("\\\\?\\") { + Some(p) => PathBuf::from(p), + None => PathBuf::from(git_root), + }; + // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when, + // for example, the build directory is inside of another unrelated git directory. + // In that case keep the original `CARGO_MANIFEST_DIR` handling. + // + // NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside + // the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1. + if src.join("src").join("stage0.json").exists() { + config.src = src; + } + } else { + // We're building from a tarball, not git sources. + // We don't support pre-downloaded bootstrap in this case. + } + + if cfg!(test) { + // Use the build directory of the original x.py invocation, so that we can set `initial_rustc` properly. + config.out = Path::new( + &env::var_os("CARGO_TARGET_DIR").expect("cargo test directly is not supported"), + ) + .parent() + .unwrap() + .to_path_buf(); + } + + let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json"))); + + config.stage0_metadata = t!(serde_json::from_slice::(&stage0_json)); + + // Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`, then `config.toml` in the root directory. + let toml_path = flags + .config + .clone() + .or_else(|| env::var_os("RUST_BOOTSTRAP_CONFIG").map(PathBuf::from)); + let using_default_path = toml_path.is_none(); + let mut toml_path = toml_path.unwrap_or_else(|| PathBuf::from("config.toml")); + if using_default_path && !toml_path.exists() { + toml_path = config.src.join(toml_path); + } + + // Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path, + // but not if `config.toml` hasn't been created. + let mut toml = if !using_default_path || toml_path.exists() { + config.config = Some(toml_path.clone()); + get_toml(&toml_path) + } else { + config.config = None; + TomlConfig::default() + }; + + if let Some(include) = &toml.profile { + // Allows creating alias for profile names, allowing + // profiles to be renamed while maintaining back compatibility + // Keep in sync with `profile_aliases` in bootstrap.py + let profile_aliases = HashMap::from([("user", "dist")]); + let include = match profile_aliases.get(include.as_str()) { + Some(alias) => alias, + None => include.as_str(), + }; + let mut include_path = config.src.clone(); + include_path.push("src"); + include_path.push("bootstrap"); + include_path.push("defaults"); + include_path.push(format!("config.{include}.toml")); + let included_toml = get_toml(&include_path); + toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate); + } + + let mut override_toml = TomlConfig::default(); + for option in flags.set.iter() { + fn get_table(option: &str) -> Result { + toml::from_str(&option) + .and_then(|table: toml::Value| TomlConfig::deserialize(table)) + } + + let mut err = match get_table(option) { + Ok(v) => { + override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate); + continue; + } + Err(e) => e, + }; + // We want to be able to set string values without quotes, + // like in `configure.py`. Try adding quotes around the right hand side + if let Some((key, value)) = option.split_once("=") { + if !value.contains('"') { + match get_table(&format!(r#"{key}="{value}""#)) { + Ok(v) => { + override_toml.merge(v, ReplaceOpt::ErrorOnDuplicate); + continue; + } + Err(e) => err = e, + } + } + } + eprintln!("failed to parse override `{option}`: `{err}"); + exit!(2) + } + toml.merge(override_toml, ReplaceOpt::Override); + + config.changelog_seen = toml.changelog_seen; + config.change_id = toml.change_id; + + let build = toml.build.unwrap_or_default(); + if let Some(file_build) = build.build { + config.build = TargetSelection::from_user(&file_build); + }; + + set(&mut config.out, flags.build_dir.or_else(|| build.build_dir.map(PathBuf::from))); + // NOTE: Bootstrap spawns various commands with different working directories. + // To avoid writing to random places on the file system, `config.out` needs to be an absolute path. + if !config.out.is_absolute() { + // `canonicalize` requires the path to already exist. Use our vendored copy of `absolute` instead. + config.out = crate::utils::helpers::absolute(&config.out); + } + + config.initial_rustc = if let Some(rustc) = build.rustc { + if !flags.skip_stage0_validation { + config.check_build_rustc_version(&rustc); + } + PathBuf::from(rustc) + } else { + config.download_beta_toolchain(); + config.out.join(config.build.triple).join("stage0/bin/rustc") + }; + + config.initial_cargo = build + .cargo + .map(|cargo| { + t!(PathBuf::from(cargo).canonicalize(), "`initial_cargo` not found on disk") + }) + .unwrap_or_else(|| config.out.join(config.build.triple).join("stage0/bin/cargo")); + + // NOTE: it's important this comes *after* we set `initial_rustc` just above. + if config.dry_run() { + let dir = config.out.join("tmp-dry-run"); + t!(fs::create_dir_all(&dir)); + config.out = dir; + } + + config.hosts = if let Some(TargetSelectionList(arg_host)) = flags.host { + arg_host + } else if let Some(file_host) = build.host { + file_host.iter().map(|h| TargetSelection::from_user(h)).collect() + } else { + vec![config.build] + }; + config.targets = if let Some(TargetSelectionList(arg_target)) = flags.target { + arg_target + } else if let Some(file_target) = build.target { + file_target.iter().map(|h| TargetSelection::from_user(h)).collect() + } else { + // If target is *not* configured, then default to the host + // toolchains. + config.hosts.clone() + }; + + config.nodejs = build.nodejs.map(PathBuf::from); + config.npm = build.npm.map(PathBuf::from); + config.gdb = build.gdb.map(PathBuf::from); + config.python = build.python.map(PathBuf::from); + config.reuse = build.reuse.map(PathBuf::from); + config.submodules = build.submodules; + config.android_ndk = build.android_ndk; + set(&mut config.low_priority, build.low_priority); + set(&mut config.compiler_docs, build.compiler_docs); + set(&mut config.library_docs_private_items, build.library_docs_private_items); + set(&mut config.docs_minification, build.docs_minification); + set(&mut config.docs, build.docs); + set(&mut config.locked_deps, build.locked_deps); + set(&mut config.vendor, build.vendor); + set(&mut config.full_bootstrap, build.full_bootstrap); + set(&mut config.extended, build.extended); + config.tools = build.tools; + set(&mut config.verbose, build.verbose); + set(&mut config.sanitizers, build.sanitizers); + set(&mut config.profiler, build.profiler); + set(&mut config.cargo_native_static, build.cargo_native_static); + set(&mut config.configure_args, build.configure_args); + set(&mut config.local_rebuild, build.local_rebuild); + set(&mut config.print_step_timings, build.print_step_timings); + set(&mut config.print_step_rusage, build.print_step_rusage); + config.patch_binaries_for_nix = build.patch_binaries_for_nix; + + config.verbose = cmp::max(config.verbose, flags.verbose as usize); + + if let Some(install) = toml.install { + config.prefix = install.prefix.map(PathBuf::from); + config.sysconfdir = install.sysconfdir.map(PathBuf::from); + config.datadir = install.datadir.map(PathBuf::from); + config.docdir = install.docdir.map(PathBuf::from); + set(&mut config.bindir, install.bindir.map(PathBuf::from)); + config.libdir = install.libdir.map(PathBuf::from); + config.mandir = install.mandir.map(PathBuf::from); + } + + // Store off these values as options because if they're not provided + // we'll infer default values for them later + let mut llvm_assertions = None; + let mut llvm_tests = None; + let mut llvm_plugins = None; + let mut debug = None; + let mut debug_assertions = None; + let mut debug_assertions_std = None; + let mut overflow_checks = None; + let mut overflow_checks_std = None; + let mut debug_logging = None; + let mut debuginfo_level = None; + let mut debuginfo_level_rustc = None; + let mut debuginfo_level_std = None; + let mut debuginfo_level_tools = None; + let mut debuginfo_level_tests = None; + let mut optimize = None; + let mut omit_git_hash = None; + + if let Some(rust) = toml.rust { + set(&mut config.channel, rust.channel); + + config.download_rustc_commit = config.download_ci_rustc_commit(rust.download_rustc); + // This list is incomplete, please help by expanding it! + if config.download_rustc_commit.is_some() { + // We need the channel used by the downloaded compiler to match the one we set for rustdoc; + // otherwise rustdoc-ui tests break. + let ci_channel = t!(fs::read_to_string(config.src.join("src/ci/channel"))); + let ci_channel = ci_channel.trim_end(); + if config.channel != ci_channel + && !(config.channel == "dev" && ci_channel == "nightly") + { + panic!( + "setting rust.channel={} is incompatible with download-rustc", + config.channel + ); + } + } + + debug = rust.debug; + debug_assertions = rust.debug_assertions; + debug_assertions_std = rust.debug_assertions_std; + overflow_checks = rust.overflow_checks; + overflow_checks_std = rust.overflow_checks_std; + debug_logging = rust.debug_logging; + debuginfo_level = rust.debuginfo_level; + debuginfo_level_rustc = rust.debuginfo_level_rustc; + debuginfo_level_std = rust.debuginfo_level_std; + debuginfo_level_tools = rust.debuginfo_level_tools; + debuginfo_level_tests = rust.debuginfo_level_tests; + + config.rust_split_debuginfo = rust + .split_debuginfo + .as_deref() + .map(SplitDebuginfo::from_str) + .map(|v| v.expect("invalid value for rust.split_debuginfo")) + .unwrap_or(SplitDebuginfo::default_for_platform(&config.build.triple)); + optimize = rust.optimize; + omit_git_hash = rust.omit_git_hash; + config.rust_new_symbol_mangling = rust.new_symbol_mangling; + set(&mut config.rust_optimize_tests, rust.optimize_tests); + set(&mut config.codegen_tests, rust.codegen_tests); + set(&mut config.rust_rpath, rust.rpath); + set(&mut config.jemalloc, rust.jemalloc); + set(&mut config.test_compare_mode, rust.test_compare_mode); + set(&mut config.backtrace, rust.backtrace); + config.description = rust.description; + set(&mut config.rust_dist_src, rust.dist_src); + set(&mut config.verbose_tests, rust.verbose_tests); + // in the case "false" is set explicitly, do not overwrite the command line args + if let Some(true) = rust.incremental { + config.incremental = true; + } + set(&mut config.use_lld, rust.use_lld); + set(&mut config.lld_enabled, rust.lld); + set(&mut config.llvm_tools_enabled, rust.llvm_tools); + config.rustc_parallel = rust + .parallel_compiler + .unwrap_or(config.channel == "dev" || config.channel == "nightly"); + config.rustc_default_linker = rust.default_linker; + config.musl_root = rust.musl_root.map(PathBuf::from); + config.save_toolstates = rust.save_toolstates.map(PathBuf::from); + set( + &mut config.deny_warnings, + match flags.warnings { + Warnings::Deny => Some(true), + Warnings::Warn => Some(false), + Warnings::Default => rust.deny_warnings, + }, + ); + set(&mut config.backtrace_on_ice, rust.backtrace_on_ice); + set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir); + config.rust_thin_lto_import_instr_limit = rust.thin_lto_import_instr_limit; + set(&mut config.rust_remap_debuginfo, rust.remap_debuginfo); + set(&mut config.control_flow_guard, rust.control_flow_guard); + config.llvm_libunwind_default = rust + .llvm_libunwind + .map(|v| v.parse().expect("failed to parse rust.llvm-libunwind")); + + if let Some(ref backends) = rust.codegen_backends { + let available_backends = vec!["llvm", "cranelift", "gcc"]; + + config.rust_codegen_backends = backends.iter().map(|s| { + if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) { + if available_backends.contains(&backend) { + panic!("Invalid value '{s}' for 'rust.codegen-backends'. Instead, please use '{backend}'."); + } else { + println!("HELP: '{s}' for 'rust.codegen-backends' might fail. \ + Codegen backends are mostly defined without the '{CODEGEN_BACKEND_PREFIX}' prefix. \ + In this case, it would be referred to as '{backend}'."); + } + } + + INTERNER.intern_str(s) + }).collect(); + } + + config.rust_codegen_units = rust.codegen_units.map(threads_from_config); + config.rust_codegen_units_std = rust.codegen_units_std.map(threads_from_config); + config.rust_profile_use = flags.rust_profile_use.or(rust.profile_use); + config.rust_profile_generate = flags.rust_profile_generate.or(rust.profile_generate); + config.rust_lto = rust + .lto + .as_deref() + .map(|value| RustcLto::from_str(value).unwrap()) + .unwrap_or_default(); + config.rust_validate_mir_opts = rust.validate_mir_opts; + } else { + config.rust_profile_use = flags.rust_profile_use; + config.rust_profile_generate = flags.rust_profile_generate; + } + + config.reproducible_artifacts = flags.reproducible_artifact; + + // rust_info must be set before is_ci_llvm_available() is called. + let default = config.channel == "dev"; + config.omit_git_hash = omit_git_hash.unwrap_or(default); + config.rust_info = GitInfo::new(config.omit_git_hash, &config.src); + + if let Some(llvm) = toml.llvm { + match llvm.ccache { + Some(StringOrBool::String(ref s)) => config.ccache = Some(s.to_string()), + Some(StringOrBool::Bool(true)) => { + config.ccache = Some("ccache".to_string()); + } + Some(StringOrBool::Bool(false)) | None => {} + } + set(&mut config.ninja_in_file, llvm.ninja); + llvm_assertions = llvm.assertions; + llvm_tests = llvm.tests; + llvm_plugins = llvm.plugins; + set(&mut config.llvm_optimize, llvm.optimize); + set(&mut config.llvm_thin_lto, llvm.thin_lto); + set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo); + set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); + if let Some(v) = llvm.link_shared { + config.llvm_link_shared.set(Some(v)); + } + config.llvm_targets = llvm.targets.clone(); + config.llvm_experimental_targets = llvm.experimental_targets.clone(); + config.llvm_link_jobs = llvm.link_jobs; + config.llvm_version_suffix = llvm.version_suffix.clone(); + config.llvm_clang_cl = llvm.clang_cl.clone(); + + config.llvm_cflags = llvm.cflags.clone(); + config.llvm_cxxflags = llvm.cxxflags.clone(); + config.llvm_ldflags = llvm.ldflags.clone(); + set(&mut config.llvm_use_libcxx, llvm.use_libcxx); + config.llvm_use_linker = llvm.use_linker.clone(); + config.llvm_allow_old_toolchain = llvm.allow_old_toolchain.unwrap_or(false); + config.llvm_polly = llvm.polly.unwrap_or(false); + config.llvm_clang = llvm.clang.unwrap_or(false); + config.llvm_enable_warnings = llvm.enable_warnings.unwrap_or(false); + config.llvm_build_config = llvm.build_config.clone().unwrap_or(Default::default()); + + let asserts = llvm_assertions.unwrap_or(false); + config.llvm_from_ci = config.parse_download_ci_llvm(llvm.download_ci_llvm, asserts); + + if config.llvm_from_ci { + // None of the LLVM options, except assertions, are supported + // when using downloaded LLVM. We could just ignore these but + // that's potentially confusing, so force them to not be + // explicitly set. The defaults and CI defaults don't + // necessarily match but forcing people to match (somewhat + // arbitrary) CI configuration locally seems bad/hard. + check_ci_llvm!(llvm.optimize); + check_ci_llvm!(llvm.thin_lto); + check_ci_llvm!(llvm.release_debuginfo); + // CI-built LLVM can be either dynamic or static. We won't know until we download it. + check_ci_llvm!(llvm.link_shared); + check_ci_llvm!(llvm.static_libstdcpp); + check_ci_llvm!(llvm.targets); + check_ci_llvm!(llvm.experimental_targets); + check_ci_llvm!(llvm.link_jobs); + check_ci_llvm!(llvm.clang_cl); + check_ci_llvm!(llvm.version_suffix); + check_ci_llvm!(llvm.cflags); + check_ci_llvm!(llvm.cxxflags); + check_ci_llvm!(llvm.ldflags); + check_ci_llvm!(llvm.use_libcxx); + check_ci_llvm!(llvm.use_linker); + check_ci_llvm!(llvm.allow_old_toolchain); + check_ci_llvm!(llvm.polly); + check_ci_llvm!(llvm.clang); + check_ci_llvm!(llvm.build_config); + check_ci_llvm!(llvm.plugins); + } + + // NOTE: can never be hit when downloading from CI, since we call `check_ci_llvm!(thin_lto)` above. + if config.llvm_thin_lto && llvm.link_shared.is_none() { + // If we're building with ThinLTO on, by default we want to link + // to LLVM shared, to avoid re-doing ThinLTO (which happens in + // the link step) with each stage. + config.llvm_link_shared.set(Some(true)); + } + } else { + config.llvm_from_ci = config.channel == "dev" + && crate::core::build_steps::llvm::is_ci_llvm_available(&config, false); + } + + if let Some(t) = toml.target { + for (triple, cfg) in t { + let mut target = Target::from_triple(&triple); + + if let Some(ref s) = cfg.llvm_config { + if config.download_rustc_commit.is_some() && triple == &*config.build.triple { + panic!( + "setting llvm_config for the host is incompatible with download-rustc" + ); + } + target.llvm_config = Some(config.src.join(s)); + } + target.llvm_has_rust_patches = cfg.llvm_has_rust_patches; + if let Some(ref s) = cfg.llvm_filecheck { + target.llvm_filecheck = Some(config.src.join(s)); + } + target.llvm_libunwind = cfg + .llvm_libunwind + .as_ref() + .map(|v| v.parse().expect("failed to parse rust.llvm-libunwind")); + if let Some(s) = cfg.no_std { + target.no_std = s; + } + target.cc = cfg.cc.map(PathBuf::from); + target.cxx = cfg.cxx.map(PathBuf::from); + target.ar = cfg.ar.map(PathBuf::from); + target.ranlib = cfg.ranlib.map(PathBuf::from); + target.linker = cfg.linker.map(PathBuf::from); + target.crt_static = cfg.crt_static; + target.musl_root = cfg.musl_root.map(PathBuf::from); + target.musl_libdir = cfg.musl_libdir.map(PathBuf::from); + target.wasi_root = cfg.wasi_root.map(PathBuf::from); + target.qemu_rootfs = cfg.qemu_rootfs.map(PathBuf::from); + target.sanitizers = cfg.sanitizers; + target.profiler = cfg.profiler; + target.rpath = cfg.rpath; + + config.target_config.insert(TargetSelection::from_user(&triple), target); + } + } + + if config.llvm_from_ci { + let triple = &config.build.triple; + let ci_llvm_bin = config.ci_llvm_root().join("bin"); + let build_target = config + .target_config + .entry(config.build) + .or_insert_with(|| Target::from_triple(&triple)); + + check_ci_llvm!(build_target.llvm_config); + check_ci_llvm!(build_target.llvm_filecheck); + build_target.llvm_config = Some(ci_llvm_bin.join(exe("llvm-config", config.build))); + build_target.llvm_filecheck = Some(ci_llvm_bin.join(exe("FileCheck", config.build))); + } + + if let Some(t) = toml.dist { + config.dist_sign_folder = t.sign_folder.map(PathBuf::from); + config.dist_upload_addr = t.upload_addr; + config.dist_compression_formats = t.compression_formats; + set(&mut config.dist_compression_profile, t.compression_profile); + set(&mut config.rust_dist_src, t.src_tarball); + set(&mut config.missing_tools, t.missing_tools); + set(&mut config.dist_include_mingw_linker, t.include_mingw_linker) + } + + if let Some(r) = build.rustfmt { + *config.initial_rustfmt.borrow_mut() = if r.exists() { + RustfmtState::SystemToolchain(r) + } else { + RustfmtState::Unavailable + }; + } + + // Now that we've reached the end of our configuration, infer the + // default values for all options that we haven't otherwise stored yet. + + config.llvm_assertions = llvm_assertions.unwrap_or(false); + config.llvm_tests = llvm_tests.unwrap_or(false); + config.llvm_plugins = llvm_plugins.unwrap_or(false); + config.rust_optimize = optimize.unwrap_or(RustOptimize::Bool(true)); + + let default = debug == Some(true); + config.rust_debug_assertions = debug_assertions.unwrap_or(default); + config.rust_debug_assertions_std = + debug_assertions_std.unwrap_or(config.rust_debug_assertions); + config.rust_overflow_checks = overflow_checks.unwrap_or(default); + config.rust_overflow_checks_std = + overflow_checks_std.unwrap_or(config.rust_overflow_checks); + + config.rust_debug_logging = debug_logging.unwrap_or(config.rust_debug_assertions); + + let with_defaults = |debuginfo_level_specific: Option<_>| { + debuginfo_level_specific.or(debuginfo_level).unwrap_or(if debug == Some(true) { + DebuginfoLevel::Limited + } else { + DebuginfoLevel::None + }) + }; + config.rust_debuginfo_level_rustc = with_defaults(debuginfo_level_rustc); + config.rust_debuginfo_level_std = with_defaults(debuginfo_level_std); + config.rust_debuginfo_level_tools = with_defaults(debuginfo_level_tools); + config.rust_debuginfo_level_tests = debuginfo_level_tests.unwrap_or(DebuginfoLevel::None); + + let download_rustc = config.download_rustc_commit.is_some(); + // See https://github.com/rust-lang/compiler-team/issues/326 + config.stage = match config.cmd { + Subcommand::Check { .. } => flags.stage.or(build.check_stage).unwrap_or(0), + // `download-rustc` only has a speed-up for stage2 builds. Default to stage2 unless explicitly overridden. + Subcommand::Doc { .. } => { + flags.stage.or(build.doc_stage).unwrap_or(if download_rustc { 2 } else { 0 }) + } + Subcommand::Build { .. } => { + flags.stage.or(build.build_stage).unwrap_or(if download_rustc { 2 } else { 1 }) + } + Subcommand::Test { .. } => { + flags.stage.or(build.test_stage).unwrap_or(if download_rustc { 2 } else { 1 }) + } + Subcommand::Bench { .. } => flags.stage.or(build.bench_stage).unwrap_or(2), + Subcommand::Dist { .. } => flags.stage.or(build.dist_stage).unwrap_or(2), + Subcommand::Install { .. } => flags.stage.or(build.install_stage).unwrap_or(2), + // These are all bootstrap tools, which don't depend on the compiler. + // The stage we pass shouldn't matter, but use 0 just in case. + Subcommand::Clean { .. } + | Subcommand::Clippy { .. } + | Subcommand::Fix { .. } + | Subcommand::Run { .. } + | Subcommand::Setup { .. } + | Subcommand::Format { .. } + | Subcommand::Suggest { .. } => flags.stage.unwrap_or(0), + }; + + // CI should always run stage 2 builds, unless it specifically states otherwise + #[cfg(not(test))] + if flags.stage.is_none() && crate::CiEnv::current() != crate::CiEnv::None { + match config.cmd { + Subcommand::Test { .. } + | Subcommand::Doc { .. } + | Subcommand::Build { .. } + | Subcommand::Bench { .. } + | Subcommand::Dist { .. } + | Subcommand::Install { .. } => { + assert_eq!( + config.stage, 2, + "x.py should be run with `--stage 2` on CI, but was run with `--stage {}`", + config.stage, + ); + } + Subcommand::Clean { .. } + | Subcommand::Check { .. } + | Subcommand::Clippy { .. } + | Subcommand::Fix { .. } + | Subcommand::Run { .. } + | Subcommand::Setup { .. } + | Subcommand::Format { .. } + | Subcommand::Suggest { .. } => {} + } + } + + config + } + + pub(crate) fn dry_run(&self) -> bool { + match self.dry_run { + DryRun::Disabled => false, + DryRun::SelfCheck | DryRun::UserSelected => true, + } + } + + /// Runs a command, printing out nice contextual information if it fails. + /// Exits if the command failed to execute at all, otherwise returns its + /// `status.success()`. + #[deprecated = "use `Builder::try_run` instead where possible"] + pub(crate) fn try_run(&self, cmd: &mut Command) -> Result<(), ()> { + if self.dry_run() { + return Ok(()); + } + self.verbose(&format!("running: {cmd:?}")); + build_helper::util::try_run(cmd, self.is_verbose()) + } + + /// A git invocation which runs inside the source directory. + /// + /// Use this rather than `Command::new("git")` in order to support out-of-tree builds. + pub(crate) fn git(&self) -> Command { + let mut git = Command::new("git"); + git.current_dir(&self.src); + git + } + + pub(crate) fn test_args(&self) -> Vec<&str> { + let mut test_args = match self.cmd { + Subcommand::Test { ref test_args, .. } | Subcommand::Bench { ref test_args, .. } => { + test_args.iter().flat_map(|s| s.split_whitespace()).collect() + } + _ => vec![], + }; + test_args.extend(self.free_args.iter().map(|s| s.as_str())); + test_args + } + + pub(crate) fn args(&self) -> Vec<&str> { + let mut args = match self.cmd { + Subcommand::Run { ref args, .. } => { + args.iter().flat_map(|s| s.split_whitespace()).collect() + } + _ => vec![], + }; + args.extend(self.free_args.iter().map(|s| s.as_str())); + args + } + + /// Bootstrap embeds a version number into the name of shared libraries it uploads in CI. + /// Return the version it would have used for the given commit. + pub(crate) fn artifact_version_part(&self, commit: &str) -> String { + let (channel, version) = if self.rust_info.is_managed_git_subrepository() { + let mut channel = self.git(); + channel.arg("show").arg(format!("{commit}:src/ci/channel")); + let channel = output(&mut channel); + let mut version = self.git(); + version.arg("show").arg(format!("{commit}:src/version")); + let version = output(&mut version); + (channel.trim().to_owned(), version.trim().to_owned()) + } else { + let channel = fs::read_to_string(self.src.join("src/ci/channel")); + let version = fs::read_to_string(self.src.join("src/version")); + match (channel, version) { + (Ok(channel), Ok(version)) => { + (channel.trim().to_owned(), version.trim().to_owned()) + } + (channel, version) => { + let src = self.src.display(); + eprintln!("ERROR: failed to determine artifact channel and/or version"); + eprintln!( + "HELP: consider using a git checkout or ensure these files are readable" + ); + if let Err(channel) = channel { + eprintln!("reading {src}/src/ci/channel failed: {channel:?}"); + } + if let Err(version) = version { + eprintln!("reading {src}/src/version failed: {version:?}"); + } + panic!(); + } + } + }; + + match channel.as_str() { + "stable" => version, + "beta" => channel, + "nightly" => channel, + other => unreachable!("{:?} is not recognized as a valid channel", other), + } + } + + /// Try to find the relative path of `bindir`, otherwise return it in full. + pub fn bindir_relative(&self) -> &Path { + let bindir = &self.bindir; + if bindir.is_absolute() { + // Try to make it relative to the prefix. + if let Some(prefix) = &self.prefix { + if let Ok(stripped) = bindir.strip_prefix(prefix) { + return stripped; + } + } + } + bindir + } + + /// Try to find the relative path of `libdir`. + pub fn libdir_relative(&self) -> Option<&Path> { + let libdir = self.libdir.as_ref()?; + if libdir.is_relative() { + Some(libdir) + } else { + // Try to make it relative to the prefix. + libdir.strip_prefix(self.prefix.as_ref()?).ok() + } + } + + /// The absolute path to the downloaded LLVM artifacts. + pub(crate) fn ci_llvm_root(&self) -> PathBuf { + assert!(self.llvm_from_ci); + self.out.join(&*self.build.triple).join("ci-llvm") + } + + /// Directory where the extracted `rustc-dev` component is stored. + pub(crate) fn ci_rustc_dir(&self) -> PathBuf { + assert!(self.download_rustc()); + self.out.join(self.build.triple).join("ci-rustc") + } + + /// Determine whether llvm should be linked dynamically. + /// + /// If `false`, llvm should be linked statically. + /// This is computed on demand since LLVM might have to first be downloaded from CI. + pub(crate) fn llvm_link_shared(&self) -> bool { + let mut opt = self.llvm_link_shared.get(); + if opt.is_none() && self.dry_run() { + // just assume static for now - dynamic linking isn't supported on all platforms + return false; + } + + let llvm_link_shared = *opt.get_or_insert_with(|| { + if self.llvm_from_ci { + self.maybe_download_ci_llvm(); + let ci_llvm = self.ci_llvm_root(); + let link_type = t!( + std::fs::read_to_string(ci_llvm.join("link-type.txt")), + format!("CI llvm missing: {}", ci_llvm.display()) + ); + link_type == "dynamic" + } else { + // unclear how thought-through this default is, but it maintains compatibility with + // previous behavior + false + } + }); + self.llvm_link_shared.set(opt); + llvm_link_shared + } + + /// Return whether we will use a downloaded, pre-compiled version of rustc, or just build from source. + pub(crate) fn download_rustc(&self) -> bool { + self.download_rustc_commit().is_some() + } + + pub(crate) fn download_rustc_commit(&self) -> Option<&str> { + static DOWNLOAD_RUSTC: OnceCell> = OnceCell::new(); + if self.dry_run() && DOWNLOAD_RUSTC.get().is_none() { + // avoid trying to actually download the commit + return self.download_rustc_commit.as_deref(); + } + + DOWNLOAD_RUSTC + .get_or_init(|| match &self.download_rustc_commit { + None => None, + Some(commit) => { + self.download_ci_rustc(commit); + Some(commit.clone()) + } + }) + .as_deref() + } + + pub(crate) fn initial_rustfmt(&self) -> Option { + match &mut *self.initial_rustfmt.borrow_mut() { + RustfmtState::SystemToolchain(p) | RustfmtState::Downloaded(p) => Some(p.clone()), + RustfmtState::Unavailable => None, + r @ RustfmtState::LazyEvaluated => { + if self.dry_run() { + return Some(PathBuf::new()); + } + let path = self.maybe_download_rustfmt(); + *r = if let Some(p) = &path { + RustfmtState::Downloaded(p.clone()) + } else { + RustfmtState::Unavailable + }; + path + } + } + } + + pub fn verbose(&self, msg: &str) { + if self.verbose > 0 { + println!("{msg}"); + } + } + + pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool { + self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers) + } + + pub fn any_sanitizers_enabled(&self) -> bool { + self.target_config.values().any(|t| t.sanitizers == Some(true)) || self.sanitizers + } + + pub fn profiler_path(&self, target: TargetSelection) -> Option<&str> { + match self.target_config.get(&target)?.profiler.as_ref()? { + StringOrBool::String(s) => Some(s), + StringOrBool::Bool(_) => None, + } + } + + pub fn profiler_enabled(&self, target: TargetSelection) -> bool { + self.target_config + .get(&target) + .and_then(|t| t.profiler.as_ref()) + .map(StringOrBool::is_string_or_true) + .unwrap_or(self.profiler) + } + + pub fn any_profiler_enabled(&self) -> bool { + self.target_config.values().any(|t| matches!(&t.profiler, Some(p) if p.is_string_or_true())) + || self.profiler + } + + pub fn rpath_enabled(&self, target: TargetSelection) -> bool { + self.target_config.get(&target).map(|t| t.rpath).flatten().unwrap_or(self.rust_rpath) + } + + pub fn llvm_enabled(&self) -> bool { + self.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) + } + + pub fn llvm_libunwind(&self, target: TargetSelection) -> LlvmLibunwind { + self.target_config + .get(&target) + .and_then(|t| t.llvm_libunwind) + .or(self.llvm_libunwind_default) + .unwrap_or(if target.contains("fuchsia") { + LlvmLibunwind::InTree + } else { + LlvmLibunwind::No + }) + } + + pub fn submodules(&self, rust_info: &GitInfo) -> bool { + self.submodules.unwrap_or(rust_info.is_managed_git_subrepository()) + } + + pub fn default_codegen_backend(&self) -> Option> { + self.rust_codegen_backends.get(0).cloned() + } + + pub fn git_config(&self) -> GitConfig<'_> { + GitConfig { + git_repository: &self.stage0_metadata.config.git_repository, + nightly_branch: &self.stage0_metadata.config.nightly_branch, + } + } + + pub fn check_build_rustc_version(&self, rustc_path: &str) { + if self.dry_run() { + return; + } + + // check rustc version is same or lower with 1 apart from the building one + let mut cmd = Command::new(rustc_path); + cmd.arg("--version"); + let rustc_output = output(&mut cmd) + .lines() + .next() + .unwrap() + .split(' ') + .nth(1) + .unwrap() + .split('-') + .next() + .unwrap() + .to_owned(); + let rustc_version = Version::parse(&rustc_output.trim()).unwrap(); + let source_version = + Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim()) + .unwrap(); + if !(source_version == rustc_version + || (source_version.major == rustc_version.major + && (source_version.minor == rustc_version.minor + || source_version.minor == rustc_version.minor + 1))) + { + let prev_version = format!("{}.{}.x", source_version.major, source_version.minor - 1); + eprintln!( + "Unexpected rustc version: {rustc_version}, we should use {prev_version}/{source_version} to build source with {source_version}" + ); + exit!(1); + } + } + + /// Returns the commit to download, or `None` if we shouldn't download CI artifacts. + fn download_ci_rustc_commit(&self, download_rustc: Option) -> Option { + // If `download-rustc` is not set, default to rebuilding. + let if_unchanged = match download_rustc { + None | Some(StringOrBool::Bool(false)) => return None, + Some(StringOrBool::Bool(true)) => false, + Some(StringOrBool::String(s)) if s == "if-unchanged" => true, + Some(StringOrBool::String(other)) => { + panic!("unrecognized option for download-rustc: {other}") + } + }; + + // Handle running from a directory other than the top level + let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"])); + let top_level = top_level.trim_end(); + let compiler = format!("{top_level}/compiler/"); + let library = format!("{top_level}/library/"); + + // Look for a version to compare to based on the current commit. + // Only commits merged by bors will have CI artifacts. + let merge_base = output( + self.git() + .arg("rev-list") + .arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email)) + .args(&["-n1", "--first-parent", "HEAD"]), + ); + let commit = merge_base.trim_end(); + if commit.is_empty() { + println!("ERROR: could not find commit hash for downloading rustc"); + println!("HELP: maybe your repository history is too shallow?"); + println!("HELP: consider disabling `download-rustc`"); + println!("HELP: or fetch enough history to include one upstream commit"); + crate::exit!(1); + } + + // Warn if there were changes to the compiler or standard library since the ancestor commit. + let has_changes = !t!(self + .git() + .args(&["diff-index", "--quiet", &commit, "--", &compiler, &library]) + .status()) + .success(); + if has_changes { + if if_unchanged { + if self.verbose > 0 { + println!( + "WARNING: saw changes to compiler/ or library/ since {commit}; \ + ignoring `download-rustc`" + ); + } + return None; + } + println!( + "WARNING: `download-rustc` is enabled, but there are changes to \ + compiler/ or library/" + ); + } + + Some(commit.to_string()) + } + + fn parse_download_ci_llvm( + &self, + download_ci_llvm: Option, + asserts: bool, + ) -> bool { + match download_ci_llvm { + None => self.channel == "dev" && llvm::is_ci_llvm_available(&self, asserts), + Some(StringOrBool::Bool(b)) => b, + Some(StringOrBool::String(s)) if s == "if-available" => { + llvm::is_ci_llvm_available(&self, asserts) + } + Some(StringOrBool::String(s)) if s == "if-unchanged" => { + // Git is needed to track modifications here, but tarball source is not available. + // If not modified here or built through tarball source, we maintain consistency + // with '"if available"'. + if !self.rust_info.is_from_tarball() + && self + .last_modified_commit(&["src/llvm-project"], "download-ci-llvm", true) + .is_none() + { + // there are some untracked changes in the the given paths. + false + } else { + llvm::is_ci_llvm_available(&self, asserts) + } + } + Some(StringOrBool::String(other)) => { + panic!("unrecognized option for download-ci-llvm: {:?}", other) + } + } + } + + /// Returns the last commit in which any of `modified_paths` were changed, + /// or `None` if there are untracked changes in the working directory and `if_unchanged` is true. + pub fn last_modified_commit( + &self, + modified_paths: &[&str], + option_name: &str, + if_unchanged: bool, + ) -> Option { + // Handle running from a directory other than the top level + let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"])); + let top_level = top_level.trim_end(); + + // Look for a version to compare to based on the current commit. + // Only commits merged by bors will have CI artifacts. + let merge_base = output( + self.git() + .arg("rev-list") + .arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email)) + .args(&["-n1", "--first-parent", "HEAD"]), + ); + let commit = merge_base.trim_end(); + if commit.is_empty() { + println!("error: could not find commit hash for downloading components from CI"); + println!("help: maybe your repository history is too shallow?"); + println!("help: consider disabling `{option_name}`"); + println!("help: or fetch enough history to include one upstream commit"); + crate::exit!(1); + } + + // Warn if there were changes to the compiler or standard library since the ancestor commit. + let mut git = self.git(); + git.args(&["diff-index", "--quiet", &commit, "--"]); + + for path in modified_paths { + git.arg(format!("{top_level}/{path}")); + } + + let has_changes = !t!(git.status()).success(); + if has_changes { + if if_unchanged { + if self.verbose > 0 { + println!( + "warning: saw changes to one of {modified_paths:?} since {commit}; \ + ignoring `{option_name}`" + ); + } + return None; + } + println!( + "warning: `{option_name}` is enabled, but there are changes to one of {modified_paths:?}" + ); + } + + Some(commit.to_string()) + } +} + +fn set(field: &mut T, val: Option) { + if let Some(v) = val { + *field = v; + } +} + +fn threads_from_config(v: u32) -> u32 { + match v { + 0 => std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32, + n => n, + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/flags.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/flags.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/flags.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/flags.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,571 @@ +//! Command-line interface of the rustbuild build system. +//! +//! This module implements the command-line parsing of the build system which +//! has various flags to configure how it's run. + +use std::path::{Path, PathBuf}; + +use clap::{CommandFactory, Parser, ValueEnum}; + +use crate::core::build_steps::setup::Profile; +use crate::core::builder::{Builder, Kind}; +use crate::core::config::{target_selection_list, Config, TargetSelectionList}; +use crate::{Build, DocTests}; + +#[derive(Copy, Clone, Default, Debug, ValueEnum)] +pub enum Color { + Always, + Never, + #[default] + Auto, +} + +/// Whether to deny warnings, emit them as warnings, or use the default behavior +#[derive(Copy, Clone, Default, Debug, ValueEnum)] +pub enum Warnings { + Deny, + Warn, + #[default] + Default, +} + +/// Deserialized version of all flags for this compile. +#[derive(Debug, Parser)] +#[clap( + override_usage = "x.py [options] [...]", + disable_help_subcommand(true), + about = "", + next_line_help(false) +)] +pub struct Flags { + #[command(subcommand)] + pub cmd: Subcommand, + + #[arg(global(true), short, long, action = clap::ArgAction::Count)] + /// use verbose output (-vv for very verbose) + pub verbose: u8, // each extra -v after the first is passed to Cargo + #[arg(global(true), short, long)] + /// use incremental compilation + pub incremental: bool, + #[arg(global(true), long, value_hint = clap::ValueHint::FilePath, value_name = "FILE")] + /// TOML configuration file for build + pub config: Option, + #[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")] + /// Build directory, overrides `build.build-dir` in `config.toml` + pub build_dir: Option, + + #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "BUILD")] + /// build target of the stage0 compiler + pub build: Option, + + #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "HOST", value_parser = target_selection_list)] + /// host targets to build + pub host: Option, + + #[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "TARGET", value_parser = target_selection_list)] + /// target targets to build + pub target: Option, + + #[arg(global(true), long, value_name = "PATH")] + /// build paths to exclude + pub exclude: Vec, // keeping for client backward compatibility + #[arg(global(true), long, value_name = "PATH")] + /// build paths to skip + pub skip: Vec, + #[arg(global(true), long)] + /// include default paths in addition to the provided ones + pub include_default_paths: bool, + + #[arg(global(true), value_hint = clap::ValueHint::Other, long)] + pub rustc_error_format: Option, + + #[arg(global(true), long, value_hint = clap::ValueHint::CommandString, value_name = "CMD")] + /// command to run on failure + pub on_fail: Option, + #[arg(global(true), long)] + /// dry run; don't build anything + pub dry_run: bool, + #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] + /// stage to build (indicates compiler to use/test, e.g., stage 0 uses the + /// bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.) + pub stage: Option, + + #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] + /// stage(s) to keep without recompiling + /// (pass multiple times to keep e.g., both stages 0 and 1) + pub keep_stage: Vec, + #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] + /// stage(s) of the standard library to keep without recompiling + /// (pass multiple times to keep e.g., both stages 0 and 1) + pub keep_stage_std: Vec, + #[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")] + /// path to the root of the rust checkout + pub src: Option, + + #[arg( + global(true), + short, + long, + value_hint = clap::ValueHint::Other, + default_value_t = std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get), + value_name = "JOBS" + )] + /// number of jobs to run in parallel + pub jobs: usize, + // This overrides the deny-warnings configuration option, + // which passes -Dwarnings to the compiler invocations. + #[arg(global(true), long)] + #[clap(value_enum, default_value_t=Warnings::Default, value_name = "deny|warn")] + /// if value is deny, will deny warnings + /// if value is warn, will emit warnings + /// otherwise, use the default configured behaviour + pub warnings: Warnings, + + #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "FORMAT")] + /// rustc error format + pub error_format: Option, + #[arg(global(true), long)] + /// use message-format=json + pub json_output: bool, + + #[arg(global(true), long, value_name = "STYLE")] + #[clap(value_enum, default_value_t = Color::Auto)] + /// whether to use color in cargo and rustc output + pub color: Color, + + /// whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml + #[arg(global(true), long, value_name = "VALUE")] + pub llvm_skip_rebuild: Option, + /// generate PGO profile with rustc build + #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")] + pub rust_profile_generate: Option, + /// use PGO profile for rustc build + #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")] + pub rust_profile_use: Option, + /// use PGO profile for LLVM build + #[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")] + pub llvm_profile_use: Option, + // LLVM doesn't support a custom location for generating profile + // information. + // + // llvm_out/build/profiles/ is the location this writes to. + /// generate PGO profile with llvm built for rustc + #[arg(global(true), long)] + pub llvm_profile_generate: bool, + /// Enable BOLT link flags + #[arg(global(true), long)] + pub enable_bolt_settings: bool, + /// Skip stage0 compiler validation + #[arg(global(true), long)] + pub skip_stage0_validation: bool, + /// Additional reproducible artifacts that should be added to the reproducible artifacts archive. + #[arg(global(true), long)] + pub reproducible_artifact: Vec, + #[arg(global(true))] + /// paths for the subcommand + pub paths: Vec, + /// override options in config.toml + #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "section.option=value")] + pub set: Vec, + /// arguments passed to subcommands + #[arg(global(true), last(true), value_name = "ARGS")] + pub free_args: Vec, +} + +impl Flags { + pub fn parse(args: &[String]) -> Self { + let first = String::from("x.py"); + let it = std::iter::once(&first).chain(args.iter()); + // We need to check for ` -h -v`, in which case we list the paths + #[derive(Parser)] + #[clap(disable_help_flag(true))] + struct HelpVerboseOnly { + #[arg(short, long)] + help: bool, + #[arg(global(true), short, long, action = clap::ArgAction::Count)] + pub verbose: u8, + #[arg(value_enum)] + cmd: Kind, + } + if let Ok(HelpVerboseOnly { help: true, verbose: 1.., cmd: subcommand }) = + HelpVerboseOnly::try_parse_from(it.clone()) + { + println!("NOTE: updating submodules before printing available paths"); + let config = Config::parse(&[String::from("build")]); + let build = Build::new(config); + let paths = Builder::get_help(&build, subcommand); + if let Some(s) = paths { + println!("{s}"); + } else { + panic!("No paths available for subcommand `{}`", subcommand.as_str()); + } + crate::exit!(0); + } + + Flags::parse_from(it) + } +} + +#[derive(Debug, Clone, Default, clap::Subcommand)] +pub enum Subcommand { + #[clap(aliases = ["b"], long_about = "\n + Arguments: + This subcommand accepts a number of paths to directories to the crates + and/or artifacts to compile. For example, for a quick build of a usable + compiler: + ./x.py build --stage 1 library/std + This will build a compiler and standard library from the local source code. + Once this is done, build/$ARCH/stage1 contains a usable compiler. + If no arguments are passed then the default artifacts for that stage are + compiled. For example: + ./x.py build --stage 0 + ./x.py build ")] + /// Compile either the compiler or libraries + #[default] + Build, + #[clap(aliases = ["c"], long_about = "\n + Arguments: + This subcommand accepts a number of paths to directories to the crates + and/or artifacts to compile. For example: + ./x.py check library/std + If no arguments are passed then many artifacts are checked.")] + /// Compile either the compiler or libraries, using cargo check + Check { + #[arg(long)] + /// Check all targets + all_targets: bool, + }, + /// Run Clippy (uses rustup/cargo-installed clippy binary) + #[clap(long_about = "\n + Arguments: + This subcommand accepts a number of paths to directories to the crates + and/or artifacts to run clippy against. For example: + ./x.py clippy library/core + ./x.py clippy library/core library/proc_macro")] + Clippy { + #[arg(long)] + fix: bool, + /// clippy lints to allow + #[arg(global(true), short = 'A', action = clap::ArgAction::Append, value_name = "LINT")] + allow: Vec, + /// clippy lints to deny + #[arg(global(true), short = 'D', action = clap::ArgAction::Append, value_name = "LINT")] + deny: Vec, + /// clippy lints to warn on + #[arg(global(true), short = 'W', action = clap::ArgAction::Append, value_name = "LINT")] + warn: Vec, + /// clippy lints to forbid + #[arg(global(true), short = 'F', action = clap::ArgAction::Append, value_name = "LINT")] + forbid: Vec, + }, + /// Run cargo fix + #[clap(long_about = "\n + Arguments: + This subcommand accepts a number of paths to directories to the crates + and/or artifacts to run `cargo fix` against. For example: + ./x.py fix library/core + ./x.py fix library/core library/proc_macro")] + Fix, + #[clap( + name = "fmt", + long_about = "\n + Arguments: + This subcommand optionally accepts a `--check` flag which succeeds if formatting is correct and + fails if it is not. For example: + ./x.py fmt + ./x.py fmt --check" + )] + /// Run rustfmt + Format { + /// check formatting instead of applying + #[arg(long)] + check: bool, + }, + #[clap(aliases = ["d"], long_about = "\n + Arguments: + This subcommand accepts a number of paths to directories of documentation + to build. For example: + ./x.py doc src/doc/book + ./x.py doc src/doc/nomicon + ./x.py doc src/doc/book library/std + ./x.py doc library/std --json + ./x.py doc library/std --open + If no arguments are passed then everything is documented: + ./x.py doc + ./x.py doc --stage 1")] + /// Build documentation + Doc { + #[arg(long)] + /// open the docs in a browser + open: bool, + #[arg(long)] + /// render the documentation in JSON format in addition to the usual HTML format + json: bool, + }, + #[clap(aliases = ["t"], long_about = "\n + Arguments: + This subcommand accepts a number of paths to test directories that + should be compiled and run. For example: + ./x.py test tests/ui + ./x.py test library/std --test-args hash_map + ./x.py test library/std --stage 0 --no-doc + ./x.py test tests/ui --bless + ./x.py test tests/ui --compare-mode next-solver + Note that `test tests/* --stage N` does NOT depend on `build compiler/rustc --stage N`; + just like `build library/std --stage N` it tests the compiler produced by the previous + stage. + Execute tool tests with a tool name argument: + ./x.py test tidy + If no arguments are passed then the complete artifacts for that stage are + compiled and tested. + ./x.py test + ./x.py test --stage 1")] + /// Build and run some test suites + Test { + #[arg(long)] + /// run all tests regardless of failure + no_fail_fast: bool, + #[arg(long, value_name = "SUBSTRING")] + /// skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times + skip: Vec, + #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] + /// extra arguments to be passed for the test tool being used + /// (e.g. libtest, compiletest or rustdoc) + test_args: Vec, + /// extra options to pass the compiler when running tests + #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] + rustc_args: Vec, + #[arg(long)] + /// do not run doc tests + no_doc: bool, + #[arg(long)] + /// only run doc tests + doc: bool, + #[arg(long)] + /// whether to automatically update stderr/stdout files + bless: bool, + #[arg(long)] + /// comma-separated list of other files types to check (accepts py, py:lint, + /// py:fmt, shell) + extra_checks: Option, + #[arg(long)] + /// rerun tests even if the inputs are unchanged + force_rerun: bool, + #[arg(long)] + /// only run tests that result has been changed + only_modified: bool, + #[arg(long, value_name = "COMPARE MODE")] + /// mode describing what file the actual ui output will be compared to + compare_mode: Option, + #[arg(long, value_name = "check | build | run")] + /// force {check,build,run}-pass tests to this mode. + pass: Option, + #[arg(long, value_name = "auto | always | never")] + /// whether to execute run-* tests + run: Option, + #[arg(long)] + /// enable this to generate a Rustfix coverage file, which is saved in + /// `//rustfix_missing_coverage.txt` + rustfix_coverage: bool, + }, + /// Build and run some benchmarks + Bench { + #[arg(long, allow_hyphen_values(true))] + test_args: Vec, + }, + /// Clean out build directories + Clean { + #[arg(long)] + /// Clean the entire build directory (not used by default) + all: bool, + #[arg(long, value_name = "N")] + /// Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used. + stage: Option, + }, + /// Build distribution artifacts + Dist, + /// Install distribution artifacts + Install, + #[clap(aliases = ["r"], long_about = "\n + Arguments: + This subcommand accepts a number of paths to tools to build and run. For + example: + ./x.py run src/tools/expand-yaml-anchors + At least a tool needs to be called.")] + /// Run tools contained in this repository + Run { + /// arguments for the tool + #[arg(long, allow_hyphen_values(true))] + args: Vec, + }, + /// Set up the environment for development + #[clap(long_about = format!( + "\n +x.py setup creates a `config.toml` which changes the defaults for x.py itself, +as well as setting up a git pre-push hook, VS Code config and toolchain link. +Arguments: + This subcommand accepts a 'profile' to use for builds. For example: + ./x.py setup library + The profile is optional and you will be prompted interactively if it is not given. + The following profiles are available: +{} + To only set up the git hook, VS Code config or toolchain link, you may use + ./x.py setup hook + ./x.py setup vscode + ./x.py setup link", Profile::all_for_help(" ").trim_end()))] + Setup { + /// Either the profile for `config.toml` or another setup action. + /// May be omitted to set up interactively + #[arg(value_name = "|hook|vscode|link")] + profile: Option, + }, + /// Suggest a subset of tests to run, based on modified files + #[clap(long_about = "\n")] + Suggest { + /// run suggested tests + #[arg(long)] + run: bool, + }, +} + +impl Subcommand { + pub fn kind(&self) -> Kind { + match self { + Subcommand::Bench { .. } => Kind::Bench, + Subcommand::Build { .. } => Kind::Build, + Subcommand::Check { .. } => Kind::Check, + Subcommand::Clippy { .. } => Kind::Clippy, + Subcommand::Doc { .. } => Kind::Doc, + Subcommand::Fix { .. } => Kind::Fix, + Subcommand::Format { .. } => Kind::Format, + Subcommand::Test { .. } => Kind::Test, + Subcommand::Clean { .. } => Kind::Clean, + Subcommand::Dist { .. } => Kind::Dist, + Subcommand::Install { .. } => Kind::Install, + Subcommand::Run { .. } => Kind::Run, + Subcommand::Setup { .. } => Kind::Setup, + Subcommand::Suggest { .. } => Kind::Suggest, + } + } + + pub fn rustc_args(&self) -> Vec<&str> { + match *self { + Subcommand::Test { ref rustc_args, .. } => { + rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() + } + _ => vec![], + } + } + + pub fn fail_fast(&self) -> bool { + match *self { + Subcommand::Test { no_fail_fast, .. } => !no_fail_fast, + _ => false, + } + } + + pub fn doc_tests(&self) -> DocTests { + match *self { + Subcommand::Test { doc, no_doc, .. } => { + if doc { + DocTests::Only + } else if no_doc { + DocTests::No + } else { + DocTests::Yes + } + } + _ => DocTests::Yes, + } + } + + pub fn bless(&self) -> bool { + match *self { + Subcommand::Test { bless, .. } => bless, + _ => false, + } + } + + pub fn extra_checks(&self) -> Option<&str> { + match *self { + Subcommand::Test { ref extra_checks, .. } => extra_checks.as_ref().map(String::as_str), + _ => None, + } + } + + pub fn only_modified(&self) -> bool { + match *self { + Subcommand::Test { only_modified, .. } => only_modified, + _ => false, + } + } + + pub fn force_rerun(&self) -> bool { + match *self { + Subcommand::Test { force_rerun, .. } => force_rerun, + _ => false, + } + } + + pub fn rustfix_coverage(&self) -> bool { + match *self { + Subcommand::Test { rustfix_coverage, .. } => rustfix_coverage, + _ => false, + } + } + + pub fn compare_mode(&self) -> Option<&str> { + match *self { + Subcommand::Test { ref compare_mode, .. } => compare_mode.as_ref().map(|s| &s[..]), + _ => None, + } + } + + pub fn pass(&self) -> Option<&str> { + match *self { + Subcommand::Test { ref pass, .. } => pass.as_ref().map(|s| &s[..]), + _ => None, + } + } + + pub fn run(&self) -> Option<&str> { + match *self { + Subcommand::Test { ref run, .. } => run.as_ref().map(|s| &s[..]), + _ => None, + } + } + + pub fn open(&self) -> bool { + match *self { + Subcommand::Doc { open, .. } => open, + _ => false, + } + } + + pub fn json(&self) -> bool { + match *self { + Subcommand::Doc { json, .. } => json, + _ => false, + } + } +} + +/// Returns the shell completion for a given shell, if the result differs from the current +/// content of `path`. If `path` does not exist, always returns `Some`. +pub fn get_completion(shell: G, path: &Path) -> Option { + let mut cmd = Flags::command(); + let current = if !path.exists() { + String::new() + } else { + std::fs::read_to_string(path).unwrap_or_else(|_| { + eprintln!("couldn't read {}", path.display()); + crate::exit!(1) + }) + }; + let mut buf = Vec::new(); + clap_complete::generate(shell, &mut cmd, "x.py", &mut buf); + if buf == current.as_bytes() { + return None; + } + Some(String::from_utf8(buf).expect("completion script should be UTF-8")) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/config/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,4 @@ +pub(crate) mod config; +pub(crate) mod flags; + +pub use config::*; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/download.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/download.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/download.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/download.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,701 @@ +use std::{ + env, + ffi::{OsStr, OsString}, + fs::{self, File}, + io::{BufRead, BufReader, BufWriter, ErrorKind, Write}, + path::{Path, PathBuf}, + process::{Command, Stdio}, +}; + +use build_helper::ci::CiEnv; +use once_cell::sync::OnceCell; +use xz2::bufread::XzDecoder; + +use crate::core::build_steps::llvm::detect_llvm_sha; +use crate::core::config::RustfmtMetadata; +use crate::utils::helpers::{check_run, exe, program_out_of_date}; +use crate::{t, Config}; + +static SHOULD_FIX_BINS_AND_DYLIBS: OnceCell = OnceCell::new(); + +/// `Config::try_run` wrapper for this module to avoid warnings on `try_run`, since we don't have access to a `builder` yet. +fn try_run(config: &Config, cmd: &mut Command) -> Result<(), ()> { + #[allow(deprecated)] + config.try_run(cmd) +} + +/// Generic helpers that are useful anywhere in bootstrap. +impl Config { + pub fn is_verbose(&self) -> bool { + self.verbose > 0 + } + + pub(crate) fn create(&self, path: &Path, s: &str) { + if self.dry_run() { + return; + } + t!(fs::write(path, s)); + } + + pub(crate) fn remove(&self, f: &Path) { + if self.dry_run() { + return; + } + fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {:?}", f)); + } + + /// Create a temporary directory in `out` and return its path. + /// + /// NOTE: this temporary directory is shared between all steps; + /// if you need an empty directory, create a new subdirectory inside it. + pub(crate) fn tempdir(&self) -> PathBuf { + let tmp = self.out.join("tmp"); + t!(fs::create_dir_all(&tmp)); + tmp + } + + /// Runs a command, printing out nice contextual information if it fails. + /// Returns false if do not execute at all, otherwise returns its + /// `status.success()`. + pub(crate) fn check_run(&self, cmd: &mut Command) -> bool { + if self.dry_run() { + return true; + } + self.verbose(&format!("running: {cmd:?}")); + check_run(cmd, self.is_verbose()) + } + + /// Whether or not `fix_bin_or_dylib` needs to be run; can only be true + /// on NixOS + fn should_fix_bins_and_dylibs(&self) -> bool { + let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { + match Command::new("uname").arg("-s").stderr(Stdio::inherit()).output() { + Err(_) => return false, + Ok(output) if !output.status.success() => return false, + Ok(output) => { + let mut os_name = output.stdout; + if os_name.last() == Some(&b'\n') { + os_name.pop(); + } + if os_name != b"Linux" { + return false; + } + } + } + + // If the user has asked binaries to be patched for Nix, then + // don't check for NixOS or `/lib`. + // NOTE: this intentionally comes after the Linux check: + // - patchelf only works with ELF files, so no need to run it on Mac or Windows + // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. + if let Some(explicit_value) = self.patch_binaries_for_nix { + return explicit_value; + } + + // Use `/etc/os-release` instead of `/etc/NIXOS`. + // The latter one does not exist on NixOS when using tmpfs as root. + let is_nixos = match File::open("/etc/os-release") { + Err(e) if e.kind() == ErrorKind::NotFound => false, + Err(e) => panic!("failed to access /etc/os-release: {}", e), + Ok(os_release) => BufReader::new(os_release).lines().any(|l| { + let l = l.expect("reading /etc/os-release"); + matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") + }), + }; + if !is_nixos { + let in_nix_shell = env::var("IN_NIX_SHELL"); + if let Ok(in_nix_shell) = in_nix_shell { + eprintln!( + "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ + you may need to set `patch-binaries-for-nix=true` in config.toml" + ); + } + } + is_nixos + }); + if val { + eprintln!("INFO: You seem to be using Nix."); + } + val + } + + /// Modifies the interpreter section of 'fname' to fix the dynamic linker, + /// or the RPATH section, to fix the dynamic library search path + /// + /// This is only required on NixOS and uses the PatchELF utility to + /// change the interpreter/RPATH of ELF executables. + /// + /// Please see for more information + fn fix_bin_or_dylib(&self, fname: &Path) { + assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); + println!("attempting to patch {}", fname.display()); + + // Only build `.nix-deps` once. + static NIX_DEPS_DIR: OnceCell = OnceCell::new(); + let mut nix_build_succeeded = true; + let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { + // Run `nix-build` to "build" each dependency (which will likely reuse + // the existing `/nix/store` copy, or at most download a pre-built copy). + // + // Importantly, we create a gc-root called `.nix-deps` in the `build/` + // directory, but still reference the actual `/nix/store` path in the rpath + // as it makes it significantly more robust against changes to the location of + // the `.nix-deps` location. + // + // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). + // zlib: Needed as a system dependency of `libLLVM-*.so`. + // patchelf: Needed for patching ELF binaries (see doc comment above). + let nix_deps_dir = self.out.join(".nix-deps"); + const NIX_EXPR: &str = " + with (import {}); + symlinkJoin { + name = \"rust-stage0-dependencies\"; + paths = [ + zlib + patchelf + stdenv.cc.bintools + ]; + } + "; + nix_build_succeeded = try_run( + self, + Command::new("nix-build").args(&[ + Path::new("-E"), + Path::new(NIX_EXPR), + Path::new("-o"), + &nix_deps_dir, + ]), + ) + .is_ok(); + nix_deps_dir + }); + if !nix_build_succeeded { + return; + } + + let mut patchelf = Command::new(nix_deps_dir.join("bin/patchelf")); + let rpath_entries = { + // ORIGIN is a relative default, all binary and dynamic libraries we ship + // appear to have this (even when `../lib` is redundant). + // NOTE: there are only two paths here, delimited by a `:` + let mut entries = OsString::from("$ORIGIN/../lib:"); + entries.push(t!(fs::canonicalize(nix_deps_dir)).join("lib")); + entries + }; + patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]); + if !fname.extension().map_or(false, |ext| ext == "so") { + // Finally, set the correct .interp for binaries + let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); + // FIXME: can we support utf8 here? `args` doesn't accept Vec, only OsString ... + let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path)))); + patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]); + } + + let _ = try_run(self, patchelf.arg(fname)); + } + + fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) { + self.verbose(&format!("download {url}")); + // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. + let tempfile = self.tempdir().join(dest_path.file_name().unwrap()); + // While bootstrap itself only supports http and https downloads, downstream forks might + // need to download components from other protocols. The match allows them adding more + // protocols without worrying about merge conflicts if we change the HTTP implementation. + match url.split_once("://").map(|(proto, _)| proto) { + Some("http") | Some("https") => { + self.download_http_with_retries(&tempfile, url, help_on_error) + } + Some(other) => panic!("unsupported protocol {other} in {url}"), + None => panic!("no protocol in {url}"), + } + t!(std::fs::rename(&tempfile, dest_path)); + } + + fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) { + println!("downloading {url}"); + // Try curl. If that fails and we are on windows, fallback to PowerShell. + let mut curl = Command::new("curl"); + curl.args(&[ + "-y", + "30", + "-Y", + "10", // timeout if speed is < 10 bytes/sec for > 30 seconds + "--connect-timeout", + "30", // timeout if cannot connect within 30 seconds + "-o", + tempfile.to_str().unwrap(), + "--retry", + "3", + "-SRf", + ]); + // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. + if CiEnv::is_ci() { + curl.arg("-s"); + } else { + curl.arg("--progress-bar"); + } + curl.arg(url); + if !self.check_run(&mut curl) { + if self.build.contains("windows-msvc") { + eprintln!("Fallback to PowerShell"); + for _ in 0..3 { + if try_run(self, Command::new("PowerShell.exe").args(&[ + "/nologo", + "-Command", + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", + &format!( + "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", + url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), + ), + ])).is_err() { + return; + } + eprintln!("\nspurious failure, trying again"); + } + } + if !help_on_error.is_empty() { + eprintln!("{help_on_error}"); + } + crate::exit!(1); + } + } + + fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) { + eprintln!("extracting {} to {}", tarball.display(), dst.display()); + if !dst.exists() { + t!(fs::create_dir_all(dst)); + } + + // `tarball` ends with `.tar.xz`; strip that suffix + // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` + let uncompressed_filename = + Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); + let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); + + // decompress the file + let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); + let decompressor = XzDecoder::new(BufReader::new(data)); + + let mut tar = tar::Archive::new(decompressor); + + // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding + // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. + // Cache the entries when we extract it so we only have to read it once. + let mut recorded_entries = + if dst.ends_with("ci-rustc") { recorded_entries(dst, pattern) } else { None }; + + for member in t!(tar.entries()) { + let mut member = t!(member); + let original_path = t!(member.path()).into_owned(); + // skip the top-level directory + if original_path == directory_prefix { + continue; + } + let mut short_path = t!(original_path.strip_prefix(directory_prefix)); + if !short_path.starts_with(pattern) { + continue; + } + short_path = t!(short_path.strip_prefix(pattern)); + let dst_path = dst.join(short_path); + self.verbose(&format!("extracting {} to {}", original_path.display(), dst.display())); + if !t!(member.unpack_in(dst)) { + panic!("path traversal attack ??"); + } + if let Some(record) = &mut recorded_entries { + t!(writeln!(record, "{}", short_path.to_str().unwrap())); + } + let src_path = dst.join(original_path); + if src_path.is_dir() && dst_path.exists() { + continue; + } + t!(fs::rename(src_path, dst_path)); + } + let dst_dir = dst.join(directory_prefix); + if dst_dir.exists() { + t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); + } + } + + /// Returns whether the SHA256 checksum of `path` matches `expected`. + pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool { + use sha2::Digest; + + self.verbose(&format!("verifying {}", path.display())); + + if self.dry_run() { + return false; + } + + let mut hasher = sha2::Sha256::new(); + + let file = t!(File::open(path)); + let mut reader = BufReader::new(file); + + loop { + let buffer = t!(reader.fill_buf()); + let l = buffer.len(); + // break if EOF + if l == 0 { + break; + } + hasher.update(buffer); + reader.consume(l); + } + + let checksum = hex::encode(hasher.finalize().as_slice()); + let verified = checksum == expected; + + if !verified { + println!( + "invalid checksum: \n\ + found: {checksum}\n\ + expected: {expected}", + ); + } + + verified + } +} + +fn recorded_entries(dst: &Path, pattern: &str) -> Option> { + let name = if pattern == "rustc-dev" { + ".rustc-dev-contents" + } else if pattern.starts_with("rust-std") { + ".rust-std-contents" + } else { + return None; + }; + Some(BufWriter::new(t!(File::create(dst.join(name))))) +} + +enum DownloadSource { + CI, + Dist, +} + +/// Functions that are only ever called once, but named for clarify and to avoid thousand-line functions. +impl Config { + pub(crate) fn maybe_download_rustfmt(&self) -> Option { + let RustfmtMetadata { date, version } = self.stage0_metadata.rustfmt.as_ref()?; + let channel = format!("{version}-{date}"); + + let host = self.build; + let bin_root = self.out.join(host.triple).join("rustfmt"); + let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); + let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); + if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { + return Some(rustfmt_path); + } + + self.download_component( + DownloadSource::Dist, + format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), + "rustfmt-preview", + &date, + "rustfmt", + ); + self.download_component( + DownloadSource::Dist, + format!("rustc-{version}-{build}.tar.xz", build = host.triple), + "rustc", + &date, + "rustfmt", + ); + + if self.should_fix_bins_and_dylibs() { + self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); + self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if lib.path().extension() == Some(OsStr::new("so")) { + self.fix_bin_or_dylib(&lib.path()); + } + } + } + + self.create(&rustfmt_stamp, &channel); + Some(rustfmt_path) + } + + pub(crate) fn ci_rust_std_contents(&self) -> Vec { + self.ci_component_contents(".rust-std-contents") + } + + pub(crate) fn ci_rustc_dev_contents(&self) -> Vec { + self.ci_component_contents(".rustc-dev-contents") + } + + fn ci_component_contents(&self, stamp_file: &str) -> Vec { + assert!(self.download_rustc()); + if self.dry_run() { + return vec![]; + } + + let ci_rustc_dir = self.ci_rustc_dir(); + let stamp_file = ci_rustc_dir.join(stamp_file); + let contents_file = t!(File::open(&stamp_file), stamp_file.display().to_string()); + t!(BufReader::new(contents_file).lines().collect()) + } + + pub(crate) fn download_ci_rustc(&self, commit: &str) { + self.verbose(&format!("using downloaded stage2 artifacts from CI (commit {commit})")); + + let version = self.artifact_version_part(commit); + // download-rustc doesn't need its own cargo, it can just use beta's. But it does need the + // `rustc_private` crates for tools. + let extra_components = ["rustc-dev"]; + + self.download_toolchain( + &version, + "ci-rustc", + &format!("{commit}-{}", self.llvm_assertions), + &extra_components, + Self::download_ci_component, + ); + } + + pub(crate) fn download_beta_toolchain(&self) { + self.verbose("downloading stage0 beta artifacts"); + + let date = &self.stage0_metadata.compiler.date; + let version = &self.stage0_metadata.compiler.version; + let extra_components = ["cargo"]; + + let download_beta_component = |config: &Config, filename, prefix: &_, date: &_| { + config.download_component(DownloadSource::Dist, filename, prefix, date, "stage0") + }; + + self.download_toolchain( + version, + "stage0", + date, + &extra_components, + download_beta_component, + ); + } + + fn download_toolchain( + &self, + version: &str, + sysroot: &str, + stamp_key: &str, + extra_components: &[&str], + download_component: fn(&Config, String, &str, &str), + ) { + let host = self.build.triple; + let bin_root = self.out.join(host).join(sysroot); + let rustc_stamp = bin_root.join(".rustc-stamp"); + + if !bin_root.join("bin").join(exe("rustc", self.build)).exists() + || program_out_of_date(&rustc_stamp, stamp_key) + { + if bin_root.exists() { + t!(fs::remove_dir_all(&bin_root)); + } + let filename = format!("rust-std-{version}-{host}.tar.xz"); + let pattern = format!("rust-std-{host}"); + download_component(self, filename, &pattern, stamp_key); + let filename = format!("rustc-{version}-{host}.tar.xz"); + download_component(self, filename, "rustc", stamp_key); + + for component in extra_components { + let filename = format!("{component}-{version}-{host}.tar.xz"); + download_component(self, filename, component, stamp_key); + } + + if self.should_fix_bins_and_dylibs() { + self.fix_bin_or_dylib(&bin_root.join("bin").join("rustc")); + self.fix_bin_or_dylib(&bin_root.join("bin").join("rustdoc")); + self.fix_bin_or_dylib( + &bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"), + ); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if lib.path().extension() == Some(OsStr::new("so")) { + self.fix_bin_or_dylib(&lib.path()); + } + } + } + + t!(fs::write(rustc_stamp, stamp_key)); + } + } + + /// Download a single component of a CI-built toolchain (not necessarily a published nightly). + // NOTE: intentionally takes an owned string to avoid downloading multiple times by accident + fn download_ci_component(&self, filename: String, prefix: &str, commit_with_assertions: &str) { + Self::download_component( + self, + DownloadSource::CI, + filename, + prefix, + commit_with_assertions, + "ci-rustc", + ) + } + + fn download_component( + &self, + mode: DownloadSource, + filename: String, + prefix: &str, + key: &str, + destination: &str, + ) { + let cache_dst = self.out.join("cache"); + let cache_dir = cache_dst.join(key); + if !cache_dir.exists() { + t!(fs::create_dir_all(&cache_dir)); + } + + let bin_root = self.out.join(self.build.triple).join(destination); + let tarball = cache_dir.join(&filename); + let (base_url, url, should_verify) = match mode { + DownloadSource::CI => { + let dist_server = if self.llvm_assertions { + self.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() + } else { + self.stage0_metadata.config.artifacts_server.clone() + }; + let url = format!( + "{}/{filename}", + key.strip_suffix(&format!("-{}", self.llvm_assertions)).unwrap() + ); + (dist_server, url, false) + } + DownloadSource::Dist => { + let dist_server = env::var("RUSTUP_DIST_SERVER") + .unwrap_or(self.stage0_metadata.config.dist_server.to_string()); + // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0.json + (dist_server, format!("dist/{key}/{filename}"), true) + } + }; + + // For the beta compiler, put special effort into ensuring the checksums are valid. + // FIXME: maybe we should do this for download-rustc as well? but it would be a pain to update + // this on each and every nightly ... + let checksum = if should_verify { + let error = format!( + "src/stage0.json doesn't contain a checksum for {url}. \ + Pre-built artifacts might not be available for this \ + target at this time, see https://doc.rust-lang.org/nightly\ + /rustc/platform-support.html for more information." + ); + let sha256 = self.stage0_metadata.checksums_sha256.get(&url).expect(&error); + if tarball.exists() { + if self.verify(&tarball, sha256) { + self.unpack(&tarball, &bin_root, prefix); + return; + } else { + self.verbose(&format!( + "ignoring cached file {} due to failed verification", + tarball.display() + )); + self.remove(&tarball); + } + } + Some(sha256) + } else if tarball.exists() { + self.unpack(&tarball, &bin_root, prefix); + return; + } else { + None + }; + + let mut help_on_error = ""; + if destination == "ci-rustc" { + help_on_error = "ERROR: failed to download pre-built rustc from CI + +NOTE: old builds get deleted after a certain time +HELP: if trying to compile an old commit of rustc, disable `download-rustc` in config.toml: + +[rust] +download-rustc = false +"; + } + self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error); + if let Some(sha256) = checksum { + if !self.verify(&tarball, sha256) { + panic!("failed to verify {}", tarball.display()); + } + } + + self.unpack(&tarball, &bin_root, prefix); + } + + pub(crate) fn maybe_download_ci_llvm(&self) { + if !self.llvm_from_ci { + return; + } + let llvm_root = self.ci_llvm_root(); + let llvm_stamp = llvm_root.join(".llvm-stamp"); + let llvm_sha = detect_llvm_sha(&self, self.rust_info.is_managed_git_subrepository()); + let key = format!("{}{}", llvm_sha, self.llvm_assertions); + if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() { + self.download_ci_llvm(&llvm_sha); + if self.should_fix_bins_and_dylibs() { + for entry in t!(fs::read_dir(llvm_root.join("bin"))) { + self.fix_bin_or_dylib(&t!(entry).path()); + } + } + + // Update the timestamp of llvm-config to force rustc_llvm to be + // rebuilt. This is a hacky workaround for a deficiency in Cargo where + // the rerun-if-changed directive doesn't handle changes very well. + // https://github.com/rust-lang/cargo/issues/10791 + // Cargo only compares the timestamp of the file relative to the last + // time `rustc_llvm` build script ran. However, the timestamps of the + // files in the tarball are in the past, so it doesn't trigger a + // rebuild. + let now = filetime::FileTime::from_system_time(std::time::SystemTime::now()); + let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build)); + t!(filetime::set_file_times(&llvm_config, now, now)); + + if self.should_fix_bins_and_dylibs() { + let llvm_lib = llvm_root.join("lib"); + for entry in t!(fs::read_dir(&llvm_lib)) { + let lib = t!(entry).path(); + if lib.extension().map_or(false, |ext| ext == "so") { + self.fix_bin_or_dylib(&lib); + } + } + } + + t!(fs::write(llvm_stamp, key)); + } + } + + fn download_ci_llvm(&self, llvm_sha: &str) { + let llvm_assertions = self.llvm_assertions; + + let cache_prefix = format!("llvm-{llvm_sha}-{llvm_assertions}"); + let cache_dst = self.out.join("cache"); + let rustc_cache = cache_dst.join(cache_prefix); + if !rustc_cache.exists() { + t!(fs::create_dir_all(&rustc_cache)); + } + let base = if llvm_assertions { + &self.stage0_metadata.config.artifacts_with_llvm_assertions_server + } else { + &self.stage0_metadata.config.artifacts_server + }; + let version = self.artifact_version_part(llvm_sha); + let filename = format!("rust-dev-{}-{}.tar.xz", version, self.build.triple); + let tarball = rustc_cache.join(&filename); + if !tarball.exists() { + let help_on_error = "ERROR: failed to download llvm from ci + + HELP: old builds get deleted after a certain time + HELP: if trying to compile an old commit of rustc, disable `download-ci-llvm` in config.toml: + + [llvm] + download-ci-llvm = false + "; + self.download_file(&format!("{base}/{llvm_sha}/{filename}"), &tarball, help_on_error); + } + let llvm_root = self.ci_llvm_root(); + self.unpack(&tarball, &llvm_root, "rust-dev"); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/metadata.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/metadata.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/metadata.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/metadata.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,101 @@ +use std::path::PathBuf; +use std::process::Command; + +use serde_derive::Deserialize; + +use crate::utils::cache::INTERNER; +use crate::utils::helpers::output; +use crate::{t, Build, Crate}; + +/// For more information, see the output of +/// +#[derive(Debug, Deserialize)] +struct Output { + packages: Vec, +} + +/// For more information, see the output of +/// +#[derive(Debug, Deserialize)] +struct Package { + name: String, + source: Option, + manifest_path: String, + dependencies: Vec, + targets: Vec, +} + +/// For more information, see the output of +/// +#[derive(Debug, Deserialize)] +struct Dependency { + name: String, + source: Option, +} + +#[derive(Debug, Deserialize)] +struct Target { + kind: Vec, +} + +/// Collects and stores package metadata of each workspace members into `build`, +/// by executing `cargo metadata` commands. +pub fn build(build: &mut Build) { + for package in workspace_members(build) { + if package.source.is_none() { + let name = INTERNER.intern_string(package.name); + let mut path = PathBuf::from(package.manifest_path); + path.pop(); + let deps = package + .dependencies + .into_iter() + .filter(|dep| dep.source.is_none()) + .map(|dep| INTERNER.intern_string(dep.name)) + .collect(); + let has_lib = package.targets.iter().any(|t| t.kind.iter().any(|k| k == "lib")); + let krate = Crate { name, deps, path, has_lib }; + let relative_path = krate.local_path(build); + build.crates.insert(name, krate); + let existing_path = build.crate_paths.insert(relative_path, name); + assert!( + existing_path.is_none(), + "multiple crates with the same path: {}", + existing_path.unwrap() + ); + } + } +} + +/// Invokes `cargo metadata` to get package metadata of each workspace member. +/// +/// Note that `src/tools/cargo` is no longer a workspace member but we still +/// treat it as one here, by invoking an additional `cargo metadata` command. +fn workspace_members(build: &Build) -> impl Iterator { + let collect_metadata = |manifest_path| { + let mut cargo = Command::new(&build.initial_cargo); + cargo + // Will read the libstd Cargo.toml + // which uses the unstable `public-dependency` feature. + .env("RUSTC_BOOTSTRAP", "1") + .arg("metadata") + .arg("--format-version") + .arg("1") + .arg("--no-deps") + .arg("--manifest-path") + .arg(build.src.join(manifest_path)); + let metadata_output = output(&mut cargo); + let Output { packages, .. } = t!(serde_json::from_str(&metadata_output)); + packages + }; + + // Collects `metadata.packages` from all workspaces. + let packages = collect_metadata("Cargo.toml"); + let cargo_packages = collect_metadata("src/tools/cargo/Cargo.toml"); + let ra_packages = collect_metadata("src/tools/rust-analyzer/Cargo.toml"); + let bootstrap_packages = collect_metadata("src/bootstrap/Cargo.toml"); + + // We only care about the root package from `src/tool/cargo` workspace. + let cargo_package = cargo_packages.into_iter().find(|pkg| pkg.name == "cargo").into_iter(); + + packages.into_iter().chain(cargo_package).chain(ra_packages).chain(bootstrap_packages) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,6 @@ +pub(crate) mod build_steps; +pub(crate) mod builder; +pub(crate) mod config; +pub(crate) mod download; +pub(crate) mod metadata; +pub(crate) mod sanity; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/sanity.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/sanity.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/sanity.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/core/sanity.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,267 @@ +//! Sanity checking performed by rustbuild before actually executing anything. +//! +//! This module contains the implementation of ensuring that the build +//! environment looks reasonable before progressing. This will verify that +//! various programs like git and python exist, along with ensuring that all C +//! compilers for cross-compiling are found. +//! +//! In theory if we get past this phase it's a bug if a build fails, but in +//! practice that's likely not true! + +use std::collections::HashMap; +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::path::PathBuf; +use std::process::Command; + +use crate::core::config::Target; +use crate::utils::cache::INTERNER; +use crate::utils::helpers::output; +use crate::Build; + +pub struct Finder { + cache: HashMap>, + path: OsString, +} + +impl Finder { + pub fn new() -> Self { + Self { cache: HashMap::new(), path: env::var_os("PATH").unwrap_or_default() } + } + + pub fn maybe_have>(&mut self, cmd: S) -> Option { + let cmd: OsString = cmd.into(); + let path = &self.path; + self.cache + .entry(cmd.clone()) + .or_insert_with(|| { + for path in env::split_paths(path) { + let target = path.join(&cmd); + let mut cmd_exe = cmd.clone(); + cmd_exe.push(".exe"); + + if target.is_file() // some/path/git + || path.join(&cmd_exe).exists() // some/path/git.exe + || target.join(&cmd_exe).exists() + // some/path/git/git.exe + { + return Some(target); + } + } + None + }) + .clone() + } + + pub fn must_have>(&mut self, cmd: S) -> PathBuf { + self.maybe_have(&cmd).unwrap_or_else(|| { + panic!("\n\ncouldn't find required command: {:?}\n\n", cmd.as_ref()); + }) + } +} + +pub fn check(build: &mut Build) { + let skip_target_sanity = + env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some_and(|s| s == "1" || s == "true"); + + let path = env::var_os("PATH").unwrap_or_default(); + // On Windows, quotes are invalid characters for filename paths, and if + // one is present as part of the PATH then that can lead to the system + // being unable to identify the files properly. See + // https://github.com/rust-lang/rust/issues/34959 for more details. + if cfg!(windows) && path.to_string_lossy().contains('\"') { + panic!("PATH contains invalid character '\"'"); + } + + let mut cmd_finder = Finder::new(); + // If we've got a git directory we're gonna need git to update + // submodules and learn about various other aspects. + if build.rust_info().is_managed_git_subrepository() { + cmd_finder.must_have("git"); + } + + // We need cmake, but only if we're actually building LLVM or sanitizers. + let building_llvm = build.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) + && build + .hosts + .iter() + .map(|host| { + build + .config + .target_config + .get(host) + .map(|config| config.llvm_config.is_none()) + .unwrap_or(true) + }) + .any(|build_llvm_ourselves| build_llvm_ourselves); + + let need_cmake = building_llvm || build.config.any_sanitizers_enabled(); + if need_cmake && cmd_finder.maybe_have("cmake").is_none() { + eprintln!( + " +Couldn't find required command: cmake + +You should install cmake, or set `download-ci-llvm = true` in the +`[llvm]` section of `config.toml` to download LLVM rather +than building it. +" + ); + crate::exit!(1); + } + + build.config.python = build + .config + .python + .take() + .map(|p| cmd_finder.must_have(p)) + .or_else(|| env::var_os("BOOTSTRAP_PYTHON").map(PathBuf::from)) // set by bootstrap.py + .or_else(|| cmd_finder.maybe_have("python")) + .or_else(|| cmd_finder.maybe_have("python3")) + .or_else(|| cmd_finder.maybe_have("python2")); + + build.config.nodejs = build + .config + .nodejs + .take() + .map(|p| cmd_finder.must_have(p)) + .or_else(|| cmd_finder.maybe_have("node")) + .or_else(|| cmd_finder.maybe_have("nodejs")); + + build.config.npm = build + .config + .npm + .take() + .map(|p| cmd_finder.must_have(p)) + .or_else(|| cmd_finder.maybe_have("npm")); + + build.config.gdb = build + .config + .gdb + .take() + .map(|p| cmd_finder.must_have(p)) + .or_else(|| cmd_finder.maybe_have("gdb")); + + build.config.reuse = build + .config + .reuse + .take() + .map(|p| cmd_finder.must_have(p)) + .or_else(|| cmd_finder.maybe_have("reuse")); + + // We're gonna build some custom C code here and there, host triples + // also build some C++ shims for LLVM so we need a C++ compiler. + for target in &build.targets { + // On emscripten we don't actually need the C compiler to just + // build the target artifacts, only for testing. For the sake + // of easier bot configuration, just skip detection. + if target.contains("emscripten") { + continue; + } + + // We don't use a C compiler on wasm32 + if target.contains("wasm32") { + continue; + } + + // Some environments don't want or need these tools, such as when testing Miri. + // FIXME: it would be better to refactor this code to split necessary setup from pure sanity + // checks, and have a regular flag for skipping the latter. Also see + // . + if skip_target_sanity { + continue; + } + + if !build.config.dry_run() { + cmd_finder.must_have(build.cc(*target)); + if let Some(ar) = build.ar(*target) { + cmd_finder.must_have(ar); + } + } + } + + for host in &build.hosts { + if !build.config.dry_run() { + cmd_finder.must_have(build.cxx(*host).unwrap()); + } + } + + if build.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) { + // Externally configured LLVM requires FileCheck to exist + let filecheck = build.llvm_filecheck(build.build); + if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests { + panic!("FileCheck executable {filecheck:?} does not exist"); + } + } + + for target in &build.targets { + build + .config + .target_config + .entry(*target) + .or_insert_with(|| Target::from_triple(&target.triple)); + + if (target.contains("-none-") || target.contains("nvptx")) + && build.no_std(*target) == Some(false) + { + panic!("All the *-none-* and nvptx* targets are no-std targets") + } + + // Some environments don't want or need these tools, such as when testing Miri. + // FIXME: it would be better to refactor this code to split necessary setup from pure sanity + // checks, and have a regular flag for skipping the latter. Also see + // . + if skip_target_sanity { + continue; + } + + // Make sure musl-root is valid. + if target.contains("musl") && !target.contains("unikraft") { + // If this is a native target (host is also musl) and no musl-root is given, + // fall back to the system toolchain in /usr before giving up + if build.musl_root(*target).is_none() && build.config.build == *target { + let target = build.config.target_config.entry(*target).or_default(); + target.musl_root = Some("/usr".into()); + } + match build.musl_libdir(*target) { + Some(libdir) => { + if fs::metadata(libdir.join("libc.a")).is_err() { + panic!("couldn't find libc.a in musl libdir: {}", libdir.display()); + } + } + None => panic!( + "when targeting MUSL either the rust.musl-root \ + option or the target.$TARGET.musl-root option must \ + be specified in config.toml" + ), + } + } + + if need_cmake && target.contains("msvc") { + // There are three builds of cmake on windows: MSVC, MinGW, and + // Cygwin. The Cygwin build does not have generators for Visual + // Studio, so detect that here and error. + let out = output(Command::new("cmake").arg("--help")); + if !out.contains("Visual Studio") { + panic!( + " +cmake does not support Visual Studio generators. + +This is likely due to it being an msys/cygwin build of cmake, +rather than the required windows version, built using MinGW +or Visual Studio. + +If you are building under msys2 try installing the mingw-w64-x86_64-cmake +package instead of cmake: + +$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake +" + ); + } + } + } + + if let Some(ref s) = build.config.ccache { + cmd_finder.must_have(s); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/lib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,1874 @@ +//! Implementation of rustbuild, the Rust build system. +//! +//! This module, and its descendants, are the implementation of the Rust build +//! system. Most of this build system is backed by Cargo but the outer layer +//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo +//! builds, building artifacts like LLVM, etc. The goals of rustbuild are: +//! +//! * To be an easily understandable, easily extensible, and maintainable build +//! system. +//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka +//! crates.io and Cargo. +//! * A standard interface to build across all platforms, including MSVC +//! +//! ## Further information +//! +//! More documentation can be found in each respective module below, and you can +//! also check out the `src/bootstrap/README.md` file for more information. + +use std::cell::{Cell, RefCell}; +use std::collections::{HashMap, HashSet}; +use std::env; +use std::fmt::Display; +use std::fs::{self, File}; +use std::io; +use std::path::{Path, PathBuf}; +use std::process::{Command, Output, Stdio}; +use std::str; + +use build_helper::ci::{gha, CiEnv}; +use build_helper::exit; +use build_helper::util::fail; +use filetime::FileTime; +use once_cell::sync::OnceCell; +use termcolor::{ColorChoice, StandardStream, WriteColor}; +use utils::channel::GitInfo; + +use crate::core::builder; +use crate::core::builder::Kind; +use crate::core::config::flags; +use crate::core::config::{DryRun, Target}; +use crate::core::config::{LlvmLibunwind, TargetSelection}; +use crate::utils::cache::{Interned, INTERNER}; +use crate::utils::exec::{BehaviorOnFailure, BootstrapCommand, OutputMode}; +use crate::utils::helpers::{self, dir_is_empty, exe, libdir, mtime, output, symlink_dir}; + +mod core; +mod utils; + +pub use crate::core::builder::PathSet; +pub use crate::core::config::flags::Subcommand; +pub use crate::core::config::Config; + +const LLVM_TOOLS: &[&str] = &[ + "llvm-cov", // used to generate coverage report + "llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility + "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume + "llvm-objdump", // used to disassemble programs + "llvm-profdata", // used to inspect and merge files generated by profiles + "llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide + "llvm-size", // used to prints the size of the linker sections of a program + "llvm-strip", // used to discard symbols from binary files to reduce their size + "llvm-ar", // used for creating and modifying archive files + "llvm-as", // used to convert LLVM assembly to LLVM bitcode + "llvm-dis", // used to disassemble LLVM bitcode + "llc", // used to compile LLVM bytecode + "opt", // used to optimize LLVM bytecode +]; + +/// LLD file names for all flavors. +const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"]; + +/// Keeps track of major changes made to the bootstrap configuration. +/// +/// These values also represent the IDs of the PRs that caused major changes. +/// You can visit `https://github.com/rust-lang/rust/pull/{any-id-from-the-list}` to +/// check for more details regarding each change. +/// +/// If you make any major changes (such as adding new values or changing default values), +/// please ensure that the associated PR ID is added to the end of this list. +/// This is necessary because the list must be sorted by the merge date. +pub const CONFIG_CHANGE_HISTORY: &[usize] = &[115898, 116998, 117435, 116881]; + +/// Extra --check-cfg to add when building +/// (Mode restriction, config name, config values (if any)) +const EXTRA_CHECK_CFGS: &[(Option, &str, Option<&[&'static str]>)] = &[ + (None, "bootstrap", None), + (Some(Mode::Rustc), "parallel_compiler", None), + (Some(Mode::ToolRustc), "parallel_compiler", None), + (Some(Mode::Codegen), "parallel_compiler", None), + (Some(Mode::Std), "stdarch_intel_sde", None), + (Some(Mode::Std), "no_fp_fmt_parse", None), + (Some(Mode::Std), "no_global_oom_handling", None), + (Some(Mode::Std), "no_rc", None), + (Some(Mode::Std), "no_sync", None), + (Some(Mode::Std), "freebsd12", None), + (Some(Mode::Std), "freebsd13", None), + (Some(Mode::Std), "backtrace_in_libstd", None), + /* Extra values not defined in the built-in targets yet, but used in std */ + (Some(Mode::Std), "target_env", Some(&["libnx"])), + // (Some(Mode::Std), "target_os", Some(&[])), + (Some(Mode::Std), "target_arch", Some(&["asmjs", "spirv", "nvptx", "xtensa"])), + /* Extra names used by dependencies */ + // FIXME: Used by serde_json, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "no_btreemap_remove_entry", None), + (Some(Mode::ToolRustc), "no_btreemap_remove_entry", None), + // FIXME: Used by crossbeam-utils, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "crossbeam_loom", None), + (Some(Mode::ToolRustc), "crossbeam_loom", None), + // FIXME: Used by proc-macro2, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "span_locations", None), + (Some(Mode::ToolRustc), "span_locations", None), + // FIXME: Used by rustix, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "rustix_use_libc", None), + (Some(Mode::ToolRustc), "rustix_use_libc", None), + // FIXME: Used by filetime, but we should not be triggering on external dependencies. + (Some(Mode::Rustc), "emulate_second_only_system", None), + (Some(Mode::ToolRustc), "emulate_second_only_system", None), + // Needed to avoid the need to copy windows.lib into the sysroot. + (Some(Mode::Rustc), "windows_raw_dylib", None), + (Some(Mode::ToolRustc), "windows_raw_dylib", None), +]; + +/// A structure representing a Rust compiler. +/// +/// Each compiler has a `stage` that it is associated with and a `host` that +/// corresponds to the platform the compiler runs on. This structure is used as +/// a parameter to many methods below. +#[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)] +pub struct Compiler { + stage: u32, + host: TargetSelection, +} + +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +pub enum DocTests { + /// Run normal tests and doc tests (default). + Yes, + /// Do not run any doc tests. + No, + /// Only run doc tests. + Only, +} + +pub enum GitRepo { + Rustc, + Llvm, +} + +/// Global configuration for the build system. +/// +/// This structure transitively contains all configuration for the build system. +/// All filesystem-encoded configuration is in `config`, all flags are in +/// `flags`, and then parsed or probed information is listed in the keys below. +/// +/// This structure is a parameter of almost all methods in the build system, +/// although most functions are implemented as free functions rather than +/// methods specifically on this structure itself (to make it easier to +/// organize). +#[derive(Clone)] +pub struct Build { + /// User-specified configuration from `config.toml`. + config: Config, + + // Version information + version: String, + + // Properties derived from the above configuration + src: PathBuf, + out: PathBuf, + bootstrap_out: PathBuf, + cargo_info: GitInfo, + rust_analyzer_info: GitInfo, + clippy_info: GitInfo, + miri_info: GitInfo, + rustfmt_info: GitInfo, + in_tree_llvm_info: GitInfo, + local_rebuild: bool, + fail_fast: bool, + doc_tests: DocTests, + verbosity: usize, + + // Targets for which to build + build: TargetSelection, + hosts: Vec, + targets: Vec, + + initial_rustc: PathBuf, + initial_cargo: PathBuf, + initial_lld: PathBuf, + initial_libdir: PathBuf, + initial_sysroot: PathBuf, + + // Runtime state filled in later on + // C/C++ compilers and archiver for all targets + cc: RefCell>, + cxx: RefCell>, + ar: RefCell>, + ranlib: RefCell>, + // Miscellaneous + // allow bidirectional lookups: both name -> path and path -> name + crates: HashMap, Crate>, + crate_paths: HashMap>, + is_sudo: bool, + ci_env: CiEnv, + delayed_failures: RefCell>, + prerelease_version: Cell>, + + #[cfg(feature = "build-metrics")] + metrics: crate::utils::metrics::BuildMetrics, +} + +#[derive(Debug, Clone)] +struct Crate { + name: Interned, + deps: HashSet>, + path: PathBuf, + has_lib: bool, +} + +impl Crate { + fn local_path(&self, build: &Build) -> PathBuf { + self.path.strip_prefix(&build.config.src).unwrap().into() + } +} + +/// When building Rust various objects are handled differently. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum DependencyType { + /// Libraries originating from proc-macros. + Host, + /// Typical Rust libraries. + Target, + /// Non Rust libraries and objects shipped to ease usage of certain targets. + TargetSelfContained, +} + +/// The various "modes" of invoking Cargo. +/// +/// These entries currently correspond to the various output directories of the +/// build system, with each mod generating output in a different directory. +#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum Mode { + /// Build the standard library, placing output in the "stageN-std" directory. + Std, + + /// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory. + Rustc, + + /// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory. + Codegen, + + /// Build a tool, placing output in the "stage0-bootstrap-tools" + /// directory. This is for miscellaneous sets of tools that are built + /// using the bootstrap stage0 compiler in its entirety (target libraries + /// and all). Typically these tools compile with stable Rust. + ToolBootstrap, + + /// Build a tool which uses the locally built std, placing output in the + /// "stageN-tools" directory. Its usage is quite rare, mainly used by + /// compiletest which needs libtest. + ToolStd, + + /// Build a tool which uses the locally built rustc and the target std, + /// placing the output in the "stageN-tools" directory. This is used for + /// anything that needs a fully functional rustc, such as rustdoc, clippy, + /// cargo, rls, rustfmt, miri, etc. + ToolRustc, +} + +impl Mode { + pub fn is_tool(&self) -> bool { + matches!(self, Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd) + } + + pub fn must_support_dlopen(&self) -> bool { + matches!(self, Mode::Std | Mode::Codegen) + } +} + +pub enum CLang { + C, + Cxx, +} + +macro_rules! forward { + ( $( $fn:ident( $($param:ident: $ty:ty),* ) $( -> $ret:ty)? ),+ $(,)? ) => { + impl Build { + $( fn $fn(&self, $($param: $ty),* ) $( -> $ret)? { + self.config.$fn( $($param),* ) + } )+ + } + } +} + +forward! { + verbose(msg: &str), + is_verbose() -> bool, + create(path: &Path, s: &str), + remove(f: &Path), + tempdir() -> PathBuf, + llvm_link_shared() -> bool, + download_rustc() -> bool, + initial_rustfmt() -> Option, +} + +impl Build { + /// Creates a new set of build configuration from the `flags` on the command + /// line and the filesystem `config`. + /// + /// By default all build output will be placed in the current directory. + pub fn new(mut config: Config) -> Build { + let src = config.src.clone(); + let out = config.out.clone(); + + #[cfg(unix)] + // keep this consistent with the equivalent check in x.py: + // https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/bootstrap.py#L796-L797 + let is_sudo = match env::var_os("SUDO_USER") { + Some(_sudo_user) => { + // SAFETY: getuid() system call is always successful and no return value is reserved + // to indicate an error. + // + // For more context, see https://man7.org/linux/man-pages/man2/geteuid.2.html + let uid = unsafe { libc::getuid() }; + uid == 0 + } + None => false, + }; + #[cfg(not(unix))] + let is_sudo = false; + + let omit_git_hash = config.omit_git_hash; + let rust_info = GitInfo::new(omit_git_hash, &src); + let cargo_info = GitInfo::new(omit_git_hash, &src.join("src/tools/cargo")); + let rust_analyzer_info = GitInfo::new(omit_git_hash, &src.join("src/tools/rust-analyzer")); + let clippy_info = GitInfo::new(omit_git_hash, &src.join("src/tools/clippy")); + let miri_info = GitInfo::new(omit_git_hash, &src.join("src/tools/miri")); + let rustfmt_info = GitInfo::new(omit_git_hash, &src.join("src/tools/rustfmt")); + + // we always try to use git for LLVM builds + let in_tree_llvm_info = GitInfo::new(false, &src.join("src/llvm-project")); + + let initial_target_libdir_str = if config.dry_run() { + "/dummy/lib/path/to/lib/".to_string() + } else { + output( + Command::new(&config.initial_rustc) + .arg("--target") + .arg(config.build.rustc_target_arg()) + .arg("--print") + .arg("target-libdir"), + ) + }; + let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap(); + let initial_lld = initial_target_dir.join("bin").join("rust-lld"); + + let initial_sysroot = if config.dry_run() { + "/dummy".to_string() + } else { + output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot")) + } + .trim() + .to_string(); + + let initial_libdir = initial_target_dir + .parent() + .unwrap() + .parent() + .unwrap() + .strip_prefix(&initial_sysroot) + .unwrap() + .to_path_buf(); + + let version = std::fs::read_to_string(src.join("src").join("version")) + .expect("failed to read src/version"); + let version = version.trim(); + + let bootstrap_out = std::env::current_exe() + .expect("could not determine path to running process") + .parent() + .unwrap() + .to_path_buf(); + if !bootstrap_out.join(exe("rustc", config.build)).exists() && !cfg!(test) { + // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented + panic!( + "`rustc` not found in {}, run `cargo build --bins` before `cargo run`", + bootstrap_out.display() + ) + } + + if rust_info.is_from_tarball() && config.description.is_none() { + config.description = Some("built from a source tarball".to_owned()); + } + + let mut build = Build { + initial_rustc: config.initial_rustc.clone(), + initial_cargo: config.initial_cargo.clone(), + initial_lld, + initial_libdir, + initial_sysroot: initial_sysroot.into(), + local_rebuild: config.local_rebuild, + fail_fast: config.cmd.fail_fast(), + doc_tests: config.cmd.doc_tests(), + verbosity: config.verbose, + + build: config.build, + hosts: config.hosts.clone(), + targets: config.targets.clone(), + + config, + version: version.to_string(), + src, + out, + bootstrap_out, + + cargo_info, + rust_analyzer_info, + clippy_info, + miri_info, + rustfmt_info, + in_tree_llvm_info, + cc: RefCell::new(HashMap::new()), + cxx: RefCell::new(HashMap::new()), + ar: RefCell::new(HashMap::new()), + ranlib: RefCell::new(HashMap::new()), + crates: HashMap::new(), + crate_paths: HashMap::new(), + is_sudo, + ci_env: CiEnv::current(), + delayed_failures: RefCell::new(Vec::new()), + prerelease_version: Cell::new(None), + + #[cfg(feature = "build-metrics")] + metrics: crate::utils::metrics::BuildMetrics::init(), + }; + + // If local-rust is the same major.minor as the current version, then force a + // local-rebuild + let local_version_verbose = + output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose")); + let local_release = local_version_verbose + .lines() + .filter_map(|x| x.strip_prefix("release:")) + .next() + .unwrap() + .trim(); + if local_release.split('.').take(2).eq(version.split('.').take(2)) { + build.verbose(&format!("auto-detected local-rebuild {local_release}")); + build.local_rebuild = true; + } + + build.verbose("finding compilers"); + utils::cc_detect::find(&build); + // When running `setup`, the profile is about to change, so any requirements we have now may + // be different on the next invocation. Don't check for them until the next time x.py is + // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing. + // + // Similarly, for `setup` we don't actually need submodules or cargo metadata. + if !matches!(build.config.cmd, Subcommand::Setup { .. }) { + build.verbose("running sanity check"); + crate::core::sanity::check(&mut build); + + // Make sure we update these before gathering metadata so we don't get an error about missing + // Cargo.toml files. + let rust_submodules = ["src/tools/cargo", "library/backtrace", "library/stdarch"]; + for s in rust_submodules { + build.update_submodule(Path::new(s)); + } + // Now, update all existing submodules. + build.update_existing_submodules(); + + build.verbose("learning about cargo"); + crate::core::metadata::build(&mut build); + } + + // Make a symbolic link so we can use a consistent directory in the documentation. + let build_triple = build.out.join(&build.build.triple); + t!(fs::create_dir_all(&build_triple)); + let host = build.out.join("host"); + if host.is_symlink() { + // Left over from a previous build; overwrite it. + // This matters if `build.build` has changed between invocations. + #[cfg(windows)] + t!(fs::remove_dir(&host)); + #[cfg(not(windows))] + t!(fs::remove_file(&host)); + } + t!( + symlink_dir(&build.config, &build_triple, &host), + format!("symlink_dir({} => {}) failed", host.display(), build_triple.display()) + ); + + build + } + + // modified from `check_submodule` and `update_submodule` in bootstrap.py + /// Given a path to the directory of a submodule, update it. + /// + /// `relative_path` should be relative to the root of the git repository, not an absolute path. + pub(crate) fn update_submodule(&self, relative_path: &Path) { + if !self.config.submodules(&self.rust_info()) { + return; + } + + let absolute_path = self.config.src.join(relative_path); + + // NOTE: The check for the empty directory is here because when running x.py the first time, + // the submodule won't be checked out. Check it out now so we can build it. + if !GitInfo::new(false, &absolute_path).is_managed_git_subrepository() + && !dir_is_empty(&absolute_path) + { + return; + } + + // check_submodule + let checked_out_hash = + output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path)); + // update_submodules + let recorded = output( + Command::new("git") + .args(&["ls-tree", "HEAD"]) + .arg(relative_path) + .current_dir(&self.config.src), + ); + let actual_hash = recorded + .split_whitespace() + .nth(2) + .unwrap_or_else(|| panic!("unexpected output `{}`", recorded)); + + // update_submodule + if actual_hash == checked_out_hash.trim_end() { + // already checked out + return; + } + + println!("Updating submodule {}", relative_path.display()); + self.run( + Command::new("git") + .args(&["submodule", "-q", "sync"]) + .arg(relative_path) + .current_dir(&self.config.src), + ); + + // Try passing `--progress` to start, then run git again without if that fails. + let update = |progress: bool| { + // Git is buggy and will try to fetch submodules from the tracking branch for *this* repository, + // even though that has no relation to the upstream for the submodule. + let current_branch = { + let output = self + .config + .git() + .args(["symbolic-ref", "--short", "HEAD"]) + .stderr(Stdio::inherit()) + .output(); + let output = t!(output); + if output.status.success() { + Some(String::from_utf8(output.stdout).unwrap().trim().to_owned()) + } else { + None + } + }; + + let mut git = self.config.git(); + if let Some(branch) = current_branch { + // If there is a tag named after the current branch, git will try to disambiguate by prepending `heads/` to the branch name. + // This syntax isn't accepted by `branch.{branch}`. Strip it. + let branch = branch.strip_prefix("heads/").unwrap_or(&branch); + git.arg("-c").arg(format!("branch.{branch}.remote=origin")); + } + git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]); + if progress { + git.arg("--progress"); + } + git.arg(relative_path); + git + }; + // NOTE: doesn't use `try_run` because this shouldn't print an error if it fails. + if !update(true).status().map_or(false, |status| status.success()) { + self.run(&mut update(false)); + } + + // Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error). + // diff-index reports the modifications through the exit status + let has_local_modifications = !self.run_cmd( + BootstrapCommand::from( + Command::new("git") + .args(&["diff-index", "--quiet", "HEAD"]) + .current_dir(&absolute_path), + ) + .allow_failure() + .output_mode(match self.is_verbose() { + true => OutputMode::PrintAll, + false => OutputMode::PrintOutput, + }), + ); + if has_local_modifications { + self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path)); + } + + self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path)); + self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path)); + + if has_local_modifications { + self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path)); + } + } + + /// If any submodule has been initialized already, sync it unconditionally. + /// This avoids contributors checking in a submodule change by accident. + pub fn update_existing_submodules(&self) { + // Avoid running git when there isn't a git checkout. + if !self.config.submodules(&self.rust_info()) { + return; + } + let output = output( + self.config + .git() + .args(&["config", "--file"]) + .arg(&self.config.src.join(".gitmodules")) + .args(&["--get-regexp", "path"]), + ); + for line in output.lines() { + // Look for `submodule.$name.path = $path` + // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer` + let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap()); + // Don't update the submodule unless it's already been cloned. + if GitInfo::new(false, submodule).is_managed_git_subrepository() { + self.update_submodule(submodule); + } + } + } + + /// Executes the entire build, as configured by the flags and configuration. + pub fn build(&mut self) { + unsafe { + crate::utils::job::setup(self); + } + + // Download rustfmt early so that it can be used in rust-analyzer configs. + let _ = &builder::Builder::new(&self).initial_rustfmt(); + + // hardcoded subcommands + match &self.config.cmd { + Subcommand::Format { check } => { + return core::build_steps::format::format( + &builder::Builder::new(&self), + *check, + &self.config.paths, + ); + } + Subcommand::Suggest { run } => { + return core::build_steps::suggest::suggest(&builder::Builder::new(&self), *run); + } + _ => (), + } + + { + let builder = builder::Builder::new(&self); + if let Some(path) = builder.paths.get(0) { + if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") { + return; + } + } + } + + if !self.config.dry_run() { + { + self.config.dry_run = DryRun::SelfCheck; + let builder = builder::Builder::new(&self); + builder.execute_cli(); + } + self.config.dry_run = DryRun::Disabled; + let builder = builder::Builder::new(&self); + builder.execute_cli(); + } else { + let builder = builder::Builder::new(&self); + builder.execute_cli(); + } + + // Check for postponed failures from `test --no-fail-fast`. + let failures = self.delayed_failures.borrow(); + if failures.len() > 0 { + eprintln!("\n{} command(s) did not execute successfully:\n", failures.len()); + for failure in failures.iter() { + eprintln!(" - {failure}\n"); + } + exit!(1); + } + + #[cfg(feature = "build-metrics")] + self.metrics.persist(self); + } + + /// Clear out `dir` if `input` is newer. + /// + /// After this executes, it will also ensure that `dir` exists. + fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool { + let stamp = dir.join(".stamp"); + let mut cleared = false; + if mtime(&stamp) < mtime(input) { + self.verbose(&format!("Dirty - {}", dir.display())); + let _ = fs::remove_dir_all(dir); + cleared = true; + } else if stamp.exists() { + return cleared; + } + t!(fs::create_dir_all(dir)); + t!(File::create(stamp)); + cleared + } + + fn rust_info(&self) -> &GitInfo { + &self.config.rust_info + } + + /// Gets the space-separated set of activated features for the standard + /// library. + fn std_features(&self, target: TargetSelection) -> String { + let mut features = " panic-unwind".to_string(); + + match self.config.llvm_libunwind(target) { + LlvmLibunwind::InTree => features.push_str(" llvm-libunwind"), + LlvmLibunwind::System => features.push_str(" system-llvm-libunwind"), + LlvmLibunwind::No => {} + } + if self.config.backtrace { + features.push_str(" backtrace"); + } + if self.config.profiler_enabled(target) { + features.push_str(" profiler"); + } + features + } + + /// Gets the space-separated set of activated features for the compiler. + fn rustc_features(&self, kind: Kind) -> String { + let mut features = vec![]; + if self.config.jemalloc { + features.push("jemalloc"); + } + if self.config.llvm_enabled() || kind == Kind::Check { + features.push("llvm"); + } + // keep in sync with `bootstrap/compile.rs:rustc_cargo_env` + if self.config.rustc_parallel { + features.push("rustc_use_parallel_compiler"); + } + + // If debug logging is on, then we want the default for tracing: + // https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26 + // which is everything (including debug/trace/etc.) + // if its unset, if debug_assertions is on, then debug_logging will also be on + // as well as tracing *ignoring* this feature when debug_assertions is on + if !self.config.rust_debug_logging { + features.push("max_level_info"); + } + + features.join(" ") + } + + /// Component directory that Cargo will produce output into (e.g. + /// release/debug) + fn cargo_dir(&self) -> &'static str { + if self.config.rust_optimize.is_release() { "release" } else { "debug" } + } + + fn tools_dir(&self, compiler: Compiler) -> PathBuf { + let out = self + .out + .join(&*compiler.host.triple) + .join(format!("stage{}-tools-bin", compiler.stage)); + t!(fs::create_dir_all(&out)); + out + } + + /// Returns the root directory for all output generated in a particular + /// stage when running with a particular host compiler. + /// + /// The mode indicates what the root directory is for. + fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf { + let suffix = match mode { + Mode::Std => "-std", + Mode::Rustc => "-rustc", + Mode::Codegen => "-codegen", + Mode::ToolBootstrap => "-bootstrap-tools", + Mode::ToolStd | Mode::ToolRustc => "-tools", + }; + self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix)) + } + + /// Returns the root output directory for all Cargo output in a given stage, + /// running a particular compiler, whether or not we're building the + /// standard library, and targeting the specified architecture. + fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf { + self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir()) + } + + /// Root output directory for LLVM compiled for `target` + /// + /// Note that if LLVM is configured externally then the directory returned + /// will likely be empty. + fn llvm_out(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("llvm") + } + + fn lld_out(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("lld") + } + + /// Output directory for all documentation for a target + fn doc_out(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("doc") + } + + /// Output directory for all JSON-formatted documentation for a target + fn json_doc_out(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("json-doc") + } + + fn test_out(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("test") + } + + /// Output directory for all documentation for a target + fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("compiler-doc") + } + + /// Output directory for some generated md crate documentation for a target (temporary) + fn md_doc_out(&self, target: TargetSelection) -> Interned { + INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc")) + } + + /// Returns `true` if no custom `llvm-config` is set for the specified target. + /// + /// If no custom `llvm-config` was specified then Rust's llvm will be used. + fn is_rust_llvm(&self, target: TargetSelection) -> bool { + match self.config.target_config.get(&target) { + Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched, + Some(Target { llvm_config, .. }) => { + // If the user set llvm-config we assume Rust is not patched, + // but first check to see if it was configured by llvm-from-ci. + (self.config.llvm_from_ci && target == self.config.build) || llvm_config.is_none() + } + None => true, + } + } + + /// Returns the path to `FileCheck` binary for the specified target + fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf { + let target_config = self.config.target_config.get(&target); + if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) { + s.to_path_buf() + } else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { + let llvm_bindir = output(Command::new(s).arg("--bindir")); + let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target)); + if filecheck.exists() { + filecheck + } else { + // On Fedora the system LLVM installs FileCheck in the + // llvm subdirectory of the libdir. + let llvm_libdir = output(Command::new(s).arg("--libdir")); + let lib_filecheck = + Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target)); + if lib_filecheck.exists() { + lib_filecheck + } else { + // Return the most normal file name, even though + // it doesn't exist, so that any error message + // refers to that. + filecheck + } + } + } else { + let base = self.llvm_out(target).join("build"); + let base = if !self.ninja() && target.contains("msvc") { + if self.config.llvm_optimize { + if self.config.llvm_release_debuginfo { + base.join("RelWithDebInfo") + } else { + base.join("Release") + } + } else { + base.join("Debug") + } + } else { + base + }; + base.join("bin").join(exe("FileCheck", target)) + } + } + + /// Directory for libraries built from C/C++ code and shared between stages. + fn native_dir(&self, target: TargetSelection) -> PathBuf { + self.out.join(&*target.triple).join("native") + } + + /// Root output directory for rust_test_helpers library compiled for + /// `target` + fn test_helpers_out(&self, target: TargetSelection) -> PathBuf { + self.native_dir(target).join("rust-test-helpers") + } + + /// Adds the `RUST_TEST_THREADS` env var if necessary + fn add_rust_test_threads(&self, cmd: &mut Command) { + if env::var_os("RUST_TEST_THREADS").is_none() { + cmd.env("RUST_TEST_THREADS", self.jobs().to_string()); + } + } + + /// Returns the libdir of the snapshot compiler. + fn rustc_snapshot_libdir(&self) -> PathBuf { + self.rustc_snapshot_sysroot().join(libdir(self.config.build)) + } + + /// Returns the sysroot of the snapshot compiler. + fn rustc_snapshot_sysroot(&self) -> &Path { + static SYSROOT_CACHE: OnceCell = once_cell::sync::OnceCell::new(); + SYSROOT_CACHE.get_or_init(|| { + let mut rustc = Command::new(&self.initial_rustc); + rustc.args(&["--print", "sysroot"]); + output(&mut rustc).trim().into() + }) + } + + /// Runs a command, printing out nice contextual information if it fails. + fn run(&self, cmd: &mut Command) { + self.run_cmd(BootstrapCommand::from(cmd).fail_fast().output_mode( + match self.is_verbose() { + true => OutputMode::PrintAll, + false => OutputMode::PrintOutput, + }, + )); + } + + /// Runs a command, printing out contextual info if it fails, and delaying errors until the build finishes. + pub(crate) fn run_delaying_failure(&self, cmd: &mut Command) -> bool { + self.run_cmd(BootstrapCommand::from(cmd).delay_failure().output_mode( + match self.is_verbose() { + true => OutputMode::PrintAll, + false => OutputMode::PrintOutput, + }, + )) + } + + /// Runs a command, printing out nice contextual information if it fails. + fn run_quiet(&self, cmd: &mut Command) { + self.run_cmd( + BootstrapCommand::from(cmd).fail_fast().output_mode(OutputMode::SuppressOnSuccess), + ); + } + + /// Runs a command, printing out nice contextual information if it fails. + /// Exits if the command failed to execute at all, otherwise returns its + /// `status.success()`. + fn run_quiet_delaying_failure(&self, cmd: &mut Command) -> bool { + self.run_cmd( + BootstrapCommand::from(cmd).delay_failure().output_mode(OutputMode::SuppressOnSuccess), + ) + } + + /// A centralized function for running commands that do not return output. + pub(crate) fn run_cmd<'a, C: Into>>(&self, cmd: C) -> bool { + if self.config.dry_run() { + return true; + } + + let command = cmd.into(); + self.verbose(&format!("running: {command:?}")); + + let (output, print_error) = match command.output_mode { + mode @ (OutputMode::PrintAll | OutputMode::PrintOutput) => ( + command.command.status().map(|status| Output { + status, + stdout: Vec::new(), + stderr: Vec::new(), + }), + matches!(mode, OutputMode::PrintAll), + ), + OutputMode::SuppressOnSuccess => (command.command.output(), true), + }; + + let output = match output { + Ok(output) => output, + Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}", command, e)), + }; + let result = if !output.status.success() { + if print_error { + println!( + "\n\ncommand did not execute successfully: {:?}\n\ + expected success, got: {}\n\n\ + stdout ----\n{}\n\ + stderr ----\n{}\n\n", + command.command, + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + } + Err(()) + } else { + Ok(()) + }; + + match result { + Ok(_) => true, + Err(_) => { + match command.failure_behavior { + BehaviorOnFailure::DelayFail => { + if self.fail_fast { + exit!(1); + } + + let mut failures = self.delayed_failures.borrow_mut(); + failures.push(format!("{command:?}")); + } + BehaviorOnFailure::Exit => { + exit!(1); + } + BehaviorOnFailure::Ignore => {} + } + false + } + } + } + + pub fn is_verbose_than(&self, level: usize) -> bool { + self.verbosity > level + } + + /// Prints a message if this build is configured in more verbose mode than `level`. + fn verbose_than(&self, level: usize, msg: &str) { + if self.is_verbose_than(level) { + println!("{msg}"); + } + } + + fn info(&self, msg: &str) { + match self.config.dry_run { + DryRun::SelfCheck => (), + DryRun::Disabled | DryRun::UserSelected => { + println!("{msg}"); + } + } + } + + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] + fn msg_check( + &self, + what: impl Display, + target: impl Into>, + ) -> Option { + self.msg(Kind::Check, self.config.stage, what, self.config.build, target) + } + + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] + fn msg_doc( + &self, + compiler: Compiler, + what: impl Display, + target: impl Into> + Copy, + ) -> Option { + self.msg(Kind::Doc, compiler.stage, what, compiler.host, target.into()) + } + + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] + fn msg_build( + &self, + compiler: Compiler, + what: impl Display, + target: impl Into>, + ) -> Option { + self.msg(Kind::Build, compiler.stage, what, compiler.host, target) + } + + /// Return a `Group` guard for a [`Step`] that is built for each `--stage`. + /// + /// [`Step`]: crate::core::builder::Step + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] + fn msg( + &self, + action: impl Into, + stage: u32, + what: impl Display, + host: impl Into>, + target: impl Into>, + ) -> Option { + let action = action.into().description(); + let msg = |fmt| format!("{action} stage{stage} {what}{fmt}"); + let msg = if let Some(target) = target.into() { + let host = host.into().unwrap(); + if host == target { + msg(format_args!(" ({target})")) + } else { + msg(format_args!(" ({host} -> {target})")) + } + } else { + msg(format_args!("")) + }; + self.group(&msg) + } + + /// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`. + /// + /// [`Step`]: crate::core::builder::Step + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] + fn msg_unstaged( + &self, + action: impl Into, + what: impl Display, + target: TargetSelection, + ) -> Option { + let action = action.into().description(); + let msg = format!("{action} {what} for {target}"); + self.group(&msg) + } + + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] + fn msg_sysroot_tool( + &self, + action: impl Into, + stage: u32, + what: impl Display, + host: TargetSelection, + target: TargetSelection, + ) -> Option { + let action = action.into().description(); + let msg = |fmt| format!("{action} {what} {fmt}"); + let msg = if host == target { + msg(format_args!("(stage{stage} -> stage{}, {target})", stage + 1)) + } else { + msg(format_args!("(stage{stage}:{host} -> stage{}:{target})", stage + 1)) + }; + self.group(&msg) + } + + #[track_caller] + fn group(&self, msg: &str) -> Option { + match self.config.dry_run { + DryRun::SelfCheck => None, + DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)), + } + } + + /// Returns the number of parallel jobs that have been configured for this + /// build. + fn jobs(&self) -> u32 { + self.config.jobs.unwrap_or_else(|| { + std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 + }) + } + + fn debuginfo_map_to(&self, which: GitRepo) -> Option { + if !self.config.rust_remap_debuginfo { + return None; + } + + match which { + GitRepo::Rustc => { + let sha = self.rust_sha().unwrap_or(&self.version); + Some(format!("/rustc/{sha}")) + } + GitRepo::Llvm => Some(String::from("/rustc/llvm")), + } + } + + /// Returns the path to the C compiler for the target specified. + fn cc(&self, target: TargetSelection) -> PathBuf { + if self.config.dry_run() { + return PathBuf::new(); + } + self.cc.borrow()[&target].path().into() + } + + /// Returns a list of flags to pass to the C compiler for the target + /// specified. + fn cflags(&self, target: TargetSelection, which: GitRepo, c: CLang) -> Vec { + if self.config.dry_run() { + return Vec::new(); + } + let base = match c { + CLang::C => self.cc.borrow()[&target].clone(), + CLang::Cxx => self.cxx.borrow()[&target].clone(), + }; + + // Filter out -O and /O (the optimization flags) that we picked up from + // cc-rs because the build scripts will determine that for themselves. + let mut base = base + .args() + .iter() + .map(|s| s.to_string_lossy().into_owned()) + .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) + .collect::>(); + + // If we're compiling C++ on macOS then we add a flag indicating that + // we want libc++ (more filled out than libstdc++), ensuring that + // LLVM/etc are all properly compiled. + if matches!(c, CLang::Cxx) && target.contains("apple-darwin") { + base.push("-stdlib=libc++".into()); + } + + // Work around an apparently bad MinGW / GCC optimization, + // See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html + // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936 + if &*target.triple == "i686-pc-windows-gnu" { + base.push("-fno-omit-frame-pointer".into()); + } + + if let Some(map_to) = self.debuginfo_map_to(which) { + let map = format!("{}={}", self.src.display(), map_to); + let cc = self.cc(target); + if cc.ends_with("clang") || cc.ends_with("gcc") { + base.push(format!("-fdebug-prefix-map={map}")); + } else if cc.ends_with("clang-cl.exe") { + base.push("-Xclang".into()); + base.push(format!("-fdebug-prefix-map={map}")); + } + } + base + } + + /// Returns the path to the `ar` archive utility for the target specified. + fn ar(&self, target: TargetSelection) -> Option { + if self.config.dry_run() { + return None; + } + self.ar.borrow().get(&target).cloned() + } + + /// Returns the path to the `ranlib` utility for the target specified. + fn ranlib(&self, target: TargetSelection) -> Option { + if self.config.dry_run() { + return None; + } + self.ranlib.borrow().get(&target).cloned() + } + + /// Returns the path to the C++ compiler for the target specified. + fn cxx(&self, target: TargetSelection) -> Result { + if self.config.dry_run() { + return Ok(PathBuf::new()); + } + match self.cxx.borrow().get(&target) { + Some(p) => Ok(p.path().into()), + None => Err(format!("target `{target}` is not configured as a host, only as a target")), + } + } + + /// Returns the path to the linker for the given target if it needs to be overridden. + fn linker(&self, target: TargetSelection) -> Option { + if self.config.dry_run() { + return Some(PathBuf::new()); + } + if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.clone()) + { + Some(linker) + } else if target.contains("vxworks") { + // need to use CXX compiler as linker to resolve the exception functions + // that are only existed in CXX libraries + Some(self.cxx.borrow()[&target].path().into()) + } else if target != self.config.build + && helpers::use_host_linker(target) + && !target.contains("msvc") + { + Some(self.cc(target)) + } else if self.config.use_lld && !self.is_fuse_ld_lld(target) && self.build == target { + Some(self.initial_lld.clone()) + } else { + None + } + } + + // LLD is used through `-fuse-ld=lld` rather than directly. + // Only MSVC targets use LLD directly at the moment. + fn is_fuse_ld_lld(&self, target: TargetSelection) -> bool { + self.config.use_lld && !target.contains("msvc") + } + + fn lld_flags(&self, target: TargetSelection) -> impl Iterator { + let mut options = [None, None]; + + if self.config.use_lld { + if self.is_fuse_ld_lld(target) { + options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string()); + } + + let no_threads = helpers::lld_flag_no_threads(target.contains("windows")); + options[1] = Some(format!("-Clink-arg=-Wl,{no_threads}")); + } + + IntoIterator::into_iter(options).flatten() + } + + /// Returns if this target should statically link the C runtime, if specified + fn crt_static(&self, target: TargetSelection) -> Option { + if target.contains("pc-windows-msvc") { + Some(true) + } else { + self.config.target_config.get(&target).and_then(|t| t.crt_static) + } + } + + /// Returns the "musl root" for this `target`, if defined + fn musl_root(&self, target: TargetSelection) -> Option<&Path> { + self.config + .target_config + .get(&target) + .and_then(|t| t.musl_root.as_ref()) + .or_else(|| self.config.musl_root.as_ref()) + .map(|p| &**p) + } + + /// Returns the "musl libdir" for this `target`. + fn musl_libdir(&self, target: TargetSelection) -> Option { + let t = self.config.target_config.get(&target)?; + if let libdir @ Some(_) = &t.musl_libdir { + return libdir.clone(); + } + self.musl_root(target).map(|root| root.join("lib")) + } + + /// Returns the sysroot for the wasi target, if defined + fn wasi_root(&self, target: TargetSelection) -> Option<&Path> { + self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p) + } + + /// Returns `true` if this is a no-std `target`, if defined + fn no_std(&self, target: TargetSelection) -> Option { + self.config.target_config.get(&target).map(|t| t.no_std) + } + + /// Returns `true` if the target will be tested using the `remote-test-client` + /// and `remote-test-server` binaries. + fn remote_tested(&self, target: TargetSelection) -> bool { + self.qemu_rootfs(target).is_some() + || target.contains("android") + || env::var_os("TEST_DEVICE_ADDR").is_some() + } + + /// Returns the root of the "rootfs" image that this target will be using, + /// if one was configured. + /// + /// If `Some` is returned then that means that tests for this target are + /// emulated with QEMU and binaries will need to be shipped to the emulator. + fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> { + self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p) + } + + /// Path to the python interpreter to use + fn python(&self) -> &Path { + if self.config.build.ends_with("apple-darwin") { + // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the + // LLDB plugin's compiled module which only works with the system python + // (namely not Homebrew-installed python) + Path::new("/usr/bin/python3") + } else { + self.config + .python + .as_ref() + .expect("python is required for running LLDB or rustdoc tests") + } + } + + /// Temporary directory that extended error information is emitted to. + fn extended_error_dir(&self) -> PathBuf { + self.out.join("tmp/extended-error-metadata") + } + + /// Tests whether the `compiler` compiling for `target` should be forced to + /// use a stage1 compiler instead. + /// + /// Currently, by default, the build system does not perform a "full + /// bootstrap" by default where we compile the compiler three times. + /// Instead, we compile the compiler two times. The final stage (stage2) + /// just copies the libraries from the previous stage, which is what this + /// method detects. + /// + /// Here we return `true` if: + /// + /// * The build isn't performing a full bootstrap + /// * The `compiler` is in the final stage, 2 + /// * We're not cross-compiling, so the artifacts are already available in + /// stage1 + /// + /// When all of these conditions are met the build will lift artifacts from + /// the previous stage forward. + fn force_use_stage1(&self, stage: u32, target: TargetSelection) -> bool { + !self.config.full_bootstrap + && !self.config.download_rustc() + && stage >= 2 + && (self.hosts.iter().any(|h| *h == target) || target == self.build) + } + + /// Checks whether the `compiler` compiling for `target` should be forced to + /// use a stage2 compiler instead. + /// + /// When we download the pre-compiled version of rustc and compiler stage is >= 2, + /// it should be forced to use a stage2 compiler. + fn force_use_stage2(&self, stage: u32) -> bool { + self.config.download_rustc() && stage >= 2 + } + + /// Given `num` in the form "a.b.c" return a "release string" which + /// describes the release version number. + /// + /// For example on nightly this returns "a.b.c-nightly", on beta it returns + /// "a.b.c-beta.1" and on stable it just returns "a.b.c". + fn release(&self, num: &str) -> String { + match &self.config.channel[..] { + "stable" => num.to_string(), + "beta" => { + if !self.config.omit_git_hash { + format!("{}-beta.{}", num, self.beta_prerelease_version()) + } else { + format!("{num}-beta") + } + } + "nightly" => format!("{num}-nightly"), + _ => format!("{num}-dev"), + } + } + + fn beta_prerelease_version(&self) -> u32 { + fn extract_beta_rev_from_file>(version_file: P) -> Option { + let version = fs::read_to_string(version_file).ok()?; + + helpers::extract_beta_rev(&version) + } + + if let Some(s) = self.prerelease_version.get() { + return s; + } + + // First check if there is a version file available. + // If available, we read the beta revision from that file. + // This only happens when building from a source tarball when Git should not be used. + let count = extract_beta_rev_from_file(self.src.join("version")).unwrap_or_else(|| { + // Figure out how many merge commits happened since we branched off master. + // That's our beta number! + // (Note that we use a `..` range, not the `...` symmetric difference.) + output(self.config.git().arg("rev-list").arg("--count").arg("--merges").arg(format!( + "refs/remotes/origin/{}..HEAD", + self.config.stage0_metadata.config.nightly_branch + ))) + }); + let n = count.trim().parse().unwrap(); + self.prerelease_version.set(Some(n)); + n + } + + /// Returns the value of `release` above for Rust itself. + fn rust_release(&self) -> String { + self.release(&self.version) + } + + /// Returns the "package version" for a component given the `num` release + /// number. + /// + /// The package version is typically what shows up in the names of tarballs. + /// For channels like beta/nightly it's just the channel name, otherwise + /// it's the `num` provided. + fn package_vers(&self, num: &str) -> String { + match &self.config.channel[..] { + "stable" => num.to_string(), + "beta" => "beta".to_string(), + "nightly" => "nightly".to_string(), + _ => format!("{num}-dev"), + } + } + + /// Returns the value of `package_vers` above for Rust itself. + fn rust_package_vers(&self) -> String { + self.package_vers(&self.version) + } + + /// Returns the `version` string associated with this compiler for Rust + /// itself. + /// + /// Note that this is a descriptive string which includes the commit date, + /// sha, version, etc. + fn rust_version(&self) -> String { + let mut version = self.rust_info().version(self, &self.version); + if let Some(ref s) = self.config.description { + version.push_str(" ("); + version.push_str(s); + version.push(')'); + } + version + } + + /// Returns the full commit hash. + fn rust_sha(&self) -> Option<&str> { + self.rust_info().sha() + } + + /// Returns the `a.b.c` version that the given package is at. + fn release_num(&self, package: &str) -> String { + let toml_file_name = self.src.join(&format!("src/tools/{package}/Cargo.toml")); + let toml = t!(fs::read_to_string(&toml_file_name)); + for line in toml.lines() { + if let Some(stripped) = + line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\"")) + { + return stripped.to_owned(); + } + } + + panic!("failed to find version in {package}'s Cargo.toml") + } + + /// Returns `true` if unstable features should be enabled for the compiler + /// we're building. + fn unstable_features(&self) -> bool { + match &self.config.channel[..] { + "stable" | "beta" => false, + "nightly" | _ => true, + } + } + + /// Returns a Vec of all the dependencies of the given root crate, + /// including transitive dependencies and the root itself. Only includes + /// "local" crates (those in the local source tree, not from a registry). + fn in_tree_crates(&self, root: &str, target: Option) -> Vec<&Crate> { + let mut ret = Vec::new(); + let mut list = vec![INTERNER.intern_str(root)]; + let mut visited = HashSet::new(); + while let Some(krate) = list.pop() { + let krate = self + .crates + .get(&krate) + .unwrap_or_else(|| panic!("metadata missing for {krate}: {:?}", self.crates)); + ret.push(krate); + for dep in &krate.deps { + if !self.crates.contains_key(dep) { + // Ignore non-workspace members. + continue; + } + // Don't include optional deps if their features are not + // enabled. Ideally this would be computed from `cargo + // metadata --features …`, but that is somewhat slow. In + // the future, we may want to consider just filtering all + // build and dev dependencies in metadata::build. + if visited.insert(dep) + && (dep != "profiler_builtins" + || target + .map(|t| self.config.profiler_enabled(t)) + .unwrap_or_else(|| self.config.any_profiler_enabled())) + && (dep != "rustc_codegen_llvm" || self.config.llvm_enabled()) + { + list.push(*dep); + } + } + } + ret.sort_unstable_by_key(|krate| krate.name); // reproducible order needed for tests + ret + } + + fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, DependencyType)> { + if self.config.dry_run() { + return Vec::new(); + } + + if !stamp.exists() { + eprintln!( + "ERROR: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?", + stamp.display() + ); + crate::exit!(1); + } + + let mut paths = Vec::new(); + let contents = t!(fs::read(stamp), &stamp); + // This is the method we use for extracting paths from the stamp file passed to us. See + // run_cargo for more information (in compile.rs). + for part in contents.split(|b| *b == 0) { + if part.is_empty() { + continue; + } + let dependency_type = match part[0] as char { + 'h' => DependencyType::Host, + 's' => DependencyType::TargetSelfContained, + 't' => DependencyType::Target, + _ => unreachable!(), + }; + let path = PathBuf::from(t!(str::from_utf8(&part[1..]))); + paths.push((path, dependency_type)); + } + paths + } + + /// Copies a file from `src` to `dst` + pub fn copy(&self, src: &Path, dst: &Path) { + self.copy_internal(src, dst, false); + } + + fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) { + if self.config.dry_run() { + return; + } + self.verbose_than(1, &format!("Copy {src:?} to {dst:?}")); + if src == dst { + return; + } + let _ = fs::remove_file(&dst); + let metadata = t!(src.symlink_metadata()); + let mut src = src.to_path_buf(); + if metadata.file_type().is_symlink() { + if dereference_symlinks { + src = t!(fs::canonicalize(src)); + } else { + let link = t!(fs::read_link(src)); + t!(self.symlink_file(link, dst)); + return; + } + } + if let Ok(()) = fs::hard_link(&src, dst) { + // Attempt to "easy copy" by creating a hard link + // (symlinks don't work on windows), but if that fails + // just fall back to a slow `copy` operation. + } else { + if let Err(e) = fs::copy(&src, dst) { + panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e) + } + t!(fs::set_permissions(dst, metadata.permissions())); + let atime = FileTime::from_last_access_time(&metadata); + let mtime = FileTime::from_last_modification_time(&metadata); + t!(filetime::set_file_times(dst, atime, mtime)); + } + } + + /// Copies the `src` directory recursively to `dst`. Both are assumed to exist + /// when this function is called. + pub fn cp_r(&self, src: &Path, dst: &Path) { + if self.config.dry_run() { + return; + } + for f in self.read_dir(src) { + let path = f.path(); + let name = path.file_name().unwrap(); + let dst = dst.join(name); + if t!(f.file_type()).is_dir() { + t!(fs::create_dir_all(&dst)); + self.cp_r(&path, &dst); + } else { + let _ = fs::remove_file(&dst); + self.copy(&path, &dst); + } + } + } + + /// Copies the `src` directory recursively to `dst`. Both are assumed to exist + /// when this function is called. Unwanted files or directories can be skipped + /// by returning `false` from the filter function. + pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) { + // Immediately recurse with an empty relative path + self.recurse_(src, dst, Path::new(""), filter) + } + + // Inner function does the actual work + fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) { + for f in self.read_dir(src) { + let path = f.path(); + let name = path.file_name().unwrap(); + let dst = dst.join(name); + let relative = relative.join(name); + // Only copy file or directory if the filter function returns true + if filter(&relative) { + if t!(f.file_type()).is_dir() { + let _ = fs::remove_dir_all(&dst); + self.create_dir(&dst); + self.recurse_(&path, &dst, &relative, filter); + } else { + let _ = fs::remove_file(&dst); + self.copy(&path, &dst); + } + } + } + } + + fn copy_to_folder(&self, src: &Path, dest_folder: &Path) { + let file_name = src.file_name().unwrap(); + let dest = dest_folder.join(file_name); + self.copy(src, &dest); + } + + fn install(&self, src: &Path, dstdir: &Path, perms: u32) { + if self.config.dry_run() { + return; + } + let dst = dstdir.join(src.file_name().unwrap()); + self.verbose_than(1, &format!("Install {src:?} to {dst:?}")); + t!(fs::create_dir_all(dstdir)); + if !src.exists() { + panic!("ERROR: File \"{}\" not found!", src.display()); + } + self.copy_internal(src, &dst, true); + chmod(&dst, perms); + } + + fn read(&self, path: &Path) -> String { + if self.config.dry_run() { + return String::new(); + } + t!(fs::read_to_string(path)) + } + + fn create_dir(&self, dir: &Path) { + if self.config.dry_run() { + return; + } + t!(fs::create_dir_all(dir)) + } + + fn remove_dir(&self, dir: &Path) { + if self.config.dry_run() { + return; + } + t!(fs::remove_dir_all(dir)) + } + + fn read_dir(&self, dir: &Path) -> impl Iterator { + let iter = match fs::read_dir(dir) { + Ok(v) => v, + Err(_) if self.config.dry_run() => return vec![].into_iter(), + Err(err) => panic!("could not read dir {dir:?}: {err:?}"), + }; + iter.map(|e| t!(e)).collect::>().into_iter() + } + + fn symlink_file, Q: AsRef>(&self, src: P, link: Q) -> io::Result<()> { + #[cfg(unix)] + use std::os::unix::fs::symlink as symlink_file; + #[cfg(windows)] + use std::os::windows::fs::symlink_file; + if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) } + } + + /// Returns if config.ninja is enabled, and checks for ninja existence, + /// exiting with a nicer error message if not. + fn ninja(&self) -> bool { + let mut cmd_finder = crate::core::sanity::Finder::new(); + + if self.config.ninja_in_file { + // Some Linux distros rename `ninja` to `ninja-build`. + // CMake can work with either binary name. + if cmd_finder.maybe_have("ninja-build").is_none() + && cmd_finder.maybe_have("ninja").is_none() + { + eprintln!( + " +Couldn't find required command: ninja (or ninja-build) + +You should install ninja as described at +, +or set `ninja = false` in the `[llvm]` section of `config.toml`. +Alternatively, set `download-ci-llvm = true` in that `[llvm]` section +to download LLVM rather than building it. +" + ); + exit!(1); + } + } + + // If ninja isn't enabled but we're building for MSVC then we try + // doubly hard to enable it. It was realized in #43767 that the msbuild + // CMake generator for MSVC doesn't respect configuration options like + // disabling LLVM assertions, which can often be quite important! + // + // In these cases we automatically enable Ninja if we find it in the + // environment. + if !self.config.ninja_in_file + && self.config.build.contains("msvc") + && cmd_finder.maybe_have("ninja").is_some() + { + return true; + } + + self.config.ninja_in_file + } + + pub fn colored_stdout R>(&self, f: F) -> R { + self.colored_stream_inner(StandardStream::stdout, self.config.stdout_is_tty, f) + } + + pub fn colored_stderr R>(&self, f: F) -> R { + self.colored_stream_inner(StandardStream::stderr, self.config.stderr_is_tty, f) + } + + fn colored_stream_inner(&self, constructor: C, is_tty: bool, f: F) -> R + where + C: Fn(ColorChoice) -> StandardStream, + F: FnOnce(&mut dyn WriteColor) -> R, + { + let choice = match self.config.color { + flags::Color::Always => ColorChoice::Always, + flags::Color::Never => ColorChoice::Never, + flags::Color::Auto if !is_tty => ColorChoice::Never, + flags::Color::Auto => ColorChoice::Auto, + }; + let mut stream = constructor(choice); + let result = f(&mut stream); + stream.reset().unwrap(); + result + } +} + +#[cfg(unix)] +fn chmod(path: &Path, perms: u32) { + use std::os::unix::fs::*; + t!(fs::set_permissions(path, fs::Permissions::from_mode(perms))); +} +#[cfg(windows)] +fn chmod(_path: &Path, _perms: u32) {} + +impl Compiler { + pub fn with_stage(mut self, stage: u32) -> Compiler { + self.stage = stage; + self + } + + /// Returns `true` if this is a snapshot compiler for `build`'s configuration + pub fn is_snapshot(&self, build: &Build) -> bool { + self.stage == 0 && self.host == build.build + } + + /// Returns if this compiler should be treated as a final stage one in the + /// current build session. + /// This takes into account whether we're performing a full bootstrap or + /// not; don't directly compare the stage with `2`! + pub fn is_final_stage(&self, build: &Build) -> bool { + let final_stage = if build.config.full_bootstrap { 2 } else { 1 }; + self.stage >= final_stage + } +} + +fn envify(s: &str) -> String { + s.chars() + .map(|c| match c { + '-' => '_', + c => c, + }) + .flat_map(|c| c.to_uppercase()) + .collect() +} + +pub fn find_recent_config_change_ids(current_id: usize) -> Vec { + if !CONFIG_CHANGE_HISTORY.contains(¤t_id) { + // If the current change-id is greater than the most recent one, return + // an empty list (it may be due to switching from a recent branch to an + // older one); otherwise, return the full list (assuming the user provided + // the incorrect change-id by accident). + if let Some(max_id) = CONFIG_CHANGE_HISTORY.iter().max() { + if ¤t_id > max_id { + return Vec::new(); + } + } + + return CONFIG_CHANGE_HISTORY.to_vec(); + } + + let index = CONFIG_CHANGE_HISTORY.iter().position(|&id| id == current_id).unwrap(); + + CONFIG_CHANGE_HISTORY + .iter() + .skip(index + 1) // Skip the current_id and IDs before it + .cloned() + .collect() +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/builder.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/builder.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/builder.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/builder.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,701 @@ +use super::*; +use crate::core::config::{Config, DryRun, TargetSelection}; +use crate::core::build_steps::doc::DocumentationFormat; +use std::thread; + +fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config { + configure_with_args(&[cmd.to_owned()], host, target) +} + +fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config { + let mut config = Config::parse(cmd); + // don't save toolstates + config.save_toolstates = None; + config.dry_run = DryRun::SelfCheck; + + // Ignore most submodules, since we don't need them for a dry run. + // But make sure to check out the `doc` and `rust-analyzer` submodules, since some steps need them + // just to know which commands to run. + let submodule_build = Build::new(Config { + // don't include LLVM, so CI doesn't require ninja/cmake to be installed + rust_codegen_backends: vec![], + ..Config::parse(&["check".to_owned()]) + }); + submodule_build.update_submodule(Path::new("src/doc/book")); + config.submodules = Some(false); + + config.ninja_in_file = false; + // try to avoid spurious failures in dist where we create/delete each others file + // HACK: rather than pull in `tempdir`, use the one that cargo has conveniently created for us + let dir = Path::new(env!("OUT_DIR")) + .join("tmp-rustbuild-tests") + .join(&thread::current().name().unwrap_or("unknown").replace(":", "-")); + t!(fs::create_dir_all(&dir)); + config.out = dir; + config.build = TargetSelection::from_user("A"); + config.hosts = host.iter().map(|s| TargetSelection::from_user(s)).collect(); + config.targets = target.iter().map(|s| TargetSelection::from_user(s)).collect(); + config +} + +fn first(v: Vec<(A, B)>) -> Vec { + v.into_iter().map(|(a, _)| a).collect::>() +} + +fn run_build(paths: &[PathBuf], config: Config) -> Cache { + let kind = config.cmd.kind(); + let build = Build::new(config); + let builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(kind), paths); + builder.cache +} + +fn check_cli(paths: [&str; N]) { + run_build( + &paths.map(PathBuf::from), + configure_with_args(&paths.map(String::from), &["A"], &["A"]), + ); +} + +macro_rules! std { + ($host:ident => $target:ident, stage = $stage:literal) => { + compile::Std::new( + Compiler { host: TargetSelection::from_user(stringify!($host)), stage: $stage }, + TargetSelection::from_user(stringify!($target)), + ) + }; +} + +macro_rules! doc_std { + ($host:ident => $target:ident, stage = $stage:literal) => {{ + let config = configure("doc", &["A"], &["A"]); + let build = Build::new(config); + let builder = Builder::new(&build); + doc::Std::new( + $stage, + TargetSelection::from_user(stringify!($target)), + &builder, + DocumentationFormat::HTML, + ) + }}; +} + +macro_rules! rustc { + ($host:ident => $target:ident, stage = $stage:literal) => { + compile::Rustc::new( + Compiler { host: TargetSelection::from_user(stringify!($host)), stage: $stage }, + TargetSelection::from_user(stringify!($target)), + ) + }; +} + +#[test] +fn test_valid() { + // make sure multi suite paths are accepted + check_cli(["test", "tests/ui/attr-start.rs", "tests/ui/attr-shebang.rs"]); +} + +#[test] +#[should_panic] +fn test_invalid() { + // make sure that invalid paths are caught, even when combined with valid paths + check_cli(["test", "library/std", "x"]); +} + +#[test] +fn test_intersection() { + let set = |paths: &[&str]| { + PathSet::Set(paths.into_iter().map(|p| TaskPath { path: p.into(), kind: None }).collect()) + }; + let library_set = set(&["library/core", "library/alloc", "library/std"]); + let mut command_paths = + vec![Path::new("library/core"), Path::new("library/alloc"), Path::new("library/stdarch")]; + let subset = library_set.intersection_removing_matches(&mut command_paths, Kind::Build); + assert_eq!(subset, set(&["library/core", "library/alloc"]),); + assert_eq!(command_paths, vec![Path::new("library/stdarch")]); +} + +#[test] +fn test_exclude() { + let mut config = configure("test", &["A"], &["A"]); + config.skip = vec!["src/tools/tidy".into()]; + let cache = run_build(&[], config); + + // Ensure we have really excluded tidy + assert!(!cache.contains::()); + + // Ensure other tests are not affected. + assert!(cache.contains::()); +} + +#[test] +fn test_exclude_kind() { + let path = PathBuf::from("compiler/rustc_data_structures"); + + let mut config = configure("test", &["A"], &["A"]); + // Ensure our test is valid, and `test::Rustc` would be run without the exclude. + assert!(run_build(&[], config.clone()).contains::()); + // Ensure tests for rustc are not skipped. + config.skip = vec![path.clone()]; + assert!(run_build(&[], config.clone()).contains::()); + // Ensure builds for rustc are not skipped. + assert!(run_build(&[], config).contains::()); +} + +/// Ensure that if someone passes both a single crate and `library`, all library crates get built. +#[test] +fn alias_and_path_for_library() { + let mut cache = + run_build(&["library".into(), "core".into()], configure("build", &["A"], &["A"])); + assert_eq!( + first(cache.all::()), + &[std!(A => A, stage = 0), std!(A => A, stage = 1)] + ); + + let mut cache = run_build(&["library".into(), "core".into()], configure("doc", &["A"], &["A"])); + assert_eq!(first(cache.all::()), &[doc_std!(A => A, stage = 0)]); +} + +#[test] +fn test_beta_rev_parsing() { + use crate::utils::helpers::extract_beta_rev; + + // single digit revision + assert_eq!(extract_beta_rev("1.99.9-beta.7 (xxxxxx)"), Some("7".to_string())); + // multiple digits + assert_eq!(extract_beta_rev("1.99.9-beta.777 (xxxxxx)"), Some("777".to_string())); + // nightly channel (no beta revision) + assert_eq!(extract_beta_rev("1.99.9-nightly (xxxxxx)"), None); + // stable channel (no beta revision) + assert_eq!(extract_beta_rev("1.99.9 (xxxxxxx)"), None); + // invalid string + assert_eq!(extract_beta_rev("invalid"), None); +} + +mod defaults { + use super::{configure, first, run_build}; + use crate::core::builder::*; + use crate::Config; + use pretty_assertions::assert_eq; + + #[test] + fn build_default() { + let mut cache = run_build(&[], configure("build", &["A"], &["A"])); + + let a = TargetSelection::from_user("A"); + assert_eq!( + first(cache.all::()), + &[std!(A => A, stage = 0), std!(A => A, stage = 1),] + ); + assert!(!cache.all::().is_empty()); + // Make sure rustdoc is only built once. + assert_eq!( + first(cache.all::()), + // Recall that rustdoc stages are off-by-one + // - this is the compiler it's _linked_ to, not built with. + &[tool::Rustdoc { compiler: Compiler { host: a, stage: 1 } }], + ); + assert_eq!(first(cache.all::()), &[rustc!(A => A, stage = 0)],); + } + + #[test] + fn build_stage_0() { + let config = Config { stage: 0, ..configure("build", &["A"], &["A"]) }; + let mut cache = run_build(&[], config); + + let a = TargetSelection::from_user("A"); + assert_eq!(first(cache.all::()), &[std!(A => A, stage = 0)]); + assert!(!cache.all::().is_empty()); + assert_eq!( + first(cache.all::()), + // This is the beta rustdoc. + // Add an assert here to make sure this is the only rustdoc built. + &[tool::Rustdoc { compiler: Compiler { host: a, stage: 0 } }], + ); + assert!(cache.all::().is_empty()); + } + + #[test] + fn build_cross_compile() { + let config = Config { stage: 1, ..configure("build", &["A", "B"], &["A", "B"]) }; + let mut cache = run_build(&[], config); + + let a = TargetSelection::from_user("A"); + let b = TargetSelection::from_user("B"); + + // Ideally, this build wouldn't actually have `target: a` + // rustdoc/rustcc/std here (the user only requested a host=B build, so + // there's not really a need for us to build for target A in this case + // (since we're producing stage 1 libraries/binaries). But currently + // rustbuild is just a bit buggy here; this should be fixed though. + assert_eq!( + first(cache.all::()), + &[ + std!(A => A, stage = 0), + std!(A => A, stage = 1), + std!(A => B, stage = 0), + std!(A => B, stage = 1), + ] + ); + assert_eq!( + first(cache.all::()), + &[ + compile::Assemble { target_compiler: Compiler { host: a, stage: 0 } }, + compile::Assemble { target_compiler: Compiler { host: a, stage: 1 } }, + compile::Assemble { target_compiler: Compiler { host: b, stage: 1 } }, + ] + ); + assert_eq!( + first(cache.all::()), + &[ + tool::Rustdoc { compiler: Compiler { host: a, stage: 1 } }, + tool::Rustdoc { compiler: Compiler { host: b, stage: 1 } }, + ], + ); + assert_eq!( + first(cache.all::()), + &[rustc!(A => A, stage = 0), rustc!(A => B, stage = 0),] + ); + } + + #[test] + fn doc_default() { + let mut config = configure("doc", &["A"], &["A"]); + config.compiler_docs = true; + config.cmd = Subcommand::Doc { open: false, json: false }; + let mut cache = run_build(&[], config); + let a = TargetSelection::from_user("A"); + + // error_index_generator uses stage 0 to share rustdoc artifacts with the + // rustdoc tool. + assert_eq!(first(cache.all::()), &[doc::ErrorIndex { target: a },]); + assert_eq!( + first(cache.all::()), + &[tool::ErrorIndex { compiler: Compiler { host: a, stage: 0 } }] + ); + // docs should be built with the beta compiler, not with the stage0 artifacts. + // recall that rustdoc is off-by-one: `stage` is the compiler rustdoc is _linked_ to, + // not the one it was built by. + assert_eq!( + first(cache.all::()), + &[tool::Rustdoc { compiler: Compiler { host: a, stage: 0 } },] + ); + } +} + +mod dist { + use super::{first, run_build, Config}; + use crate::core::builder::*; + use pretty_assertions::assert_eq; + + fn configure(host: &[&str], target: &[&str]) -> Config { + Config { stage: 2, ..super::configure("dist", host, target) } + } + + #[test] + fn dist_baseline() { + let mut cache = run_build(&[], configure(&["A"], &["A"])); + + let a = TargetSelection::from_user("A"); + + assert_eq!(first(cache.all::()), &[dist::Docs { host: a },]); + assert_eq!(first(cache.all::()), &[dist::Mingw { host: a },]); + assert_eq!( + first(cache.all::()), + &[dist::Rustc { compiler: Compiler { host: a, stage: 2 } },] + ); + assert_eq!( + first(cache.all::()), + &[dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a },] + ); + assert_eq!(first(cache.all::()), &[dist::Src]); + // Make sure rustdoc is only built once. + assert_eq!( + first(cache.all::()), + &[tool::Rustdoc { compiler: Compiler { host: a, stage: 2 } },] + ); + } + + #[test] + fn dist_with_targets() { + let mut cache = run_build(&[], configure(&["A"], &["A", "B"])); + + let a = TargetSelection::from_user("A"); + let b = TargetSelection::from_user("B"); + + assert_eq!( + first(cache.all::()), + &[dist::Docs { host: a }, dist::Docs { host: b },] + ); + assert_eq!( + first(cache.all::()), + &[dist::Mingw { host: a }, dist::Mingw { host: b },] + ); + assert_eq!( + first(cache.all::()), + &[dist::Rustc { compiler: Compiler { host: a, stage: 2 } },] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, + dist::Std { compiler: Compiler { host: a, stage: 2 }, target: b }, + ] + ); + assert_eq!(first(cache.all::()), &[dist::Src]); + } + + #[test] + fn dist_with_hosts() { + let mut cache = run_build(&[], configure(&["A", "B"], &["A", "B"])); + + let a = TargetSelection::from_user("A"); + let b = TargetSelection::from_user("B"); + + assert_eq!( + first(cache.all::()), + &[dist::Docs { host: a }, dist::Docs { host: b },] + ); + assert_eq!( + first(cache.all::()), + &[dist::Mingw { host: a }, dist::Mingw { host: b },] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, + ] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: b }, + ] + ); + assert_eq!( + first(cache.all::()), + &[ + std!(A => A, stage = 0), + std!(A => A, stage = 1), + std!(A => A, stage = 2), + std!(A => B, stage = 1), + std!(A => B, stage = 2), + ], + ); + assert_eq!(first(cache.all::()), &[dist::Src]); + } + + #[test] + fn dist_only_cross_host() { + let b = TargetSelection::from_user("B"); + let mut config = configure(&["A", "B"], &["A", "B"]); + config.docs = false; + config.extended = true; + config.hosts = vec![b]; + let mut cache = run_build(&[], config); + + assert_eq!( + first(cache.all::()), + &[dist::Rustc { compiler: Compiler { host: b, stage: 2 } },] + ); + assert_eq!( + first(cache.all::()), + &[rustc!(A => A, stage = 0), rustc!(A => B, stage = 1),] + ); + } + + #[test] + fn dist_with_targets_and_hosts() { + let mut cache = run_build(&[], configure(&["A", "B"], &["A", "B", "C"])); + + let a = TargetSelection::from_user("A"); + let b = TargetSelection::from_user("B"); + let c = TargetSelection::from_user("C"); + + assert_eq!( + first(cache.all::()), + &[dist::Docs { host: a }, dist::Docs { host: b }, dist::Docs { host: c },] + ); + assert_eq!( + first(cache.all::()), + &[dist::Mingw { host: a }, dist::Mingw { host: b }, dist::Mingw { host: c },] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, + ] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: b }, + dist::Std { compiler: Compiler { host: a, stage: 2 }, target: c }, + ] + ); + assert_eq!(first(cache.all::()), &[dist::Src]); + } + + #[test] + fn dist_with_empty_host() { + let config = configure(&[], &["C"]); + let mut cache = run_build(&[], config); + + let a = TargetSelection::from_user("A"); + let c = TargetSelection::from_user("C"); + + assert_eq!(first(cache.all::()), &[dist::Docs { host: c },]); + assert_eq!(first(cache.all::()), &[dist::Mingw { host: c },]); + assert_eq!( + first(cache.all::()), + &[dist::Std { compiler: Compiler { host: a, stage: 2 }, target: c },] + ); + } + + #[test] + fn dist_with_same_targets_and_hosts() { + let mut cache = run_build(&[], configure(&["A", "B"], &["A", "B"])); + + let a = TargetSelection::from_user("A"); + let b = TargetSelection::from_user("B"); + + assert_eq!( + first(cache.all::()), + &[dist::Docs { host: a }, dist::Docs { host: b },] + ); + assert_eq!( + first(cache.all::()), + &[dist::Mingw { host: a }, dist::Mingw { host: b },] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Rustc { compiler: Compiler { host: a, stage: 2 } }, + dist::Rustc { compiler: Compiler { host: b, stage: 2 } }, + ] + ); + assert_eq!( + first(cache.all::()), + &[ + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: a }, + dist::Std { compiler: Compiler { host: a, stage: 1 }, target: b }, + ] + ); + assert_eq!(first(cache.all::()), &[dist::Src]); + assert_eq!( + first(cache.all::()), + &[ + std!(A => A, stage = 0), + std!(A => A, stage = 1), + std!(A => A, stage = 2), + std!(A => B, stage = 1), + std!(A => B, stage = 2), + ] + ); + assert_eq!( + first(cache.all::()), + &[ + compile::Assemble { target_compiler: Compiler { host: a, stage: 0 } }, + compile::Assemble { target_compiler: Compiler { host: a, stage: 1 } }, + compile::Assemble { target_compiler: Compiler { host: a, stage: 2 } }, + compile::Assemble { target_compiler: Compiler { host: b, stage: 2 } }, + ] + ); + } + + #[test] + fn build_all() { + let build = Build::new(configure(&["A", "B"], &["A", "B", "C"])); + let mut builder = Builder::new(&build); + builder.run_step_descriptions( + &Builder::get_step_descriptions(Kind::Build), + &["compiler/rustc".into(), "library".into()], + ); + + assert_eq!( + first(builder.cache.all::()), + &[ + std!(A => A, stage = 0), + std!(A => A, stage = 1), + std!(A => A, stage = 2), + std!(A => B, stage = 1), + std!(A => B, stage = 2), + std!(A => C, stage = 2), + ] + ); + assert_eq!(builder.cache.all::().len(), 5); + assert_eq!( + first(builder.cache.all::()), + &[ + rustc!(A => A, stage = 0), + rustc!(A => A, stage = 1), + rustc!(A => A, stage = 2), + rustc!(A => B, stage = 1), + rustc!(A => B, stage = 2), + ] + ); + } + + #[test] + fn build_with_empty_host() { + let config = configure(&[], &["C"]); + let build = Build::new(config); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]); + + let a = TargetSelection::from_user("A"); + + assert_eq!( + first(builder.cache.all::()), + &[std!(A => A, stage = 0), std!(A => A, stage = 1), std!(A => C, stage = 2),] + ); + assert_eq!( + first(builder.cache.all::()), + &[ + compile::Assemble { target_compiler: Compiler { host: a, stage: 0 } }, + compile::Assemble { target_compiler: Compiler { host: a, stage: 1 } }, + compile::Assemble { target_compiler: Compiler { host: a, stage: 2 } }, + ] + ); + assert_eq!( + first(builder.cache.all::()), + &[rustc!(A => A, stage = 0), rustc!(A => A, stage = 1),] + ); + } + + #[test] + fn test_with_no_doc_stage0() { + let mut config = configure(&["A"], &["A"]); + config.stage = 0; + config.paths = vec!["library/std".into()]; + config.cmd = Subcommand::Test { + test_args: vec![], + rustc_args: vec![], + no_fail_fast: false, + no_doc: true, + doc: false, + bless: false, + force_rerun: false, + compare_mode: None, + rustfix_coverage: false, + pass: None, + run: None, + only_modified: false, + skip: vec![], + extra_checks: None, + }; + + let build = Build::new(config); + let mut builder = Builder::new(&build); + + let host = TargetSelection::from_user("A"); + + builder.run_step_descriptions( + &[StepDescription::from::(Kind::Test)], + &["library/std".into()], + ); + + // Ensure we don't build any compiler artifacts. + assert!(!builder.cache.contains::()); + assert_eq!( + first(builder.cache.all::()), + &[test::Crate { + compiler: Compiler { host, stage: 0 }, + target: host, + mode: Mode::Std, + crates: vec![INTERNER.intern_str("std")], + },] + ); + } + + #[test] + fn doc_ci() { + let mut config = configure(&["A"], &["A"]); + config.compiler_docs = true; + config.cmd = Subcommand::Doc { open: false, json: false }; + let build = Build::new(config); + let mut builder = Builder::new(&build); + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]); + let a = TargetSelection::from_user("A"); + + // error_index_generator uses stage 1 to share rustdoc artifacts with the + // rustdoc tool. + assert_eq!( + first(builder.cache.all::()), + &[doc::ErrorIndex { target: a },] + ); + assert_eq!( + first(builder.cache.all::()), + &[tool::ErrorIndex { compiler: Compiler { host: a, stage: 1 } }] + ); + // This is actually stage 1, but Rustdoc::run swaps out the compiler with + // stage minus 1 if --stage is not 0. Very confusing! + assert_eq!( + first(builder.cache.all::()), + &[tool::Rustdoc { compiler: Compiler { host: a, stage: 2 } },] + ); + } + + #[test] + fn test_docs() { + // Behavior of `x.py test` doing various documentation tests. + let mut config = configure(&["A"], &["A"]); + config.cmd = Subcommand::Test { + test_args: vec![], + rustc_args: vec![], + no_fail_fast: false, + doc: true, + no_doc: false, + skip: vec![], + bless: false, + force_rerun: false, + compare_mode: None, + rustfix_coverage: false, + pass: None, + run: None, + only_modified: false, + extra_checks: None, + }; + // Make sure rustfmt binary not being found isn't an error. + config.channel = "beta".to_string(); + let build = Build::new(config); + let mut builder = Builder::new(&build); + + builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]); + let a = TargetSelection::from_user("A"); + + // error_index_generator uses stage 1 to share rustdoc artifacts with the + // rustdoc tool. + assert_eq!( + first(builder.cache.all::()), + &[doc::ErrorIndex { target: a },] + ); + assert_eq!( + first(builder.cache.all::()), + &[tool::ErrorIndex { compiler: Compiler { host: a, stage: 1 } }] + ); + // Unfortunately rustdoc is built twice. Once from stage1 for compiletest + // (and other things), and once from stage0 for std crates. Ideally it + // would only be built once. If someone wants to fix this, it might be + // worth investigating if it would be possible to test std from stage1. + // Note that the stages here are +1 than what they actually are because + // Rustdoc::run swaps out the compiler with stage minus 1 if --stage is + // not 0. + // + // The stage 0 copy is the one downloaded for bootstrapping. It is + // (currently) needed to run "cargo test" on the linkchecker, and + // should be relatively "free". + assert_eq!( + first(builder.cache.all::()), + &[ + tool::Rustdoc { compiler: Compiler { host: a, stage: 0 } }, + tool::Rustdoc { compiler: Compiler { host: a, stage: 1 } }, + tool::Rustdoc { compiler: Compiler { host: a, stage: 2 } }, + ] + ); + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/config.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/config.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/config.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/config.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,219 @@ +use crate::core::config::TomlConfig; +use super::{Config, Flags}; + +use clap::CommandFactory; +use serde::Deserialize; +use std::{ + env, + fs::{remove_file, File}, + io::Write, + path::Path, +}; + +fn parse(config: &str) -> Config { + Config::parse_inner(&["check".to_owned(), "--config=/does/not/exist".to_owned()], |&_| { + toml::from_str(config).unwrap() + }) +} + +#[test] +fn download_ci_llvm() { + if crate::core::build_steps::llvm::is_ci_llvm_modified(&parse("")) { + eprintln!("Detected LLVM as non-available: running in CI and modified LLVM in this change"); + return; + } + + let parse_llvm = |s| parse(s).llvm_from_ci; + let if_available = parse_llvm("llvm.download-ci-llvm = \"if-available\""); + + assert!(parse_llvm("llvm.download-ci-llvm = true")); + assert!(!parse_llvm("llvm.download-ci-llvm = false")); + assert_eq!(parse_llvm(""), if_available); + assert_eq!(parse_llvm("rust.channel = \"dev\""), if_available); + assert!(!parse_llvm("rust.channel = \"stable\"")); + assert!(parse_llvm("build.build = \"x86_64-unknown-linux-gnu\"")); + assert!(parse_llvm( + "llvm.assertions = true \r\n build.build = \"x86_64-unknown-linux-gnu\" \r\n llvm.download-ci-llvm = \"if-available\"" + )); + assert!(!parse_llvm( + "llvm.assertions = true \r\n build.build = \"aarch64-apple-darwin\" \r\n llvm.download-ci-llvm = \"if-available\"" + )); +} + +// FIXME(onur-ozkan): extend scope of the test +// refs: +// - https://github.com/rust-lang/rust/issues/109120 +// - https://github.com/rust-lang/rust/pull/109162#issuecomment-1496782487 +#[test] +fn detect_src_and_out() { + fn test(cfg: Config, build_dir: Option<&str>) { + // This will bring absolute form of `src/bootstrap` path + let current_dir = std::env::current_dir().unwrap(); + + // get `src` by moving into project root path + let expected_src = current_dir.ancestors().nth(2).unwrap(); + assert_eq!(&cfg.src, expected_src); + + // Sanity check for `src` + let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let expected_src = manifest_dir.ancestors().nth(2).unwrap(); + assert_eq!(&cfg.src, expected_src); + + // test if build-dir was manually given in config.toml + if let Some(custom_build_dir) = build_dir { + assert_eq!(&cfg.out, Path::new(custom_build_dir)); + } + // test the native bootstrap way + else { + // This should bring output path of bootstrap in absolute form + let cargo_target_dir = env::var_os("CARGO_TARGET_DIR").expect( + "CARGO_TARGET_DIR must been provided for the test environment from bootstrap", + ); + + // Move to `build` from `build/bootstrap` + let expected_out = Path::new(&cargo_target_dir).parent().unwrap(); + assert_eq!(&cfg.out, expected_out); + + let args: Vec = env::args().collect(); + + // Another test for `out` as a sanity check + // + // This will bring something similar to: + // `{build-dir}/bootstrap/debug/deps/bootstrap-c7ee91d5661e2804` + // `{build-dir}` can be anywhere, not just in the rust project directory. + let dep = Path::new(args.first().unwrap()); + let expected_out = dep.ancestors().nth(4).unwrap(); + + assert_eq!(&cfg.out, expected_out); + } + } + + test(parse(""), None); + + { + let build_dir = if cfg!(windows) { Some("C:\\tmp") } else { Some("/tmp") }; + test(parse("build.build-dir = \"/tmp\""), build_dir); + } +} + +#[test] +fn clap_verify() { + Flags::command().debug_assert(); +} + +#[test] +fn override_toml() { + let config = Config::parse_inner( + &[ + "check".to_owned(), + "--config=/does/not/exist".to_owned(), + "--set=change-id=1".to_owned(), + "--set=rust.lto=fat".to_owned(), + "--set=rust.deny-warnings=false".to_owned(), + "--set=build.gdb=\"bar\"".to_owned(), + "--set=build.tools=[\"cargo\"]".to_owned(), + "--set=llvm.build-config={\"foo\" = \"bar\"}".to_owned(), + ], + |&_| { + toml::from_str( + r#" +change-id = 0 +[rust] +lto = "off" +deny-warnings = true + +[build] +gdb = "foo" +tools = [] + +[llvm] +download-ci-llvm = false +build-config = {} + "#, + ) + .unwrap() + }, + ); + assert_eq!(config.change_id, Some(1), "setting top-level value"); + assert_eq!( + config.rust_lto, + crate::core::config::RustcLto::Fat, + "setting string value without quotes" + ); + assert_eq!(config.gdb, Some("bar".into()), "setting string value with quotes"); + assert!(!config.deny_warnings, "setting boolean value"); + assert_eq!( + config.tools, + Some(["cargo".to_string()].into_iter().collect()), + "setting list value" + ); + assert_eq!( + config.llvm_build_config, + [("foo".to_string(), "bar".to_string())].into_iter().collect(), + "setting dictionary value" + ); +} + +#[test] +#[should_panic] +fn override_toml_duplicate() { + Config::parse_inner( + &[ + "check".to_owned(), + "--config=/does/not/exist".to_owned(), + "--set=change-id=1".to_owned(), + "--set=change-id=2".to_owned(), + ], + |&_| toml::from_str("change-id = 0").unwrap(), + ); +} + +#[test] +fn profile_user_dist() { + fn get_toml(file: &Path) -> TomlConfig { + let contents = if file.ends_with("config.toml") { + "profile = \"user\"".to_owned() + } else { + assert!(file.ends_with("config.dist.toml")); + std::fs::read_to_string(file).unwrap() + }; + toml::from_str(&contents) + .and_then(|table: toml::Value| TomlConfig::deserialize(table)) + .unwrap() + } + Config::parse_inner(&["check".to_owned()], get_toml); +} + +#[test] +fn rust_optimize() { + assert!(parse("").rust_optimize.is_release()); + assert!(!parse("rust.optimize = false").rust_optimize.is_release()); + assert!(parse("rust.optimize = true").rust_optimize.is_release()); + assert!(!parse("rust.optimize = 0").rust_optimize.is_release()); + assert!(parse("rust.optimize = 1").rust_optimize.is_release()); + assert!(parse("rust.optimize = \"s\"").rust_optimize.is_release()); + assert_eq!(parse("rust.optimize = 1").rust_optimize.get_opt_level(), Some("1".to_string())); + assert_eq!(parse("rust.optimize = \"s\"").rust_optimize.get_opt_level(), Some("s".to_string())); +} + +#[test] +#[should_panic] +fn invalid_rust_optimize() { + parse("rust.optimize = \"a\""); +} + +#[test] +fn verify_file_integrity() { + let config = parse(""); + + let tempfile = config.tempdir().join(".tmp-test-file"); + File::create(&tempfile).unwrap().write_all(b"dummy value").unwrap(); + assert!(tempfile.exists()); + + assert!( + config + .verify(&tempfile, "7e255dd9542648a8779268a0f268b891a198e9828e860ed23f826440e786eae5") + ); + + remove_file(tempfile).unwrap(); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/setup.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/setup.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/setup.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/tests/setup.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,14 @@ +use super::{RUST_ANALYZER_SETTINGS, SETTINGS_HASHES}; +use sha2::Digest; + +#[test] +fn check_matching_settings_hash() { + let mut hasher = sha2::Sha256::new(); + hasher.update(&RUST_ANALYZER_SETTINGS); + let hash = hex::encode(hasher.finalize().as_slice()); + assert_eq!( + &hash, + SETTINGS_HASHES.last().unwrap(), + "Update `SETTINGS_HASHES` with the new hash of `src/etc/rust_analyzer_settings.json`" + ); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/bin_helpers.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/bin_helpers.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/bin_helpers.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/bin_helpers.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,28 @@ +//! This file is meant to be included directly from bootstrap shims to avoid a +//! dependency on the bootstrap library. This reduces the binary size and +//! improves compilation time by reducing the linking time. + +/// Parses the value of the "RUSTC_VERBOSE" environment variable and returns it as a `usize`. +/// If it was not defined, returns 0 by default. +/// +/// Panics if "RUSTC_VERBOSE" is defined with the value that is not an unsigned integer. +pub(crate) fn parse_rustc_verbose() -> usize { + use std::str::FromStr; + + match std::env::var("RUSTC_VERBOSE") { + Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"), + Err(_) => 0, + } +} + +/// Parses the value of the "RUSTC_STAGE" environment variable and returns it as a `String`. +/// +/// If "RUSTC_STAGE" was not set, the program will be terminated with 101. +pub(crate) fn parse_rustc_stage() -> String { + std::env::var("RUSTC_STAGE").unwrap_or_else(|_| { + // Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead. + eprintln!("rustc shim: FATAL: RUSTC_STAGE was not set"); + eprintln!("rustc shim: NOTE: use `x.py build -vvv` to see all environment variables set by bootstrap"); + std::process::exit(101); + }) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cache.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cache.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cache.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cache.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,272 @@ +use std::any::{Any, TypeId}; +use std::borrow::Borrow; +use std::cell::RefCell; +use std::cmp::Ordering; +use std::collections::HashMap; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::mem; +use std::ops::Deref; +use std::path::PathBuf; +use std::sync::Mutex; + +// FIXME: replace with std::lazy after it gets stabilized and reaches beta +use once_cell::sync::Lazy; + +use crate::core::builder::Step; + +pub struct Interned(usize, PhantomData<*const T>); + +impl Default for Interned { + fn default() -> Self { + T::default().intern() + } +} + +impl Copy for Interned {} +impl Clone for Interned { + fn clone(&self) -> Interned { + *self + } +} + +impl PartialEq for Interned { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} +impl Eq for Interned {} + +impl PartialEq for Interned { + fn eq(&self, other: &str) -> bool { + *self == other + } +} +impl<'a> PartialEq<&'a str> for Interned { + fn eq(&self, other: &&str) -> bool { + **self == **other + } +} +impl<'a, T> PartialEq<&'a Interned> for Interned { + fn eq(&self, other: &&Self) -> bool { + self.0 == other.0 + } +} +impl<'a, T> PartialEq> for &'a Interned { + fn eq(&self, other: &Interned) -> bool { + self.0 == other.0 + } +} + +unsafe impl Send for Interned {} +unsafe impl Sync for Interned {} + +impl fmt::Display for Interned { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s: &str = &*self; + f.write_str(s) + } +} + +impl fmt::Debug for Interned +where + Self: Deref, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s: &U = &*self; + f.write_fmt(format_args!("{s:?}")) + } +} + +impl Hash for Interned { + fn hash(&self, state: &mut H) { + let l = T::intern_cache().lock().unwrap(); + l.get(*self).hash(state) + } +} + +impl Deref for Interned { + type Target = T::Target; + fn deref(&self) -> &Self::Target { + let l = T::intern_cache().lock().unwrap(); + unsafe { mem::transmute::<&Self::Target, &Self::Target>(l.get(*self)) } + } +} + +impl, U: ?Sized> AsRef for Interned { + fn as_ref(&self) -> &U { + let l = T::intern_cache().lock().unwrap(); + unsafe { mem::transmute::<&U, &U>(l.get(*self).as_ref()) } + } +} + +impl PartialOrd for Interned { + fn partial_cmp(&self, other: &Self) -> Option { + let l = T::intern_cache().lock().unwrap(); + l.get(*self).partial_cmp(l.get(*other)) + } +} + +impl Ord for Interned { + fn cmp(&self, other: &Self) -> Ordering { + let l = T::intern_cache().lock().unwrap(); + l.get(*self).cmp(l.get(*other)) + } +} + +struct TyIntern { + items: Vec, + set: HashMap>, +} + +impl Default for TyIntern { + fn default() -> Self { + TyIntern { items: Vec::new(), set: Default::default() } + } +} + +impl TyIntern { + fn intern_borrow(&mut self, item: &B) -> Interned + where + B: Eq + Hash + ToOwned + ?Sized, + T: Borrow, + { + if let Some(i) = self.set.get(&item) { + return *i; + } + let item = item.to_owned(); + let interned = Interned(self.items.len(), PhantomData::<*const T>); + self.set.insert(item.clone(), interned); + self.items.push(item); + interned + } + + fn intern(&mut self, item: T) -> Interned { + if let Some(i) = self.set.get(&item) { + return *i; + } + let interned = Interned(self.items.len(), PhantomData::<*const T>); + self.set.insert(item.clone(), interned); + self.items.push(item); + interned + } + + fn get(&self, i: Interned) -> &T { + &self.items[i.0] + } +} + +#[derive(Default)] +pub struct Interner { + strs: Mutex>, + paths: Mutex>, + lists: Mutex>>, +} + +trait Internable: Clone + Eq + Hash + 'static { + fn intern_cache() -> &'static Mutex>; + + fn intern(self) -> Interned { + Self::intern_cache().lock().unwrap().intern(self) + } +} + +impl Internable for String { + fn intern_cache() -> &'static Mutex> { + &INTERNER.strs + } +} + +impl Internable for PathBuf { + fn intern_cache() -> &'static Mutex> { + &INTERNER.paths + } +} + +impl Internable for Vec { + fn intern_cache() -> &'static Mutex> { + &INTERNER.lists + } +} + +impl Interner { + pub fn intern_str(&self, s: &str) -> Interned { + self.strs.lock().unwrap().intern_borrow(s) + } + pub fn intern_string(&self, s: String) -> Interned { + self.strs.lock().unwrap().intern(s) + } + + pub fn intern_path(&self, s: PathBuf) -> Interned { + self.paths.lock().unwrap().intern(s) + } + + pub fn intern_list(&self, v: Vec) -> Interned> { + self.lists.lock().unwrap().intern(v) + } +} + +pub static INTERNER: Lazy = Lazy::new(Interner::default); + +/// This is essentially a `HashMap` which allows storing any type in its input and +/// any type in its output. It is a write-once cache; values are never evicted, +/// which means that references to the value can safely be returned from the +/// `get()` method. +#[derive(Debug)] +pub struct Cache( + RefCell< + HashMap< + TypeId, + Box, // actually a HashMap> + >, + >, +); + +impl Cache { + pub fn new() -> Cache { + Cache(RefCell::new(HashMap::new())) + } + + pub fn put(&self, step: S, value: S::Output) { + let mut cache = self.0.borrow_mut(); + let type_id = TypeId::of::(); + let stepcache = cache + .entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); + assert!(!stepcache.contains_key(&step), "processing {step:?} a second time"); + stepcache.insert(step, value); + } + + pub fn get(&self, step: &S) -> Option { + let mut cache = self.0.borrow_mut(); + let type_id = TypeId::of::(); + let stepcache = cache + .entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); + stepcache.get(step).cloned() + } +} + +#[cfg(test)] +impl Cache { + pub fn all(&mut self) -> Vec<(S, S::Output)> { + let cache = self.0.get_mut(); + let type_id = TypeId::of::(); + let mut v = cache + .remove(&type_id) + .map(|b| b.downcast::>().expect("correct type")) + .map(|m| m.into_iter().collect::>()) + .unwrap_or_default(); + v.sort_by_key(|(s, _)| s.clone()); + v + } + + pub fn contains(&self) -> bool { + self.0.borrow().contains_key(&TypeId::of::()) + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cc_detect.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cc_detect.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cc_detect.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/cc_detect.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,288 @@ +//! C-compiler probing and detection. +//! +//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for +//! C and C++ compilers for each target configured. A compiler is found through +//! a number of vectors (in order of precedence) +//! +//! 1. Configuration via `target.$target.cc` in `config.toml`. +//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if +//! applicable +//! 3. Special logic to probe on OpenBSD +//! 4. The `CC_$target` environment variable. +//! 5. The `CC` environment variable. +//! 6. "cc" +//! +//! Some of this logic is implemented here, but much of it is farmed out to the +//! `cc` crate itself, so we end up having the same fallbacks as there. +//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is +//! used. +//! +//! It is intended that after this module has run no C/C++ compiler will +//! ever be probed for. Instead the compilers found here will be used for +//! everything. + +use std::collections::HashSet; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::{env, iter}; + +use crate::core::config::TargetSelection; +use crate::utils::helpers::output; +use crate::{Build, CLang, GitRepo}; + +// The `cc` crate doesn't provide a way to obtain a path to the detected archiver, +// so use some simplified logic here. First we respect the environment variable `AR`, then +// try to infer the archiver path from the C compiler path. +// In the future this logic should be replaced by calling into the `cc` crate. +fn cc2ar(cc: &Path, target: TargetSelection) -> Option { + if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace("-", "_"))) { + Some(PathBuf::from(ar)) + } else if let Some(ar) = env::var_os("AR") { + Some(PathBuf::from(ar)) + } else if target.contains("msvc") { + None + } else if target.contains("musl") { + Some(PathBuf::from("ar")) + } else if target.contains("openbsd") { + Some(PathBuf::from("ar")) + } else if target.contains("vxworks") { + Some(PathBuf::from("wr-ar")) + } else if target.contains("android") { + Some(cc.parent().unwrap().join(PathBuf::from("llvm-ar"))) + } else { + let parent = cc.parent().unwrap(); + let file = cc.file_name().unwrap().to_str().unwrap(); + for suffix in &["gcc", "cc", "clang"] { + if let Some(idx) = file.rfind(suffix) { + let mut file = file[..idx].to_owned(); + file.push_str("ar"); + return Some(parent.join(&file)); + } + } + Some(parent.join(file)) + } +} + +fn new_cc_build(build: &Build, target: TargetSelection) -> cc::Build { + let mut cfg = cc::Build::new(); + cfg.cargo_metadata(false) + .opt_level(2) + .warnings(false) + .debug(false) + // Compress debuginfo + .flag_if_supported("-gz") + .target(&target.triple) + .host(&build.build.triple); + match build.crt_static(target) { + Some(a) => { + cfg.static_crt(a); + } + None => { + if target.contains("msvc") { + cfg.static_crt(true); + } + if target.contains("musl") { + cfg.static_flag(true); + } + } + } + cfg +} + +pub fn find(build: &Build) { + // For all targets we're going to need a C compiler for building some shims + // and such as well as for being a linker for Rust code. + let targets = build + .targets + .iter() + .chain(&build.hosts) + .cloned() + .chain(iter::once(build.build)) + .collect::>(); + for target in targets.into_iter() { + find_target(build, target); + } +} + +pub fn find_target(build: &Build, target: TargetSelection) { + let mut cfg = new_cc_build(build, target); + let config = build.config.target_config.get(&target); + if let Some(cc) = config + .and_then(|c| c.cc.clone()) + .or_else(|| default_compiler(&mut cfg, Language::C, target, build)) + { + cfg.compiler(cc); + } + + let compiler = cfg.get_compiler(); + let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) { + ar + } else { + cc2ar(compiler.path(), target) + }; + + build.cc.borrow_mut().insert(target, compiler.clone()); + let cflags = build.cflags(target, GitRepo::Rustc, CLang::C); + + // If we use llvm-libunwind, we will need a C++ compiler as well for all targets + // We'll need one anyways if the target triple is also a host triple + let mut cfg = new_cc_build(build, target); + cfg.cpp(true); + let cxx_configured = if let Some(cxx) = config + .and_then(|c| c.cxx.clone()) + .or_else(|| default_compiler(&mut cfg, Language::CPlusPlus, target, build)) + { + cfg.compiler(cxx); + true + } else { + // Use an auto-detected compiler (or one configured via `CXX_target_triple` env vars). + cfg.try_get_compiler().is_ok() + }; + + // for VxWorks, record CXX compiler which will be used in lib.rs:linker() + if cxx_configured || target.contains("vxworks") { + let compiler = cfg.get_compiler(); + build.cxx.borrow_mut().insert(target, compiler); + } + + build.verbose(&format!("CC_{} = {:?}", &target.triple, build.cc(target))); + build.verbose(&format!("CFLAGS_{} = {:?}", &target.triple, cflags)); + if let Ok(cxx) = build.cxx(target) { + let cxxflags = build.cflags(target, GitRepo::Rustc, CLang::Cxx); + build.verbose(&format!("CXX_{} = {:?}", &target.triple, cxx)); + build.verbose(&format!("CXXFLAGS_{} = {:?}", &target.triple, cxxflags)); + } + if let Some(ar) = ar { + build.verbose(&format!("AR_{} = {:?}", &target.triple, ar)); + build.ar.borrow_mut().insert(target, ar); + } + + if let Some(ranlib) = config.and_then(|c| c.ranlib.clone()) { + build.ranlib.borrow_mut().insert(target, ranlib); + } +} + +fn default_compiler( + cfg: &mut cc::Build, + compiler: Language, + target: TargetSelection, + build: &Build, +) -> Option { + match &*target.triple { + // When compiling for android we may have the NDK configured in the + // config.toml in which case we look there. Otherwise the default + // compiler already takes into account the triple in question. + t if t.contains("android") => build + .config + .android_ndk + .as_ref() + .map(|ndk| ndk_compiler(compiler, &*target.triple, ndk)), + + // The default gcc version from OpenBSD may be too old, try using egcc, + // which is a gcc version from ports, if this is the case. + t if t.contains("openbsd") => { + let c = cfg.get_compiler(); + let gnu_compiler = compiler.gcc(); + if !c.path().ends_with(gnu_compiler) { + return None; + } + + let output = output(c.to_command().arg("--version")); + let i = output.find(" 4.")?; + match output[i + 3..].chars().next().unwrap() { + '0'..='6' => {} + _ => return None, + } + let alternative = format!("e{gnu_compiler}"); + if Command::new(&alternative).output().is_ok() { + Some(PathBuf::from(alternative)) + } else { + None + } + } + + "mips-unknown-linux-musl" if compiler == Language::C => { + if cfg.get_compiler().path().to_str() == Some("gcc") { + Some(PathBuf::from("mips-linux-musl-gcc")) + } else { + None + } + } + "mipsel-unknown-linux-musl" if compiler == Language::C => { + if cfg.get_compiler().path().to_str() == Some("gcc") { + Some(PathBuf::from("mipsel-linux-musl-gcc")) + } else { + None + } + } + + t if t.contains("musl") && compiler == Language::C => { + if let Some(root) = build.musl_root(target) { + let guess = root.join("bin/musl-gcc"); + if guess.exists() { Some(guess) } else { None } + } else { + None + } + } + + _ => None, + } +} + +pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf { + let mut triple_iter = triple.split("-"); + let triple_translated = if let Some(arch) = triple_iter.next() { + let arch_new = match arch { + "arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a", + other => other, + }; + std::iter::once(arch_new).chain(triple_iter).collect::>().join("-") + } else { + triple.to_string() + }; + + // API 19 is the earliest API level supported by NDK r25b but AArch64 and x86_64 support + // begins at API level 21. + let api_level = + if triple.contains("aarch64") || triple.contains("x86_64") { "21" } else { "19" }; + let compiler = format!("{}{}-{}", triple_translated, api_level, compiler.clang()); + let host_tag = if cfg!(target_os = "macos") { + // The NDK uses universal binaries, so this is correct even on ARM. + "darwin-x86_64" + } else if cfg!(target_os = "windows") { + "windows-x86_64" + } else { + // NDK r25b only has official releases for macOS, Windows and Linux. + // Try the Linux directory everywhere else, on the assumption that the OS has an + // emulation layer that can cope (e.g. BSDs). + "linux-x86_64" + }; + ndk.join("toolchains").join("llvm").join("prebuilt").join(host_tag).join("bin").join(compiler) +} + +/// The target programming language for a native compiler. +#[derive(PartialEq)] +pub(crate) enum Language { + /// The compiler is targeting C. + C, + /// The compiler is targeting C++. + CPlusPlus, +} + +impl Language { + /// Obtains the name of a compiler in the GCC collection. + fn gcc(self) -> &'static str { + match self { + Language::C => "gcc", + Language::CPlusPlus => "g++", + } + } + + /// Obtains the name of a compiler in the clang suite. + fn clang(self) -> &'static str { + match self { + Language::C => "clang", + Language::CPlusPlus => "clang++", + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/channel.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/channel.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/channel.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/channel.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,159 @@ +//! Build configuration for Rust's release channels. +//! +//! Implements the stable/beta/nightly channel distinctions by setting various +//! flags like the `unstable_features`, calculating variables like `release` and +//! `package_vers`, and otherwise indicating to the compiler what it should +//! print out as part of its version information. + +use std::fs; +use std::path::Path; +use std::process::Command; + +use crate::utils::helpers::{output, t}; +use crate::Build; + +#[derive(Clone, Default)] +pub enum GitInfo { + /// This is not a git repository. + #[default] + Absent, + /// This is a git repository. + /// If the info should be used (`omit_git_hash` is false), this will be + /// `Some`, otherwise it will be `None`. + Present(Option), + /// This is not a git repository, but the info can be fetched from the + /// `git-commit-info` file. + RecordedForTarball(Info), +} + +#[derive(Clone)] +pub struct Info { + pub commit_date: String, + pub sha: String, + pub short_sha: String, +} + +impl GitInfo { + pub fn new(omit_git_hash: bool, dir: &Path) -> GitInfo { + // See if this even begins to look like a git dir + if !dir.join(".git").exists() { + match read_commit_info_file(dir) { + Some(info) => return GitInfo::RecordedForTarball(info), + None => return GitInfo::Absent, + } + } + + // Make sure git commands work + match Command::new("git").arg("rev-parse").current_dir(dir).output() { + Ok(ref out) if out.status.success() => {} + _ => return GitInfo::Absent, + } + + // If we're ignoring the git info, we don't actually need to collect it, just make sure this + // was a git repo in the first place. + if omit_git_hash { + return GitInfo::Present(None); + } + + // Ok, let's scrape some info + let ver_date = output( + Command::new("git") + .current_dir(dir) + .arg("log") + .arg("-1") + .arg("--date=short") + .arg("--pretty=format:%cd"), + ); + let ver_hash = output(Command::new("git").current_dir(dir).arg("rev-parse").arg("HEAD")); + let short_ver_hash = output( + Command::new("git").current_dir(dir).arg("rev-parse").arg("--short=9").arg("HEAD"), + ); + GitInfo::Present(Some(Info { + commit_date: ver_date.trim().to_string(), + sha: ver_hash.trim().to_string(), + short_sha: short_ver_hash.trim().to_string(), + })) + } + + pub fn info(&self) -> Option<&Info> { + match self { + GitInfo::Absent => None, + GitInfo::Present(info) => info.as_ref(), + GitInfo::RecordedForTarball(info) => Some(info), + } + } + + pub fn sha(&self) -> Option<&str> { + self.info().map(|s| &s.sha[..]) + } + + pub fn sha_short(&self) -> Option<&str> { + self.info().map(|s| &s.short_sha[..]) + } + + pub fn commit_date(&self) -> Option<&str> { + self.info().map(|s| &s.commit_date[..]) + } + + pub fn version(&self, build: &Build, num: &str) -> String { + let mut version = build.release(num); + if let Some(ref inner) = self.info() { + version.push_str(" ("); + version.push_str(&inner.short_sha); + version.push(' '); + version.push_str(&inner.commit_date); + version.push(')'); + } + version + } + + /// Returns whether this directory has a `.git` directory which should be managed by bootstrap. + pub fn is_managed_git_subrepository(&self) -> bool { + match self { + GitInfo::Absent | GitInfo::RecordedForTarball(_) => false, + GitInfo::Present(_) => true, + } + } + + /// Returns whether this is being built from a tarball. + pub fn is_from_tarball(&self) -> bool { + match self { + GitInfo::Absent | GitInfo::Present(_) => false, + GitInfo::RecordedForTarball(_) => true, + } + } +} + +/// Read the commit information from the `git-commit-info` file given the +/// project root. +pub fn read_commit_info_file(root: &Path) -> Option { + if let Ok(contents) = fs::read_to_string(root.join("git-commit-info")) { + let mut lines = contents.lines(); + let sha = lines.next(); + let short_sha = lines.next(); + let commit_date = lines.next(); + let info = match (commit_date, sha, short_sha) { + (Some(commit_date), Some(sha), Some(short_sha)) => Info { + commit_date: commit_date.to_owned(), + sha: sha.to_owned(), + short_sha: short_sha.to_owned(), + }, + _ => panic!("the `git-commit-info` file is malformed"), + }; + Some(info) + } else { + None + } +} + +/// Write the commit information to the `git-commit-info` file given the project +/// root. +pub fn write_commit_info_file(root: &Path, info: &Info) { + let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date); + t!(fs::write(root.join("git-commit-info"), &commit_info)); +} + +/// Write the commit hash to the `git-commit-hash` file given the project root. +pub fn write_commit_hash_file(root: &Path, sha: &str) { + t!(fs::write(root.join("git-commit-hash"), sha)); +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/dylib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/dylib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/dylib.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/dylib.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,27 @@ +//! Various utilities for working with dylib paths. + +/// Returns the environment variable which the dynamic library lookup path +/// resides in for this platform. +pub fn dylib_path_var() -> &'static str { + if cfg!(target_os = "windows") { + "PATH" + } else if cfg!(target_os = "macos") { + "DYLD_LIBRARY_PATH" + } else if cfg!(target_os = "haiku") { + "LIBRARY_PATH" + } else if cfg!(target_os = "aix") { + "LIBPATH" + } else { + "LD_LIBRARY_PATH" + } +} + +/// Parses the `dylib_path_var()` environment variable, returning a list of +/// paths that are members of this lookup path. +pub fn dylib_path() -> Vec { + let var = match std::env::var_os(dylib_path_var()) { + Some(v) => v, + None => return vec![], + }; + std::env::split_paths(&var).collect() +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/exec.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/exec.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/exec.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/exec.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,60 @@ +use std::process::Command; + +/// What should be done when the command fails. +#[derive(Debug, Copy, Clone)] +pub enum BehaviorOnFailure { + /// Immediately stop bootstrap. + Exit, + /// Delay failure until the end of bootstrap invocation. + DelayFail, + /// Ignore the failure, the command can fail in an expected way. + Ignore, +} + +/// How should the output of the command be handled. +#[derive(Debug, Copy, Clone)] +pub enum OutputMode { + /// Print both the output (by inheriting stdout/stderr) and also the command itself, if it + /// fails. + PrintAll, + /// Print the output (by inheriting stdout/stderr). + PrintOutput, + /// Suppress the output if the command succeeds, otherwise print the output. + SuppressOnSuccess, +} + +/// Wrapper around `std::process::Command`. +#[derive(Debug)] +pub struct BootstrapCommand<'a> { + pub command: &'a mut Command, + pub failure_behavior: BehaviorOnFailure, + pub output_mode: OutputMode, +} + +impl<'a> BootstrapCommand<'a> { + pub fn delay_failure(self) -> Self { + Self { failure_behavior: BehaviorOnFailure::DelayFail, ..self } + } + + pub fn fail_fast(self) -> Self { + Self { failure_behavior: BehaviorOnFailure::Exit, ..self } + } + + pub fn allow_failure(self) -> Self { + Self { failure_behavior: BehaviorOnFailure::Ignore, ..self } + } + + pub fn output_mode(self, output_mode: OutputMode) -> Self { + Self { output_mode, ..self } + } +} + +impl<'a> From<&'a mut Command> for BootstrapCommand<'a> { + fn from(command: &'a mut Command) -> Self { + Self { + command, + failure_behavior: BehaviorOnFailure::Exit, + output_mode: OutputMode::PrintAll, + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/helpers.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/helpers.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/helpers.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/helpers.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,472 @@ +//! Various utility functions used throughout rustbuild. +//! +//! Simple things like testing the various filesystem operations here and there, +//! not a lot of interesting happenings here unfortunately. + +use build_helper::util::fail; +use std::env; +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::str; +use std::time::{Instant, SystemTime, UNIX_EPOCH}; + +use crate::core::builder::Builder; +use crate::core::config::{Config, TargetSelection}; +use crate::OnceCell; + +pub use crate::utils::dylib::{dylib_path, dylib_path_var}; + +/// A helper macro to `unwrap` a result except also print out details like: +/// +/// * The file/line of the panic +/// * The expression that failed +/// * The error itself +/// +/// This is currently used judiciously throughout the build system rather than +/// using a `Result` with `try!`, but this may change one day... +#[macro_export] +macro_rules! t { + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; + // it can show extra info in the second parameter + ($e:expr, $extra:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {} ({:?})", stringify!($e), e, $extra), + } + }; +} +pub use t; + +/// Given an executable called `name`, return the filename for the +/// executable for a particular target. +pub fn exe(name: &str, target: TargetSelection) -> String { + if target.contains("windows") { + format!("{name}.exe") + } else if target.contains("uefi") { + format!("{name}.efi") + } else { + name.to_string() + } +} + +/// Returns `true` if the file name given looks like a dynamic library. +pub fn is_dylib(name: &str) -> bool { + name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll") +} + +/// Returns `true` if the file name given looks like a debug info file +pub fn is_debug_info(name: &str) -> bool { + // FIXME: consider split debug info on other platforms (e.g., Linux, macOS) + name.ends_with(".pdb") +} + +/// Returns the corresponding relative library directory that the compiler's +/// dylibs will be found in. +pub fn libdir(target: TargetSelection) -> &'static str { + if target.contains("windows") { "bin" } else { "lib" } +} + +/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. +/// If the dylib_path_var is already set for this cmd, the old value will be overwritten! +pub fn add_dylib_path(path: Vec, cmd: &mut Command) { + let mut list = dylib_path(); + for path in path { + list.insert(0, path); + } + cmd.env(dylib_path_var(), t!(env::join_paths(list))); +} + +/// Adds a list of lookup paths to `cmd`'s link library lookup path. +pub fn add_link_lib_path(path: Vec, cmd: &mut Command) { + let mut list = link_lib_path(); + for path in path { + list.insert(0, path); + } + cmd.env(link_lib_path_var(), t!(env::join_paths(list))); +} + +/// Returns the environment variable which the link library lookup path +/// resides in for this platform. +fn link_lib_path_var() -> &'static str { + if cfg!(target_env = "msvc") { "LIB" } else { "LIBRARY_PATH" } +} + +/// Parses the `link_lib_path_var()` environment variable, returning a list of +/// paths that are members of this lookup path. +fn link_lib_path() -> Vec { + let var = match env::var_os(link_lib_path_var()) { + Some(v) => v, + None => return vec![], + }; + env::split_paths(&var).collect() +} + +pub struct TimeIt(bool, Instant); + +/// Returns an RAII structure that prints out how long it took to drop. +pub fn timeit(builder: &Builder<'_>) -> TimeIt { + TimeIt(builder.config.dry_run(), Instant::now()) +} + +impl Drop for TimeIt { + fn drop(&mut self) { + let time = self.1.elapsed(); + if !self.0 { + println!("\tfinished in {}.{:03} seconds", time.as_secs(), time.subsec_millis()); + } + } +} + +/// Used for download caching +pub(crate) fn program_out_of_date(stamp: &Path, key: &str) -> bool { + if !stamp.exists() { + return true; + } + t!(fs::read_to_string(stamp)) != key +} + +/// Symlinks two directories, using junctions on Windows and normal symlinks on +/// Unix. +pub fn symlink_dir(config: &Config, original: &Path, link: &Path) -> io::Result<()> { + if config.dry_run() { + return Ok(()); + } + let _ = fs::remove_dir(link); + return symlink_dir_inner(original, link); + + #[cfg(not(windows))] + fn symlink_dir_inner(original: &Path, link: &Path) -> io::Result<()> { + use std::os::unix::fs; + fs::symlink(original, link) + } + + #[cfg(windows)] + fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> { + junction::create(&target, &junction) + } +} + +pub fn forcing_clang_based_tests() -> bool { + if let Some(var) = env::var_os("RUSTBUILD_FORCE_CLANG_BASED_TESTS") { + match &var.to_string_lossy().to_lowercase()[..] { + "1" | "yes" | "on" => true, + "0" | "no" | "off" => false, + other => { + // Let's make sure typos don't go unnoticed + panic!( + "Unrecognized option '{other}' set in \ + RUSTBUILD_FORCE_CLANG_BASED_TESTS" + ) + } + } + } else { + false + } +} + +pub fn use_host_linker(target: TargetSelection) -> bool { + // FIXME: this information should be gotten by checking the linker flavor + // of the rustc target + !(target.contains("emscripten") + || target.contains("wasm32") + || target.contains("nvptx") + || target.contains("fortanix") + || target.contains("fuchsia") + || target.contains("bpf") + || target.contains("switch")) +} + +pub fn target_supports_cranelift_backend(target: TargetSelection) -> bool { + if target.contains("linux") { + target.contains("x86_64") + || target.contains("aarch64") + || target.contains("s390x") + || target.contains("riscv64gc") + } else if target.contains("darwin") || target.contains("windows") { + target.contains("x86_64") + } else { + false + } +} + +pub fn is_valid_test_suite_arg<'a, P: AsRef>( + path: &'a Path, + suite_path: P, + builder: &Builder<'_>, +) -> Option<&'a str> { + let suite_path = suite_path.as_ref(); + let path = match path.strip_prefix(".") { + Ok(p) => p, + Err(_) => path, + }; + if !path.starts_with(suite_path) { + return None; + } + let abs_path = builder.src.join(path); + let exists = abs_path.is_dir() || abs_path.is_file(); + if !exists { + panic!( + "Invalid test suite filter \"{}\": file or directory does not exist", + abs_path.display() + ); + } + // Since test suite paths are themselves directories, if we don't + // specify a directory or file, we'll get an empty string here + // (the result of the test suite directory without its suite prefix). + // Therefore, we need to filter these out, as only the first --test-args + // flag is respected, so providing an empty --test-args conflicts with + // any following it. + match path.strip_prefix(suite_path).ok().and_then(|p| p.to_str()) { + Some(s) if !s.is_empty() => Some(s), + _ => None, + } +} + +pub fn check_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool { + let status = match cmd.status() { + Ok(status) => status, + Err(e) => { + println!("failed to execute command: {cmd:?}\nERROR: {e}"); + return false; + } + }; + if !status.success() && print_cmd_on_fail { + println!( + "\n\ncommand did not execute successfully: {cmd:?}\n\ + expected success, got: {status}\n\n" + ); + } + status.success() +} + +pub fn make(host: &str) -> PathBuf { + if host.contains("dragonfly") + || host.contains("freebsd") + || host.contains("netbsd") + || host.contains("openbsd") + { + PathBuf::from("gmake") + } else { + PathBuf::from("make") + } +} + +#[track_caller] +pub fn output(cmd: &mut Command) -> String { + let output = match cmd.stderr(Stdio::inherit()).output() { + Ok(status) => status, + Err(e) => fail(&format!("failed to execute command: {cmd:?}\nERROR: {e}")), + }; + if !output.status.success() { + panic!( + "command did not execute successfully: {:?}\n\ + expected success, got: {}", + cmd, output.status + ); + } + String::from_utf8(output.stdout).unwrap() +} + +/// Returns the last-modified time for `path`, or zero if it doesn't exist. +pub fn mtime(path: &Path) -> SystemTime { + fs::metadata(path).and_then(|f| f.modified()).unwrap_or(UNIX_EPOCH) +} + +/// Returns `true` if `dst` is up to date given that the file or files in `src` +/// are used to generate it. +/// +/// Uses last-modified time checks to verify this. +pub fn up_to_date(src: &Path, dst: &Path) -> bool { + if !dst.exists() { + return false; + } + let threshold = mtime(dst); + let meta = match fs::metadata(src) { + Ok(meta) => meta, + Err(e) => panic!("source {src:?} failed to get metadata: {e}"), + }; + if meta.is_dir() { + dir_up_to_date(src, threshold) + } else { + meta.modified().unwrap_or(UNIX_EPOCH) <= threshold + } +} + +fn dir_up_to_date(src: &Path, threshold: SystemTime) -> bool { + t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| { + let meta = t!(e.metadata()); + if meta.is_dir() { + dir_up_to_date(&e.path(), threshold) + } else { + meta.modified().unwrap_or(UNIX_EPOCH) < threshold + } + }) +} + +/// Copied from `std::path::absolute` until it stabilizes. +/// +/// FIXME: this shouldn't exist. +pub(crate) fn absolute(path: &Path) -> PathBuf { + if path.as_os_str().is_empty() { + panic!("can't make empty path absolute"); + } + #[cfg(unix)] + { + t!(absolute_unix(path), format!("could not make path absolute: {}", path.display())) + } + #[cfg(windows)] + { + t!(absolute_windows(path), format!("could not make path absolute: {}", path.display())) + } + #[cfg(not(any(unix, windows)))] + { + println!("WARNING: bootstrap is not supported on non-unix platforms"); + t!(std::fs::canonicalize(t!(std::env::current_dir()))).join(path) + } +} + +#[cfg(unix)] +/// Make a POSIX path absolute without changing its semantics. +fn absolute_unix(path: &Path) -> io::Result { + // This is mostly a wrapper around collecting `Path::components`, with + // exceptions made where this conflicts with the POSIX specification. + // See 4.13 Pathname Resolution, IEEE Std 1003.1-2017 + // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13 + + use std::os::unix::prelude::OsStrExt; + let mut components = path.components(); + let path_os = path.as_os_str().as_bytes(); + + let mut normalized = if path.is_absolute() { + // "If a pathname begins with two successive characters, the + // first component following the leading characters may be + // interpreted in an implementation-defined manner, although more than + // two leading characters shall be treated as a single + // character." + if path_os.starts_with(b"//") && !path_os.starts_with(b"///") { + components.next(); + PathBuf::from("//") + } else { + PathBuf::new() + } + } else { + env::current_dir()? + }; + normalized.extend(components); + + // "Interfaces using pathname resolution may specify additional constraints + // when a pathname that does not name an existing directory contains at + // least one non- character and contains one or more trailing + // characters". + // A trailing is also meaningful if "a symbolic link is + // encountered during pathname resolution". + + if path_os.ends_with(b"/") { + normalized.push(""); + } + + Ok(normalized) +} + +#[cfg(windows)] +fn absolute_windows(path: &std::path::Path) -> std::io::Result { + use std::ffi::OsString; + use std::io::Error; + use std::os::windows::ffi::{OsStrExt, OsStringExt}; + use std::ptr::null_mut; + #[link(name = "kernel32")] + extern "system" { + fn GetFullPathNameW( + lpFileName: *const u16, + nBufferLength: u32, + lpBuffer: *mut u16, + lpFilePart: *mut *const u16, + ) -> u32; + } + + unsafe { + // encode the path as UTF-16 + let path: Vec = path.as_os_str().encode_wide().chain([0]).collect(); + let mut buffer = Vec::new(); + // Loop until either success or failure. + loop { + // Try to get the absolute path + let len = GetFullPathNameW( + path.as_ptr(), + buffer.len().try_into().unwrap(), + buffer.as_mut_ptr(), + null_mut(), + ); + match len as usize { + // Failure + 0 => return Err(Error::last_os_error()), + // Buffer is too small, resize. + len if len > buffer.len() => buffer.resize(len, 0), + // Success! + len => { + buffer.truncate(len); + return Ok(OsString::from_wide(&buffer).into()); + } + } + } + } +} + +/// Adapted from +/// +/// When `clang-cl` is used with instrumentation, we need to add clang's runtime library resource +/// directory to the linker flags, otherwise there will be linker errors about the profiler runtime +/// missing. This function returns the path to that directory. +pub fn get_clang_cl_resource_dir(clang_cl_path: &str) -> PathBuf { + // Similar to how LLVM does it, to find clang's library runtime directory: + // - we ask `clang-cl` to locate the `clang_rt.builtins` lib. + let mut builtins_locator = Command::new(clang_cl_path); + builtins_locator.args(&["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]); + + let clang_rt_builtins = output(&mut builtins_locator); + let clang_rt_builtins = Path::new(clang_rt_builtins.trim()); + assert!( + clang_rt_builtins.exists(), + "`clang-cl` must correctly locate the library runtime directory" + ); + + // - the profiler runtime will be located in the same directory as the builtins lib, like + // `$LLVM_DISTRO_ROOT/lib/clang/$LLVM_VERSION/lib/windows`. + let clang_rt_dir = clang_rt_builtins.parent().expect("The clang lib folder should exist"); + clang_rt_dir.to_path_buf() +} + +pub fn lld_flag_no_threads(is_windows: bool) -> &'static str { + static LLD_NO_THREADS: OnceCell<(&'static str, &'static str)> = OnceCell::new(); + let (windows, other) = LLD_NO_THREADS.get_or_init(|| { + let out = output(Command::new("lld").arg("-flavor").arg("ld").arg("--version")); + let newer = match (out.find(char::is_numeric), out.find('.')) { + (Some(b), Some(e)) => out.as_str()[b..e].parse::().ok().unwrap_or(14) > 10, + _ => true, + }; + if newer { ("/threads:1", "--threads=1") } else { ("/no-threads", "--no-threads") } + }); + if is_windows { windows } else { other } +} + +pub fn dir_is_empty(dir: &Path) -> bool { + t!(std::fs::read_dir(dir)).next().is_none() +} + +/// Extract the beta revision from the full version string. +/// +/// The full version string looks like "a.b.c-beta.y". And we need to extract +/// the "y" part from the string. +pub fn extract_beta_rev(version: &str) -> Option { + let parts = version.splitn(2, "-beta.").collect::>(); + let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string())); + + count +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/job.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/job.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/job.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/job.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,159 @@ +#[cfg(windows)] +pub use for_windows::*; + +#[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))] +pub unsafe fn setup(_build: &mut crate::Build) {} + +#[cfg(all(unix, not(target_os = "haiku")))] +pub unsafe fn setup(build: &mut crate::Build) { + if build.config.low_priority { + libc::setpriority(libc::PRIO_PGRP as _, 0, 10); + } +} + +#[cfg(windows)] +mod for_windows { + //! Job management on Windows for bootstrapping + //! + //! Most of the time when you're running a build system (e.g., make) you expect + //! Ctrl-C or abnormal termination to actually terminate the entire tree of + //! process in play, not just the one at the top. This currently works "by + //! default" on Unix platforms because Ctrl-C actually sends a signal to the + //! *process group* rather than the parent process, so everything will get torn + //! down. On Windows, however, this does not happen and Ctrl-C just kills the + //! parent process. + //! + //! To achieve the same semantics on Windows we use Job Objects to ensure that + //! all processes die at the same time. Job objects have a mode of operation + //! where when all handles to the object are closed it causes all child + //! processes associated with the object to be terminated immediately. + //! Conveniently whenever a process in the job object spawns a new process the + //! child will be associated with the job object as well. This means if we add + //! ourselves to the job object we create then everything will get torn down! + //! + //! Unfortunately most of the time the build system is actually called from a + //! python wrapper (which manages things like building the build system) so this + //! all doesn't quite cut it so far. To go the last mile we duplicate the job + //! object handle into our parent process (a python process probably) and then + //! close our own handle. This means that the only handle to the job object + //! resides in the parent python process, so when python dies the whole build + //! system dies (as one would probably expect!). + //! + //! Note that this module has a #[cfg(windows)] above it as none of this logic + //! is required on Unix. + + use crate::Build; + use std::env; + use std::ffi::c_void; + use std::io; + use std::mem; + + use windows::{ + core::PCWSTR, + Win32::Foundation::{CloseHandle, DuplicateHandle, DUPLICATE_SAME_ACCESS, HANDLE}, + Win32::System::Diagnostics::Debug::{ + SetErrorMode, SEM_NOGPFAULTERRORBOX, THREAD_ERROR_MODE, + }, + Win32::System::JobObjects::{ + AssignProcessToJobObject, CreateJobObjectW, JobObjectExtendedLimitInformation, + SetInformationJobObject, JOBOBJECT_EXTENDED_LIMIT_INFORMATION, + JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE, JOB_OBJECT_LIMIT_PRIORITY_CLASS, + }, + Win32::System::Threading::{ + GetCurrentProcess, OpenProcess, BELOW_NORMAL_PRIORITY_CLASS, PROCESS_DUP_HANDLE, + }, + }; + + pub unsafe fn setup(build: &mut Build) { + // Enable the Windows Error Reporting dialog which msys disables, + // so we can JIT debug rustc + let mode = SetErrorMode(THREAD_ERROR_MODE::default()); + let mode = THREAD_ERROR_MODE(mode); + SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX); + + // Create a new job object for us to use + let job = CreateJobObjectW(None, PCWSTR::null()).unwrap(); + + // Indicate that when all handles to the job object are gone that all + // process in the object should be killed. Note that this includes our + // entire process tree by default because we've added ourselves and our + // children will reside in the job by default. + let mut info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION::default(); + info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; + if build.config.low_priority { + info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS; + info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS.0; + } + let r = SetInformationJobObject( + job, + JobObjectExtendedLimitInformation, + &info as *const _ as *const c_void, + mem::size_of_val(&info) as u32, + ); + assert!(r.is_ok(), "{}", io::Error::last_os_error()); + + // Assign our process to this job object. Note that if this fails, one very + // likely reason is that we are ourselves already in a job object! This can + // happen on the build bots that we've got for Windows, or if just anyone + // else is instrumenting the build. In this case we just bail out + // immediately and assume that they take care of it. + // + // Also note that nested jobs (why this might fail) are supported in recent + // versions of Windows, but the version of Windows that our bots are running + // at least don't support nested job objects. + let r = AssignProcessToJobObject(job, GetCurrentProcess()); + if r.is_err() { + CloseHandle(job).ok(); + return; + } + + // If we've got a parent process (e.g., the python script that called us) + // then move ownership of this job object up to them. That way if the python + // script is killed (e.g., via ctrl-c) then we'll all be torn down. + // + // If we don't have a parent (e.g., this was run directly) then we + // intentionally leak the job object handle. When our process exits + // (normally or abnormally) it will close the handle implicitly, causing all + // processes in the job to be cleaned up. + let pid = match env::var("BOOTSTRAP_PARENT_ID") { + Ok(s) => s, + Err(..) => return, + }; + + let parent = match OpenProcess(PROCESS_DUP_HANDLE, false, pid.parse().unwrap()).ok() { + Some(parent) => parent, + _ => { + // If we get a null parent pointer here, it is possible that either + // we have an invalid pid or the parent process has been closed. + // Since the first case rarely happens + // (only when wrongly setting the environmental variable), + // it might be better to improve the experience of the second case + // when users have interrupted the parent process and we haven't finish + // duplicating the handle yet. We just need close the job object if that occurs. + CloseHandle(job).ok(); + return; + } + }; + + let mut parent_handle = HANDLE::default(); + let r = DuplicateHandle( + GetCurrentProcess(), + job, + parent, + &mut parent_handle, + 0, + false, + DUPLICATE_SAME_ACCESS, + ); + + // If this failed, well at least we tried! An example of DuplicateHandle + // failing in the past has been when the wrong python2 package spawned this + // build system (e.g., the `python2` package in MSYS instead of + // `mingw-w64-x86_64-python2`). Not sure why it failed, but the "failure + // mode" here is that we only clean everything up when the build system + // dies, not when the python parent does, so not too bad. + if r.is_err() { + CloseHandle(job).ok(); + } + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/metrics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/metrics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/metrics.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/metrics.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,258 @@ +//! This module is responsible for collecting metrics profiling information for the current build +//! and dumping it to disk as JSON, to aid investigations on build and CI performance. +//! +//! As this module requires additional dependencies not present during local builds, it's cfg'd +//! away whenever the `build.metrics` config option is not set to `true`. + +use crate::core::builder::{Builder, Step}; +use crate::utils::helpers::t; +use crate::Build; +use build_helper::metrics::{ + JsonInvocation, JsonInvocationSystemStats, JsonNode, JsonRoot, JsonStepSystemStats, Test, + TestOutcome, TestSuite, TestSuiteMetadata, +}; +use std::cell::RefCell; +use std::fs::File; +use std::io::BufWriter; +use std::time::{Duration, Instant, SystemTime}; +use sysinfo::{CpuExt, System, SystemExt}; + +// Update this number whenever a breaking change is made to the build metrics. +// +// The output format is versioned for two reasons: +// +// - The metadata is intended to be consumed by external tooling, and exposing a format version +// helps the tools determine whether they're compatible with a metrics file. +// +// - If a developer enables build metrics in their local checkout, making a breaking change to the +// metrics format would result in a hard-to-diagnose error message when an existing metrics file +// is not compatible with the new changes. With a format version number, bootstrap can discard +// incompatible metrics files instead of appending metrics to them. +// +// Version changelog: +// +// - v0: initial version +// - v1: replaced JsonNode::Test with JsonNode::TestSuite +// +const CURRENT_FORMAT_VERSION: usize = 1; + +pub(crate) struct BuildMetrics { + state: RefCell, +} + +/// NOTE: this isn't really cloning anything, but `x suggest` doesn't need metrics so this is probably ok. +impl Clone for BuildMetrics { + fn clone(&self) -> Self { + Self::init() + } +} + +impl BuildMetrics { + pub(crate) fn init() -> Self { + let state = RefCell::new(MetricsState { + finished_steps: Vec::new(), + running_steps: Vec::new(), + + system_info: System::new(), + timer_start: None, + invocation_timer_start: Instant::now(), + invocation_start: SystemTime::now(), + }); + + BuildMetrics { state } + } + + pub(crate) fn enter_step(&self, step: &S, builder: &Builder<'_>) { + // Do not record dry runs, as they'd be duplicates of the actual steps. + if builder.config.dry_run() { + return; + } + + let mut state = self.state.borrow_mut(); + + // Consider all the stats gathered so far as the parent's. + if !state.running_steps.is_empty() { + self.collect_stats(&mut *state); + } + + state.system_info.refresh_cpu(); + state.timer_start = Some(Instant::now()); + + state.running_steps.push(StepMetrics { + type_: std::any::type_name::().into(), + debug_repr: format!("{step:?}"), + + cpu_usage_time_sec: 0.0, + duration_excluding_children_sec: Duration::ZERO, + + children: Vec::new(), + test_suites: Vec::new(), + }); + } + + pub(crate) fn exit_step(&self, builder: &Builder<'_>) { + // Do not record dry runs, as they'd be duplicates of the actual steps. + if builder.config.dry_run() { + return; + } + + let mut state = self.state.borrow_mut(); + + self.collect_stats(&mut *state); + + let step = state.running_steps.pop().unwrap(); + if state.running_steps.is_empty() { + state.finished_steps.push(step); + state.timer_start = None; + } else { + state.running_steps.last_mut().unwrap().children.push(step); + + // Start collecting again for the parent step. + state.system_info.refresh_cpu(); + state.timer_start = Some(Instant::now()); + } + } + + pub(crate) fn begin_test_suite(&self, metadata: TestSuiteMetadata, builder: &Builder<'_>) { + // Do not record dry runs, as they'd be duplicates of the actual steps. + if builder.config.dry_run() { + return; + } + + let mut state = self.state.borrow_mut(); + let step = state.running_steps.last_mut().unwrap(); + step.test_suites.push(TestSuite { metadata, tests: Vec::new() }); + } + + pub(crate) fn record_test(&self, name: &str, outcome: TestOutcome, builder: &Builder<'_>) { + // Do not record dry runs, as they'd be duplicates of the actual steps. + if builder.config.dry_run() { + return; + } + + let mut state = self.state.borrow_mut(); + let step = state.running_steps.last_mut().unwrap(); + + if let Some(test_suite) = step.test_suites.last_mut() { + test_suite.tests.push(Test { name: name.to_string(), outcome }); + } else { + panic!("metrics.record_test() called without calling metrics.begin_test_suite() first"); + } + } + + fn collect_stats(&self, state: &mut MetricsState) { + let step = state.running_steps.last_mut().unwrap(); + + let elapsed = state.timer_start.unwrap().elapsed(); + step.duration_excluding_children_sec += elapsed; + + state.system_info.refresh_cpu(); + let cpu = state.system_info.cpus().iter().map(|p| p.cpu_usage()).sum::(); + step.cpu_usage_time_sec += cpu as f64 / 100.0 * elapsed.as_secs_f64(); + } + + pub(crate) fn persist(&self, build: &Build) { + let mut state = self.state.borrow_mut(); + assert!(state.running_steps.is_empty(), "steps are still executing"); + + let dest = build.out.join("metrics.json"); + + let mut system = System::new(); + system.refresh_cpu(); + system.refresh_memory(); + + let system_stats = JsonInvocationSystemStats { + cpu_threads_count: system.cpus().len(), + cpu_model: system.cpus()[0].brand().into(), + + memory_total_bytes: system.total_memory(), + }; + let steps = std::mem::take(&mut state.finished_steps); + + // Some of our CI builds consist of multiple independent CI invocations. Ensure all the + // previous invocations are still present in the resulting file. + let mut invocations = match std::fs::read(&dest) { + Ok(contents) => { + // We first parse just the format_version field to have the check succeed even if + // the rest of the contents are not valid anymore. + let version: OnlyFormatVersion = t!(serde_json::from_slice(&contents)); + if version.format_version == CURRENT_FORMAT_VERSION { + t!(serde_json::from_slice::(&contents)).invocations + } else { + println!( + "WARNING: overriding existing build/metrics.json, as it's not \ + compatible with build metrics format version {CURRENT_FORMAT_VERSION}." + ); + Vec::new() + } + } + Err(err) => { + if err.kind() != std::io::ErrorKind::NotFound { + panic!("failed to open existing metrics file at {}: {err}", dest.display()); + } + Vec::new() + } + }; + invocations.push(JsonInvocation { + start_time: state + .invocation_start + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_secs(), + duration_including_children_sec: state.invocation_timer_start.elapsed().as_secs_f64(), + children: steps.into_iter().map(|step| self.prepare_json_step(step)).collect(), + }); + + let json = JsonRoot { format_version: CURRENT_FORMAT_VERSION, system_stats, invocations }; + + t!(std::fs::create_dir_all(dest.parent().unwrap())); + let mut file = BufWriter::new(t!(File::create(&dest))); + t!(serde_json::to_writer(&mut file, &json)); + } + + fn prepare_json_step(&self, step: StepMetrics) -> JsonNode { + let mut children = Vec::new(); + children.extend(step.children.into_iter().map(|child| self.prepare_json_step(child))); + children.extend(step.test_suites.into_iter().map(JsonNode::TestSuite)); + + JsonNode::RustbuildStep { + type_: step.type_, + debug_repr: step.debug_repr, + + duration_excluding_children_sec: step.duration_excluding_children_sec.as_secs_f64(), + system_stats: JsonStepSystemStats { + cpu_utilization_percent: step.cpu_usage_time_sec * 100.0 + / step.duration_excluding_children_sec.as_secs_f64(), + }, + + children, + } + } +} + +struct MetricsState { + finished_steps: Vec, + running_steps: Vec, + + system_info: System, + timer_start: Option, + invocation_timer_start: Instant, + invocation_start: SystemTime, +} + +struct StepMetrics { + type_: String, + debug_repr: String, + + cpu_usage_time_sec: f64, + duration_excluding_children_sec: Duration, + + children: Vec, + test_suites: Vec, +} + +#[derive(serde_derive::Deserialize)] +struct OnlyFormatVersion { + #[serde(default)] // For version 0 the field was not present. + format_version: usize, +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,15 @@ +//! This module contains integral components of the build and configuration process, providing +//! support for a wide range of tasks and operations such as caching, tarballs, release +//! channels, job management, etc. + +pub(crate) mod cache; +pub(crate) mod cc_detect; +pub(crate) mod channel; +pub(crate) mod dylib; +pub(crate) mod exec; +pub(crate) mod helpers; +pub(crate) mod job; +#[cfg(feature = "build-metrics")] +pub(crate) mod metrics; +pub(crate) mod render_tests; +pub(crate) mod tarball; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/render_tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/render_tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/render_tests.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/render_tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,400 @@ +//! This module renders the JSON output of libtest into a human-readable form, trying to be as +//! similar to libtest's native output as possible. +//! +//! This is needed because we need to use libtest in JSON mode to extract granular information +//! about the executed tests. Doing so suppresses the human-readable output, and (compared to Cargo +//! and rustc) libtest doesn't include the rendered human-readable output as a JSON field. We had +//! to reimplement all the rendering logic in this module because of that. + +use crate::core::builder::Builder; +use std::io::{BufRead, BufReader, Read, Write}; +use std::process::{ChildStdout, Command, Stdio}; +use std::time::Duration; +use termcolor::{Color, ColorSpec, WriteColor}; + +const TERSE_TESTS_PER_LINE: usize = 88; + +pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool { + if cmd.get_args().position(|arg| arg == "--").is_none() { + cmd.arg("--"); + } + cmd.args(&["-Z", "unstable-options", "--format", "json"]); + + try_run_tests(builder, cmd, false) +} + +pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool { + if builder.config.dry_run() { + return true; + } + + if !run_tests(builder, cmd, stream) { + if builder.fail_fast { + crate::exit!(1); + } else { + let mut failures = builder.delayed_failures.borrow_mut(); + failures.push(format!("{cmd:?}")); + false + } + } else { + true + } +} + +fn run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool { + cmd.stdout(Stdio::piped()); + + builder.verbose(&format!("running: {cmd:?}")); + + let mut process = cmd.spawn().unwrap(); + + // This runs until the stdout of the child is closed, which means the child exited. We don't + // run this on another thread since the builder is not Sync. + let renderer = Renderer::new(process.stdout.take().unwrap(), builder); + if stream { + renderer.stream_all(); + } else { + renderer.render_all(); + } + + let result = process.wait_with_output().unwrap(); + if !result.status.success() && builder.is_verbose() { + println!( + "\n\ncommand did not execute successfully: {cmd:?}\n\ + expected success, got: {}", + result.status + ); + } + + result.status.success() +} + +struct Renderer<'a> { + stdout: BufReader, + failures: Vec, + benches: Vec, + builder: &'a Builder<'a>, + tests_count: Option, + executed_tests: usize, + terse_tests_in_line: usize, +} + +impl<'a> Renderer<'a> { + fn new(stdout: ChildStdout, builder: &'a Builder<'a>) -> Self { + Self { + stdout: BufReader::new(stdout), + benches: Vec::new(), + failures: Vec::new(), + builder, + tests_count: None, + executed_tests: 0, + terse_tests_in_line: 0, + } + } + + fn render_all(mut self) { + let mut line = Vec::new(); + loop { + line.clear(); + match self.stdout.read_until(b'\n', &mut line) { + Ok(_) => {} + Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break, + Err(err) => panic!("failed to read output of test runner: {err}"), + } + if line.is_empty() { + break; + } + + match serde_json::from_slice(&line) { + Ok(parsed) => self.render_message(parsed), + Err(_err) => { + // Handle non-JSON output, for example when --nocapture is passed. + let mut stdout = std::io::stdout(); + stdout.write_all(&line).unwrap(); + let _ = stdout.flush(); + } + } + } + } + + /// Renders the stdout characters one by one + fn stream_all(mut self) { + let mut buffer = [0; 1]; + loop { + match self.stdout.read(&mut buffer) { + Ok(0) => break, + Ok(_) => { + let mut stdout = std::io::stdout(); + stdout.write_all(&buffer).unwrap(); + let _ = stdout.flush(); + } + Err(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => break, + Err(err) => panic!("failed to read output of test runner: {err}"), + } + } + } + + fn render_test_outcome(&mut self, outcome: Outcome<'_>, test: &TestOutcome) { + self.executed_tests += 1; + + #[cfg(feature = "build-metrics")] + self.builder.metrics.record_test( + &test.name, + match outcome { + Outcome::Ok | Outcome::BenchOk => build_helper::metrics::TestOutcome::Passed, + Outcome::Failed => build_helper::metrics::TestOutcome::Failed, + Outcome::Ignored { reason } => build_helper::metrics::TestOutcome::Ignored { + ignore_reason: reason.map(|s| s.to_string()), + }, + }, + self.builder, + ); + + if self.builder.config.verbose_tests { + self.render_test_outcome_verbose(outcome, test); + } else { + self.render_test_outcome_terse(outcome, test); + } + } + + fn render_test_outcome_verbose(&self, outcome: Outcome<'_>, test: &TestOutcome) { + print!("test {} ... ", test.name); + self.builder.colored_stdout(|stdout| outcome.write_long(stdout)).unwrap(); + if let Some(exec_time) = test.exec_time { + print!(" ({exec_time:.2?})"); + } + println!(); + } + + fn render_test_outcome_terse(&mut self, outcome: Outcome<'_>, _: &TestOutcome) { + if self.terse_tests_in_line != 0 && self.terse_tests_in_line % TERSE_TESTS_PER_LINE == 0 { + if let Some(total) = self.tests_count { + let total = total.to_string(); + let executed = format!("{:>width$}", self.executed_tests - 1, width = total.len()); + print!(" {executed}/{total}"); + } + println!(); + self.terse_tests_in_line = 0; + } + + self.terse_tests_in_line += 1; + self.builder.colored_stdout(|stdout| outcome.write_short(stdout)).unwrap(); + let _ = std::io::stdout().flush(); + } + + fn render_suite_outcome(&self, outcome: Outcome<'_>, suite: &SuiteOutcome) { + // The terse output doesn't end with a newline, so we need to add it ourselves. + if !self.builder.config.verbose_tests { + println!(); + } + + if !self.failures.is_empty() { + println!("\nfailures:\n"); + for failure in &self.failures { + if failure.stdout.is_some() || failure.message.is_some() { + println!("---- {} stdout ----", failure.name); + if let Some(stdout) = &failure.stdout { + println!("{stdout}"); + } + if let Some(message) = &failure.message { + println!("NOTE: {message}"); + } + } + } + + println!("\nfailures:"); + for failure in &self.failures { + println!(" {}", failure.name); + } + } + + if !self.benches.is_empty() { + println!("\nbenchmarks:"); + + let mut rows = Vec::new(); + for bench in &self.benches { + rows.push(( + &bench.name, + format!("{:.2?}/iter", Duration::from_nanos(bench.median)), + format!("+/- {:.2?}", Duration::from_nanos(bench.deviation)), + )); + } + + let max_0 = rows.iter().map(|r| r.0.len()).max().unwrap_or(0); + let max_1 = rows.iter().map(|r| r.1.len()).max().unwrap_or(0); + let max_2 = rows.iter().map(|r| r.2.len()).max().unwrap_or(0); + for row in &rows { + println!(" {:max_1$} {:>max_2$}", row.0, row.1, row.2); + } + } + + print!("\ntest result: "); + self.builder.colored_stdout(|stdout| outcome.write_long(stdout)).unwrap(); + println!( + ". {} passed; {} failed; {} ignored; {} measured; {} filtered out; \ + finished in {:.2?}\n", + suite.passed, + suite.failed, + suite.ignored, + suite.measured, + suite.filtered_out, + Duration::from_secs_f64(suite.exec_time) + ); + } + + fn render_message(&mut self, message: Message) { + match message { + Message::Suite(SuiteMessage::Started { test_count }) => { + println!("\nrunning {test_count} tests"); + self.executed_tests = 0; + self.terse_tests_in_line = 0; + self.tests_count = Some(test_count); + } + Message::Suite(SuiteMessage::Ok(outcome)) => { + self.render_suite_outcome(Outcome::Ok, &outcome); + } + Message::Suite(SuiteMessage::Failed(outcome)) => { + self.render_suite_outcome(Outcome::Failed, &outcome); + } + Message::Bench(outcome) => { + // The formatting for benchmarks doesn't replicate 1:1 the formatting libtest + // outputs, mostly because libtest's formatting is broken in terse mode, which is + // the default used by our monorepo. We use a different formatting instead: + // successful benchmarks are just showed as "benchmarked"/"b", and the details are + // outputted at the bottom like failures. + let fake_test_outcome = TestOutcome { + name: outcome.name.clone(), + exec_time: None, + stdout: None, + message: None, + }; + self.render_test_outcome(Outcome::BenchOk, &fake_test_outcome); + self.benches.push(outcome); + } + Message::Test(TestMessage::Ok(outcome)) => { + self.render_test_outcome(Outcome::Ok, &outcome); + } + Message::Test(TestMessage::Ignored(outcome)) => { + self.render_test_outcome( + Outcome::Ignored { reason: outcome.message.as_deref() }, + &outcome, + ); + } + Message::Test(TestMessage::Failed(outcome)) => { + self.render_test_outcome(Outcome::Failed, &outcome); + self.failures.push(outcome); + } + Message::Test(TestMessage::Timeout { name }) => { + println!("test {name} has been running for a long time"); + } + Message::Test(TestMessage::Started) => {} // Not useful + } + } +} + +enum Outcome<'a> { + Ok, + BenchOk, + Failed, + Ignored { reason: Option<&'a str> }, +} + +impl Outcome<'_> { + fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> { + match self { + Outcome::Ok => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, ".")?; + } + Outcome::BenchOk => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?; + write!(writer, "b")?; + } + Outcome::Failed => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(writer, "F")?; + } + Outcome::Ignored { .. } => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(writer, "i")?; + } + } + writer.reset() + } + + fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> { + match self { + Outcome::Ok => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "ok")?; + } + Outcome::BenchOk => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?; + write!(writer, "benchmarked")?; + } + Outcome::Failed => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(writer, "FAILED")?; + } + Outcome::Ignored { reason } => { + writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(writer, "ignored")?; + if let Some(reason) = reason { + write!(writer, ", {reason}")?; + } + } + } + writer.reset() + } +} + +#[derive(serde_derive::Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +enum Message { + Suite(SuiteMessage), + Test(TestMessage), + Bench(BenchOutcome), +} + +#[derive(serde_derive::Deserialize)] +#[serde(tag = "event", rename_all = "snake_case")] +enum SuiteMessage { + Ok(SuiteOutcome), + Failed(SuiteOutcome), + Started { test_count: usize }, +} + +#[derive(serde_derive::Deserialize)] +struct SuiteOutcome { + passed: usize, + failed: usize, + ignored: usize, + measured: usize, + filtered_out: usize, + exec_time: f64, +} + +#[derive(serde_derive::Deserialize)] +#[serde(tag = "event", rename_all = "snake_case")] +enum TestMessage { + Ok(TestOutcome), + Failed(TestOutcome), + Ignored(TestOutcome), + Timeout { name: String }, + Started, +} + +#[derive(serde_derive::Deserialize)] +struct BenchOutcome { + name: String, + median: u64, + deviation: u64, +} + +#[derive(serde_derive::Deserialize)] +struct TestOutcome { + name: String, + exec_time: Option, + stdout: Option, + message: Option, +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/tarball.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/tarball.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/tarball.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/src/utils/tarball.rs 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,381 @@ +use std::{ + path::{Path, PathBuf}, + process::Command, +}; + +use crate::core::build_steps::dist::distdir; +use crate::core::builder::Builder; +use crate::utils::channel; +use crate::utils::helpers::t; + +#[derive(Copy, Clone)] +pub(crate) enum OverlayKind { + Rust, + LLVM, + Cargo, + Clippy, + Miri, + Rustfmt, + RustDemangler, + RLS, + RustAnalyzer, + RustcCodegenCranelift, +} + +impl OverlayKind { + fn legal_and_readme(&self) -> &[&str] { + match self { + OverlayKind::Rust => &["COPYRIGHT", "LICENSE-APACHE", "LICENSE-MIT", "README.md"], + OverlayKind::LLVM => { + &["src/llvm-project/llvm/LICENSE.TXT", "src/llvm-project/llvm/README.txt"] + } + OverlayKind::Cargo => &[ + "src/tools/cargo/README.md", + "src/tools/cargo/LICENSE-MIT", + "src/tools/cargo/LICENSE-APACHE", + "src/tools/cargo/LICENSE-THIRD-PARTY", + ], + OverlayKind::Clippy => &[ + "src/tools/clippy/README.md", + "src/tools/clippy/LICENSE-APACHE", + "src/tools/clippy/LICENSE-MIT", + ], + OverlayKind::Miri => &[ + "src/tools/miri/README.md", + "src/tools/miri/LICENSE-APACHE", + "src/tools/miri/LICENSE-MIT", + ], + OverlayKind::Rustfmt => &[ + "src/tools/rustfmt/README.md", + "src/tools/rustfmt/LICENSE-APACHE", + "src/tools/rustfmt/LICENSE-MIT", + ], + OverlayKind::RustDemangler => { + &["src/tools/rust-demangler/README.md", "LICENSE-APACHE", "LICENSE-MIT"] + } + OverlayKind::RLS => &["src/tools/rls/README.md", "LICENSE-APACHE", "LICENSE-MIT"], + OverlayKind::RustAnalyzer => &[ + "src/tools/rust-analyzer/README.md", + "src/tools/rust-analyzer/LICENSE-APACHE", + "src/tools/rust-analyzer/LICENSE-MIT", + ], + OverlayKind::RustcCodegenCranelift => &[ + "compiler/rustc_codegen_cranelift/Readme.md", + "compiler/rustc_codegen_cranelift/LICENSE-APACHE", + "compiler/rustc_codegen_cranelift/LICENSE-MIT", + ], + } + } + + fn version(&self, builder: &Builder<'_>) -> String { + match self { + OverlayKind::Rust => builder.rust_version(), + OverlayKind::LLVM => builder.rust_version(), + OverlayKind::RustDemangler => builder.release_num("rust-demangler"), + OverlayKind::Cargo => { + builder.cargo_info.version(builder, &builder.release_num("cargo")) + } + OverlayKind::Clippy => { + builder.clippy_info.version(builder, &builder.release_num("clippy")) + } + OverlayKind::Miri => builder.miri_info.version(builder, &builder.release_num("miri")), + OverlayKind::Rustfmt => { + builder.rustfmt_info.version(builder, &builder.release_num("rustfmt")) + } + OverlayKind::RLS => builder.release(&builder.release_num("rls")), + OverlayKind::RustAnalyzer => builder + .rust_analyzer_info + .version(builder, &builder.release_num("rust-analyzer/crates/rust-analyzer")), + OverlayKind::RustcCodegenCranelift => builder.rust_version(), + } + } +} + +pub(crate) struct Tarball<'a> { + builder: &'a Builder<'a>, + + pkgname: String, + component: String, + target: Option, + product_name: String, + overlay: OverlayKind, + + temp_dir: PathBuf, + image_dir: PathBuf, + overlay_dir: PathBuf, + bulk_dirs: Vec, + + include_target_in_component_name: bool, + is_preview: bool, + permit_symlinks: bool, +} + +impl<'a> Tarball<'a> { + pub(crate) fn new(builder: &'a Builder<'a>, component: &str, target: &str) -> Self { + Self::new_inner(builder, component, Some(target.into())) + } + + pub(crate) fn new_targetless(builder: &'a Builder<'a>, component: &str) -> Self { + Self::new_inner(builder, component, None) + } + + fn new_inner(builder: &'a Builder<'a>, component: &str, target: Option) -> Self { + let pkgname = crate::core::build_steps::dist::pkgname(builder, component); + + let mut temp_dir = builder.out.join("tmp").join("tarball").join(component); + if let Some(target) = &target { + temp_dir = temp_dir.join(target); + } + let _ = std::fs::remove_dir_all(&temp_dir); + + let image_dir = temp_dir.join("image"); + let overlay_dir = temp_dir.join("overlay"); + + Self { + builder, + + pkgname, + component: component.into(), + target, + product_name: "Rust".into(), + overlay: OverlayKind::Rust, + + temp_dir, + image_dir, + overlay_dir, + bulk_dirs: Vec::new(), + + include_target_in_component_name: false, + is_preview: false, + permit_symlinks: false, + } + } + + pub(crate) fn set_overlay(&mut self, overlay: OverlayKind) { + self.overlay = overlay; + } + + pub(crate) fn set_product_name(&mut self, name: &str) { + self.product_name = name.into(); + } + + pub(crate) fn include_target_in_component_name(&mut self, include: bool) { + self.include_target_in_component_name = include; + } + + pub(crate) fn is_preview(&mut self, is: bool) { + self.is_preview = is; + } + + pub(crate) fn permit_symlinks(&mut self, flag: bool) { + self.permit_symlinks = flag; + } + + pub(crate) fn image_dir(&self) -> &Path { + t!(std::fs::create_dir_all(&self.image_dir)); + &self.image_dir + } + + pub(crate) fn add_file(&self, src: impl AsRef, destdir: impl AsRef, perms: u32) { + // create_dir_all fails to create `foo/bar/.`, so when the destination is "." this simply + // uses the base directory as the destination directory. + let destdir = if destdir.as_ref() == Path::new(".") { + self.image_dir.clone() + } else { + self.image_dir.join(destdir.as_ref()) + }; + + t!(std::fs::create_dir_all(&destdir)); + self.builder.install(src.as_ref(), &destdir, perms); + } + + pub(crate) fn add_renamed_file( + &self, + src: impl AsRef, + destdir: impl AsRef, + new_name: &str, + ) { + let destdir = self.image_dir.join(destdir.as_ref()); + t!(std::fs::create_dir_all(&destdir)); + self.builder.copy(src.as_ref(), &destdir.join(new_name)); + } + + pub(crate) fn add_legal_and_readme_to(&self, destdir: impl AsRef) { + for file in self.overlay.legal_and_readme() { + self.add_file(self.builder.src.join(file), destdir.as_ref(), 0o644); + } + } + + pub(crate) fn add_dir(&self, src: impl AsRef, dest: impl AsRef) { + let dest = self.image_dir.join(dest.as_ref()); + + t!(std::fs::create_dir_all(&dest)); + self.builder.cp_r(src.as_ref(), &dest); + } + + pub(crate) fn add_bulk_dir(&mut self, src: impl AsRef, dest: impl AsRef) { + self.bulk_dirs.push(dest.as_ref().to_path_buf()); + self.add_dir(src, dest); + } + + pub(crate) fn generate(self) -> GeneratedTarball { + let mut component_name = self.component.clone(); + if self.is_preview { + component_name.push_str("-preview"); + } + if self.include_target_in_component_name { + component_name.push('-'); + component_name.push_str( + &self + .target + .as_ref() + .expect("include_target_in_component_name used in a targetless tarball"), + ); + } + + self.run(|this, cmd| { + cmd.arg("generate") + .arg("--image-dir") + .arg(&this.image_dir) + .arg(format!("--component-name={}", &component_name)); + + if let Some((dir, dirs)) = this.bulk_dirs.split_first() { + let mut arg = dir.as_os_str().to_os_string(); + for dir in dirs { + arg.push(","); + arg.push(dir); + } + cmd.arg("--bulk-dirs").arg(&arg); + } + + this.non_bare_args(cmd); + }) + } + + pub(crate) fn combine(self, tarballs: &[GeneratedTarball]) -> GeneratedTarball { + let mut input_tarballs = tarballs[0].path.as_os_str().to_os_string(); + for tarball in &tarballs[1..] { + input_tarballs.push(","); + input_tarballs.push(&tarball.path); + } + + self.run(|this, cmd| { + cmd.arg("combine").arg("--input-tarballs").arg(input_tarballs); + this.non_bare_args(cmd); + }) + } + + pub(crate) fn bare(self) -> GeneratedTarball { + // Bare tarballs should have the top level directory match the package + // name, not "image". We rename the image directory just before passing + // into rust-installer. + let dest = self.temp_dir.join(self.package_name()); + t!(std::fs::rename(&self.image_dir, &dest)); + + self.run(|this, cmd| { + let distdir = distdir(this.builder); + t!(std::fs::create_dir_all(&distdir)); + cmd.arg("tarball") + .arg("--input") + .arg(&dest) + .arg("--output") + .arg(distdir.join(this.package_name())); + }) + } + + fn package_name(&self) -> String { + if let Some(target) = &self.target { + format!("{}-{}", self.pkgname, target) + } else { + self.pkgname.clone() + } + } + + fn non_bare_args(&self, cmd: &mut Command) { + cmd.arg("--rel-manifest-dir=rustlib") + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg(format!("--product-name={}", self.product_name)) + .arg(format!("--success-message={} installed.", self.component)) + .arg(format!("--package-name={}", self.package_name())) + .arg("--non-installed-overlay") + .arg(&self.overlay_dir) + .arg("--output-dir") + .arg(distdir(self.builder)); + } + + fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball { + t!(std::fs::create_dir_all(&self.overlay_dir)); + self.builder.create(&self.overlay_dir.join("version"), &self.overlay.version(self.builder)); + if let Some(info) = self.builder.rust_info().info() { + channel::write_commit_hash_file(&self.overlay_dir, &info.sha); + channel::write_commit_info_file(&self.overlay_dir, info); + } + for file in self.overlay.legal_and_readme() { + self.builder.install(&self.builder.src.join(file), &self.overlay_dir, 0o644); + } + + let mut cmd = self.builder.tool_cmd(crate::core::build_steps::tool::Tool::RustInstaller); + + let package_name = self.package_name(); + self.builder.info(&format!("Dist {package_name}")); + let _time = crate::utils::helpers::timeit(self.builder); + + build_cli(&self, &mut cmd); + cmd.arg("--work-dir").arg(&self.temp_dir); + if let Some(formats) = &self.builder.config.dist_compression_formats { + assert!(!formats.is_empty(), "dist.compression-formats can't be empty"); + cmd.arg("--compression-formats").arg(formats.join(",")); + } + cmd.args(&["--compression-profile", &self.builder.config.dist_compression_profile]); + self.builder.run(&mut cmd); + + // Ensure there are no symbolic links in the tarball. In particular, + // rustup-toolchain-install-master and most versions of Windows can't handle symbolic links. + let decompressed_output = self.temp_dir.join(&package_name); + if !self.builder.config.dry_run() && !self.permit_symlinks { + for entry in walkdir::WalkDir::new(&decompressed_output) { + let entry = t!(entry); + if entry.path_is_symlink() { + panic!("generated a symlink in a tarball: {}", entry.path().display()); + } + } + } + + // Use either the first compression format defined, or "gz" as the default. + let ext = self + .builder + .config + .dist_compression_formats + .as_ref() + .and_then(|formats| formats.get(0)) + .map(|s| s.as_str()) + .unwrap_or("gz"); + + GeneratedTarball { + path: distdir(self.builder).join(format!("{package_name}.tar.{ext}")), + decompressed_output, + work: self.temp_dir, + } + } +} + +#[derive(Debug, Clone)] +pub struct GeneratedTarball { + path: PathBuf, + decompressed_output: PathBuf, + work: PathBuf, +} + +impl GeneratedTarball { + pub(crate) fn tarball(&self) -> &Path { + &self.path + } + + pub(crate) fn decompressed_output(&self) -> &Path { + &self.decompressed_output + } + + pub(crate) fn work_dir(&self) -> &Path { + &self.work + } +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/suggest.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/suggest.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/suggest.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/suggest.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -#![cfg_attr(feature = "build-metrics", allow(unused))] - -use std::str::FromStr; - -use std::path::PathBuf; - -use clap::Parser; - -use crate::{builder::Builder, tool::Tool}; - -/// Suggests a list of possible `x.py` commands to run based on modified files in branch. -pub fn suggest(builder: &Builder<'_>, run: bool) { - let suggestions = - builder.tool_cmd(Tool::SuggestTests).output().expect("failed to run `suggest-tests` tool"); - - if !suggestions.status.success() { - println!("failed to run `suggest-tests` tool ({})", suggestions.status); - println!( - "`suggest_tests` stdout:\n{}`suggest_tests` stderr:\n{}", - String::from_utf8(suggestions.stdout).unwrap(), - String::from_utf8(suggestions.stderr).unwrap() - ); - panic!("failed to run `suggest-tests`"); - } - - let suggestions = String::from_utf8(suggestions.stdout).unwrap(); - let suggestions = suggestions - .lines() - .map(|line| { - let mut sections = line.split_ascii_whitespace(); - - // this code expects one suggestion per line in the following format: - // {some number of flags} [optional stage number] - let cmd = sections.next().unwrap(); - let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten(); - let paths: Vec = sections.map(|p| PathBuf::from_str(p).unwrap()).collect(); - - (cmd, stage, paths) - }) - .collect::>(); - - if !suggestions.is_empty() { - println!("==== SUGGESTIONS ===="); - for sug in &suggestions { - print!("x {} ", sug.0); - if let Some(stage) = sug.1 { - print!("--stage {stage} "); - } - - for path in &sug.2 { - print!("{} ", path.display()); - } - println!(); - } - println!("====================="); - } else { - println!("No suggestions found!"); - return; - } - - if run { - for sug in suggestions { - let mut build: crate::Build = builder.build.clone(); - build.config.paths = sug.2; - build.config.cmd = crate::flags::Flags::parse_from(["x.py", sug.0]).cmd; - if let Some(stage) = sug.1 { - build.config.stage = stage; - } - build.build(); - } - } else { - println!("help: consider using the `--run` flag to automatically run suggested tests"); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/synthetic_targets.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/synthetic_targets.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/synthetic_targets.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/synthetic_targets.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,82 +0,0 @@ -//! In some cases, parts of bootstrap need to change part of a target spec just for one or a few -//! steps. Adding these targets to rustc proper would "leak" this implementation detail of -//! bootstrap, and would make it more complex to apply additional changes if the need arises. -//! -//! To address that problem, this module implements support for "synthetic targets". Synthetic -//! targets are custom target specs generated using builtin target specs as their base. You can use -//! one of the target specs already defined in this module, or create new ones by adding a new step -//! that calls create_synthetic_target. - -use crate::builder::{Builder, ShouldRun, Step}; -use crate::config::TargetSelection; -use crate::Compiler; -use std::process::{Command, Stdio}; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub(crate) struct MirOptPanicAbortSyntheticTarget { - pub(crate) compiler: Compiler, - pub(crate) base: TargetSelection, -} - -impl Step for MirOptPanicAbortSyntheticTarget { - type Output = TargetSelection; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - fn run(self, builder: &Builder<'_>) -> Self::Output { - create_synthetic_target(builder, self.compiler, "miropt-abort", self.base, |spec| { - spec.insert("panic-strategy".into(), "abort".into()); - }) - } -} - -fn create_synthetic_target( - builder: &Builder<'_>, - compiler: Compiler, - suffix: &str, - base: TargetSelection, - customize: impl FnOnce(&mut serde_json::Map), -) -> TargetSelection { - if base.contains("synthetic") { - // This check is not strictly needed, but nothing currently needs recursive synthetic - // targets. If the need arises, removing this in the future *SHOULD* be safe. - panic!("cannot create synthetic targets with other synthetic targets as their base"); - } - - let name = format!("{base}-synthetic-{suffix}"); - let path = builder.out.join("synthetic-target-specs").join(format!("{name}.json")); - std::fs::create_dir_all(path.parent().unwrap()).unwrap(); - - if builder.config.dry_run() { - std::fs::write(&path, b"dry run\n").unwrap(); - return TargetSelection::create_synthetic(&name, path.to_str().unwrap()); - } - - let mut cmd = Command::new(builder.rustc(compiler)); - cmd.arg("--target").arg(base.rustc_target_arg()); - cmd.args(["-Zunstable-options", "--print", "target-spec-json"]); - cmd.stdout(Stdio::piped()); - - let output = cmd.spawn().unwrap().wait_with_output().unwrap(); - if !output.status.success() { - panic!("failed to gather the target spec for {base}"); - } - - let mut spec: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap(); - let spec_map = spec.as_object_mut().unwrap(); - - // The `is-builtin` attribute of a spec needs to be removed, otherwise rustc will complain. - spec_map.remove("is-builtin"); - - customize(spec_map); - - std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap(); - let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap()); - crate::cc_detect::find_target(builder, target); - - target -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/tarball.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/tarball.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/tarball.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/tarball.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,373 +0,0 @@ -use std::{ - path::{Path, PathBuf}, - process::Command, -}; - -use crate::builder::Builder; -use crate::channel; -use crate::util::t; - -#[derive(Copy, Clone)] -pub(crate) enum OverlayKind { - Rust, - LLVM, - Cargo, - Clippy, - Miri, - Rustfmt, - RustDemangler, - RLS, - RustAnalyzer, -} - -impl OverlayKind { - fn legal_and_readme(&self) -> &[&str] { - match self { - OverlayKind::Rust => &["COPYRIGHT", "LICENSE-APACHE", "LICENSE-MIT", "README.md"], - OverlayKind::LLVM => { - &["src/llvm-project/llvm/LICENSE.TXT", "src/llvm-project/llvm/README.txt"] - } - OverlayKind::Cargo => &[ - "src/tools/cargo/README.md", - "src/tools/cargo/LICENSE-MIT", - "src/tools/cargo/LICENSE-APACHE", - "src/tools/cargo/LICENSE-THIRD-PARTY", - ], - OverlayKind::Clippy => &[ - "src/tools/clippy/README.md", - "src/tools/clippy/LICENSE-APACHE", - "src/tools/clippy/LICENSE-MIT", - ], - OverlayKind::Miri => &[ - "src/tools/miri/README.md", - "src/tools/miri/LICENSE-APACHE", - "src/tools/miri/LICENSE-MIT", - ], - OverlayKind::Rustfmt => &[ - "src/tools/rustfmt/README.md", - "src/tools/rustfmt/LICENSE-APACHE", - "src/tools/rustfmt/LICENSE-MIT", - ], - OverlayKind::RustDemangler => { - &["src/tools/rust-demangler/README.md", "LICENSE-APACHE", "LICENSE-MIT"] - } - OverlayKind::RLS => &["src/tools/rls/README.md", "LICENSE-APACHE", "LICENSE-MIT"], - OverlayKind::RustAnalyzer => &[ - "src/tools/rust-analyzer/README.md", - "src/tools/rust-analyzer/LICENSE-APACHE", - "src/tools/rust-analyzer/LICENSE-MIT", - ], - } - } - - fn version(&self, builder: &Builder<'_>) -> String { - match self { - OverlayKind::Rust => builder.rust_version(), - OverlayKind::LLVM => builder.rust_version(), - OverlayKind::RustDemangler => builder.release_num("rust-demangler"), - OverlayKind::Cargo => { - builder.cargo_info.version(builder, &builder.release_num("cargo")) - } - OverlayKind::Clippy => { - builder.clippy_info.version(builder, &builder.release_num("clippy")) - } - OverlayKind::Miri => builder.miri_info.version(builder, &builder.release_num("miri")), - OverlayKind::Rustfmt => { - builder.rustfmt_info.version(builder, &builder.release_num("rustfmt")) - } - OverlayKind::RLS => builder.release(&builder.release_num("rls")), - OverlayKind::RustAnalyzer => builder - .rust_analyzer_info - .version(builder, &builder.release_num("rust-analyzer/crates/rust-analyzer")), - } - } -} - -pub(crate) struct Tarball<'a> { - builder: &'a Builder<'a>, - - pkgname: String, - component: String, - target: Option, - product_name: String, - overlay: OverlayKind, - - temp_dir: PathBuf, - image_dir: PathBuf, - overlay_dir: PathBuf, - bulk_dirs: Vec, - - include_target_in_component_name: bool, - is_preview: bool, - permit_symlinks: bool, -} - -impl<'a> Tarball<'a> { - pub(crate) fn new(builder: &'a Builder<'a>, component: &str, target: &str) -> Self { - Self::new_inner(builder, component, Some(target.into())) - } - - pub(crate) fn new_targetless(builder: &'a Builder<'a>, component: &str) -> Self { - Self::new_inner(builder, component, None) - } - - fn new_inner(builder: &'a Builder<'a>, component: &str, target: Option) -> Self { - let pkgname = crate::dist::pkgname(builder, component); - - let mut temp_dir = builder.out.join("tmp").join("tarball").join(component); - if let Some(target) = &target { - temp_dir = temp_dir.join(target); - } - let _ = std::fs::remove_dir_all(&temp_dir); - - let image_dir = temp_dir.join("image"); - let overlay_dir = temp_dir.join("overlay"); - - Self { - builder, - - pkgname, - component: component.into(), - target, - product_name: "Rust".into(), - overlay: OverlayKind::Rust, - - temp_dir, - image_dir, - overlay_dir, - bulk_dirs: Vec::new(), - - include_target_in_component_name: false, - is_preview: false, - permit_symlinks: false, - } - } - - pub(crate) fn set_overlay(&mut self, overlay: OverlayKind) { - self.overlay = overlay; - } - - pub(crate) fn set_product_name(&mut self, name: &str) { - self.product_name = name.into(); - } - - pub(crate) fn include_target_in_component_name(&mut self, include: bool) { - self.include_target_in_component_name = include; - } - - pub(crate) fn is_preview(&mut self, is: bool) { - self.is_preview = is; - } - - pub(crate) fn permit_symlinks(&mut self, flag: bool) { - self.permit_symlinks = flag; - } - - pub(crate) fn image_dir(&self) -> &Path { - t!(std::fs::create_dir_all(&self.image_dir)); - &self.image_dir - } - - pub(crate) fn add_file(&self, src: impl AsRef, destdir: impl AsRef, perms: u32) { - // create_dir_all fails to create `foo/bar/.`, so when the destination is "." this simply - // uses the base directory as the destination directory. - let destdir = if destdir.as_ref() == Path::new(".") { - self.image_dir.clone() - } else { - self.image_dir.join(destdir.as_ref()) - }; - - t!(std::fs::create_dir_all(&destdir)); - self.builder.install(src.as_ref(), &destdir, perms); - } - - pub(crate) fn add_renamed_file( - &self, - src: impl AsRef, - destdir: impl AsRef, - new_name: &str, - ) { - let destdir = self.image_dir.join(destdir.as_ref()); - t!(std::fs::create_dir_all(&destdir)); - self.builder.copy(src.as_ref(), &destdir.join(new_name)); - } - - pub(crate) fn add_legal_and_readme_to(&self, destdir: impl AsRef) { - for file in self.overlay.legal_and_readme() { - self.add_file(self.builder.src.join(file), destdir.as_ref(), 0o644); - } - } - - pub(crate) fn add_dir(&self, src: impl AsRef, dest: impl AsRef) { - let dest = self.image_dir.join(dest.as_ref()); - - t!(std::fs::create_dir_all(&dest)); - self.builder.cp_r(src.as_ref(), &dest); - } - - pub(crate) fn add_bulk_dir(&mut self, src: impl AsRef, dest: impl AsRef) { - self.bulk_dirs.push(dest.as_ref().to_path_buf()); - self.add_dir(src, dest); - } - - pub(crate) fn generate(self) -> GeneratedTarball { - let mut component_name = self.component.clone(); - if self.is_preview { - component_name.push_str("-preview"); - } - if self.include_target_in_component_name { - component_name.push('-'); - component_name.push_str( - &self - .target - .as_ref() - .expect("include_target_in_component_name used in a targetless tarball"), - ); - } - - self.run(|this, cmd| { - cmd.arg("generate") - .arg("--image-dir") - .arg(&this.image_dir) - .arg(format!("--component-name={}", &component_name)); - - if let Some((dir, dirs)) = this.bulk_dirs.split_first() { - let mut arg = dir.as_os_str().to_os_string(); - for dir in dirs { - arg.push(","); - arg.push(dir); - } - cmd.arg("--bulk-dirs").arg(&arg); - } - - this.non_bare_args(cmd); - }) - } - - pub(crate) fn combine(self, tarballs: &[GeneratedTarball]) -> GeneratedTarball { - let mut input_tarballs = tarballs[0].path.as_os_str().to_os_string(); - for tarball in &tarballs[1..] { - input_tarballs.push(","); - input_tarballs.push(&tarball.path); - } - - self.run(|this, cmd| { - cmd.arg("combine").arg("--input-tarballs").arg(input_tarballs); - this.non_bare_args(cmd); - }) - } - - pub(crate) fn bare(self) -> GeneratedTarball { - // Bare tarballs should have the top level directory match the package - // name, not "image". We rename the image directory just before passing - // into rust-installer. - let dest = self.temp_dir.join(self.package_name()); - t!(std::fs::rename(&self.image_dir, &dest)); - - self.run(|this, cmd| { - let distdir = crate::dist::distdir(this.builder); - t!(std::fs::create_dir_all(&distdir)); - cmd.arg("tarball") - .arg("--input") - .arg(&dest) - .arg("--output") - .arg(distdir.join(this.package_name())); - }) - } - - fn package_name(&self) -> String { - if let Some(target) = &self.target { - format!("{}-{}", self.pkgname, target) - } else { - self.pkgname.clone() - } - } - - fn non_bare_args(&self, cmd: &mut Command) { - cmd.arg("--rel-manifest-dir=rustlib") - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg(format!("--product-name={}", self.product_name)) - .arg(format!("--success-message={} installed.", self.component)) - .arg(format!("--package-name={}", self.package_name())) - .arg("--non-installed-overlay") - .arg(&self.overlay_dir) - .arg("--output-dir") - .arg(crate::dist::distdir(self.builder)); - } - - fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball { - t!(std::fs::create_dir_all(&self.overlay_dir)); - self.builder.create(&self.overlay_dir.join("version"), &self.overlay.version(self.builder)); - if let Some(info) = self.builder.rust_info().info() { - channel::write_commit_hash_file(&self.overlay_dir, &info.sha); - channel::write_commit_info_file(&self.overlay_dir, info); - } - for file in self.overlay.legal_and_readme() { - self.builder.install(&self.builder.src.join(file), &self.overlay_dir, 0o644); - } - - let mut cmd = self.builder.tool_cmd(crate::tool::Tool::RustInstaller); - - let package_name = self.package_name(); - self.builder.info(&format!("Dist {package_name}")); - let _time = crate::util::timeit(self.builder); - - build_cli(&self, &mut cmd); - cmd.arg("--work-dir").arg(&self.temp_dir); - if let Some(formats) = &self.builder.config.dist_compression_formats { - assert!(!formats.is_empty(), "dist.compression-formats can't be empty"); - cmd.arg("--compression-formats").arg(formats.join(",")); - } - cmd.args(&["--compression-profile", &self.builder.config.dist_compression_profile]); - self.builder.run(&mut cmd); - - // Ensure there are no symbolic links in the tarball. In particular, - // rustup-toolchain-install-master and most versions of Windows can't handle symbolic links. - let decompressed_output = self.temp_dir.join(&package_name); - if !self.builder.config.dry_run() && !self.permit_symlinks { - for entry in walkdir::WalkDir::new(&decompressed_output) { - let entry = t!(entry); - if entry.path_is_symlink() { - panic!("generated a symlink in a tarball: {}", entry.path().display()); - } - } - } - - // Use either the first compression format defined, or "gz" as the default. - let ext = self - .builder - .config - .dist_compression_formats - .as_ref() - .and_then(|formats| formats.get(0)) - .map(|s| s.as_str()) - .unwrap_or("gz"); - - GeneratedTarball { - path: crate::dist::distdir(self.builder).join(format!("{package_name}.tar.{ext}")), - decompressed_output, - work: self.temp_dir, - } - } -} - -#[derive(Debug, Clone)] -pub struct GeneratedTarball { - path: PathBuf, - decompressed_output: PathBuf, - work: PathBuf, -} - -impl GeneratedTarball { - pub(crate) fn tarball(&self) -> &Path { - &self.path - } - - pub(crate) fn decompressed_output(&self) -> &Path { - &self.decompressed_output - } - - pub(crate) fn work_dir(&self) -> &Path { - &self.work - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/test.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/test.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/test.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/test.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3093 +0,0 @@ -//! Implementation of the test-related targets of the build system. -//! -//! This file implements the various regression test suites that we execute on -//! our CI. - -use std::env; -use std::ffi::OsStr; -use std::ffi::OsString; -use std::fs; -use std::iter; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; - -use clap_complete::shells; - -use crate::builder::crate_description; -use crate::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step}; -use crate::cache::Interned; -use crate::cache::INTERNER; -use crate::compile; -use crate::config::TargetSelection; -use crate::dist; -use crate::doc::DocumentationFormat; -use crate::flags::Subcommand; -use crate::llvm; -use crate::render_tests::add_flags_and_try_run_tests; -use crate::synthetic_targets::MirOptPanicAbortSyntheticTarget; -use crate::tool::{self, SourceType, Tool}; -use crate::toolstate::ToolState; -use crate::util::{self, add_link_lib_path, dylib_path, dylib_path_var, output, t, up_to_date}; -use crate::{envify, CLang, DocTests, GitRepo, Mode}; - -const ADB_TEST_DIR: &str = "/data/local/tmp/work"; - -// mir-opt tests have different variants depending on whether a target is 32bit or 64bit, and -// blessing them requires blessing with each target. To aid developers, when blessing the mir-opt -// test suite the corresponding target of the opposite pointer size is also blessed. -// -// This array serves as the known mappings between 32bit and 64bit targets. If you're developing on -// a target where a target with the opposite pointer size exists, feel free to add it here. -const MIR_OPT_BLESS_TARGET_MAPPING: &[(&str, &str)] = &[ - // (32bit, 64bit) - ("i686-unknown-linux-gnu", "x86_64-unknown-linux-gnu"), - ("i686-unknown-linux-musl", "x86_64-unknown-linux-musl"), - ("i686-pc-windows-msvc", "x86_64-pc-windows-msvc"), - ("i686-pc-windows-gnu", "x86_64-pc-windows-gnu"), - ("i686-apple-darwin", "x86_64-apple-darwin"), - // ARM Macs don't have a corresponding 32-bit target that they can (easily) - // build for, so there is no entry for "aarch64-apple-darwin" here. -]; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CrateBootstrap { - path: Interned, - host: TargetSelection, -} - -impl Step for CrateBootstrap { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/jsondoclint") - .path("src/tools/suggest-tests") - .path("src/tools/replace-version-placeholder") - .alias("tidyselftest") - } - - fn make_run(run: RunConfig<'_>) { - for path in run.paths { - let path = INTERNER.intern_path(path.assert_single_path().path.clone()); - run.builder.ensure(CrateBootstrap { host: run.target, path }); - } - } - - fn run(self, builder: &Builder<'_>) { - let bootstrap_host = builder.config.build; - let compiler = builder.compiler(0, bootstrap_host); - let mut path = self.path.to_str().unwrap(); - if path == "tidyselftest" { - path = "src/tools/tidy"; - } - - let cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolBootstrap, - bootstrap_host, - "test", - path, - SourceType::InTree, - &[], - ); - let crate_name = path.rsplit_once('/').unwrap().1; - run_cargo_test(cargo, &[], &[], crate_name, crate_name, compiler, bootstrap_host, builder); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Linkcheck { - host: TargetSelection, -} - -impl Step for Linkcheck { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. - /// - /// This tool in `src/tools` will verify the validity of all our links in the - /// documentation to ensure we don't have a bunch of dead ones. - fn run(self, builder: &Builder<'_>) { - let host = self.host; - let hosts = &builder.hosts; - let targets = &builder.targets; - - // if we have different hosts and targets, some things may be built for - // the host (e.g. rustc) and others for the target (e.g. std). The - // documentation built for each will contain broken links to - // docs built for the other platform (e.g. rustc linking to cargo) - if (hosts != targets) && !hosts.is_empty() && !targets.is_empty() { - panic!( - "Linkcheck currently does not support builds with different hosts and targets. -You can skip linkcheck with --skip src/tools/linkchecker" - ); - } - - builder.info(&format!("Linkcheck ({host})")); - - // Test the linkchecker itself. - let bootstrap_host = builder.config.build; - let compiler = builder.compiler(0, bootstrap_host); - - let cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolBootstrap, - bootstrap_host, - "test", - "src/tools/linkchecker", - SourceType::InTree, - &[], - ); - run_cargo_test( - cargo, - &[], - &[], - "linkchecker", - "linkchecker self tests", - compiler, - bootstrap_host, - builder, - ); - - if builder.doc_tests == DocTests::No { - return; - } - - // Build all the default documentation. - builder.default_doc(&[]); - - // Build the linkchecker before calling `msg`, since GHA doesn't support nested groups. - let mut linkchecker = builder.tool_cmd(Tool::Linkchecker); - - // Run the linkchecker. - let _guard = - builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host); - let _time = util::timeit(&builder); - builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc"))); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - let run = run.path("src/tools/linkchecker"); - run.default_condition(builder.config.docs) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Linkcheck { host: run.target }); - } -} - -fn check_if_tidy_is_installed() -> bool { - Command::new("tidy") - .arg("--version") - .stdout(Stdio::null()) - .status() - .map_or(false, |status| status.success()) -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct HtmlCheck { - target: TargetSelection, -} - -impl Step for HtmlCheck { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let run = run.path("src/tools/html-checker"); - run.lazy_default_condition(Box::new(check_if_tidy_is_installed)) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(HtmlCheck { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - if !check_if_tidy_is_installed() { - eprintln!("not running HTML-check tool because `tidy` is missing"); - eprintln!( - "Note that `tidy` is not the in-tree `src/tools/tidy` but needs to be installed" - ); - panic!("Cannot run html-check tests"); - } - // Ensure that a few different kinds of documentation are available. - builder.default_doc(&[]); - builder.ensure(crate::doc::Rustc::new(builder.top_stage, self.target, builder)); - - builder.run_delaying_failure( - builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target)), - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Cargotest { - stage: u32, - host: TargetSelection, -} - -impl Step for Cargotest { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/cargotest") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Cargotest { stage: run.builder.top_stage, host: run.target }); - } - - /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. - /// - /// This tool in `src/tools` will check out a few Rust projects and run `cargo - /// test` to ensure that we don't regress the test suites there. - fn run(self, builder: &Builder<'_>) { - let compiler = builder.compiler(self.stage, self.host); - builder.ensure(compile::Rustc::new(compiler, compiler.host)); - let cargo = builder.ensure(tool::Cargo { compiler, target: compiler.host }); - - // Note that this is a short, cryptic, and not scoped directory name. This - // is currently to minimize the length of path on Windows where we otherwise - // quickly run into path name limit constraints. - let out_dir = builder.out.join("ct"); - t!(fs::create_dir_all(&out_dir)); - - let _time = util::timeit(&builder); - let mut cmd = builder.tool_cmd(Tool::CargoTest); - builder.run_delaying_failure( - cmd.arg(&cargo) - .arg(&out_dir) - .args(builder.config.test_args()) - .env("RUSTC", builder.rustc(compiler)) - .env("RUSTDOC", builder.rustdoc(compiler)), - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Cargo { - stage: u32, - host: TargetSelection, -} - -impl Step for Cargo { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/cargo") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Cargo { stage: run.builder.top_stage, host: run.target }); - } - - /// Runs `cargo test` for `cargo` packaged with Rust. - fn run(self, builder: &Builder<'_>) { - let compiler = builder.compiler(self.stage, self.host); - - builder.ensure(tool::Cargo { compiler, target: self.host }); - let cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - self.host, - "test", - "src/tools/cargo", - SourceType::Submodule, - &[], - ); - - // NOTE: can't use `run_cargo_test` because we need to overwrite `PATH` - let mut cargo = prepare_cargo_test(cargo, &[], &[], "cargo", compiler, self.host, builder); - - // Don't run cross-compile tests, we may not have cross-compiled libstd libs - // available. - cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); - // Forcibly disable tests using nightly features since any changes to - // those features won't be able to land. - cargo.env("CARGO_TEST_DISABLE_NIGHTLY", "1"); - cargo.env("PATH", &path_for_cargo(builder, compiler)); - - #[cfg(feature = "build-metrics")] - builder.metrics.begin_test_suite( - build_helper::metrics::TestSuiteMetadata::CargoPackage { - crates: vec!["cargo".into()], - target: self.host.triple.to_string(), - host: self.host.triple.to_string(), - stage: self.stage, - }, - builder, - ); - - let _time = util::timeit(&builder); - add_flags_and_try_run_tests(builder, &mut cargo); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RustAnalyzer { - stage: u32, - host: TargetSelection, -} - -impl Step for RustAnalyzer { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/rust-analyzer") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Self { stage: run.builder.top_stage, host: run.target }); - } - - /// Runs `cargo test` for rust-analyzer - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - // We don't need to build the whole Rust Analyzer for the proc-macro-srv test suite, - // but we do need the standard library to be present. - builder.ensure(compile::Std::new(compiler, host)); - - let workspace_path = "src/tools/rust-analyzer"; - // until the whole RA test suite runs on `i686`, we only run - // `proc-macro-srv` tests - let crate_path = "src/tools/rust-analyzer/crates/proc-macro-srv"; - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolStd, - host, - "test", - crate_path, - SourceType::InTree, - &["sysroot-abi".to_owned()], - ); - cargo.allow_features(tool::RustAnalyzer::ALLOW_FEATURES); - - let dir = builder.src.join(workspace_path); - // needed by rust-analyzer to find its own text fixtures, cf. - // https://github.com/rust-analyzer/expect-test/issues/33 - cargo.env("CARGO_WORKSPACE_DIR", &dir); - - // RA's test suite tries to write to the source directory, that can't - // work in Rust CI - cargo.env("SKIP_SLOW_TESTS", "1"); - - cargo.add_rustc_lib_path(builder, compiler); - run_cargo_test(cargo, &[], &[], "rust-analyzer", "rust-analyzer", compiler, host, builder); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Rustfmt { - stage: u32, - host: TargetSelection, -} - -impl Step for Rustfmt { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/rustfmt") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Rustfmt { stage: run.builder.top_stage, host: run.target }); - } - - /// Runs `cargo test` for rustfmt. - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - builder - .ensure(tool::Rustfmt { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/rustfmt", - SourceType::InTree, - &[], - ); - - let dir = testdir(builder, compiler.host); - t!(fs::create_dir_all(&dir)); - cargo.env("RUSTFMT_TEST_DIR", dir); - - cargo.add_rustc_lib_path(builder, compiler); - - run_cargo_test(cargo, &[], &[], "rustfmt", "rustfmt", compiler, host, builder); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RustDemangler { - stage: u32, - host: TargetSelection, -} - -impl Step for RustDemangler { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/rust-demangler") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustDemangler { stage: run.builder.top_stage, host: run.target }); - } - - /// Runs `cargo test` for rust-demangler. - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - let rust_demangler = builder - .ensure(tool::RustDemangler { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/rust-demangler", - SourceType::InTree, - &[], - ); - - let dir = testdir(builder, compiler.host); - t!(fs::create_dir_all(&dir)); - - cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler); - cargo.add_rustc_lib_path(builder, compiler); - - run_cargo_test( - cargo, - &[], - &[], - "rust-demangler", - "rust-demangler", - compiler, - host, - builder, - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Miri { - stage: u32, - host: TargetSelection, - target: TargetSelection, -} - -impl Miri { - /// Run `cargo miri setup` for the given target, return where the Miri sysroot was put. - pub fn build_miri_sysroot( - builder: &Builder<'_>, - compiler: Compiler, - miri: &Path, - target: TargetSelection, - ) -> String { - let miri_sysroot = builder.out.join(compiler.host.triple).join("miri-sysroot"); - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - compiler.host, - "run", - "src/tools/miri/cargo-miri", - SourceType::InTree, - &[], - ); - cargo.add_rustc_lib_path(builder, compiler); - cargo.arg("--").arg("miri").arg("setup"); - cargo.arg("--target").arg(target.rustc_target_arg()); - - // Tell `cargo miri setup` where to find the sources. - cargo.env("MIRI_LIB_SRC", builder.src.join("library")); - // Tell it where to find Miri. - cargo.env("MIRI", &miri); - // Tell it where to put the sysroot. - cargo.env("MIRI_SYSROOT", &miri_sysroot); - // Debug things. - cargo.env("RUST_BACKTRACE", "1"); - - let mut cargo = Command::from(cargo); - let _guard = builder.msg( - Kind::Build, - compiler.stage + 1, - "miri sysroot", - compiler.host, - compiler.host, - ); - builder.run(&mut cargo); - - // # Determine where Miri put its sysroot. - // To this end, we run `cargo miri setup --print-sysroot` and capture the output. - // (We do this separately from the above so that when the setup actually - // happens we get some output.) - // We re-use the `cargo` from above. - cargo.arg("--print-sysroot"); - - // FIXME: Is there a way in which we can re-use the usual `run` helpers? - if builder.config.dry_run() { - String::new() - } else { - builder.verbose(&format!("running: {cargo:?}")); - let out = - cargo.output().expect("We already ran `cargo miri setup` before and that worked"); - assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code"); - // Output is "\n". - let stdout = String::from_utf8(out.stdout) - .expect("`cargo miri setup` stdout is not valid UTF-8"); - let sysroot = stdout.trim_end(); - builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {sysroot:?}")); - sysroot.to_owned() - } - } -} - -impl Step for Miri { - type Output = (); - const ONLY_HOSTS: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/miri") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Miri { - stage: run.builder.top_stage, - host: run.build_triple(), - target: run.target, - }); - } - - /// Runs `cargo test` for miri. - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let target = self.target; - let compiler = builder.compiler(stage, host); - // We need the stdlib for the *next* stage, as it was built with this compiler that also built Miri. - // Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage. - let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host); - - let miri = builder - .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - let _cargo_miri = builder - .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - // The stdlib we need might be at a different stage. And just asking for the - // sysroot does not seem to populate it, so we do that first. - builder.ensure(compile::Std::new(compiler_std, host)); - let sysroot = builder.sysroot(compiler_std); - // We also need a Miri sysroot. - let miri_sysroot = Miri::build_miri_sysroot(builder, compiler, &miri, target); - - // # Run `cargo test`. - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/miri", - SourceType::InTree, - &[], - ); - let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "miri", host, host); - - cargo.add_rustc_lib_path(builder, compiler); - - // miri tests need to know about the stage sysroot - cargo.env("MIRI_SYSROOT", &miri_sysroot); - cargo.env("MIRI_HOST_SYSROOT", sysroot); - cargo.env("MIRI", &miri); - if builder.config.locked_deps { - // enforce lockfiles - cargo.env("CARGO_EXTRA_FLAGS", "--locked"); - } - - // Set the target. - cargo.env("MIRI_TEST_TARGET", target.rustc_target_arg()); - - // This can NOT be `run_cargo_test` since the Miri test runner - // does not understand the flags added by `add_flags_and_try_run_test`. - let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder); - { - let _time = util::timeit(&builder); - builder.run(&mut cargo); - } - - // Run it again for mir-opt-level 4 to catch some miscompilations. - if builder.config.test_args().is_empty() { - cargo.env("MIRIFLAGS", "-O -Zmir-opt-level=4 -Cdebug-assertions=yes"); - // Optimizations can change backtraces - cargo.env("MIRI_SKIP_UI_CHECKS", "1"); - // `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible - cargo.env_remove("RUSTC_BLESS"); - // Optimizations can change error locations and remove UB so don't run `fail` tests. - cargo.args(&["tests/pass", "tests/panic"]); - - let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder); - { - let _time = util::timeit(&builder); - builder.run(&mut cargo); - } - } - - // # Run `cargo miri test`. - // This is just a smoke test (Miri's own CI invokes this in a bunch of different ways and ensures - // that we get the desired output), but that is sufficient to make sure that the libtest harness - // itself executes properly under Miri. - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - host, - "run", - "src/tools/miri/cargo-miri", - SourceType::Submodule, - &[], - ); - cargo.add_rustc_lib_path(builder, compiler); - cargo.arg("--").arg("miri").arg("test"); - if builder.config.locked_deps { - cargo.arg("--locked"); - } - cargo - .arg("--manifest-path") - .arg(builder.src.join("src/tools/miri/test-cargo-miri/Cargo.toml")); - cargo.arg("--target").arg(target.rustc_target_arg()); - cargo.arg("--tests"); // don't run doctests, they are too confused by the staging - cargo.arg("--").args(builder.config.test_args()); - - // Tell `cargo miri` where to find things. - cargo.env("MIRI_SYSROOT", &miri_sysroot); - cargo.env("MIRI_HOST_SYSROOT", sysroot); - cargo.env("MIRI", &miri); - // Debug things. - cargo.env("RUST_BACKTRACE", "1"); - - let mut cargo = Command::from(cargo); - { - let _time = util::timeit(&builder); - builder.run(&mut cargo); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CompiletestTest { - host: TargetSelection, -} - -impl Step for CompiletestTest { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/compiletest") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(CompiletestTest { host: run.target }); - } - - /// Runs `cargo test` for compiletest. - fn run(self, builder: &Builder<'_>) { - let host = self.host; - let compiler = builder.compiler(builder.top_stage, host); - - // We need `ToolStd` for the locally-built sysroot because - // compiletest uses unstable features of the `test` crate. - builder.ensure(compile::Std::new(compiler, host)); - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolStd, - host, - "test", - "src/tools/compiletest", - SourceType::InTree, - &[], - ); - cargo.allow_features("test"); - run_cargo_test( - cargo, - &[], - &[], - "compiletest", - "compiletest self test", - compiler, - host, - builder, - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Clippy { - stage: u32, - host: TargetSelection, -} - -impl Step for Clippy { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/clippy") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Clippy { stage: run.builder.top_stage, host: run.target }); - } - - /// Runs `cargo test` for clippy. - fn run(self, builder: &Builder<'_>) { - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - builder - .ensure(tool::Clippy { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - host, - "test", - "src/tools/clippy", - SourceType::InTree, - &[], - ); - - cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); - cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); - let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir()); - cargo.env("HOST_LIBS", host_libs); - - cargo.add_rustc_lib_path(builder, compiler); - let mut cargo = prepare_cargo_test(cargo, &[], &[], "clippy", compiler, host, builder); - - let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "clippy", host, host); - - #[allow(deprecated)] // Clippy reports errors if it blessed the outputs - if builder.config.try_run(&mut cargo).is_ok() { - // The tests succeeded; nothing to do. - return; - } - - if !builder.config.cmd.bless() { - crate::exit!(1); - } - } -} - -fn path_for_cargo(builder: &Builder<'_>, compiler: Compiler) -> OsString { - // Configure PATH to find the right rustc. NB. we have to use PATH - // and not RUSTC because the Cargo test suite has tests that will - // fail if rustc is not spelled `rustc`. - let path = builder.sysroot(compiler).join("bin"); - let old_path = env::var_os("PATH").unwrap_or_default(); - env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustdocTheme { - pub compiler: Compiler, -} - -impl Step for RustdocTheme { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/rustdoc-themes") - } - - fn make_run(run: RunConfig<'_>) { - let compiler = run.builder.compiler(run.builder.top_stage, run.target); - - run.builder.ensure(RustdocTheme { compiler }); - } - - fn run(self, builder: &Builder<'_>) { - let rustdoc = builder.bootstrap_out.join("rustdoc"); - let mut cmd = builder.tool_cmd(Tool::RustdocTheme); - cmd.arg(rustdoc.to_str().unwrap()) - .arg(builder.src.join("src/librustdoc/html/static/css/rustdoc.css").to_str().unwrap()) - .env("RUSTC_STAGE", self.compiler.stage.to_string()) - .env("RUSTC_SYSROOT", builder.sysroot(self.compiler)) - .env("RUSTDOC_LIBDIR", builder.sysroot_libdir(self.compiler, self.compiler.host)) - .env("CFG_RELEASE_CHANNEL", &builder.config.channel) - .env("RUSTDOC_REAL", builder.rustdoc(self.compiler)) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(linker) = builder.linker(self.compiler.host) { - cmd.env("RUSTDOC_LINKER", linker); - } - if builder.is_fuse_ld_lld(self.compiler.host) { - cmd.env( - "RUSTDOC_LLD_NO_THREADS", - util::lld_flag_no_threads(self.compiler.host.contains("windows")), - ); - } - builder.run_delaying_failure(&mut cmd); - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustdocJSStd { - pub target: TargetSelection, -} - -impl Step for RustdocJSStd { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = run.builder.config.nodejs.is_some(); - run.suite_path("tests/rustdoc-js-std").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustdocJSStd { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let nodejs = - builder.config.nodejs.as_ref().expect("need nodejs to run rustdoc-js-std tests"); - let mut command = Command::new(nodejs); - command - .arg(builder.src.join("src/tools/rustdoc-js/tester.js")) - .arg("--crate-name") - .arg("std") - .arg("--resource-suffix") - .arg(&builder.version) - .arg("--doc-folder") - .arg(builder.doc_out(self.target)) - .arg("--test-folder") - .arg(builder.src.join("tests/rustdoc-js-std")); - for path in &builder.paths { - if let Some(p) = util::is_valid_test_suite_arg(path, "tests/rustdoc-js-std", builder) { - if !p.ends_with(".js") { - eprintln!("A non-js file was given: `{}`", path.display()); - panic!("Cannot run rustdoc-js-std tests"); - } - command.arg("--test-file").arg(path); - } - } - builder.ensure(crate::doc::Std::new( - builder.top_stage, - self.target, - builder, - DocumentationFormat::HTML, - )); - let _guard = builder.msg( - Kind::Test, - builder.top_stage, - "rustdoc-js-std", - builder.config.build, - self.target, - ); - builder.run(&mut command); - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustdocJSNotStd { - pub target: TargetSelection, - pub compiler: Compiler, -} - -impl Step for RustdocJSNotStd { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let default = run.builder.config.nodejs.is_some(); - run.suite_path("tests/rustdoc-js").default_condition(default) - } - - fn make_run(run: RunConfig<'_>) { - let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); - run.builder.ensure(RustdocJSNotStd { target: run.target, compiler }); - } - - fn run(self, builder: &Builder<'_>) { - builder.ensure(Compiletest { - compiler: self.compiler, - target: self.target, - mode: "js-doc-test", - suite: "rustdoc-js", - path: "tests/rustdoc-js", - compare_mode: None, - }); - } -} - -fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option { - let mut command = Command::new(&npm); - command.arg("list").arg("--parseable").arg("--long").arg("--depth=0"); - if global { - command.arg("--global"); - } - let lines = command - .output() - .map(|output| String::from_utf8_lossy(&output.stdout).into_owned()) - .unwrap_or(String::new()); - lines - .lines() - .find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@")) - .map(|v| v.to_owned()) -} - -fn get_browser_ui_test_version(npm: &Path) -> Option { - get_browser_ui_test_version_inner(npm, false) - .or_else(|| get_browser_ui_test_version_inner(npm, true)) -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustdocGUI { - pub target: TargetSelection, - pub compiler: Compiler, -} - -impl Step for RustdocGUI { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - let run = run.suite_path("tests/rustdoc-gui"); - run.lazy_default_condition(Box::new(move || { - builder.config.nodejs.is_some() - && builder - .config - .npm - .as_ref() - .map(|p| get_browser_ui_test_version(p).is_some()) - .unwrap_or(false) - })) - } - - fn make_run(run: RunConfig<'_>) { - let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); - run.builder.ensure(RustdocGUI { target: run.target, compiler }); - } - - fn run(self, builder: &Builder<'_>) { - builder.ensure(compile::Std::new(self.compiler, self.target)); - - let mut cmd = builder.tool_cmd(Tool::RustdocGUITest); - - let out_dir = builder.test_out(self.target).join("rustdoc-gui"); - builder.clear_if_dirty(&out_dir, &builder.rustdoc(self.compiler)); - - if let Some(src) = builder.config.src.to_str() { - cmd.arg("--rust-src").arg(src); - } - - if let Some(out_dir) = out_dir.to_str() { - cmd.arg("--out-dir").arg(out_dir); - } - - if let Some(initial_cargo) = builder.config.initial_cargo.to_str() { - cmd.arg("--initial-cargo").arg(initial_cargo); - } - - cmd.arg("--jobs").arg(builder.jobs().to_string()); - - cmd.env("RUSTDOC", builder.rustdoc(self.compiler)) - .env("RUSTC", builder.rustc(self.compiler)); - - for path in &builder.paths { - if let Some(p) = util::is_valid_test_suite_arg(path, "tests/rustdoc-gui", builder) { - if !p.ends_with(".goml") { - eprintln!("A non-goml file was given: `{}`", path.display()); - panic!("Cannot run rustdoc-gui tests"); - } - if let Some(name) = path.file_name().and_then(|f| f.to_str()) { - cmd.arg("--goml-file").arg(name); - } - } - } - - for test_arg in builder.config.test_args() { - cmd.arg("--test-arg").arg(test_arg); - } - - if let Some(ref nodejs) = builder.config.nodejs { - cmd.arg("--nodejs").arg(nodejs); - } - - if let Some(ref npm) = builder.config.npm { - cmd.arg("--npm").arg(npm); - } - - let _time = util::timeit(&builder); - let _guard = builder.msg_sysroot_tool( - Kind::Test, - self.compiler.stage, - "rustdoc-gui", - self.compiler.host, - self.target, - ); - crate::render_tests::try_run_tests(builder, &mut cmd, true); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Tidy; - -impl Step for Tidy { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - /// Runs the `tidy` tool. - /// - /// This tool in `src/tools` checks up on various bits and pieces of style and - /// otherwise just implements a few lint-like checks that are specific to the - /// compiler itself. - /// - /// Once tidy passes, this step also runs `fmt --check` if tests are being run - /// for the `dev` or `nightly` channels. - fn run(self, builder: &Builder<'_>) { - let mut cmd = builder.tool_cmd(Tool::Tidy); - cmd.arg(&builder.src); - cmd.arg(&builder.initial_cargo); - cmd.arg(&builder.out); - // Tidy is heavily IO constrained. Still respect `-j`, but use a higher limit if `jobs` hasn't been configured. - let jobs = builder.config.jobs.unwrap_or_else(|| { - 8 * std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 - }); - cmd.arg(jobs.to_string()); - if builder.is_verbose() { - cmd.arg("--verbose"); - } - if builder.config.cmd.bless() { - cmd.arg("--bless"); - } - if let Some(s) = builder.config.cmd.extra_checks() { - cmd.arg(format!("--extra-checks={s}")); - } - let mut args = std::env::args_os(); - if let Some(_) = args.find(|arg| arg == OsStr::new("--")) { - cmd.arg("--"); - cmd.args(args); - } - - if builder.config.channel == "dev" || builder.config.channel == "nightly" { - builder.info("fmt check"); - if builder.initial_rustfmt().is_none() { - let inferred_rustfmt_dir = builder.initial_rustc.parent().unwrap(); - eprintln!( - "\ -error: no `rustfmt` binary found in {PATH} -info: `rust.channel` is currently set to \"{CHAN}\" -help: if you are testing a beta branch, set `rust.channel` to \"beta\" in the `config.toml` file -help: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to `x.py test`", - PATH = inferred_rustfmt_dir.display(), - CHAN = builder.config.channel, - ); - crate::exit!(1); - } - crate::format::format(&builder, !builder.config.cmd.bless(), &[]); - } - - builder.info("tidy check"); - builder.run_delaying_failure(&mut cmd); - - builder.ensure(ExpandYamlAnchors); - - builder.info("x.py completions check"); - let [bash, fish, powershell] = ["x.py.sh", "x.py.fish", "x.py.ps1"] - .map(|filename| builder.src.join("src/etc/completions").join(filename)); - if builder.config.cmd.bless() { - builder.ensure(crate::run::GenerateCompletions); - } else if crate::flags::get_completion(shells::Bash, &bash).is_some() - || crate::flags::get_completion(shells::Fish, &fish).is_some() - || crate::flags::get_completion(shells::PowerShell, &powershell).is_some() - { - eprintln!( - "x.py completions were changed; run `x.py run generate-completions` to update them" - ); - crate::exit!(1); - } - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/tidy") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Tidy); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ExpandYamlAnchors; - -impl Step for ExpandYamlAnchors { - type Output = (); - const ONLY_HOSTS: bool = true; - - /// Ensure the `generate-ci-config` tool was run locally. - /// - /// The tool in `src/tools` reads the CI definition in `src/ci/builders.yml` and generates the - /// appropriate configuration for all our CI providers. This step ensures the tool was called - /// by the user before committing CI changes. - fn run(self, builder: &Builder<'_>) { - // Note: `.github/` is not included in dist-src tarballs - if !builder.src.join(".github/workflows/ci.yml").exists() { - builder.info("Skipping YAML anchors check: GitHub Actions config not found"); - return; - } - builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded"); - builder.run_delaying_failure( - &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src), - ); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/expand-yaml-anchors") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ExpandYamlAnchors); - } -} - -fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf { - builder.out.join(host.triple).join("test") -} - -macro_rules! default_test { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { - test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false }); - }; -} - -macro_rules! default_test_with_compare_mode { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, - compare_mode: $compare_mode:expr }) => { - test_with_compare_mode!($name { - path: $path, - mode: $mode, - suite: $suite, - default: true, - host: false, - compare_mode: $compare_mode - }); - }; -} - -macro_rules! host_test { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { - test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true }); - }; -} - -macro_rules! test { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, - host: $host:expr }) => { - test_definitions!($name { - path: $path, - mode: $mode, - suite: $suite, - default: $default, - host: $host, - compare_mode: None - }); - }; -} - -macro_rules! test_with_compare_mode { - ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, - host: $host:expr, compare_mode: $compare_mode:expr }) => { - test_definitions!($name { - path: $path, - mode: $mode, - suite: $suite, - default: $default, - host: $host, - compare_mode: Some($compare_mode) - }); - }; -} - -macro_rules! test_definitions { - ($name:ident { - path: $path:expr, - mode: $mode:expr, - suite: $suite:expr, - default: $default:expr, - host: $host:expr, - compare_mode: $compare_mode:expr - }) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub struct $name { - pub compiler: Compiler, - pub target: TargetSelection, - } - - impl Step for $name { - type Output = (); - const DEFAULT: bool = $default; - const ONLY_HOSTS: bool = $host; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.suite_path($path) - } - - fn make_run(run: RunConfig<'_>) { - let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); - - run.builder.ensure($name { compiler, target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - builder.ensure(Compiletest { - compiler: self.compiler, - target: self.target, - mode: $mode, - suite: $suite, - path: $path, - compare_mode: $compare_mode, - }) - } - } - }; -} - -default_test!(Ui { path: "tests/ui", mode: "ui", suite: "ui" }); - -default_test!(RunPassValgrind { - path: "tests/run-pass-valgrind", - mode: "run-pass-valgrind", - suite: "run-pass-valgrind" -}); - -default_test!(Codegen { path: "tests/codegen", mode: "codegen", suite: "codegen" }); - -default_test!(CodegenUnits { - path: "tests/codegen-units", - mode: "codegen-units", - suite: "codegen-units" -}); - -default_test!(Incremental { path: "tests/incremental", mode: "incremental", suite: "incremental" }); - -default_test_with_compare_mode!(Debuginfo { - path: "tests/debuginfo", - mode: "debuginfo", - suite: "debuginfo", - compare_mode: "split-dwarf" -}); - -host_test!(UiFullDeps { path: "tests/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" }); - -host_test!(Rustdoc { path: "tests/rustdoc", mode: "rustdoc", suite: "rustdoc" }); -host_test!(RustdocUi { path: "tests/rustdoc-ui", mode: "ui", suite: "rustdoc-ui" }); - -host_test!(RustdocJson { path: "tests/rustdoc-json", mode: "rustdoc-json", suite: "rustdoc-json" }); - -host_test!(Pretty { path: "tests/pretty", mode: "pretty", suite: "pretty" }); - -default_test!(RunMake { path: "tests/run-make", mode: "run-make", suite: "run-make" }); - -host_test!(RunMakeFullDeps { - path: "tests/run-make-fulldeps", - mode: "run-make", - suite: "run-make-fulldeps" -}); - -default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assembly" }); - -default_test!(CoverageMap { - path: "tests/coverage-map", - mode: "coverage-map", - suite: "coverage-map" -}); - -host_test!(RunCoverage { path: "tests/run-coverage", mode: "run-coverage", suite: "run-coverage" }); -host_test!(RunCoverageRustdoc { - path: "tests/run-coverage-rustdoc", - mode: "run-coverage", - suite: "run-coverage-rustdoc" -}); - -// For the mir-opt suite we do not use macros, as we need custom behavior when blessing. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct MirOpt { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for MirOpt { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.suite_path("tests/mir-opt") - } - - fn make_run(run: RunConfig<'_>) { - let compiler = run.builder.compiler(run.builder.top_stage, run.build_triple()); - run.builder.ensure(MirOpt { compiler, target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let run = |target| { - builder.ensure(Compiletest { - compiler: self.compiler, - target, - mode: "mir-opt", - suite: "mir-opt", - path: "tests/mir-opt", - compare_mode: None, - }) - }; - - // We use custom logic to bless the mir-opt suite: mir-opt tests have multiple variants - // (32bit vs 64bit, and panic=abort vs panic=unwind), and all of them needs to be blessed. - // When blessing, we try best-effort to also bless the other variants, to aid developers. - if builder.config.cmd.bless() { - let targets = MIR_OPT_BLESS_TARGET_MAPPING - .iter() - .filter(|(target_32bit, target_64bit)| { - *target_32bit == &*self.target.triple || *target_64bit == &*self.target.triple - }) - .next() - .map(|(target_32bit, target_64bit)| { - let target_32bit = TargetSelection::from_user(target_32bit); - let target_64bit = TargetSelection::from_user(target_64bit); - - // Running compiletest requires a C compiler to be available, but it might not - // have been detected by bootstrap if the target we're testing wasn't in the - // --target flags. - if !builder.cc.borrow().contains_key(&target_32bit) { - crate::cc_detect::find_target(builder, target_32bit); - } - if !builder.cc.borrow().contains_key(&target_64bit) { - crate::cc_detect::find_target(builder, target_64bit); - } - - vec![target_32bit, target_64bit] - }) - .unwrap_or_else(|| { - eprintln!( - "\ -Note that not all variants of mir-opt tests are going to be blessed, as no mapping between -a 32bit and a 64bit target was found for {target}. -You can add that mapping by changing MIR_OPT_BLESS_TARGET_MAPPING in src/bootstrap/test.rs", - target = self.target, - ); - vec![self.target] - }); - - for target in targets { - run(target); - - let panic_abort_target = builder.ensure(MirOptPanicAbortSyntheticTarget { - compiler: self.compiler, - base: target, - }); - run(panic_abort_target); - } - } else { - run(self.target); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -struct Compiletest { - compiler: Compiler, - target: TargetSelection, - mode: &'static str, - suite: &'static str, - path: &'static str, - compare_mode: Option<&'static str>, -} - -impl Step for Compiletest { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Executes the `compiletest` tool to run a suite of tests. - /// - /// Compiles all tests with `compiler` for `target` with the specified - /// compiletest `mode` and `suite` arguments. For example `mode` can be - /// "run-pass" or `suite` can be something like `debuginfo`. - fn run(self, builder: &Builder<'_>) { - if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() { - eprintln!("\ -error: `--stage 0` runs compiletest on the beta compiler, not your local changes, and will almost always cause tests to fail -help: to test the compiler, use `--stage 1` instead -help: to test the standard library, use `--stage 0 library/std` instead -note: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`." - ); - crate::exit!(1); - } - - let mut compiler = self.compiler; - let target = self.target; - let mode = self.mode; - let suite = self.suite; - - // Path for test suite - let suite_path = self.path; - - // Skip codegen tests if they aren't enabled in configuration. - if !builder.config.codegen_tests && suite == "codegen" { - return; - } - - // Support stage 1 ui-fulldeps. This is somewhat complicated: ui-fulldeps tests for the most - // part test the *API* of the compiler, not how it compiles a given file. As a result, we - // can run them against the stage 1 sources as long as we build them with the stage 0 - // bootstrap compiler. - // NOTE: Only stage 1 is special cased because we need the rustc_private artifacts to match the - // running compiler in stage 2 when plugins run. - let stage_id = if suite == "ui-fulldeps" && compiler.stage == 1 { - compiler = builder.compiler(compiler.stage - 1, target); - format!("stage{}-{}", compiler.stage + 1, target) - } else { - format!("stage{}-{}", compiler.stage, target) - }; - - if suite.ends_with("fulldeps") { - builder.ensure(compile::Rustc::new(compiler, target)); - } - - if suite == "debuginfo" { - builder - .ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), host: target }); - } - - builder.ensure(compile::Std::new(compiler, target)); - // ensure that `libproc_macro` is available on the host. - builder.ensure(compile::Std::new(compiler, compiler.host)); - - // Also provide `rust_test_helpers` for the host. - builder.ensure(TestHelpers { target: compiler.host }); - - // As well as the target, except for plain wasm32, which can't build it - if !target.contains("wasm") || target.contains("emscripten") { - builder.ensure(TestHelpers { target }); - } - - builder.ensure(RemoteCopyLibs { compiler, target }); - - let mut cmd = builder.tool_cmd(Tool::Compiletest); - - // compiletest currently has... a lot of arguments, so let's just pass all - // of them! - - cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler)); - cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target)); - cmd.arg("--rustc-path").arg(builder.rustc(compiler)); - - let is_rustdoc = suite.ends_with("rustdoc-ui") || suite.ends_with("rustdoc-js"); - - // Avoid depending on rustdoc when we don't need it. - if mode == "rustdoc" - || mode == "run-make" - || (mode == "ui" && is_rustdoc) - || mode == "js-doc-test" - || mode == "rustdoc-json" - || suite == "run-coverage-rustdoc" - { - cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler)); - } - - if mode == "rustdoc-json" { - // Use the beta compiler for jsondocck - let json_compiler = compiler.with_stage(0); - cmd.arg("--jsondocck-path") - .arg(builder.ensure(tool::JsonDocCk { compiler: json_compiler, target })); - cmd.arg("--jsondoclint-path") - .arg(builder.ensure(tool::JsonDocLint { compiler: json_compiler, target })); - } - - if mode == "coverage-map" { - let coverage_dump = builder.ensure(tool::CoverageDump { - compiler: compiler.with_stage(0), - target: compiler.host, - }); - cmd.arg("--coverage-dump-path").arg(coverage_dump); - } - - if mode == "run-make" || mode == "run-coverage" { - let rust_demangler = builder - .ensure(tool::RustDemangler { - compiler, - target: compiler.host, - extra_features: Vec::new(), - }) - .expect("in-tree tool"); - cmd.arg("--rust-demangler-path").arg(rust_demangler); - } - - cmd.arg("--src-base").arg(builder.src.join("tests").join(suite)); - cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite)); - - // When top stage is 0, that means that we're testing an externally provided compiler. - // In that case we need to use its specific sysroot for tests to pass. - let sysroot = if builder.top_stage == 0 { - builder.initial_sysroot.clone() - } else { - builder.sysroot(compiler).to_path_buf() - }; - cmd.arg("--sysroot-base").arg(sysroot); - cmd.arg("--stage-id").arg(stage_id); - cmd.arg("--suite").arg(suite); - cmd.arg("--mode").arg(mode); - cmd.arg("--target").arg(target.rustc_target_arg()); - cmd.arg("--host").arg(&*compiler.host.triple); - cmd.arg("--llvm-filecheck").arg(builder.llvm_filecheck(builder.config.build)); - - if builder.config.cmd.bless() { - cmd.arg("--bless"); - } - - if builder.config.cmd.force_rerun() { - cmd.arg("--force-rerun"); - } - - let compare_mode = - builder.config.cmd.compare_mode().or_else(|| { - if builder.config.test_compare_mode { self.compare_mode } else { None } - }); - - if let Some(ref pass) = builder.config.cmd.pass() { - cmd.arg("--pass"); - cmd.arg(pass); - } - - if let Some(ref run) = builder.config.cmd.run() { - cmd.arg("--run"); - cmd.arg(run); - } - - if let Some(ref nodejs) = builder.config.nodejs { - cmd.arg("--nodejs").arg(nodejs); - } else if mode == "js-doc-test" { - panic!("need nodejs to run js-doc-test suite"); - } - if let Some(ref npm) = builder.config.npm { - cmd.arg("--npm").arg(npm); - } - if builder.config.rust_optimize_tests { - cmd.arg("--optimize-tests"); - } - if builder.config.cmd.only_modified() { - cmd.arg("--only-modified"); - } - - let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; - flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); - flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string())); - - if let Some(linker) = builder.linker(target) { - cmd.arg("--target-linker").arg(linker); - } - if let Some(linker) = builder.linker(compiler.host) { - cmd.arg("--host-linker").arg(linker); - } - - let mut hostflags = flags.clone(); - hostflags.push(format!("-Lnative={}", builder.test_helpers_out(compiler.host).display())); - hostflags.extend(builder.lld_flags(compiler.host)); - for flag in hostflags { - cmd.arg("--host-rustcflags").arg(flag); - } - - let mut targetflags = flags; - targetflags.push(format!("-Lnative={}", builder.test_helpers_out(target).display())); - targetflags.extend(builder.lld_flags(target)); - for flag in targetflags { - cmd.arg("--target-rustcflags").arg(flag); - } - - cmd.arg("--python").arg(builder.python()); - - if let Some(ref gdb) = builder.config.gdb { - cmd.arg("--gdb").arg(gdb); - } - - let run = |cmd: &mut Command| { - cmd.output().map(|output| { - String::from_utf8_lossy(&output.stdout) - .lines() - .next() - .unwrap_or_else(|| panic!("{:?} failed {:?}", cmd, output)) - .to_string() - }) - }; - let lldb_exe = "lldb"; - let lldb_version = Command::new(lldb_exe) - .arg("--version") - .output() - .map(|output| String::from_utf8_lossy(&output.stdout).to_string()) - .ok(); - if let Some(ref vers) = lldb_version { - cmd.arg("--lldb-version").arg(vers); - let lldb_python_dir = run(Command::new(lldb_exe).arg("-P")).ok(); - if let Some(ref dir) = lldb_python_dir { - cmd.arg("--lldb-python-dir").arg(dir); - } - } - - if util::forcing_clang_based_tests() { - let clang_exe = builder.llvm_out(target).join("bin").join("clang"); - cmd.arg("--run-clang-based-tests-with").arg(clang_exe); - } - - for exclude in &builder.config.skip { - cmd.arg("--skip"); - cmd.arg(&exclude); - } - - // Get paths from cmd args - let paths = match &builder.config.cmd { - Subcommand::Test { .. } => &builder.config.paths[..], - _ => &[], - }; - - // Get test-args by striping suite path - let mut test_args: Vec<&str> = paths - .iter() - .filter_map(|p| util::is_valid_test_suite_arg(p, suite_path, builder)) - .collect(); - - test_args.append(&mut builder.config.test_args()); - - // On Windows, replace forward slashes in test-args by backslashes - // so the correct filters are passed to libtest - if cfg!(windows) { - let test_args_win: Vec = - test_args.iter().map(|s| s.replace("/", "\\")).collect(); - cmd.args(&test_args_win); - } else { - cmd.args(&test_args); - } - - if builder.is_verbose() { - cmd.arg("--verbose"); - } - - cmd.arg("--json"); - - let mut llvm_components_passed = false; - let mut copts_passed = false; - if builder.config.llvm_enabled() { - let llvm::LlvmResult { llvm_config, .. } = - builder.ensure(llvm::Llvm { target: builder.config.build }); - if !builder.config.dry_run() { - let llvm_version = output(Command::new(&llvm_config).arg("--version")); - let llvm_components = output(Command::new(&llvm_config).arg("--components")); - // Remove trailing newline from llvm-config output. - cmd.arg("--llvm-version") - .arg(llvm_version.trim()) - .arg("--llvm-components") - .arg(llvm_components.trim()); - llvm_components_passed = true; - } - if !builder.is_rust_llvm(target) { - cmd.arg("--system-llvm"); - } - - // Tests that use compiler libraries may inherit the `-lLLVM` link - // requirement, but the `-L` library path is not propagated across - // separate compilations. We can add LLVM's library path to the - // platform-specific environment variable as a workaround. - if !builder.config.dry_run() && suite.ends_with("fulldeps") { - let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir")); - add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cmd); - } - - if !builder.config.dry_run() - && (matches!(suite, "run-make" | "run-make-fulldeps") || mode == "run-coverage") - { - // The llvm/bin directory contains many useful cross-platform - // tools. Pass the path to run-make tests so they can use them. - // (The run-coverage tests also need these tools to process - // coverage reports.) - let llvm_bin_path = llvm_config - .parent() - .expect("Expected llvm-config to be contained in directory"); - assert!(llvm_bin_path.is_dir()); - cmd.arg("--llvm-bin-dir").arg(llvm_bin_path); - } - - if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") { - // If LLD is available, add it to the PATH - if builder.config.lld_enabled { - let lld_install_root = - builder.ensure(llvm::Lld { target: builder.config.build }); - - let lld_bin_path = lld_install_root.join("bin"); - - let old_path = env::var_os("PATH").unwrap_or_default(); - let new_path = env::join_paths( - std::iter::once(lld_bin_path).chain(env::split_paths(&old_path)), - ) - .expect("Could not add LLD bin path to PATH"); - cmd.env("PATH", new_path); - } - } - } - - // Only pass correct values for these flags for the `run-make` suite as it - // requires that a C++ compiler was configured which isn't always the case. - if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") { - cmd.arg("--cc") - .arg(builder.cc(target)) - .arg("--cxx") - .arg(builder.cxx(target).unwrap()) - .arg("--cflags") - .arg(builder.cflags(target, GitRepo::Rustc, CLang::C).join(" ")) - .arg("--cxxflags") - .arg(builder.cflags(target, GitRepo::Rustc, CLang::Cxx).join(" ")); - copts_passed = true; - if let Some(ar) = builder.ar(target) { - cmd.arg("--ar").arg(ar); - } - } - - if !llvm_components_passed { - cmd.arg("--llvm-components").arg(""); - } - if !copts_passed { - cmd.arg("--cc") - .arg("") - .arg("--cxx") - .arg("") - .arg("--cflags") - .arg("") - .arg("--cxxflags") - .arg(""); - } - - if builder.remote_tested(target) { - cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient)); - } - - // Running a C compiler on MSVC requires a few env vars to be set, to be - // sure to set them here. - // - // Note that if we encounter `PATH` we make sure to append to our own `PATH` - // rather than stomp over it. - if !builder.config.dry_run() && target.contains("msvc") { - for &(ref k, ref v) in builder.cc.borrow()[&target].env() { - if k != "PATH" { - cmd.env(k, v); - } - } - } - cmd.env("RUSTC_BOOTSTRAP", "1"); - // Override the rustc version used in symbol hashes to reduce the amount of normalization - // needed when diffing test output. - cmd.env("RUSTC_FORCE_RUSTC_VERSION", "compiletest"); - cmd.env("DOC_RUST_LANG_ORG_CHANNEL", builder.doc_rust_lang_org_channel()); - builder.add_rust_test_threads(&mut cmd); - - if builder.config.sanitizers_enabled(target) { - cmd.env("RUSTC_SANITIZER_SUPPORT", "1"); - } - - if builder.config.profiler_enabled(target) { - cmd.env("RUSTC_PROFILER_SUPPORT", "1"); - } - - cmd.env("RUST_TEST_TMPDIR", builder.tempdir()); - - cmd.arg("--adb-path").arg("adb"); - cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); - if target.contains("android") && !builder.config.dry_run() { - // Assume that cc for this target comes from the android sysroot - cmd.arg("--android-cross-path") - .arg(builder.cc(target).parent().unwrap().parent().unwrap()); - } else { - cmd.arg("--android-cross-path").arg(""); - } - - if builder.config.cmd.rustfix_coverage() { - cmd.arg("--rustfix-coverage"); - } - - cmd.env("BOOTSTRAP_CARGO", &builder.initial_cargo); - - cmd.arg("--channel").arg(&builder.config.channel); - - if !builder.config.omit_git_hash { - cmd.arg("--git-hash"); - } - - builder.ci_env.force_coloring_in_ci(&mut cmd); - - #[cfg(feature = "build-metrics")] - builder.metrics.begin_test_suite( - build_helper::metrics::TestSuiteMetadata::Compiletest { - suite: suite.into(), - mode: mode.into(), - compare_mode: None, - target: self.target.triple.to_string(), - host: self.compiler.host.triple.to_string(), - stage: self.compiler.stage, - }, - builder, - ); - - let _group = builder.msg( - Kind::Test, - compiler.stage, - &format!("compiletest suite={suite} mode={mode}"), - compiler.host, - target, - ); - crate::render_tests::try_run_tests(builder, &mut cmd, false); - - if let Some(compare_mode) = compare_mode { - cmd.arg("--compare-mode").arg(compare_mode); - - #[cfg(feature = "build-metrics")] - builder.metrics.begin_test_suite( - build_helper::metrics::TestSuiteMetadata::Compiletest { - suite: suite.into(), - mode: mode.into(), - compare_mode: Some(compare_mode.into()), - target: self.target.triple.to_string(), - host: self.compiler.host.triple.to_string(), - stage: self.compiler.stage, - }, - builder, - ); - - builder.info(&format!( - "Check compiletest suite={} mode={} compare_mode={} ({} -> {})", - suite, mode, compare_mode, &compiler.host, target - )); - let _time = util::timeit(&builder); - crate::render_tests::try_run_tests(builder, &mut cmd, false); - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -struct BookTest { - compiler: Compiler, - path: PathBuf, - name: &'static str, - is_ext_doc: bool, -} - -impl Step for BookTest { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Runs the documentation tests for a book in `src/doc`. - /// - /// This uses the `rustdoc` that sits next to `compiler`. - fn run(self, builder: &Builder<'_>) { - // External docs are different from local because: - // - Some books need pre-processing by mdbook before being tested. - // - They need to save their state to toolstate. - // - They are only tested on the "checktools" builders. - // - // The local docs are tested by default, and we don't want to pay the - // cost of building mdbook, so they use `rustdoc --test` directly. - // Also, the unstable book is special because SUMMARY.md is generated, - // so it is easier to just run `rustdoc` on its files. - if self.is_ext_doc { - self.run_ext_doc(builder); - } else { - self.run_local_doc(builder); - } - } -} - -impl BookTest { - /// This runs the equivalent of `mdbook test` (via the rustbook wrapper) - /// which in turn runs `rustdoc --test` on each file in the book. - fn run_ext_doc(self, builder: &Builder<'_>) { - let compiler = self.compiler; - - builder.ensure(compile::Std::new(compiler, compiler.host)); - - // mdbook just executes a binary named "rustdoc", so we need to update - // PATH so that it points to our rustdoc. - let mut rustdoc_path = builder.rustdoc(compiler); - rustdoc_path.pop(); - let old_path = env::var_os("PATH").unwrap_or_default(); - let new_path = env::join_paths(iter::once(rustdoc_path).chain(env::split_paths(&old_path))) - .expect("could not add rustdoc to PATH"); - - let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); - let path = builder.src.join(&self.path); - // Books often have feature-gated example text. - rustbook_cmd.env("RUSTC_BOOTSTRAP", "1"); - rustbook_cmd.env("PATH", new_path).arg("test").arg(path); - builder.add_rust_test_threads(&mut rustbook_cmd); - let _guard = builder.msg( - Kind::Test, - compiler.stage, - format_args!("mdbook {}", self.path.display()), - compiler.host, - compiler.host, - ); - let _time = util::timeit(&builder); - let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) { - ToolState::TestPass - } else { - ToolState::TestFail - }; - builder.save_toolstate(self.name, toolstate); - } - - /// This runs `rustdoc --test` on all `.md` files in the path. - fn run_local_doc(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let host = self.compiler.host; - - builder.ensure(compile::Std::new(compiler, host)); - - let _guard = - builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host); - - // Do a breadth-first traversal of the `src/doc` directory and just run - // tests for all files that end in `*.md` - let mut stack = vec![builder.src.join(self.path)]; - let _time = util::timeit(&builder); - let mut files = Vec::new(); - while let Some(p) = stack.pop() { - if p.is_dir() { - stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); - continue; - } - - if p.extension().and_then(|s| s.to_str()) != Some("md") { - continue; - } - - files.push(p); - } - - files.sort(); - - for file in files { - markdown_test(builder, compiler, &file); - } - } -} - -macro_rules! test_book { - ($($name:ident, $path:expr, $book_name:expr, default=$default:expr;)+) => { - $( - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub struct $name { - compiler: Compiler, - } - - impl Step for $name { - type Output = (); - const DEFAULT: bool = $default; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path($path) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($name { - compiler: run.builder.compiler(run.builder.top_stage, run.target), - }); - } - - fn run(self, builder: &Builder<'_>) { - builder.ensure(BookTest { - compiler: self.compiler, - path: PathBuf::from($path), - name: $book_name, - is_ext_doc: !$default, - }); - } - } - )+ - } -} - -test_book!( - Nomicon, "src/doc/nomicon", "nomicon", default=false; - Reference, "src/doc/reference", "reference", default=false; - RustdocBook, "src/doc/rustdoc", "rustdoc", default=true; - RustcBook, "src/doc/rustc", "rustc", default=true; - RustByExample, "src/doc/rust-by-example", "rust-by-example", default=false; - EmbeddedBook, "src/doc/embedded-book", "embedded-book", default=false; - TheBook, "src/doc/book", "book", default=false; - UnstableBook, "src/doc/unstable-book", "unstable-book", default=true; - EditionGuide, "src/doc/edition-guide", "edition-guide", default=false; -); - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ErrorIndex { - compiler: Compiler, -} - -impl Step for ErrorIndex { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/error_index_generator") - } - - fn make_run(run: RunConfig<'_>) { - // error_index_generator depends on librustdoc. Use the compiler that - // is normally used to build rustdoc for other tests (like compiletest - // tests in tests/rustdoc) so that it shares the same artifacts. - let compiler = run.builder.compiler(run.builder.top_stage, run.builder.config.build); - run.builder.ensure(ErrorIndex { compiler }); - } - - /// Runs the error index generator tool to execute the tests located in the error - /// index. - /// - /// The `error_index_generator` tool lives in `src/tools` and is used to - /// generate a markdown file from the error indexes of the code base which is - /// then passed to `rustdoc --test`. - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - - let dir = testdir(builder, compiler.host); - t!(fs::create_dir_all(&dir)); - let output = dir.join("error-index.md"); - - let mut tool = tool::ErrorIndex::command(builder); - tool.arg("markdown").arg(&output); - - let guard = - builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host); - let _time = util::timeit(&builder); - builder.run_quiet(&mut tool); - drop(guard); - // The tests themselves need to link to std, so make sure it is - // available. - builder.ensure(compile::Std::new(compiler, compiler.host)); - markdown_test(builder, compiler, &output); - } -} - -fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> bool { - if let Ok(contents) = fs::read_to_string(markdown) { - if !contents.contains("```") { - return true; - } - } - - builder.verbose(&format!("doc tests for: {}", markdown.display())); - let mut cmd = builder.rustdoc_cmd(compiler); - builder.add_rust_test_threads(&mut cmd); - // allow for unstable options such as new editions - cmd.arg("-Z"); - cmd.arg("unstable-options"); - cmd.arg("--test"); - cmd.arg(markdown); - cmd.env("RUSTC_BOOTSTRAP", "1"); - - let test_args = builder.config.test_args().join(" "); - cmd.arg("--test-args").arg(test_args); - - if builder.config.verbose_tests { - builder.run_delaying_failure(&mut cmd) - } else { - builder.run_quiet_delaying_failure(&mut cmd) - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RustcGuide; - -impl Step for RustcGuide { - type Output = (); - const DEFAULT: bool = false; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/doc/rustc-dev-guide") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustcGuide); - } - - fn run(self, builder: &Builder<'_>) { - let relative_path = Path::new("src").join("doc").join("rustc-dev-guide"); - builder.update_submodule(&relative_path); - - let src = builder.src.join(relative_path); - let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); - let toolstate = if builder.run_delaying_failure(rustbook_cmd.arg("linkcheck").arg(&src)) { - ToolState::TestPass - } else { - ToolState::TestFail - }; - builder.save_toolstate("rustc-dev-guide", toolstate); - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct CrateLibrustc { - compiler: Compiler, - target: TargetSelection, - crates: Vec>, -} - -impl Step for CrateLibrustc { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.crate_or_deps("rustc-main") - } - - fn make_run(run: RunConfig<'_>) { - let builder = run.builder; - let host = run.build_triple(); - let compiler = builder.compiler_for(builder.top_stage, host, host); - let crates = run - .paths - .iter() - .map(|p| builder.crate_paths[&p.assert_single_path().path].clone()) - .collect(); - - builder.ensure(CrateLibrustc { compiler, target: run.target, crates }); - } - - fn run(self, builder: &Builder<'_>) { - builder.ensure(Crate { - compiler: self.compiler, - target: self.target, - mode: Mode::Rustc, - crates: self.crates, - }); - } -} - -/// Given a `cargo test` subcommand, add the appropriate flags and run it. -/// -/// Returns whether the test succeeded. -fn run_cargo_test<'a>( - cargo: impl Into, - libtest_args: &[&str], - crates: &[Interned], - primary_crate: &str, - description: impl Into>, - compiler: Compiler, - target: TargetSelection, - builder: &Builder<'_>, -) -> bool { - let mut cargo = - prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder); - let _time = util::timeit(&builder); - let _group = description.into().and_then(|what| { - builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target) - }); - - #[cfg(feature = "build-metrics")] - builder.metrics.begin_test_suite( - build_helper::metrics::TestSuiteMetadata::CargoPackage { - crates: crates.iter().map(|c| c.to_string()).collect(), - target: target.triple.to_string(), - host: compiler.host.triple.to_string(), - stage: compiler.stage, - }, - builder, - ); - add_flags_and_try_run_tests(builder, &mut cargo) -} - -/// Given a `cargo test` subcommand, pass it the appropriate test flags given a `builder`. -fn prepare_cargo_test( - cargo: impl Into, - libtest_args: &[&str], - crates: &[Interned], - primary_crate: &str, - compiler: Compiler, - target: TargetSelection, - builder: &Builder<'_>, -) -> Command { - let mut cargo = cargo.into(); - - // Propegate `--bless` if it has not already been set/unset - // Any tools that want to use this should bless if `RUSTC_BLESS` is set to - // anything other than `0`. - if builder.config.cmd.bless() && !cargo.get_envs().any(|v| v.0 == "RUSTC_BLESS") { - cargo.env("RUSTC_BLESS", "Gesundheit"); - } - - // Pass in some standard flags then iterate over the graph we've discovered - // in `cargo metadata` with the maps above and figure out what `-p` - // arguments need to get passed. - if builder.kind == Kind::Test && !builder.fail_fast { - cargo.arg("--no-fail-fast"); - } - match builder.doc_tests { - DocTests::Only => { - cargo.arg("--doc"); - } - DocTests::No => { - let krate = &builder - .crates - .get(&INTERNER.intern_str(primary_crate)) - .unwrap_or_else(|| panic!("missing crate {primary_crate}")); - if krate.has_lib { - cargo.arg("--lib"); - } - cargo.args(&["--bins", "--examples", "--tests", "--benches"]); - } - DocTests::Yes => {} - } - - for &krate in crates { - cargo.arg("-p").arg(krate); - } - - cargo.arg("--").args(&builder.config.test_args()).args(libtest_args); - if !builder.config.verbose_tests { - cargo.arg("--quiet"); - } - - // The tests are going to run with the *target* libraries, so we need to - // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. - // - // Note that to run the compiler we need to run with the *host* libraries, - // but our wrapper scripts arrange for that to be the case anyway. - let mut dylib_path = dylib_path(); - dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target))); - cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - - if target.contains("emscripten") { - cargo.env( - format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), - builder.config.nodejs.as_ref().expect("nodejs not configured"), - ); - } else if target.starts_with("wasm32") { - let node = builder.config.nodejs.as_ref().expect("nodejs not configured"); - let runner = format!("{} {}/src/etc/wasm32-shim.js", node.display(), builder.src.display()); - cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), &runner); - } else if builder.remote_tested(target) { - cargo.env( - format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), - format!("{} run 0", builder.tool_exe(Tool::RemoteTestClient).display()), - ); - } - - cargo -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Crate { - pub compiler: Compiler, - pub target: TargetSelection, - pub mode: Mode, - pub crates: Vec>, -} - -impl Step for Crate { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.crate_or_deps("sysroot") - } - - fn make_run(run: RunConfig<'_>) { - let builder = run.builder; - let host = run.build_triple(); - let compiler = builder.compiler_for(builder.top_stage, host, host); - let crates = run - .paths - .iter() - .map(|p| builder.crate_paths[&p.assert_single_path().path].clone()) - .collect(); - - builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates }); - } - - /// Runs all unit tests plus documentation tests for a given crate defined - /// by a `Cargo.toml` (single manifest) - /// - /// This is what runs tests for crates like the standard library, compiler, etc. - /// It essentially is the driver for running `cargo test`. - /// - /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` - /// arguments, and those arguments are discovered from `cargo metadata`. - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target = self.target; - let mode = self.mode; - - // See [field@compile::Std::force_recompile]. - builder.ensure(compile::Std::force_recompile(compiler, target)); - builder.ensure(RemoteCopyLibs { compiler, target }); - - // If we're not doing a full bootstrap but we're testing a stage2 - // version of libstd, then what we're actually testing is the libstd - // produced in stage1. Reflect that here by updating the compiler that - // we're working with automatically. - let compiler = builder.compiler_for(compiler.stage, compiler.host, target); - - let mut cargo = - builder.cargo(compiler, mode, SourceType::InTree, target, builder.kind.as_str()); - match mode { - Mode::Std => { - compile::std_cargo(builder, target, compiler.stage, &mut cargo); - // `std_cargo` actually does the wrong thing: it passes `--sysroot build/host/stage2`, - // but we want to use the force-recompile std we just built in `build/host/stage2-test-sysroot`. - // Override it. - if builder.download_rustc() && compiler.stage > 0 { - let sysroot = builder - .out - .join(compiler.host.triple) - .join(format!("stage{}-test-sysroot", compiler.stage)); - cargo.env("RUSTC_SYSROOT", sysroot); - } - } - Mode::Rustc => { - compile::rustc_cargo(builder, &mut cargo, target, compiler.stage); - } - _ => panic!("can only test libraries"), - }; - - run_cargo_test( - cargo, - &[], - &self.crates, - &self.crates[0], - &*crate_description(&self.crates), - compiler, - target, - builder, - ); - } -} - -/// Rustdoc is special in various ways, which is why this step is different from `Crate`. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CrateRustdoc { - host: TargetSelection, -} - -impl Step for CrateRustdoc { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.paths(&["src/librustdoc", "src/tools/rustdoc"]) - } - - fn make_run(run: RunConfig<'_>) { - let builder = run.builder; - - builder.ensure(CrateRustdoc { host: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let target = self.host; - - let compiler = if builder.download_rustc() { - builder.compiler(builder.top_stage, target) - } else { - // Use the previous stage compiler to reuse the artifacts that are - // created when running compiletest for tests/rustdoc. If this used - // `compiler`, then it would cause rustdoc to be built *again*, which - // isn't really necessary. - builder.compiler_for(builder.top_stage, target, target) - }; - // NOTE: normally `ensure(Rustc)` automatically runs `ensure(Std)` for us. However, when - // using `download-rustc`, the rustc_private artifacts may be in a *different sysroot* from - // the target rustdoc (`ci-rustc-sysroot` vs `stage2`). In that case, we need to ensure this - // explicitly to make sure it ends up in the stage2 sysroot. - builder.ensure(compile::Std::new(compiler, target)); - builder.ensure(compile::Rustc::new(compiler, target)); - - let mut cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - target, - builder.kind.as_str(), - "src/tools/rustdoc", - SourceType::InTree, - &[], - ); - if self.host.contains("musl") { - cargo.arg("'-Ctarget-feature=-crt-static'"); - } - - // This is needed for running doctests on librustdoc. This is a bit of - // an unfortunate interaction with how bootstrap works and how cargo - // sets up the dylib path, and the fact that the doctest (in - // html/markdown.rs) links to rustc-private libs. For stage1, the - // compiler host dylibs (in stage1/lib) are not the same as the target - // dylibs (in stage1/lib/rustlib/...). This is different from a normal - // rust distribution where they are the same. - // - // On the cargo side, normal tests use `target_process` which handles - // setting up the dylib for a *target* (stage1/lib/rustlib/... in this - // case). However, for doctests it uses `rustdoc_process` which only - // sets up the dylib path for the *host* (stage1/lib), which is the - // wrong directory. - // - // Recall that we special-cased `compiler_for(top_stage)` above, so we always use stage1. - // - // It should be considered to just stop running doctests on - // librustdoc. There is only one test, and it doesn't look too - // important. There might be other ways to avoid this, but it seems - // pretty convoluted. - // - // See also https://github.com/rust-lang/rust/issues/13983 where the - // host vs target dylibs for rustdoc are consistently tricky to deal - // with. - // - // Note that this set the host libdir for `download_rustc`, which uses a normal rust distribution. - let libdir = if builder.download_rustc() { - builder.rustc_libdir(compiler) - } else { - builder.sysroot_libdir(compiler, target).to_path_buf() - }; - let mut dylib_path = dylib_path(); - dylib_path.insert(0, PathBuf::from(&*libdir)); - cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - - run_cargo_test( - cargo, - &[], - &[INTERNER.intern_str("rustdoc:0.0.0")], - "rustdoc", - "rustdoc", - compiler, - target, - builder, - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CrateRustdocJsonTypes { - host: TargetSelection, -} - -impl Step for CrateRustdocJsonTypes { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/rustdoc-json-types") - } - - fn make_run(run: RunConfig<'_>) { - let builder = run.builder; - - builder.ensure(CrateRustdocJsonTypes { host: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let target = self.host; - - // Use the previous stage compiler to reuse the artifacts that are - // created when running compiletest for tests/rustdoc. If this used - // `compiler`, then it would cause rustdoc to be built *again*, which - // isn't really necessary. - let compiler = builder.compiler_for(builder.top_stage, target, target); - builder.ensure(compile::Rustc::new(compiler, target)); - - let cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolRustc, - target, - builder.kind.as_str(), - "src/rustdoc-json-types", - SourceType::InTree, - &[], - ); - - // FIXME: this looks very wrong, libtest doesn't accept `-C` arguments and the quotes are fishy. - let libtest_args = if self.host.contains("musl") { - ["'-Ctarget-feature=-crt-static'"].as_slice() - } else { - &[] - }; - - run_cargo_test( - cargo, - libtest_args, - &[INTERNER.intern_str("rustdoc-json-types")], - "rustdoc-json-types", - "rustdoc-json-types", - compiler, - target, - builder, - ); - } -} - -/// Some test suites are run inside emulators or on remote devices, and most -/// of our test binaries are linked dynamically which means we need to ship -/// the standard library and such to the emulator ahead of time. This step -/// represents this and is a dependency of all test suites. -/// -/// Most of the time this is a no-op. For some steps such as shipping data to -/// QEMU we have to build our own tools so we've got conditional dependencies -/// on those programs as well. Note that the remote test client is built for -/// the build target (us) and the server is built for the target. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RemoteCopyLibs { - compiler: Compiler, - target: TargetSelection, -} - -impl Step for RemoteCopyLibs { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target = self.target; - if !builder.remote_tested(target) { - return; - } - - builder.ensure(compile::Std::new(compiler, target)); - - builder.info(&format!("REMOTE copy libs to emulator ({target})")); - - let server = builder.ensure(tool::RemoteTestServer { compiler, target }); - - // Spawn the emulator and wait for it to come online - let tool = builder.tool_exe(Tool::RemoteTestClient); - let mut cmd = Command::new(&tool); - cmd.arg("spawn-emulator").arg(target.triple).arg(&server).arg(builder.tempdir()); - if let Some(rootfs) = builder.qemu_rootfs(target) { - cmd.arg(rootfs); - } - builder.run(&mut cmd); - - // Push all our dylibs to the emulator - for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { - let f = t!(f); - let name = f.file_name().into_string().unwrap(); - if util::is_dylib(&name) { - builder.run(Command::new(&tool).arg("push").arg(f.path())); - } - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Distcheck; - -impl Step for Distcheck { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("distcheck") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Distcheck); - } - - /// Runs "distcheck", a 'make check' from a tarball - fn run(self, builder: &Builder<'_>) { - builder.info("Distcheck"); - let dir = builder.tempdir().join("distcheck"); - let _ = fs::remove_dir_all(&dir); - t!(fs::create_dir_all(&dir)); - - // Guarantee that these are built before we begin running. - builder.ensure(dist::PlainSourceTarball); - builder.ensure(dist::Src); - - let mut cmd = Command::new("tar"); - cmd.arg("-xf") - .arg(builder.ensure(dist::PlainSourceTarball).tarball()) - .arg("--strip-components=1") - .current_dir(&dir); - builder.run(&mut cmd); - builder.run( - Command::new("./configure") - .args(&builder.config.configure_args) - .arg("--enable-vendor") - .current_dir(&dir), - ); - builder.run( - Command::new(util::make(&builder.config.build.triple)).arg("check").current_dir(&dir), - ); - - // Now make sure that rust-src has all of libstd's dependencies - builder.info("Distcheck rust-src"); - let dir = builder.tempdir().join("distcheck-src"); - let _ = fs::remove_dir_all(&dir); - t!(fs::create_dir_all(&dir)); - - let mut cmd = Command::new("tar"); - cmd.arg("-xf") - .arg(builder.ensure(dist::Src).tarball()) - .arg("--strip-components=1") - .current_dir(&dir); - builder.run(&mut cmd); - - let toml = dir.join("rust-src/lib/rustlib/src/rust/library/std/Cargo.toml"); - builder.run( - Command::new(&builder.initial_cargo) - // Will read the libstd Cargo.toml - // which uses the unstable `public-dependency` feature. - .env("RUSTC_BOOTSTRAP", "1") - .arg("generate-lockfile") - .arg("--manifest-path") - .arg(&toml) - .current_dir(&dir), - ); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Bootstrap; - -impl Step for Bootstrap { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - /// Tests the build system itself. - fn run(self, builder: &Builder<'_>) { - let host = builder.config.build; - let compiler = builder.compiler(0, host); - let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host); - - let mut check_bootstrap = Command::new(&builder.python()); - check_bootstrap - .args(["-m", "unittest", "bootstrap_test.py"]) - .env("BUILD_DIR", &builder.out) - .env("BUILD_PLATFORM", &builder.build.build.triple) - .current_dir(builder.src.join("src/bootstrap/")); - // NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible. - // Use `python -m unittest` manually if you want to pass arguments. - builder.run_delaying_failure(&mut check_bootstrap); - - let mut cmd = Command::new(&builder.initial_cargo); - cmd.arg("test") - .current_dir(builder.src.join("src/bootstrap")) - .env("RUSTFLAGS", "-Cdebuginfo=2") - .env("CARGO_TARGET_DIR", builder.out.join("bootstrap")) - .env("RUSTC_BOOTSTRAP", "1") - .env("RUSTDOC", builder.rustdoc(compiler)) - .env("RUSTC", &builder.initial_rustc); - if let Some(flags) = option_env!("RUSTFLAGS") { - // Use the same rustc flags for testing as for "normal" compilation, - // so that Cargo doesn’t recompile the entire dependency graph every time: - // https://github.com/rust-lang/rust/issues/49215 - cmd.env("RUSTFLAGS", flags); - } - // rustbuild tests are racy on directory creation so just run them one at a time. - // Since there's not many this shouldn't be a problem. - run_cargo_test(cmd, &["--test-threads=1"], &[], "bootstrap", None, compiler, host, builder); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/bootstrap") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Bootstrap); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct TierCheck { - pub compiler: Compiler, -} - -impl Step for TierCheck { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/tier-check") - } - - fn make_run(run: RunConfig<'_>) { - let compiler = - run.builder.compiler_for(run.builder.top_stage, run.builder.build.build, run.target); - run.builder.ensure(TierCheck { compiler }); - } - - /// Tests the Platform Support page in the rustc book. - fn run(self, builder: &Builder<'_>) { - builder.ensure(compile::Std::new(self.compiler, self.compiler.host)); - let mut cargo = tool::prepare_tool_cargo( - builder, - self.compiler, - Mode::ToolStd, - self.compiler.host, - "run", - "src/tools/tier-check", - SourceType::InTree, - &[], - ); - cargo.arg(builder.src.join("src/doc/rustc/src/platform-support.md")); - cargo.arg(&builder.rustc(self.compiler)); - if builder.is_verbose() { - cargo.arg("--verbose"); - } - - let _guard = builder.msg( - Kind::Test, - self.compiler.stage, - "platform support check", - self.compiler.host, - self.compiler.host, - ); - builder.run_delaying_failure(&mut cargo.into()); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct LintDocs { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for LintDocs { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/lint-docs") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(LintDocs { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - /// Tests that the lint examples in the rustc book generate the correct - /// lints and have the expected format. - fn run(self, builder: &Builder<'_>) { - builder.ensure(crate::doc::RustcBook { - compiler: self.compiler, - target: self.target, - validate: true, - }); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RustInstaller; - -impl Step for RustInstaller { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - /// Ensure the version placeholder replacement tool builds - fn run(self, builder: &Builder<'_>) { - let bootstrap_host = builder.config.build; - let compiler = builder.compiler(0, bootstrap_host); - let cargo = tool::prepare_tool_cargo( - builder, - compiler, - Mode::ToolBootstrap, - bootstrap_host, - "test", - "src/tools/rust-installer", - SourceType::InTree, - &[], - ); - - let _guard = builder.msg( - Kind::Test, - compiler.stage, - "rust-installer", - bootstrap_host, - bootstrap_host, - ); - run_cargo_test(cargo, &[], &[], "installer", None, compiler, bootstrap_host, builder); - - // We currently don't support running the test.sh script outside linux(?) environments. - // Eventually this should likely migrate to #[test]s in rust-installer proper rather than a - // set of scripts, which will likely allow dropping this if. - if bootstrap_host != "x86_64-unknown-linux-gnu" { - return; - } - - let mut cmd = - std::process::Command::new(builder.src.join("src/tools/rust-installer/test.sh")); - let tmpdir = testdir(builder, compiler.host).join("rust-installer"); - let _ = std::fs::remove_dir_all(&tmpdir); - let _ = std::fs::create_dir_all(&tmpdir); - cmd.current_dir(&tmpdir); - cmd.env("CARGO_TARGET_DIR", tmpdir.join("cargo-target")); - cmd.env("CARGO", &builder.initial_cargo); - cmd.env("RUSTC", &builder.initial_rustc); - cmd.env("TMP_DIR", &tmpdir); - builder.run_delaying_failure(&mut cmd); - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/rust-installer") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Self); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct TestHelpers { - pub target: TargetSelection, -} - -impl Step for TestHelpers { - type Output = (); - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("tests/auxiliary/rust_test_helpers.c") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(TestHelpers { target: run.target }) - } - - /// Compiles the `rust_test_helpers.c` library which we used in various - /// `run-pass` tests for ABI testing. - fn run(self, builder: &Builder<'_>) { - if builder.config.dry_run() { - return; - } - // The x86_64-fortanix-unknown-sgx target doesn't have a working C - // toolchain. However, some x86_64 ELF objects can be linked - // without issues. Use this hack to compile the test helpers. - let target = if self.target == "x86_64-fortanix-unknown-sgx" { - TargetSelection::from_user("x86_64-unknown-linux-gnu") - } else { - self.target - }; - let dst = builder.test_helpers_out(target); - let src = builder.src.join("tests/auxiliary/rust_test_helpers.c"); - if up_to_date(&src, &dst.join("librust_test_helpers.a")) { - return; - } - - let _guard = builder.msg_unstaged(Kind::Build, "test helpers", target); - t!(fs::create_dir_all(&dst)); - let mut cfg = cc::Build::new(); - // FIXME: Workaround for https://github.com/emscripten-core/emscripten/issues/9013 - if target.contains("emscripten") { - cfg.pic(false); - } - - // We may have found various cross-compilers a little differently due to our - // extra configuration, so inform cc of these compilers. Note, though, that - // on MSVC we still need cc's detection of env vars (ugh). - if !target.contains("msvc") { - if let Some(ar) = builder.ar(target) { - cfg.archiver(ar); - } - cfg.compiler(builder.cc(target)); - } - cfg.cargo_metadata(false) - .out_dir(&dst) - .target(&target.triple) - .host(&builder.config.build.triple) - .opt_level(0) - .warnings(false) - .debug(false) - .file(builder.src.join("tests/auxiliary/rust_test_helpers.c")) - .compile("rust_test_helpers"); - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct CodegenCranelift { - compiler: Compiler, - target: TargetSelection, -} - -impl Step for CodegenCranelift { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.paths(&["compiler/rustc_codegen_cranelift"]) - } - - fn make_run(run: RunConfig<'_>) { - let builder = run.builder; - let host = run.build_triple(); - let compiler = run.builder.compiler_for(run.builder.top_stage, host, host); - - if builder.doc_tests == DocTests::Only { - return; - } - - let triple = run.target.triple; - let target_supported = if triple.contains("linux") { - triple.contains("x86_64") || triple.contains("aarch64") || triple.contains("s390x") - } else if triple.contains("darwin") || triple.contains("windows") { - triple.contains("x86_64") - } else { - false - }; - if !target_supported { - builder.info("target not supported by rustc_codegen_cranelift. skipping"); - return; - } - - if builder.remote_tested(run.target) { - builder.info("remote testing is not supported by rustc_codegen_cranelift. skipping"); - return; - } - - if !builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("cranelift")) { - builder.info("cranelift not in rust.codegen-backends. skipping"); - return; - } - - builder.ensure(CodegenCranelift { compiler, target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let compiler = self.compiler; - let target = self.target; - - builder.ensure(compile::Std::new(compiler, target)); - - // If we're not doing a full bootstrap but we're testing a stage2 - // version of libstd, then what we're actually testing is the libstd - // produced in stage1. Reflect that here by updating the compiler that - // we're working with automatically. - let compiler = builder.compiler_for(compiler.stage, compiler.host, target); - - let build_cargo = || { - let mut cargo = builder.cargo( - compiler, - Mode::Codegen, // Must be codegen to ensure dlopen on compiled dylibs works - SourceType::InTree, - target, - "run", - ); - cargo.current_dir(&builder.src.join("compiler/rustc_codegen_cranelift")); - cargo - .arg("--manifest-path") - .arg(builder.src.join("compiler/rustc_codegen_cranelift/build_system/Cargo.toml")); - compile::rustc_cargo_env(builder, &mut cargo, target, compiler.stage); - - // Avoid incremental cache issues when changing rustc - cargo.env("CARGO_BUILD_INCREMENTAL", "false"); - - cargo - }; - - builder.info(&format!( - "{} cranelift stage{} ({} -> {})", - Kind::Test.description(), - compiler.stage, - &compiler.host, - target - )); - let _time = util::timeit(&builder); - - // FIXME handle vendoring for source tarballs before removing the --skip-test below - let download_dir = builder.out.join("cg_clif_download"); - - /* - let mut prepare_cargo = build_cargo(); - prepare_cargo.arg("--").arg("prepare").arg("--download-dir").arg(&download_dir); - #[allow(deprecated)] - builder.config.try_run(&mut prepare_cargo.into()).unwrap(); - */ - - let mut cargo = build_cargo(); - cargo - .arg("--") - .arg("test") - .arg("--download-dir") - .arg(&download_dir) - .arg("--out-dir") - .arg(builder.stage_out(compiler, Mode::ToolRustc).join("cg_clif")) - .arg("--no-unstable-features") - .arg("--use-backend") - .arg("cranelift") - // Avoid having to vendor the standard library dependencies - .arg("--sysroot") - .arg("llvm") - // These tests depend on crates that are not yet vendored - // FIXME remove once vendoring is handled - .arg("--skip-test") - .arg("testsuite.extended_sysroot"); - cargo.args(builder.config.test_args()); - - #[allow(deprecated)] - builder.config.try_run(&mut cargo.into()).unwrap(); - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/tool.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/tool.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/tool.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/tool.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,848 +0,0 @@ -use std::env; -use std::fs; -use std::path::PathBuf; -use std::process::Command; - -use crate::builder::{Builder, Cargo as CargoCommand, RunConfig, ShouldRun, Step}; -use crate::channel::GitInfo; -use crate::compile; -use crate::config::TargetSelection; -use crate::toolstate::ToolState; -use crate::util::{add_dylib_path, exe, t}; -use crate::Compiler; -use crate::Mode; -use crate::{gha, Kind}; - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub enum SourceType { - InTree, - Submodule, -} - -#[derive(Debug, Clone, Hash, PartialEq, Eq)] -struct ToolBuild { - compiler: Compiler, - target: TargetSelection, - tool: &'static str, - path: &'static str, - mode: Mode, - is_optional_tool: bool, - source_type: SourceType, - extra_features: Vec, - /// Nightly-only features that are allowed (comma-separated list). - allow_features: &'static str, -} - -impl Builder<'_> { - #[track_caller] - fn msg_tool( - &self, - mode: Mode, - tool: &str, - build_stage: u32, - host: &TargetSelection, - target: &TargetSelection, - ) -> Option { - match mode { - // depends on compiler stage, different to host compiler - Mode::ToolRustc => self.msg_sysroot_tool( - Kind::Build, - build_stage, - format_args!("tool {tool}"), - *host, - *target, - ), - // doesn't depend on compiler, same as host compiler - _ => self.msg(Kind::Build, build_stage, format_args!("tool {tool}"), *host, *target), - } - } -} - -impl Step for ToolBuild { - type Output = Option; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - /// Builds a tool in `src/tools` - /// - /// This will build the specified tool with the specified `host` compiler in - /// `stage` into the normal cargo output directory. - fn run(self, builder: &Builder<'_>) -> Option { - let compiler = self.compiler; - let target = self.target; - let mut tool = self.tool; - let path = self.path; - let is_optional_tool = self.is_optional_tool; - - match self.mode { - Mode::ToolRustc => { - builder.ensure(compile::Std::new(compiler, compiler.host)); - builder.ensure(compile::Rustc::new(compiler, target)); - } - Mode::ToolStd => builder.ensure(compile::Std::new(compiler, target)), - Mode::ToolBootstrap => {} // uses downloaded stage0 compiler libs - _ => panic!("unexpected Mode for tool build"), - } - - let mut cargo = prepare_tool_cargo( - builder, - compiler, - self.mode, - target, - "build", - path, - self.source_type, - &self.extra_features, - ); - if !self.allow_features.is_empty() { - cargo.allow_features(self.allow_features); - } - let _guard = builder.msg_tool( - self.mode, - self.tool, - self.compiler.stage, - &self.compiler.host, - &self.target, - ); - - let mut cargo = Command::from(cargo); - #[allow(deprecated)] // we check this in `is_optional_tool` in a second - let is_expected = builder.config.try_run(&mut cargo).is_ok(); - - builder.save_toolstate( - tool, - if is_expected { ToolState::TestFail } else { ToolState::BuildFail }, - ); - - if !is_expected { - if !is_optional_tool { - crate::exit!(1); - } else { - None - } - } else { - // HACK(#82501): on Windows, the tools directory gets added to PATH when running tests, and - // compiletest confuses HTML tidy with the in-tree tidy. Name the in-tree tidy something - // different so the problem doesn't come up. - if tool == "tidy" { - tool = "rust-tidy"; - } - let cargo_out = builder.cargo_out(compiler, self.mode, target).join(exe(tool, target)); - let bin = builder.tools_dir(compiler).join(exe(tool, target)); - builder.copy(&cargo_out, &bin); - Some(bin) - } - } -} - -pub fn prepare_tool_cargo( - builder: &Builder<'_>, - compiler: Compiler, - mode: Mode, - target: TargetSelection, - command: &'static str, - path: &str, - source_type: SourceType, - extra_features: &[String], -) -> CargoCommand { - let mut cargo = builder.cargo(compiler, mode, source_type, target, command); - let dir = builder.src.join(path); - cargo.arg("--manifest-path").arg(dir.join("Cargo.toml")); - - let mut features = extra_features.to_vec(); - if builder.build.config.cargo_native_static { - if path.ends_with("cargo") - || path.ends_with("rls") - || path.ends_with("clippy") - || path.ends_with("miri") - || path.ends_with("rustfmt") - { - cargo.env("LIBZ_SYS_STATIC", "1"); - } - if path.ends_with("cargo") { - features.push("all-static".to_string()); - } - } - - // clippy tests need to know about the stage sysroot. Set them consistently while building to - // avoid rebuilding when running tests. - cargo.env("SYSROOT", builder.sysroot(compiler)); - - // if tools are using lzma we want to force the build script to build its - // own copy - cargo.env("LZMA_API_STATIC", "1"); - - // CFG_RELEASE is needed by rustfmt (and possibly other tools) which - // import rustc-ap-rustc_attr which requires this to be set for the - // `#[cfg(version(...))]` attribute. - cargo.env("CFG_RELEASE", builder.rust_release()); - cargo.env("CFG_RELEASE_CHANNEL", &builder.config.channel); - cargo.env("CFG_VERSION", builder.rust_version()); - cargo.env("CFG_RELEASE_NUM", &builder.version); - cargo.env("DOC_RUST_LANG_ORG_CHANNEL", builder.doc_rust_lang_org_channel()); - if let Some(ref ver_date) = builder.rust_info().commit_date() { - cargo.env("CFG_VER_DATE", ver_date); - } - if let Some(ref ver_hash) = builder.rust_info().sha() { - cargo.env("CFG_VER_HASH", ver_hash); - } - - let info = GitInfo::new(builder.config.omit_git_hash, &dir); - if let Some(sha) = info.sha() { - cargo.env("CFG_COMMIT_HASH", sha); - } - if let Some(sha_short) = info.sha_short() { - cargo.env("CFG_SHORT_COMMIT_HASH", sha_short); - } - if let Some(date) = info.commit_date() { - cargo.env("CFG_COMMIT_DATE", date); - } - if !features.is_empty() { - cargo.arg("--features").arg(&features.join(", ")); - } - cargo -} - -macro_rules! bootstrap_tool { - ($( - $name:ident, $path:expr, $tool_name:expr - $(,is_external_tool = $external:expr)* - $(,is_unstable_tool = $unstable:expr)* - $(,allow_features = $allow_features:expr)? - ; - )+) => { - #[derive(Copy, PartialEq, Eq, Clone)] - pub enum Tool { - $( - $name, - )+ - } - - impl<'a> Builder<'a> { - pub fn tool_exe(&self, tool: Tool) -> PathBuf { - match tool { - $(Tool::$name => - self.ensure($name { - compiler: self.compiler(0, self.config.build), - target: self.config.build, - }), - )+ - } - } - } - - $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] - pub struct $name { - pub compiler: Compiler, - pub target: TargetSelection, - } - - impl Step for $name { - type Output = PathBuf; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path($path) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($name { - // snapshot compiler - compiler: run.builder.compiler(0, run.builder.config.build), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> PathBuf { - builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: $tool_name, - mode: if false $(|| $unstable)* { - // use in-tree libraries for unstable features - Mode::ToolStd - } else { - Mode::ToolBootstrap - }, - path: $path, - is_optional_tool: false, - source_type: if false $(|| $external)* { - SourceType::Submodule - } else { - SourceType::InTree - }, - extra_features: vec![], - allow_features: concat!($($allow_features)*), - }).expect("expected to build -- essential tool") - } - } - )+ - } -} - -bootstrap_tool!( - Rustbook, "src/tools/rustbook", "rustbook"; - UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen"; - Tidy, "src/tools/tidy", "tidy"; - Linkchecker, "src/tools/linkchecker", "linkchecker"; - CargoTest, "src/tools/cargotest", "cargotest"; - Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true, allow_features = "test"; - BuildManifest, "src/tools/build-manifest", "build-manifest"; - RemoteTestClient, "src/tools/remote-test-client", "remote-test-client"; - RustInstaller, "src/tools/rust-installer", "rust-installer"; - RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes"; - ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors"; - LintDocs, "src/tools/lint-docs", "lint-docs"; - JsonDocCk, "src/tools/jsondocck", "jsondocck"; - JsonDocLint, "src/tools/jsondoclint", "jsondoclint"; - HtmlChecker, "src/tools/html-checker", "html-checker"; - BumpStage0, "src/tools/bump-stage0", "bump-stage0"; - ReplaceVersionPlaceholder, "src/tools/replace-version-placeholder", "replace-version-placeholder"; - CollectLicenseMetadata, "src/tools/collect-license-metadata", "collect-license-metadata"; - GenerateCopyright, "src/tools/generate-copyright", "generate-copyright"; - SuggestTests, "src/tools/suggest-tests", "suggest-tests"; - GenerateWindowsSys, "src/tools/generate-windows-sys", "generate-windows-sys"; - RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = "test"; - OptimizedDist, "src/tools/opt-dist", "opt-dist"; - CoverageDump, "src/tools/coverage-dump", "coverage-dump"; -); - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] -pub struct ErrorIndex { - pub compiler: Compiler, -} - -impl ErrorIndex { - pub fn command(builder: &Builder<'_>) -> Command { - // Error-index-generator links with the rustdoc library, so we need to add `rustc_lib_paths` - // for rustc_private and libLLVM.so, and `sysroot_lib` for libstd, etc. - let host = builder.config.build; - let compiler = builder.compiler_for(builder.top_stage, host, host); - let mut cmd = Command::new(builder.ensure(ErrorIndex { compiler })); - let mut dylib_paths = builder.rustc_lib_paths(compiler); - dylib_paths.push(PathBuf::from(&builder.sysroot_libdir(compiler, compiler.host))); - add_dylib_path(dylib_paths, &mut cmd); - cmd - } -} - -impl Step for ErrorIndex { - type Output = PathBuf; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/error_index_generator") - } - - fn make_run(run: RunConfig<'_>) { - // Compile the error-index in the same stage as rustdoc to avoid - // recompiling rustdoc twice if we can. - // - // NOTE: This `make_run` isn't used in normal situations, only if you - // manually build the tool with `x.py build - // src/tools/error-index-generator` which almost nobody does. - // Normally, `x.py test` or `x.py doc` will use the - // `ErrorIndex::command` function instead. - let compiler = - run.builder.compiler(run.builder.top_stage.saturating_sub(1), run.builder.config.build); - run.builder.ensure(ErrorIndex { compiler }); - } - - fn run(self, builder: &Builder<'_>) -> PathBuf { - builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.compiler.host, - tool: "error_index_generator", - mode: Mode::ToolRustc, - path: "src/tools/error_index_generator", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool") - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RemoteTestServer { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for RemoteTestServer { - type Output = PathBuf; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/remote-test-server") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RemoteTestServer { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> PathBuf { - builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "remote-test-server", - mode: Mode::ToolStd, - path: "src/tools/remote-test-server", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool") - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] -pub struct Rustdoc { - /// This should only ever be 0 or 2. - /// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here. - pub compiler: Compiler, -} - -impl Step for Rustdoc { - type Output = PathBuf; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/rustdoc").path("src/librustdoc") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Rustdoc { - // Note: this is somewhat unique in that we actually want a *target* - // compiler here, because rustdoc *is* a compiler. We won't be using - // this as the compiler to build with, but rather this is "what - // compiler are we producing"? - compiler: run.builder.compiler(run.builder.top_stage, run.target), - }); - } - - fn run(self, builder: &Builder<'_>) -> PathBuf { - let target_compiler = self.compiler; - if target_compiler.stage == 0 { - if !target_compiler.is_snapshot(builder) { - panic!("rustdoc in stage 0 must be snapshot rustdoc"); - } - return builder.initial_rustc.with_file_name(exe("rustdoc", target_compiler.host)); - } - let target = target_compiler.host; - // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise - // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage - // compilers, which isn't what we want. Rustdoc should be linked in the same way as the - // rustc compiler it's paired with, so it must be built with the previous stage compiler. - let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build); - - // When using `download-rustc` and a stage0 build_compiler, copying rustc doesn't actually - // build stage0 libstd (because the libstd in sysroot has the wrong ABI). Explicitly build - // it. - builder.ensure(compile::Std::new(build_compiler, target_compiler.host)); - builder.ensure(compile::Rustc::new(build_compiler, target_compiler.host)); - // NOTE: this implies that `download-rustc` is pretty useless when compiling with the stage0 - // compiler, since you do just as much work. - if !builder.config.dry_run() && builder.download_rustc() && build_compiler.stage == 0 { - println!( - "warning: `download-rustc` does nothing when building stage1 tools; consider using `--stage 2` instead" - ); - } - - // The presence of `target_compiler` ensures that the necessary libraries (codegen backends, - // compiler libraries, ...) are built. Rustdoc does not require the presence of any - // libraries within sysroot_libdir (i.e., rustlib), though doctests may want it (since - // they'll be linked to those libraries). As such, don't explicitly `ensure` any additional - // libraries here. The intuition here is that If we've built a compiler, we should be able - // to build rustdoc. - // - let mut features = Vec::new(); - if builder.config.jemalloc { - features.push("jemalloc".to_string()); - } - - let mut cargo = prepare_tool_cargo( - builder, - build_compiler, - Mode::ToolRustc, - target, - "build", - "src/tools/rustdoc", - SourceType::InTree, - features.as_slice(), - ); - - if builder.config.rustc_parallel { - cargo.rustflag("--cfg=parallel_compiler"); - } - - let _guard = builder.msg_tool( - Mode::ToolRustc, - "rustdoc", - build_compiler.stage, - &self.compiler.host, - &target, - ); - builder.run(&mut cargo.into()); - - // Cargo adds a number of paths to the dylib search path on windows, which results in - // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" - // rustdoc a different name. - let tool_rustdoc = builder - .cargo_out(build_compiler, Mode::ToolRustc, target) - .join(exe("rustdoc_tool_binary", target_compiler.host)); - - // don't create a stage0-sysroot/bin directory. - if target_compiler.stage > 0 { - let sysroot = builder.sysroot(target_compiler); - let bindir = sysroot.join("bin"); - t!(fs::create_dir_all(&bindir)); - let bin_rustdoc = bindir.join(exe("rustdoc", target_compiler.host)); - let _ = fs::remove_file(&bin_rustdoc); - builder.copy(&tool_rustdoc, &bin_rustdoc); - bin_rustdoc - } else { - tool_rustdoc - } - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct Cargo { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for Cargo { - type Output = PathBuf; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/tools/cargo").default_condition( - builder.config.extended - && builder.config.tools.as_ref().map_or( - true, - // If `tools` is set, search list for this tool. - |tools| tools.iter().any(|tool| tool == "cargo"), - ), - ) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Cargo { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> PathBuf { - let cargo_bin_path = builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "cargo", - mode: Mode::ToolRustc, - path: "src/tools/cargo", - is_optional_tool: false, - source_type: SourceType::Submodule, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool"); - cargo_bin_path - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct LldWrapper { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for LldWrapper { - type Output = PathBuf; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() - } - - fn run(self, builder: &Builder<'_>) -> PathBuf { - let src_exe = builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "lld-wrapper", - mode: Mode::ToolStd, - path: "src/tools/lld-wrapper", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool"); - - src_exe - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustAnalyzer { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl RustAnalyzer { - pub const ALLOW_FEATURES: &'static str = - "proc_macro_internals,proc_macro_diagnostic,proc_macro_span,proc_macro_span_shrink"; -} - -impl Step for RustAnalyzer { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/tools/rust-analyzer").default_condition( - builder.config.extended - && builder - .config - .tools - .as_ref() - .map_or(true, |tools| tools.iter().any(|tool| tool == "rust-analyzer")), - ) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustAnalyzer { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "rust-analyzer", - mode: Mode::ToolStd, - path: "src/tools/rust-analyzer", - extra_features: vec!["rust-analyzer/in-rust-tree".to_owned()], - is_optional_tool: false, - source_type: SourceType::InTree, - allow_features: RustAnalyzer::ALLOW_FEATURES, - }) - } -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustAnalyzerProcMacroSrv { - pub compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for RustAnalyzerProcMacroSrv { - type Output = Option; - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - // Allow building `rust-analyzer-proc-macro-srv` both as part of the `rust-analyzer` and as a stand-alone tool. - run.path("src/tools/rust-analyzer") - .path("src/tools/rust-analyzer/crates/proc-macro-srv-cli") - .default_condition(builder.config.tools.as_ref().map_or(true, |tools| { - tools - .iter() - .any(|tool| tool == "rust-analyzer" || tool == "rust-analyzer-proc-macro-srv") - })) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(RustAnalyzerProcMacroSrv { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - }); - } - - fn run(self, builder: &Builder<'_>) -> Option { - let path = builder.ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "rust-analyzer-proc-macro-srv", - mode: Mode::ToolStd, - path: "src/tools/rust-analyzer/crates/proc-macro-srv-cli", - extra_features: vec!["sysroot-abi".to_owned()], - is_optional_tool: false, - source_type: SourceType::InTree, - allow_features: RustAnalyzer::ALLOW_FEATURES, - })?; - - // Copy `rust-analyzer-proc-macro-srv` to `/libexec/` - // so that r-a can use it. - let libexec_path = builder.sysroot(self.compiler).join("libexec"); - t!(fs::create_dir_all(&libexec_path)); - builder.copy(&path, &libexec_path.join("rust-analyzer-proc-macro-srv")); - - Some(path) - } -} - -macro_rules! tool_extended { - (($sel:ident, $builder:ident), - $($name:ident, - $path:expr, - $tool_name:expr, - stable = $stable:expr - $(,tool_std = $tool_std:literal)? - $(,allow_features = $allow_features:expr)? - $(,add_bins_to_sysroot = $add_bins_to_sysroot:expr)? - ;)+) => { - $( - #[derive(Debug, Clone, Hash, PartialEq, Eq)] - pub struct $name { - pub compiler: Compiler, - pub target: TargetSelection, - pub extra_features: Vec, - } - - impl Step for $name { - type Output = Option; - const DEFAULT: bool = true; // Overwritten below - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path($path).default_condition( - builder.config.extended - && builder.config.tools.as_ref().map_or( - // By default, on nightly/dev enable all tools, else only - // build stable tools. - $stable || builder.build.unstable_features(), - // If `tools` is set, search list for this tool. - |tools| { - tools.iter().any(|tool| match tool.as_ref() { - "clippy" => $tool_name == "clippy-driver", - x => $tool_name == x, - }) - }), - ) - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure($name { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build), - target: run.target, - extra_features: Vec::new(), - }); - } - - #[allow(unused_mut)] - fn run(mut $sel, $builder: &Builder<'_>) -> Option { - let tool = $builder.ensure(ToolBuild { - compiler: $sel.compiler, - target: $sel.target, - tool: $tool_name, - mode: if false $(|| $tool_std)? { Mode::ToolStd } else { Mode::ToolRustc }, - path: $path, - extra_features: $sel.extra_features, - is_optional_tool: true, - source_type: SourceType::InTree, - allow_features: concat!($($allow_features)*), - })?; - - if (false $(|| !$add_bins_to_sysroot.is_empty())?) && $sel.compiler.stage > 0 { - let bindir = $builder.sysroot($sel.compiler).join("bin"); - t!(fs::create_dir_all(&bindir)); - - #[allow(unused_variables)] - let tools_out = $builder - .cargo_out($sel.compiler, Mode::ToolRustc, $sel.target); - - $(for add_bin in $add_bins_to_sysroot { - let bin_source = tools_out.join(exe(add_bin, $sel.target)); - let bin_destination = bindir.join(exe(add_bin, $sel.compiler.host)); - $builder.copy(&bin_source, &bin_destination); - })? - - let tool = bindir.join(exe($tool_name, $sel.compiler.host)); - Some(tool) - } else { - Some(tool) - } - } - } - )+ - } -} - -// Note: tools need to be also added to `Builder::get_step_descriptions` in `builder.rs` -// to make `./x.py build ` work. -// Note: Most submodule updates for tools are handled by bootstrap.py, since they're needed just to -// invoke Cargo to build bootstrap. See the comment there for more details. -tool_extended!((self, builder), - Cargofmt, "src/tools/rustfmt", "cargo-fmt", stable=true; - CargoClippy, "src/tools/clippy", "cargo-clippy", stable=true; - Clippy, "src/tools/clippy", "clippy-driver", stable=true, add_bins_to_sysroot = ["clippy-driver", "cargo-clippy"]; - Miri, "src/tools/miri", "miri", stable=false, add_bins_to_sysroot = ["miri"]; - CargoMiri, "src/tools/miri/cargo-miri", "cargo-miri", stable=true, add_bins_to_sysroot = ["cargo-miri"]; - // FIXME: tool_std is not quite right, we shouldn't allow nightly features. - // But `builder.cargo` doesn't know how to handle ToolBootstrap in stages other than 0, - // and this is close enough for now. - Rls, "src/tools/rls", "rls", stable=true, tool_std=true; - RustDemangler, "src/tools/rust-demangler", "rust-demangler", stable=false, tool_std=true; - Rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, add_bins_to_sysroot = ["rustfmt", "cargo-fmt"]; -); - -impl<'a> Builder<'a> { - /// Gets a `Command` which is ready to run `tool` in `stage` built for - /// `host`. - pub fn tool_cmd(&self, tool: Tool) -> Command { - let mut cmd = Command::new(self.tool_exe(tool)); - let compiler = self.compiler(0, self.config.build); - let host = &compiler.host; - // Prepares the `cmd` provided to be able to run the `compiler` provided. - // - // Notably this munges the dynamic library lookup path to point to the - // right location to run `compiler`. - let mut lib_paths: Vec = vec![ - self.build.rustc_snapshot_libdir(), - self.cargo_out(compiler, Mode::ToolBootstrap, *host).join("deps"), - ]; - - // On MSVC a tool may invoke a C compiler (e.g., compiletest in run-make - // mode) and that C compiler may need some extra PATH modification. Do - // so here. - if compiler.host.contains("msvc") { - let curpaths = env::var_os("PATH").unwrap_or_default(); - let curpaths = env::split_paths(&curpaths).collect::>(); - for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() { - if k != "PATH" { - continue; - } - for path in env::split_paths(v) { - if !curpaths.contains(&path) { - lib_paths.push(path); - } - } - } - } - - add_dylib_path(lib_paths, &mut cmd); - - // Provide a RUSTC for this command to use. - cmd.env("RUSTC", &self.initial_rustc); - - cmd - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/toolstate.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/toolstate.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/toolstate.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/toolstate.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,478 +0,0 @@ -use crate::builder::{Builder, RunConfig, ShouldRun, Step}; -use crate::util::t; -use serde_derive::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::env; -use std::fmt; -use std::fs; -use std::io::{Seek, SeekFrom}; -use std::path::{Path, PathBuf}; -use std::process::Command; -use std::time; - -// Each cycle is 42 days long (6 weeks); the last week is 35..=42 then. -const BETA_WEEK_START: u64 = 35; - -#[cfg(target_os = "linux")] -const OS: Option<&str> = Some("linux"); - -#[cfg(windows)] -const OS: Option<&str> = Some("windows"); - -#[cfg(all(not(target_os = "linux"), not(windows)))] -const OS: Option<&str> = None; - -type ToolstateData = HashMap, ToolState>; - -#[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, PartialOrd)] -#[serde(rename_all = "kebab-case")] -/// Whether a tool can be compiled, tested or neither -pub enum ToolState { - /// The tool compiles successfully, but the test suite fails - TestFail = 1, - /// The tool compiles successfully and its test suite passes - TestPass = 2, - /// The tool can't even be compiled - BuildFail = 0, -} - -impl fmt::Display for ToolState { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "{}", - match self { - ToolState::TestFail => "test-fail", - ToolState::TestPass => "test-pass", - ToolState::BuildFail => "build-fail", - } - ) - } -} - -/// Number of days after the last promotion of beta. -/// Its value is 41 on the Tuesday where "Promote master to beta (T-2)" happens. -/// The Wednesday after this has value 0. -/// We track this value to prevent regressing tools in the last week of the 6-week cycle. -fn days_since_beta_promotion() -> u64 { - let since_epoch = t!(time::SystemTime::UNIX_EPOCH.elapsed()); - (since_epoch.as_secs() / 86400 - 20) % 42 -} - -// These tools must test-pass on the beta/stable channels. -// -// On the nightly channel, their build step must be attempted, but they may not -// be able to build successfully. -static STABLE_TOOLS: &[(&str, &str)] = &[ - ("book", "src/doc/book"), - ("nomicon", "src/doc/nomicon"), - ("reference", "src/doc/reference"), - ("rust-by-example", "src/doc/rust-by-example"), - ("edition-guide", "src/doc/edition-guide"), -]; - -// These tools are permitted to not build on the beta/stable channels. -// -// We do require that we checked whether they build or not on the tools builder, -// though, as otherwise we will be unable to file an issue if they start -// failing. -static NIGHTLY_TOOLS: &[(&str, &str)] = &[ - ("embedded-book", "src/doc/embedded-book"), - // ("rustc-dev-guide", "src/doc/rustc-dev-guide"), -]; - -fn print_error(tool: &str, submodule: &str) { - eprintln!(); - eprintln!("We detected that this PR updated '{tool}', but its tests failed."); - eprintln!(); - eprintln!("If you do intend to update '{tool}', please check the error messages above and"); - eprintln!("commit another update."); - eprintln!(); - eprintln!("If you do NOT intend to update '{tool}', please ensure you did not accidentally"); - eprintln!("change the submodule at '{submodule}'. You may ask your reviewer for the"); - eprintln!("proper steps."); - crate::exit!(3); -} - -fn check_changed_files(toolstates: &HashMap, ToolState>) { - // Changed files - let output = std::process::Command::new("git") - .arg("diff") - .arg("--name-status") - .arg("HEAD") - .arg("HEAD^") - .output(); - let output = match output { - Ok(o) => o, - Err(e) => { - eprintln!("Failed to get changed files: {e:?}"); - crate::exit!(1); - } - }; - - let output = t!(String::from_utf8(output.stdout)); - - for (tool, submodule) in STABLE_TOOLS.iter().chain(NIGHTLY_TOOLS.iter()) { - let changed = output.lines().any(|l| l.starts_with('M') && l.ends_with(submodule)); - eprintln!("Verifying status of {tool}..."); - if !changed { - continue; - } - - eprintln!("This PR updated '{submodule}', verifying if status is 'test-pass'..."); - if toolstates[*tool] != ToolState::TestPass { - print_error(tool, submodule); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ToolStateCheck; - -impl Step for ToolStateCheck { - type Output = (); - - /// Checks tool state status. - /// - /// This is intended to be used in the `checktools.sh` script. To use - /// this, set `save-toolstates` in `config.toml` so that tool status will - /// be saved to a JSON file. Then, run `x.py test --no-fail-fast` for all - /// of the tools to populate the JSON file. After that is done, this - /// command can be run to check for any status failures, and exits with an - /// error if there are any. - /// - /// This also handles publishing the results to the `history` directory of - /// the toolstate repo - /// if the env var `TOOLSTATE_PUBLISH` is set. Note that there is a - /// *separate* step of updating the `latest.json` file and creating GitHub - /// issues and comments in `src/ci/publish_toolstate.sh`, which is only - /// performed on master. (The shell/python code is intended to be migrated - /// here eventually.) - /// - /// The rules for failure are: - /// * If the PR modifies a tool, the status must be test-pass. - /// NOTE: There is intent to change this, see - /// . - /// * All "stable" tools must be test-pass on the stable or beta branches. - /// * During beta promotion week, a PR is not allowed to "regress" a - /// stable tool. That is, the status is not allowed to get worse - /// (test-pass to test-fail or build-fail). - fn run(self, builder: &Builder<'_>) { - if builder.config.dry_run() { - return; - } - - let days_since_beta_promotion = days_since_beta_promotion(); - let in_beta_week = days_since_beta_promotion >= BETA_WEEK_START; - let is_nightly = !(builder.config.channel == "beta" || builder.config.channel == "stable"); - let toolstates = builder.toolstates(); - - let mut did_error = false; - - for (tool, _) in STABLE_TOOLS.iter().chain(NIGHTLY_TOOLS.iter()) { - if !toolstates.contains_key(*tool) { - did_error = true; - eprintln!("error: Tool `{tool}` was not recorded in tool state."); - } - } - - if did_error { - crate::exit!(1); - } - - check_changed_files(&toolstates); - checkout_toolstate_repo(); - let old_toolstate = read_old_toolstate(); - - for (tool, _) in STABLE_TOOLS.iter() { - let state = toolstates[*tool]; - - if state != ToolState::TestPass { - if !is_nightly { - did_error = true; - eprintln!("error: Tool `{tool}` should be test-pass but is {state}"); - } else if in_beta_week { - let old_state = old_toolstate - .iter() - .find(|ts| ts.tool == *tool) - .expect("latest.json missing tool") - .state(); - if state < old_state { - did_error = true; - eprintln!( - "error: Tool `{tool}` has regressed from {old_state} to {state} during beta week." - ); - } else { - // This warning only appears in the logs, which most - // people won't read. It's mostly here for testing and - // debugging. - eprintln!( - "warning: Tool `{tool}` is not test-pass (is `{state}`), \ - this should be fixed before beta is branched." - ); - } - } - // `publish_toolstate.py` is responsible for updating - // `latest.json` and creating comments/issues warning people - // if there is a regression. That all happens in a separate CI - // job on the master branch once the PR has passed all tests - // on the `auto` branch. - } - } - - if did_error { - crate::exit!(1); - } - - if builder.config.channel == "nightly" && env::var_os("TOOLSTATE_PUBLISH").is_some() { - commit_toolstate_change(&toolstates); - } - } - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.alias("check-tools") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(ToolStateCheck); - } -} - -impl Builder<'_> { - fn toolstates(&self) -> HashMap, ToolState> { - if let Some(ref path) = self.config.save_toolstates { - if let Some(parent) = path.parent() { - // Ensure the parent directory always exists - t!(std::fs::create_dir_all(parent)); - } - let mut file = - t!(fs::OpenOptions::new().create(true).write(true).read(true).open(path)); - - serde_json::from_reader(&mut file).unwrap_or_default() - } else { - Default::default() - } - } - - /// Updates the actual toolstate of a tool. - /// - /// The toolstates are saved to the file specified by the key - /// `rust.save-toolstates` in `config.toml`. If unspecified, nothing will be - /// done. The file is updated immediately after this function completes. - pub fn save_toolstate(&self, tool: &str, state: ToolState) { - use std::io::Write; - - // If we're in a dry run setting we don't want to save toolstates as - // that means if we e.g. panic down the line it'll look like we tested - // everything (but we actually haven't). - if self.config.dry_run() { - return; - } - // Toolstate isn't tracked for clippy or rustfmt, but since most tools do, we avoid checking - // in all the places we could save toolstate and just do so here. - if tool == "clippy-driver" || tool == "rustfmt" { - return; - } - if let Some(ref path) = self.config.save_toolstates { - if let Some(parent) = path.parent() { - // Ensure the parent directory always exists - t!(std::fs::create_dir_all(parent)); - } - let mut file = - t!(fs::OpenOptions::new().create(true).read(true).write(true).open(path)); - - let mut current_toolstates: HashMap, ToolState> = - serde_json::from_reader(&mut file).unwrap_or_default(); - current_toolstates.insert(tool.into(), state); - t!(file.seek(SeekFrom::Start(0))); - t!(file.set_len(0)); - t!(serde_json::to_writer(&file, ¤t_toolstates)); - t!(writeln!(file)); // make sure this ends in a newline - } - } -} - -fn toolstate_repo() -> String { - env::var("TOOLSTATE_REPO") - .unwrap_or_else(|_| "https://github.com/rust-lang-nursery/rust-toolstate.git".to_string()) -} - -/// Directory where the toolstate repo is checked out. -const TOOLSTATE_DIR: &str = "rust-toolstate"; - -/// Checks out the toolstate repo into `TOOLSTATE_DIR`. -fn checkout_toolstate_repo() { - if let Ok(token) = env::var("TOOLSTATE_REPO_ACCESS_TOKEN") { - prepare_toolstate_config(&token); - } - if Path::new(TOOLSTATE_DIR).exists() { - eprintln!("Cleaning old toolstate directory..."); - t!(fs::remove_dir_all(TOOLSTATE_DIR)); - } - - let status = Command::new("git") - .arg("clone") - .arg("--depth=1") - .arg(toolstate_repo()) - .arg(TOOLSTATE_DIR) - .status(); - let success = match status { - Ok(s) => s.success(), - Err(_) => false, - }; - if !success { - panic!("git clone unsuccessful (status: {status:?})"); - } -} - -/// Sets up config and authentication for modifying the toolstate repo. -fn prepare_toolstate_config(token: &str) { - fn git_config(key: &str, value: &str) { - let status = Command::new("git").arg("config").arg("--global").arg(key).arg(value).status(); - let success = match status { - Ok(s) => s.success(), - Err(_) => false, - }; - if !success { - panic!("git config key={key} value={value} failed (status: {status:?})"); - } - } - - // If changing anything here, then please check that `src/ci/publish_toolstate.sh` is up to date - // as well. - git_config("user.email", "7378925+rust-toolstate-update@users.noreply.github.com"); - git_config("user.name", "Rust Toolstate Update"); - git_config("credential.helper", "store"); - - let credential = format!("https://{token}:x-oauth-basic@github.com\n",); - let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials"); - t!(fs::write(&git_credential_path, credential)); -} - -/// Reads the latest toolstate from the toolstate repo. -fn read_old_toolstate() -> Vec { - let latest_path = Path::new(TOOLSTATE_DIR).join("_data").join("latest.json"); - let old_toolstate = t!(fs::read(latest_path)); - t!(serde_json::from_slice(&old_toolstate)) -} - -/// This function `commit_toolstate_change` provides functionality for pushing a change -/// to the `rust-toolstate` repository. -/// -/// The function relies on a GitHub bot user, which should have a Personal access -/// token defined in the environment variable $TOOLSTATE_REPO_ACCESS_TOKEN. If for -/// some reason you need to change the token, please update the Azure Pipelines -/// variable group. -/// -/// 1. Generate a new Personal access token: -/// -/// * Login to the bot account, and go to Settings -> Developer settings -> -/// Personal access tokens -/// * Click "Generate new token" -/// * Enable the "public_repo" permission, then click "Generate token" -/// * Copy the generated token (should be a 40-digit hexadecimal number). -/// Save it somewhere secure, as the token would be gone once you leave -/// the page. -/// -/// 2. Update the variable group in Azure Pipelines -/// -/// * Ping a member of the infrastructure team to do this. -/// -/// 4. Replace the email address below if the bot account identity is changed -/// -/// * See -/// if a private email by GitHub is wanted. -fn commit_toolstate_change(current_toolstate: &ToolstateData) { - let message = format!("({} CI update)", OS.expect("linux/windows only")); - let mut success = false; - for _ in 1..=5 { - // Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo. - // This does *not* change the "current toolstate"; that only happens post-landing - // via `src/ci/docker/publish_toolstate.sh`. - publish_test_results(¤t_toolstate); - - // `git commit` failing means nothing to commit. - let status = t!(Command::new("git") - .current_dir(TOOLSTATE_DIR) - .arg("commit") - .arg("-a") - .arg("-m") - .arg(&message) - .status()); - if !status.success() { - success = true; - break; - } - - let status = t!(Command::new("git") - .current_dir(TOOLSTATE_DIR) - .arg("push") - .arg("origin") - .arg("master") - .status()); - // If we successfully push, exit. - if status.success() { - success = true; - break; - } - eprintln!("Sleeping for 3 seconds before retrying push"); - std::thread::sleep(std::time::Duration::from_secs(3)); - let status = t!(Command::new("git") - .current_dir(TOOLSTATE_DIR) - .arg("fetch") - .arg("origin") - .arg("master") - .status()); - assert!(status.success()); - let status = t!(Command::new("git") - .current_dir(TOOLSTATE_DIR) - .arg("reset") - .arg("--hard") - .arg("origin/master") - .status()); - assert!(status.success()); - } - - if !success { - panic!("Failed to update toolstate repository with new data"); - } -} - -/// Updates the "history" files with the latest results. -/// -/// These results will later be promoted to `latest.json` by the -/// `publish_toolstate.py` script if the PR passes all tests and is merged to -/// master. -fn publish_test_results(current_toolstate: &ToolstateData) { - let commit = t!(std::process::Command::new("git").arg("rev-parse").arg("HEAD").output()); - let commit = t!(String::from_utf8(commit.stdout)); - - let toolstate_serialized = t!(serde_json::to_string(¤t_toolstate)); - - let history_path = Path::new(TOOLSTATE_DIR) - .join("history") - .join(format!("{}.tsv", OS.expect("linux/windows only"))); - let mut file = t!(fs::read_to_string(&history_path)); - let end_of_first_line = file.find('\n').unwrap(); - file.insert_str(end_of_first_line, &format!("\n{}\t{}", commit.trim(), toolstate_serialized)); - t!(fs::write(&history_path, file)); -} - -#[derive(Debug, Deserialize)] -struct RepoState { - tool: String, - windows: ToolState, - linux: ToolState, -} - -impl RepoState { - fn state(&self) -> ToolState { - if cfg!(target_os = "linux") { - self.linux - } else if cfg!(windows) { - self.windows - } else { - unimplemented!() - } - } -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/util.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/util.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/bootstrap/util.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/bootstrap/util.rs 1970-01-01 00:00:00.000000000 +0000 @@ -1,497 +0,0 @@ -//! Various utility functions used throughout rustbuild. -//! -//! Simple things like testing the various filesystem operations here and there, -//! not a lot of interesting happenings here unfortunately. - -use build_helper::util::{fail, try_run}; -use std::env; -use std::fs; -use std::io; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; -use std::str; -use std::time::{Instant, SystemTime, UNIX_EPOCH}; - -use crate::builder::Builder; -use crate::config::{Config, TargetSelection}; -use crate::OnceCell; - -/// A helper macro to `unwrap` a result except also print out details like: -/// -/// * The file/line of the panic -/// * The expression that failed -/// * The error itself -/// -/// This is currently used judiciously throughout the build system rather than -/// using a `Result` with `try!`, but this may change one day... -#[macro_export] -macro_rules! t { - ($e:expr) => { - match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - } - }; - // it can show extra info in the second parameter - ($e:expr, $extra:expr) => { - match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {} ({:?})", stringify!($e), e, $extra), - } - }; -} -pub use t; - -/// Given an executable called `name`, return the filename for the -/// executable for a particular target. -pub fn exe(name: &str, target: TargetSelection) -> String { - if target.contains("windows") { - format!("{name}.exe") - } else if target.contains("uefi") { - format!("{name}.efi") - } else { - name.to_string() - } -} - -/// Returns `true` if the file name given looks like a dynamic library. -pub fn is_dylib(name: &str) -> bool { - name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll") -} - -/// Returns `true` if the file name given looks like a debug info file -pub fn is_debug_info(name: &str) -> bool { - // FIXME: consider split debug info on other platforms (e.g., Linux, macOS) - name.ends_with(".pdb") -} - -/// Returns the corresponding relative library directory that the compiler's -/// dylibs will be found in. -pub fn libdir(target: TargetSelection) -> &'static str { - if target.contains("windows") { "bin" } else { "lib" } -} - -/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. -/// If the dylib_path_var is already set for this cmd, the old value will be overwritten! -pub fn add_dylib_path(path: Vec, cmd: &mut Command) { - let mut list = dylib_path(); - for path in path { - list.insert(0, path); - } - cmd.env(dylib_path_var(), t!(env::join_paths(list))); -} - -include!("dylib_util.rs"); - -/// Adds a list of lookup paths to `cmd`'s link library lookup path. -pub fn add_link_lib_path(path: Vec, cmd: &mut Command) { - let mut list = link_lib_path(); - for path in path { - list.insert(0, path); - } - cmd.env(link_lib_path_var(), t!(env::join_paths(list))); -} - -/// Returns the environment variable which the link library lookup path -/// resides in for this platform. -fn link_lib_path_var() -> &'static str { - if cfg!(target_env = "msvc") { "LIB" } else { "LIBRARY_PATH" } -} - -/// Parses the `link_lib_path_var()` environment variable, returning a list of -/// paths that are members of this lookup path. -fn link_lib_path() -> Vec { - let var = match env::var_os(link_lib_path_var()) { - Some(v) => v, - None => return vec![], - }; - env::split_paths(&var).collect() -} - -pub struct TimeIt(bool, Instant); - -/// Returns an RAII structure that prints out how long it took to drop. -pub fn timeit(builder: &Builder<'_>) -> TimeIt { - TimeIt(builder.config.dry_run(), Instant::now()) -} - -impl Drop for TimeIt { - fn drop(&mut self) { - let time = self.1.elapsed(); - if !self.0 { - println!("\tfinished in {}.{:03} seconds", time.as_secs(), time.subsec_millis()); - } - } -} - -/// Used for download caching -pub(crate) fn program_out_of_date(stamp: &Path, key: &str) -> bool { - if !stamp.exists() { - return true; - } - t!(fs::read_to_string(stamp)) != key -} - -/// Symlinks two directories, using junctions on Windows and normal symlinks on -/// Unix. -pub fn symlink_dir(config: &Config, original: &Path, link: &Path) -> io::Result<()> { - if config.dry_run() { - return Ok(()); - } - let _ = fs::remove_dir(link); - return symlink_dir_inner(original, link); - - #[cfg(not(windows))] - fn symlink_dir_inner(original: &Path, link: &Path) -> io::Result<()> { - use std::os::unix::fs; - fs::symlink(original, link) - } - - #[cfg(windows)] - fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> { - junction::create(&target, &junction) - } -} - -pub fn forcing_clang_based_tests() -> bool { - if let Some(var) = env::var_os("RUSTBUILD_FORCE_CLANG_BASED_TESTS") { - match &var.to_string_lossy().to_lowercase()[..] { - "1" | "yes" | "on" => true, - "0" | "no" | "off" => false, - other => { - // Let's make sure typos don't go unnoticed - panic!( - "Unrecognized option '{other}' set in \ - RUSTBUILD_FORCE_CLANG_BASED_TESTS" - ) - } - } - } else { - false - } -} - -pub fn use_host_linker(target: TargetSelection) -> bool { - // FIXME: this information should be gotten by checking the linker flavor - // of the rustc target - !(target.contains("emscripten") - || target.contains("wasm32") - || target.contains("nvptx") - || target.contains("fortanix") - || target.contains("fuchsia") - || target.contains("bpf") - || target.contains("switch")) -} - -pub fn is_valid_test_suite_arg<'a, P: AsRef>( - path: &'a Path, - suite_path: P, - builder: &Builder<'_>, -) -> Option<&'a str> { - let suite_path = suite_path.as_ref(); - let path = match path.strip_prefix(".") { - Ok(p) => p, - Err(_) => path, - }; - if !path.starts_with(suite_path) { - return None; - } - let abs_path = builder.src.join(path); - let exists = abs_path.is_dir() || abs_path.is_file(); - if !exists { - panic!( - "Invalid test suite filter \"{}\": file or directory does not exist", - abs_path.display() - ); - } - // Since test suite paths are themselves directories, if we don't - // specify a directory or file, we'll get an empty string here - // (the result of the test suite directory without its suite prefix). - // Therefore, we need to filter these out, as only the first --test-args - // flag is respected, so providing an empty --test-args conflicts with - // any following it. - match path.strip_prefix(suite_path).ok().and_then(|p| p.to_str()) { - Some(s) if !s.is_empty() => Some(s), - _ => None, - } -} - -pub fn run(cmd: &mut Command, print_cmd_on_fail: bool) { - if try_run(cmd, print_cmd_on_fail).is_err() { - crate::exit!(1); - } -} - -pub fn check_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool { - let status = match cmd.status() { - Ok(status) => status, - Err(e) => { - println!("failed to execute command: {cmd:?}\nerror: {e}"); - return false; - } - }; - if !status.success() && print_cmd_on_fail { - println!( - "\n\ncommand did not execute successfully: {cmd:?}\n\ - expected success, got: {status}\n\n" - ); - } - status.success() -} - -pub fn run_suppressed(cmd: &mut Command) { - if !try_run_suppressed(cmd) { - crate::exit!(1); - } -} - -pub fn try_run_suppressed(cmd: &mut Command) -> bool { - let output = match cmd.output() { - Ok(status) => status, - Err(e) => fail(&format!("failed to execute command: {cmd:?}\nerror: {e}")), - }; - if !output.status.success() { - println!( - "\n\ncommand did not execute successfully: {:?}\n\ - expected success, got: {}\n\n\ - stdout ----\n{}\n\ - stderr ----\n{}\n\n", - cmd, - output.status, - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ); - } - output.status.success() -} - -pub fn make(host: &str) -> PathBuf { - if host.contains("dragonfly") - || host.contains("freebsd") - || host.contains("netbsd") - || host.contains("openbsd") - { - PathBuf::from("gmake") - } else { - PathBuf::from("make") - } -} - -#[track_caller] -pub fn output(cmd: &mut Command) -> String { - let output = match cmd.stderr(Stdio::inherit()).output() { - Ok(status) => status, - Err(e) => fail(&format!("failed to execute command: {cmd:?}\nerror: {e}")), - }; - if !output.status.success() { - panic!( - "command did not execute successfully: {:?}\n\ - expected success, got: {}", - cmd, output.status - ); - } - String::from_utf8(output.stdout).unwrap() -} - -pub fn output_result(cmd: &mut Command) -> Result { - let output = match cmd.stderr(Stdio::inherit()).output() { - Ok(status) => status, - Err(e) => return Err(format!("failed to run command: {cmd:?}: {e}")), - }; - if !output.status.success() { - return Err(format!( - "command did not execute successfully: {:?}\n\ - expected success, got: {}\n{}", - cmd, - output.status, - String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))? - )); - } - Ok(String::from_utf8(output.stdout).map_err(|err| format!("{err:?}"))?) -} - -/// Returns the last-modified time for `path`, or zero if it doesn't exist. -pub fn mtime(path: &Path) -> SystemTime { - fs::metadata(path).and_then(|f| f.modified()).unwrap_or(UNIX_EPOCH) -} - -/// Returns `true` if `dst` is up to date given that the file or files in `src` -/// are used to generate it. -/// -/// Uses last-modified time checks to verify this. -pub fn up_to_date(src: &Path, dst: &Path) -> bool { - if !dst.exists() { - return false; - } - let threshold = mtime(dst); - let meta = match fs::metadata(src) { - Ok(meta) => meta, - Err(e) => panic!("source {src:?} failed to get metadata: {e}"), - }; - if meta.is_dir() { - dir_up_to_date(src, threshold) - } else { - meta.modified().unwrap_or(UNIX_EPOCH) <= threshold - } -} - -fn dir_up_to_date(src: &Path, threshold: SystemTime) -> bool { - t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| { - let meta = t!(e.metadata()); - if meta.is_dir() { - dir_up_to_date(&e.path(), threshold) - } else { - meta.modified().unwrap_or(UNIX_EPOCH) < threshold - } - }) -} - -/// Copied from `std::path::absolute` until it stabilizes. -/// -/// FIXME: this shouldn't exist. -pub(crate) fn absolute(path: &Path) -> PathBuf { - if path.as_os_str().is_empty() { - panic!("can't make empty path absolute"); - } - #[cfg(unix)] - { - t!(absolute_unix(path), format!("could not make path absolute: {}", path.display())) - } - #[cfg(windows)] - { - t!(absolute_windows(path), format!("could not make path absolute: {}", path.display())) - } - #[cfg(not(any(unix, windows)))] - { - println!("warning: bootstrap is not supported on non-unix platforms"); - t!(std::fs::canonicalize(t!(std::env::current_dir()))).join(path) - } -} - -#[cfg(unix)] -/// Make a POSIX path absolute without changing its semantics. -fn absolute_unix(path: &Path) -> io::Result { - // This is mostly a wrapper around collecting `Path::components`, with - // exceptions made where this conflicts with the POSIX specification. - // See 4.13 Pathname Resolution, IEEE Std 1003.1-2017 - // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13 - - use std::os::unix::prelude::OsStrExt; - let mut components = path.components(); - let path_os = path.as_os_str().as_bytes(); - - let mut normalized = if path.is_absolute() { - // "If a pathname begins with two successive characters, the - // first component following the leading characters may be - // interpreted in an implementation-defined manner, although more than - // two leading characters shall be treated as a single - // character." - if path_os.starts_with(b"//") && !path_os.starts_with(b"///") { - components.next(); - PathBuf::from("//") - } else { - PathBuf::new() - } - } else { - env::current_dir()? - }; - normalized.extend(components); - - // "Interfaces using pathname resolution may specify additional constraints - // when a pathname that does not name an existing directory contains at - // least one non- character and contains one or more trailing - // characters". - // A trailing is also meaningful if "a symbolic link is - // encountered during pathname resolution". - - if path_os.ends_with(b"/") { - normalized.push(""); - } - - Ok(normalized) -} - -#[cfg(windows)] -fn absolute_windows(path: &std::path::Path) -> std::io::Result { - use std::ffi::OsString; - use std::io::Error; - use std::os::windows::ffi::{OsStrExt, OsStringExt}; - use std::ptr::null_mut; - #[link(name = "kernel32")] - extern "system" { - fn GetFullPathNameW( - lpFileName: *const u16, - nBufferLength: u32, - lpBuffer: *mut u16, - lpFilePart: *mut *const u16, - ) -> u32; - } - - unsafe { - // encode the path as UTF-16 - let path: Vec = path.as_os_str().encode_wide().chain([0]).collect(); - let mut buffer = Vec::new(); - // Loop until either success or failure. - loop { - // Try to get the absolute path - let len = GetFullPathNameW( - path.as_ptr(), - buffer.len().try_into().unwrap(), - buffer.as_mut_ptr(), - null_mut(), - ); - match len as usize { - // Failure - 0 => return Err(Error::last_os_error()), - // Buffer is too small, resize. - len if len > buffer.len() => buffer.resize(len, 0), - // Success! - len => { - buffer.truncate(len); - return Ok(OsString::from_wide(&buffer).into()); - } - } - } - } -} - -/// Adapted from -/// -/// When `clang-cl` is used with instrumentation, we need to add clang's runtime library resource -/// directory to the linker flags, otherwise there will be linker errors about the profiler runtime -/// missing. This function returns the path to that directory. -pub fn get_clang_cl_resource_dir(clang_cl_path: &str) -> PathBuf { - // Similar to how LLVM does it, to find clang's library runtime directory: - // - we ask `clang-cl` to locate the `clang_rt.builtins` lib. - let mut builtins_locator = Command::new(clang_cl_path); - builtins_locator.args(&["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]); - - let clang_rt_builtins = output(&mut builtins_locator); - let clang_rt_builtins = Path::new(clang_rt_builtins.trim()); - assert!( - clang_rt_builtins.exists(), - "`clang-cl` must correctly locate the library runtime directory" - ); - - // - the profiler runtime will be located in the same directory as the builtins lib, like - // `$LLVM_DISTRO_ROOT/lib/clang/$LLVM_VERSION/lib/windows`. - let clang_rt_dir = clang_rt_builtins.parent().expect("The clang lib folder should exist"); - clang_rt_dir.to_path_buf() -} - -pub fn lld_flag_no_threads(is_windows: bool) -> &'static str { - static LLD_NO_THREADS: OnceCell<(&'static str, &'static str)> = OnceCell::new(); - let (windows, other) = LLD_NO_THREADS.get_or_init(|| { - let out = output(Command::new("lld").arg("-flavor").arg("ld").arg("--version")); - let newer = match (out.find(char::is_numeric), out.find('.')) { - (Some(b), Some(e)) => out.as_str()[b..e].parse::().ok().unwrap_or(14) > 10, - _ => true, - }; - if newer { ("/threads:1", "--threads=1") } else { ("/no-threads", "--no-threads") } - }); - if is_windows { windows } else { other } -} - -pub fn dir_is_empty(dir: &Path) -> bool { - t!(std::fs::read_dir(dir)).next().is_none() -} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/arm-android/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/arm-android/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/arm-android/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/arm-android/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -30,7 +30,7 @@ ENV TARGETS=arm-linux-androideabi -ENV RUST_CONFIGURE_ARGS --arm-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ +ENV RUST_CONFIGURE_ARGS --android-ndk=/android/ndk/ ENV SCRIPT python3 ../x.py --stage 2 test --host='' --target $TARGETS diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-android/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-android/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-android/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-android/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -19,12 +19,7 @@ ENV RUST_CONFIGURE_ARGS \ --enable-extended \ --enable-profiler \ - --arm-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \ - --armv7-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \ - --thumbv7neon-linux-androideabi-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \ - --i686-linux-android-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \ - --aarch64-linux-android-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \ - --x86_64-linux-android-ndk=/android/ndk/toolchains/llvm/prebuilt/linux-x86_64/ \ + --android-ndk=/android/ndk/ \ --disable-docs ENV SCRIPT python3 ../x.py dist --host='' --target $TARGETS diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -29,8 +29,6 @@ g++-arm-linux-gnueabi \ g++-arm-linux-gnueabihf \ g++-aarch64-linux-gnu \ - g++-mips64-linux-gnuabi64 \ - g++-mips64el-linux-gnuabi64 \ gcc-arm-none-eabi \ gcc-sparc64-linux-gnu \ libc6-dev-sparc64-cross \ @@ -48,12 +46,6 @@ COPY host-x86_64/dist-various-1/install-x86_64-redox.sh /build RUN ./install-x86_64-redox.sh -COPY host-x86_64/dist-various-1/install-mips-musl.sh /build -RUN ./install-mips-musl.sh - -COPY host-x86_64/dist-various-1/install-mipsel-musl.sh /build -RUN ./install-mipsel-musl.sh - COPY host-x86_64/dist-various-1/install-aarch64-none-elf.sh /build RUN ./install-aarch64-none-elf.sh @@ -76,32 +68,7 @@ env \ CC=arm-linux-gnueabihf-gcc CFLAGS="-march=armv7-a+fp" \ CXX=arm-linux-gnueabihf-g++ CXXFLAGS="-march=armv7-a+fp" \ - bash musl.sh armv7hf && \ - env \ - CC=mips-openwrt-linux-gcc \ - CXX=mips-openwrt-linux-g++ \ - bash musl.sh mips && \ - env \ - CC=mipsel-openwrt-linux-gcc \ - CXX=mipsel-openwrt-linux-g++ \ - bash musl.sh mipsel && \ - env \ - CC=mips64-linux-gnuabi64-gcc \ - CXX=mips64-linux-gnuabi64-g++ \ - bash musl.sh mips64 && \ - env \ - CC=mips64el-linux-gnuabi64-gcc \ - CXX=mips64el-linux-gnuabi64-g++ \ - bash musl.sh mips64el && \ - rm -rf /build/* - -# FIXME(mozilla/sccache#235) this shouldn't be necessary but is currently -# necessary to disambiguate the mips compiler with the mipsel compiler. We want -# to give these two wrapper scripts (currently identical ones) different hashes -# to ensure that sccache understands that they're different compilers. -RUN \ - echo "# a" >> /usr/local/mips-linux-musl/bin/mips-openwrt-linux-musl-wrapper.sh && \ - echo "# b" >> /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-musl-wrapper.sh + bash musl.sh armv7hf ENV RUN_MAKE_TARGETS=thumbv6m-none-eabi ENV RUN_MAKE_TARGETS=$RUN_MAKE_TARGETS,thumbv7m-none-eabi @@ -110,10 +77,6 @@ ENV TARGETS=asmjs-unknown-emscripten ENV TARGETS=$TARGETS,wasm32-unknown-emscripten -ENV TARGETS=$TARGETS,mips-unknown-linux-musl -ENV TARGETS=$TARGETS,mipsel-unknown-linux-musl -ENV TARGETS=$TARGETS,mips64-unknown-linux-muslabi64 -ENV TARGETS=$TARGETS,mips64el-unknown-linux-muslabi64 ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf ENV TARGETS=$TARGETS,armv5te-unknown-linux-gnueabi @@ -149,10 +112,6 @@ CFLAGS_arm_unknown_linux_musleabi="-march=armv6 -marm" \ CFLAGS_arm_unknown_linux_musleabihf="-march=armv6 -marm -mfpu=vfp" \ CFLAGS_armv7_unknown_linux_musleabihf="-march=armv7-a+fp" \ - CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \ - CC_mips_unknown_linux_musl=mips-openwrt-linux-gcc \ - CC_mips64el_unknown_linux_muslabi64=mips64el-linux-gnuabi64-gcc \ - CC_mips64_unknown_linux_muslabi64=mips64-linux-gnuabi64-gcc \ CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc \ CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \ CC_thumbv7neon_unknown_linux_gnueabihf=arm-linux-gnueabihf-gcc \ @@ -177,10 +136,6 @@ --musl-root-arm=/musl-arm \ --musl-root-armhf=/musl-armhf \ --musl-root-armv7hf=/musl-armv7hf \ - --musl-root-mips=/musl-mips \ - --musl-root-mipsel=/musl-mipsel \ - --musl-root-mips64=/musl-mips64 \ - --musl-root-mips64el=/musl-mips64el \ --disable-docs ENV SCRIPT \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mips-musl.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mips-musl.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mips-musl.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mips-musl.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -#!/bin/sh -set -ex - -mkdir /usr/local/mips-linux-musl - -# originally from -# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/ -# OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2 -URL="https://ci-mirrors.rust-lang.org/rustc" -FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2" -curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2 - -for file in /usr/local/mips-linux-musl/bin/mips-openwrt-linux-*; do - ln -s $file /usr/local/bin/`basename $file` -done diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mipsel-musl.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mipsel-musl.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mipsel-musl.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-various-1/install-mipsel-musl.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -#!/bin/sh -set -ex - -mkdir /usr/local/mipsel-linux-musl - -# Note that this originally came from: -# https://downloads.openwrt.org/snapshots/trunk/malta/generic/ -# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 -URL="https://ci-mirrors.rust-lang.org/rustc" -FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2" -curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2 - -for file in /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-*; do - ln -s $file /usr/local/bin/`basename $file` -done diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -57,9 +57,9 @@ RUN ./build-clang.sh ENV CC=clang CXX=clang++ -# rustc-perf version from 2023-05-30 +# rustc-perf version from 2023-10-22 # Should also be changed in the opt-dist tool for other environments. -ENV PERF_COMMIT 8b2ac3042e1ff2c0074455a0a3618adef97156b1 +ENV PERF_COMMIT 4f313add609f43e928e98132358e8426ed3969ae RUN curl -LS -o perf.zip https://ci-mirrors.rust-lang.org/rustc/rustc-perf-$PERF_COMMIT.zip && \ unzip perf.zip && \ mv rustc-perf-$PERF_COMMIT rustc-perf && \ @@ -84,7 +84,8 @@ --set llvm.ninja=false \ --set rust.jemalloc \ --set rust.use-lld=true \ - --set rust.lto=thin + --set rust.lto=thin \ + --set rust.codegen-units=1 ENV SCRIPT python3 ../x.py build --set rust.debug=true opt-dist && \ ./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh 2023-12-21 16:55:28.000000000 +0000 @@ -4,7 +4,7 @@ source shared.sh -LLVM=llvmorg-17.0.0-rc3 +LLVM=llvmorg-17.0.4 mkdir llvm-project cd llvm-project diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh 2023-12-21 16:55:28.000000000 +0000 @@ -3,7 +3,8 @@ source shared.sh -GCC=8.5.0 +# Note: in the future when bumping to version 10.1.0, also take care of the sed block below. +GCC=9.5.0 curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.xz | xzcat | tar xf - cd gcc-$GCC @@ -22,15 +23,25 @@ # latter host is presented to `wget`! Therefore, we choose to download from the insecure HTTP server # instead here. # +# Note: in version 10.1.0, the URL used in `download_prerequisites` has changed from using FTP to +# using HTTP. When bumping to that gcc version, we can likely remove the sed replacement below, or +# the expression will need to be updated. That new URL is available at: +# https://github.com/gcc-mirror/gcc/blob/6e6e3f144a33ae504149dc992453b4f6dea12fdb/contrib/download_prerequisites#L35 +# sed -i'' 's|ftp://gcc\.gnu\.org/|https://gcc.gnu.org/|g' ./contrib/download_prerequisites ./contrib/download_prerequisites mkdir ../gcc-build cd ../gcc-build + +# '-fno-reorder-blocks-and-partition' is required to +# enable BOLT optimization of the C++ standard library, +# which is included in librustc_driver.so hide_output ../gcc-$GCC/configure \ --prefix=/rustroot \ --enable-languages=c,c++ \ - --disable-gnu-unique-object + --disable-gnu-unique-object \ + --enable-cxx-flags='-fno-reorder-blocks-and-partition' hide_output make -j$(nproc) hide_output make install ln -s gcc /rustroot/bin/cc diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,16 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "r-efi" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "575fc2d9b3da54adbdfaddf6eca48fec256d977c8630a1750b8991347d1ac911" + +[[package]] +name = "uefi_qemu_test" +version = "0.0.0" +dependencies = [ + "r-efi", +] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/wasm32/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/wasm32/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/wasm32/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/wasm32/Dockerfile 1970-01-01 00:00:00.000000000 +0000 @@ -1,63 +0,0 @@ -FROM ubuntu:22.04 - -ARG DEBIAN_FRONTEND=noninteractive -RUN apt-get update && apt-get install -y --no-install-recommends \ - g++ \ - make \ - ninja-build \ - file \ - curl \ - ca-certificates \ - python3 \ - git \ - cmake \ - sudo \ - gdb \ - xz-utils \ - libssl-dev \ - bzip2 \ - && rm -rf /var/lib/apt/lists/* - -COPY scripts/emscripten.sh /scripts/ -RUN bash /scripts/emscripten.sh - -COPY scripts/sccache.sh /scripts/ -RUN sh /scripts/sccache.sh - -# emcc seems to need python to specifically be "python" and not "python3" -RUN ln `which python3` /usr/bin/python - -ENV PATH=$PATH:/emsdk-portable -ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/ - -# Rust's build system requires NodeJS to be in the path, but the directory in -# which emsdk stores it contains the version number. This caused breakages in -# the past when emsdk bumped the node version causing the path to point to a -# missing directory. -# -# To avoid the problem this symlinks the latest NodeJs version available to -# "latest", and adds that to the path. -RUN ln -s /emsdk-portable/node/$(ls /emsdk-portable/node | sort -V | tail -n 1) \ - /emsdk-portable/node/latest -ENV PATH=$PATH:/emsdk-portable/node/latest/bin/ - -ENV BINARYEN_ROOT=/emsdk-portable/upstream/ -ENV EMSDK=/emsdk-portable -ENV EM_CONFIG=/emsdk-portable/.emscripten -ENV EM_CACHE=/emsdk-portable/upstream/emscripten/cache - -ENV TARGETS=wasm32-unknown-emscripten - -# Use -O1 optimizations in the link step to reduce time spent optimizing. -ENV EMCC_CFLAGS=-O1 - -COPY static/gitconfig /etc/gitconfig - -# Emscripten installation is user-specific -ENV NO_CHANGE_USER=1 -RUN chown 10719 -R /emsdk-portable/ - -# Exclude library/alloc due to OOM in benches. -# FIXME: Fix std tests -ENV SCRIPT python3 ../x.py test --stage 2 --host='' --target $TARGETS \ - --skip library/alloc --skip library/std diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -24,6 +24,7 @@ xz-utils \ nodejs \ mingw-w64 \ + libgccjit-12-dev \ && rm -rf /var/lib/apt/lists/* # Install powershell (universal package) so we can test x.ps1 on Linux @@ -34,6 +35,9 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh +# Make `libgccjit.so` accessible to the linker. +RUN ln -s /usr/lib/gcc/x86_64-linux-gnu/12/libgccjit.so /usr/lib/x86_64-linux-gnu/libgccjit.so + # We are disabling CI LLVM since this builder is intentionally using a host # LLVM, rather than the typical src/llvm-project LLVM. ENV NO_DOWNLOAD_CI_LLVM 1 @@ -47,6 +51,7 @@ --build=x86_64-unknown-linux-gnu \ --llvm-root=/usr/lib/llvm-15 \ --enable-llvm-link-shared \ + $USE_NEW_MANGLING \ --set rust.thin-lto-import-instr-limit=10 COPY host-x86_64/x86_64-gnu-llvm-15/script.sh /tmp/ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/script.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/script.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/script.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-15/script.sh 2023-12-21 16:55:28.000000000 +0000 @@ -4,34 +4,46 @@ # Only run the stage 1 tests on merges, not on PR CI jobs. if [[ -z "${PR_CI_JOB}" ]]; then - ../x.py --stage 1 test --skip src/tools/tidy && \ - # Run the `mir-opt` tests again but this time for a 32-bit target. - # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have - # both 32-bit and 64-bit outputs updated by the PR author, before - # the PR is approved and tested for merging. - # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`, - # despite having different output on 32-bit vs 64-bit targets. - ../x.py --stage 1 test tests/mir-opt \ - --host='' --target=i686-unknown-linux-gnu && \ - # Run `ui-fulldeps` in `--stage=1`, which actually uses the stage0 - # compiler, and is sensitive to the addition of new flags. - ../x.py --stage 1 test tests/ui-fulldeps + # When running gcc backend tests, we need to install `libgccjit` and to not run llvm codegen + # tests as it will fail them. + if [[ "${ENABLE_GCC_CODEGEN}" == "1" ]]; then + ../x.py --stage 1 test --skip src/tools/tidy --skip tests/codegen + else + ../x.py --stage 1 test --skip src/tools/tidy + fi + + # Run the `mir-opt` tests again but this time for a 32-bit target. + # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have + # both 32-bit and 64-bit outputs updated by the PR author, before + # the PR is approved and tested for merging. + # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`, + # despite having different output on 32-bit vs 64-bit targets. + ../x.py --stage 1 test tests/mir-opt --host='' --target=i686-unknown-linux-gnu + + # Run `ui-fulldeps` in `--stage=1`, which actually uses the stage0 + # compiler, and is sensitive to the addition of new flags. + ../x.py --stage 1 test tests/ui-fulldeps fi +# When running gcc backend tests, we need to install `libgccjit` and to not run llvm codegen +# tests as it will fail them. # NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux. -../x.py --stage 2 test --skip src/tools/tidy && \ - # Run the `mir-opt` tests again but this time for a 32-bit target. - # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have - # both 32-bit and 64-bit outputs updated by the PR author, before - # the PR is approved and tested for merging. - # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`, - # despite having different output on 32-bit vs 64-bit targets. - ../x --stage 2 test tests/mir-opt \ - --host='' --target=i686-unknown-linux-gnu && \ - # Run the UI test suite again, but in `--pass=check` mode - # - # This is intended to make sure that both `--pass=check` continues to - # work. - # - ../x.ps1 --stage 2 test tests/ui --pass=check \ - --host='' --target=i686-unknown-linux-gnu +if [[ "${ENABLE_GCC_CODEGEN}" == "1" ]]; then + ../x.py --stage 2 test --skip src/tools/tidy --skip tests/codegen +else + ../x.py --stage 2 test --skip src/tools/tidy +fi + +# Run the `mir-opt` tests again but this time for a 32-bit target. +# This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have +# both 32-bit and 64-bit outputs updated by the PR author, before +# the PR is approved and tested for merging. +# It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`, +# despite having different output on 32-bit vs 64-bit targets. +../x --stage 2 test tests/mir-opt --host='' --target=i686-unknown-linux-gnu + +# Run the UI test suite again, but in `--pass=check` mode +# +# This is intended to make sure that both `--pass=check` continues to +# work. +../x.ps1 --stage 2 test tests/ui --pass=check --host='' --target=i686-unknown-linux-gnu diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-16/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-16/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-16/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-16/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -38,6 +38,10 @@ # LLVM, rather than the typical src/llvm-project LLVM. ENV NO_DOWNLOAD_CI_LLVM 1 +# This is not the latest LLVM version, so some components required by tests may +# be missing. +ENV IS_NOT_LATEST_LLVM 1 + # Using llvm-link-shared due to libffi issues -- see #34486 ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-17/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-17/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-17/Dockerfile 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-llvm-17/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,50 @@ +FROM ubuntu:23.10 + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && apt-get install -y --no-install-recommends \ + g++ \ + gcc-multilib \ + make \ + ninja-build \ + file \ + curl \ + ca-certificates \ + python3 \ + git \ + cmake \ + sudo \ + gdb \ + llvm-17-tools \ + llvm-17-dev \ + libedit-dev \ + libssl-dev \ + pkg-config \ + zlib1g-dev \ + xz-utils \ + nodejs \ + mingw-w64 \ + && rm -rf /var/lib/apt/lists/* + +# Install powershell (universal package) so we can test x.ps1 on Linux +RUN curl -sL "https://github.com/PowerShell/PowerShell/releases/download/v7.3.1/powershell_7.3.1-1.deb_amd64.deb" > powershell.deb && \ + dpkg -i powershell.deb && \ + rm -f powershell.deb + +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +# We are disabling CI LLVM since this builder is intentionally using a host +# LLVM, rather than the typical src/llvm-project LLVM. +ENV NO_DOWNLOAD_CI_LLVM 1 + +# Using llvm-link-shared due to libffi issues -- see #34486 +ENV RUST_CONFIGURE_ARGS \ + --build=x86_64-unknown-linux-gnu \ + --llvm-root=/usr/lib/llvm-17 \ + --enable-llvm-link-shared \ + --set rust.thin-lto-import-instr-limit=10 + +COPY host-x86_64/x86_64-gnu-llvm-15/script.sh /tmp/ + +ENV SCRIPT /tmp/script.sh diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile 2023-12-21 16:55:28.000000000 +0000 @@ -15,6 +15,7 @@ sudo \ xz-utils \ tidy \ + libgccjit-12-dev \ \ # Install dependencies for chromium browser gconf-service \ @@ -61,6 +62,11 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh +# Make `libgccjit.so` accessible. +RUN ln -s /usr/lib/gcc/x86_64-linux-gnu/12/libgccjit.so /usr/lib/x86_64-linux-gnu/libgccjit.so +# Fix rustc_codegen_gcc lto issues. +ENV GCC_EXEC_PREFIX="/usr/lib/gcc/" + COPY host-x86_64/x86_64-gnu-tools/checktools.sh /tmp/ RUN curl -sL https://nodejs.org/dist/v14.20.0/node-v14.20.0-linux-x64.tar.xz | tar -xJ @@ -81,7 +87,10 @@ ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --save-toolstates=/tmp/toolstate/toolstates.json + --save-toolstates=/tmp/toolstate/toolstates.json \ + --enable-new-symbol-mangling + +ENV HOST_TARGET x86_64-unknown-linux-gnu ENV SCRIPT /tmp/checktools.sh ../x.py && \ NODE_PATH=`npm root -g` python3 ../x.py test tests/rustdoc-gui --stage 2 \ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,5 @@ #!/bin/sh +# ignore-tidy-linelength set -eu @@ -26,8 +27,30 @@ python3 "$X_PY" test --stage 2 src/tools/rustfmt python3 "$X_PY" test --stage 2 src/tools/miri # We natively run this script on x86_64-unknown-linux-gnu and x86_64-pc-windows-msvc. -# Also cover some other targets (on both of these hosts) via cross-testing. +# Also cover some other targets via cross-testing, in particular all tier 1 targets. export BOOTSTRAP_SKIP_TARGET_SANITY=1 # we don't need `cc` for these targets -python3 "$X_PY" test --stage 2 src/tools/miri --target i686-pc-windows-msvc -python3 "$X_PY" test --stage 2 src/tools/miri --target aarch64-apple-darwin +case $HOST_TARGET in + x86_64-unknown-linux-gnu) + # Only this branch runs in PR CI. + # Fully test all main OSes, including a 32bit target. + python3 "$X_PY" test --stage 2 src/tools/miri --target x86_64-apple-darwin + python3 "$X_PY" test --stage 2 src/tools/miri --target i686-pc-windows-msvc + # Only run "pass" tests for the remaining targets, which is quite a bit faster. + python3 "$X_PY" test --stage 2 src/tools/miri --target x86_64-pc-windows-gnu --test-args pass + python3 "$X_PY" test --stage 2 src/tools/miri --target i686-unknown-linux-gnu --test-args pass + python3 "$X_PY" test --stage 2 src/tools/miri --target aarch64-unknown-linux-gnu --test-args pass + python3 "$X_PY" test --stage 2 src/tools/miri --target s390x-unknown-linux-gnu --test-args pass + ;; + x86_64-pc-windows-msvc) + # Strangely, Linux targets do not work here. cargo always says + # "error: cannot produce cdylib for ... as the target ... does not support these crate types". + # Only run "pass" tests, which is quite a bit faster. + python3 "$X_PY" test --stage 2 src/tools/miri --target aarch64-apple-darwin --test-args pass + python3 "$X_PY" test --stage 2 src/tools/miri --target i686-pc-windows-gnu --test-args pass + ;; + *) + echo "FATAL: unexpected host $HOST_TARGET" + exit 1 + ;; +esac unset BOOTSTRAP_SKIP_TARGET_SANITY diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/run.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/run.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/run.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/run.sh 2023-12-21 16:55:28.000000000 +0000 @@ -235,7 +235,7 @@ args="$args --volume /tmp/toolstate:/tmp/toolstate" id=$(id -u) - if [[ "$id" != 0 && "$(docker -v)" =~ ^podman ]]; then + if [[ "$id" != 0 && "$(docker version)" =~ Podman ]]; then # Rootless podman creates a separate user namespace, where an inner # LOCAL_USER_ID will map to a different subuid range on the host. # The "keep-id" mode maps the current UID directly into the container. @@ -264,10 +264,27 @@ BASE_COMMIT="" fi +SUMMARY_FILE=github-summary.md +touch $objdir/${SUMMARY_FILE} + +extra_env="" +if [ "$ENABLE_GCC_CODEGEN" = "1" ]; then + extra_env="$EXTRA_ENV --env ENABLE_GCC_CODEGEN=1" + # If `ENABLE_GCC_CODEGEN` is set and not empty, we add the `--enable-new-symbol-mangling` + # argument to `RUST_CONFIGURE_ARGS` and set the `GCC_EXEC_PREFIX` environment variable. + # `cg_gcc` doesn't support the legacy mangling so we need to enforce the new one + # if we run `cg_gcc` tests. + extra_env="$EXTRA_ENV --env USE_NEW_MANGLING=--enable-new-symbol-mangling" + # Fix rustc_codegen_gcc lto issues. + extra_env="$EXTRA_ENV --env GCC_EXEC_PREFIX=/usr/lib/gcc/" + echo "Setting extra environment values for docker: $extra_env" +fi + docker \ run \ --workdir /checkout/obj \ --env SRC=/checkout \ + $extra_env \ $args \ --env CARGO_HOME=/cargo \ --env DEPLOY \ @@ -275,6 +292,7 @@ --env CI \ --env GITHUB_ACTIONS \ --env GITHUB_REF \ + --env GITHUB_STEP_SUMMARY="/checkout/obj/${SUMMARY_FILE}" \ --env TOOLSTATE_REPO_ACCESS_TOKEN \ --env TOOLSTATE_REPO \ --env TOOLSTATE_PUBLISH \ @@ -284,11 +302,14 @@ --env DIST_TRY_BUILD \ --env PR_CI_JOB \ --env OBJDIR_ON_HOST="$objdir" \ + --env CODEGEN_BACKENDS \ --init \ --rm \ rust-ci \ $command +cat $objdir/${SUMMARY_FILE} >> "${GITHUB_STEP_SUMMARY}" + if [ -f /.dockerenv ]; then rm -rf $objdir docker cp checkout:/checkout/obj $objdir diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/scripts/fuchsia-test-runner.py rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/scripts/fuchsia-test-runner.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/docker/scripts/fuchsia-test-runner.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/docker/scripts/fuchsia-test-runner.py 2023-12-21 16:55:28.000000000 +0000 @@ -12,6 +12,7 @@ import fcntl import glob import hashlib +import io import json import os import platform @@ -276,27 +277,60 @@ stderr=self.subprocess_output(), ) - # Start emulator - self.log_info("Starting emulator...") - product_bundle = "terminal.qemu-" + self.triple_to_arch(self.target) + # Look up the product bundle transfer manifest. + self.log_info("Looking up the product bundle transfer manifest...") + product_name = "minimal." + self.triple_to_arch(self.target) + fuchsia_version = "14.20230811.2.1" + + # FIXME: We should be able to replace this with the machine parsable + # `ffx --machine json product lookup ...` once F15 is released. + out = subprocess.check_output( + [ + ffx_path, + "product", + "lookup", + product_name, + fuchsia_version, + "--base-url", + "gs://fuchsia/development/" + fuchsia_version, + ], + env=ffx_env, + stderr=self.subprocess_output(), + ) + + self.log_debug(out) + + for line in io.BytesIO(out): + if line.startswith(b"gs://"): + transfer_manifest_url = line.rstrip() + break + else: + raise Exception("Unable to parse transfer manifest") + + # Download the product bundle. + product_bundle_dir = os.path.join(self.tmp_dir(), 'product-bundle') subprocess.check_call( [ ffx_path, - "product-bundle", - "get", - product_bundle, + "product", + "download", + transfer_manifest_url, + product_bundle_dir, + "--force", ], env=ffx_env, stdout=self.subprocess_output(), stderr=self.subprocess_output(), ) + + # Start emulator # FIXME: condition --accel hyper on target arch matching host arch subprocess.check_call( [ ffx_path, "emu", "start", - product_bundle, + product_bundle_dir, "--headless", "--log", self.emulator_log_path(), diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/github-actions/ci.yml rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/github-actions/ci.yml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/github-actions/ci.yml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/github-actions/ci.yml 2023-12-21 16:55:28.000000000 +0000 @@ -91,6 +91,10 @@ os: macos-13 # We use the standard runner for now <<: *base-job + - &job-macos-m1 + os: macos-13-xlarge + <<: *base-job + - &job-windows-8c os: windows-2019-8core-32gb <<: *base-job @@ -153,6 +157,10 @@ run: src/ci/scripts/dump-environment.sh <<: *step + - name: install awscli + run: src/ci/scripts/install-awscli.sh + <<: *step + - name: install sccache run: src/ci/scripts/install-sccache.sh <<: *step @@ -165,6 +173,10 @@ run: src/ci/scripts/install-clang.sh <<: *step + - name: install tidy + run: src/ci/scripts/install-tidy.sh + <<: *step + - name: install WIX run: src/ci/scripts/install-wix.sh <<: *step @@ -281,6 +293,7 @@ - auto - try - try-perf + - automation/bors/try - master pull_request: branches: @@ -322,6 +335,8 @@ <<: *job-linux-4c - name: x86_64-gnu-llvm-15 + env: + ENABLE_GCC_CODEGEN: "1" <<: *job-linux-16c - name: x86_64-gnu-tools @@ -350,6 +365,8 @@ <<: *job-linux-8c - name: dist-aarch64-linux + env: + CODEGEN_BACKENDS: llvm,cranelift <<: *job-linux-8c - name: dist-android @@ -402,14 +419,19 @@ - &dist-x86_64-linux name: dist-x86_64-linux + env: + CODEGEN_BACKENDS: llvm,cranelift <<: *job-linux-16c - name: dist-x86_64-linux-alt env: IMAGE: dist-x86_64-linux + CODEGEN_BACKENDS: llvm,cranelift <<: *job-linux-16c - name: dist-x86_64-musl + env: + CODEGEN_BACKENDS: llvm,cranelift <<: *job-linux-8c - name: dist-x86_64-netbsd @@ -427,20 +449,6 @@ - name: test-various <<: *job-linux-8c - - name: wasm32 - env: - # Running emscripten tests currently requires that we are - # building a nightly toolchain. Otherwise, we cannot pass - # -Zunstable-options to libtest. Normally we workaround this by - # setting RUSTC_BOOTSTRAP in the environment, but that doesn't - # work for emscripten as environment variables are not threaded - # into the compiled code. - # - # For more details see: - # https://emscripten.org/docs/porting/connecting_cpp_and_javascript/Interacting-with-code.html#environment-variables - RUST_CI_OVERRIDE_RELEASE_CHANNEL: nightly - <<: *job-linux-8c - - name: x86_64-gnu <<: *job-linux-4c @@ -468,6 +476,11 @@ - name: x86_64-gnu-distcheck <<: *job-linux-8c + - name: x86_64-gnu-llvm-17 + env: + RUST_BACKTRACE: 1 + <<: *job-linux-8c + - name: x86_64-gnu-llvm-16 env: RUST_BACKTRACE: 1 @@ -501,6 +514,7 @@ NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 DIST_REQUIRE_ALL_TOOLS: 1 + CODEGEN_BACKENDS: llvm,cranelift <<: *job-macos-xl - name: dist-apple-various @@ -536,17 +550,14 @@ # This target only needs to support 11.0 and up as nothing else supports the hardware - name: dist-aarch64-apple env: - SCRIPT: ./x.py dist bootstrap --include-default-paths --stage 2 + SCRIPT: ./x.py dist bootstrap --include-default-paths --host=aarch64-apple-darwin --target=aarch64-apple-darwin RUST_CONFIGURE_ARGS: >- - --build=x86_64-apple-darwin - --host=aarch64-apple-darwin - --target=aarch64-apple-darwin --enable-full-tools --enable-sanitizers --enable-profiler - --disable-docs --set rust.jemalloc --set llvm.ninja=false + --set rust.lto=thin RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 SELECT_XCODE: /Applications/Xcode_13.4.1.app USE_XCODE_CLANG: 1 @@ -556,15 +567,26 @@ NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 DIST_REQUIRE_ALL_TOOLS: 1 - # Corresponds to 16K page size - # - # Shouldn't be needed if jemalloc-sys is updated to - # handle this platform like iOS or if we build on - # aarch64-apple-darwin itself. - # - # https://github.com/gnzlbg/jemallocator/blob/c27a859e98e3cb790dc269773d9da71a1e918458/jemalloc-sys/build.rs#L237 - JEMALLOC_SYS_WITH_LG_PAGE: 14 - <<: *job-macos-xl + <<: *job-macos-m1 + + # This target only needs to support 11.0 and up as nothing else supports the hardware + - name: aarch64-apple + env: + SCRIPT: ./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin + RUST_CONFIGURE_ARGS: >- + --enable-sanitizers + --enable-profiler + --set rust.jemalloc + --set llvm.ninja=false + RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 + SELECT_XCODE: /Applications/Xcode_13.4.1.app + USE_XCODE_CLANG: 1 + MACOSX_DEPLOYMENT_TARGET: 11.0 + MACOSX_STD_DEPLOYMENT_TARGET: 11.0 + NO_LLVM_ASSERTIONS: 1 + NO_DEBUG_ASSERTIONS: 1 + NO_OVERFLOW_CHECKS: 1 + <<: *job-macos-m1 ###################### # Windows Builders # @@ -585,6 +607,7 @@ - name: x86_64-msvc-ext env: SCRIPT: python x.py --stage 2 test src/tools/cargotest src/tools/cargo && src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows + HOST_TARGET: x86_64-pc-windows-msvc RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld --save-toolstates=/tmp/toolstate/toolstates.json DEPLOY_TOOLSTATES_JSON: toolstates-windows.json <<: *job-windows-8c @@ -702,12 +725,14 @@ env: DIST_TRY_BUILD: 1 <<: [*shared-ci-variables, *prod-variables] - if: github.event_name == 'push' && (github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust' + if: github.event_name == 'push' && (((github.ref == 'refs/heads/try' || github.ref == 'refs/heads/try-perf') && github.repository == 'rust-lang-ci/rust') || ((github.ref == 'refs/heads/automation/bors/try') && github.repository == 'rust-lang/rust')) strategy: matrix: include: - &dist-x86_64-linux name: dist-x86_64-linux + env: + CODEGEN_BACKENDS: llvm,cranelift <<: *job-linux-16c master: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/run.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/run.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/run.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/run.sh 2023-12-21 16:55:28.000000000 +0000 @@ -47,7 +47,8 @@ export CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse -if ! isCI || isCiBranch auto || isCiBranch beta || isCiBranch try || isCiBranch try-perf; then +if ! isCI || isCiBranch auto || isCiBranch beta || isCiBranch try || isCiBranch try-perf || \ + isCiBranch automation/bors/try; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set build.print-step-timings --enable-verbose-tests" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set build.metrics" HAS_METRICS=1 @@ -97,12 +98,14 @@ if [ "$NO_LLVM_ASSERTIONS" = "1" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-llvm-assertions" elif [ "$DEPLOY_ALT" != "" ]; then - if [ "$NO_PARALLEL_COMPILER" = "" ]; then - RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.parallel-compiler" + if [ "$ALT_PARALLEL_COMPILER" = "" ]; then + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.parallel-compiler=false" fi RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-assertions" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.verify-llvm-ir" fi + + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.codegen-backends=${CODEGEN_BACKENDS:-llvm}" else # We almost always want debug assertions enabled, but sometimes this takes too # long for too little benefit, so we just turn them off. @@ -123,8 +126,15 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.verify-llvm-ir" - # Test the Cranelift backend in on CI, but don't ship it. - RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.codegen-backends=llvm,cranelift" + # When running gcc backend tests, we need to install `libgccjit` and to not run llvm codegen + # tests as it will fail them. + if [[ "${ENABLE_GCC_CODEGEN}" == "1" ]]; then + # Test the Cranelift and GCC backends in CI. Bootstrap knows which targets to run tests on. + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.codegen-backends=llvm,cranelift,gcc" + else + # Test the Cranelift backend in CI. Bootstrap knows which targets to run tests on. + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.codegen-backends=llvm,cranelift" + fi # We enable this for non-dist builders, since those aren't trying to produce # fresh binaries. We currently don't entirely support distributing a fresh diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/scripts/install-awscli.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/scripts/install-awscli.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/scripts/install-awscli.sh 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/scripts/install-awscli.sh 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,29 @@ +#!/bin/bash +# This script downloads and installs the awscli binaries directly from +# Amazon. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +AWS_VERSION="2.13.25" + +# Only the macOS arm64/aarch64 GitHub Actions runner needs to have AWS +# installed; other platforms have it preinstalled. + +if isMacOS; then + platform=$(uname -m) + case $platform in + x86_64) + ;; + arm64) + file="https://awscli.amazonaws.com/AWSCLIV2-${AWS_VERSION}.pkg" + retry curl -f "${file}" -o "AWSCLIV2.pkg" + sudo installer -pkg "AWSCLIV2.pkg" -target / + ;; + *) + echo "unsupported architecture: ${platform}" + exit 1 + esac +fi diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/scripts/install-tidy.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/scripts/install-tidy.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/scripts/install-tidy.sh 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/scripts/install-tidy.sh 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +#!/bin/bash +# This script downloads and installs the tidy binary from Homebrew. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +# Only the macOS arm64/aarch64 GitHub Actions runner needs to have tidy +# installed; other platforms have it preinstalled. + +if isMacOS; then + platform=$(uname -m) + case $platform in + x86_64) + ;; + arm64) + brew install tidy-html5 + ;; + *) + echo "unsupported architecture: ${platform}" + exit 1 + esac +fi diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/scripts/verify-channel.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/scripts/verify-channel.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/ci/scripts/verify-channel.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/ci/scripts/verify-channel.sh 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" -if isCiBranch auto || isCiBranch try || isCiBranch try-perf; then +if isCiBranch auto || isCiBranch try || isCiBranch try-perf || isCiBranch automation/bors/try; then echo "channel verification is only executed on PR builds" exit fi diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/book/redirects/compiler-plugins.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/book/redirects/compiler-plugins.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/book/redirects/compiler-plugins.md 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/book/redirects/compiler-plugins.md 2023-12-21 16:55:33.000000000 +0000 @@ -2,12 +2,5 @@ There is a new edition of the book and this is an old link. -> Compiler plugins are user-provided libraries that extend the compiler's behavior with new syntax extensions, lint checks, etc. - ---- - -This particular chapter has moved to [the Unstable Book][2]. - -* **[In the Unstable Rust Book: `plugin`][2]** - -[2]: ../unstable-book/language-features/plugin.html +> Compiler plugins were user-provided libraries that extended the compiler's behavior in certain ways. +> Support for them has been removed. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/book/src/ch02-00-guessing-game-tutorial.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/book/src/ch02-00-guessing-game-tutorial.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/book/src/ch02-00-guessing-game-tutorial.md 2023-12-04 19:48:37.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/book/src/ch02-00-guessing-game-tutorial.md 2023-12-21 16:55:33.000000000 +0000 @@ -930,8 +930,8 @@ [randcrate]: https://crates.io/crates/rand [semver]: http://semver.org [cratesio]: https://crates.io/ -[doccargo]: http://doc.crates.io -[doccratesio]: http://doc.crates.io/crates-io.html +[doccargo]: https://doc.rust-lang.org/cargo/ +[doccratesio]: https://doc.rust-lang.org/cargo/reference/publishing.html [match]: ch06-02-match.html [shadowing]: ch03-01-variables-and-mutability.html#shadowing [parse]: ../std/primitive.str.html#method.parse diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/embedded-book/src/start/hardware.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/embedded-book/src/start/hardware.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/embedded-book/src/start/hardware.md 2023-12-04 19:48:38.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/embedded-book/src/start/hardware.md 2023-12-21 16:55:35.000000000 +0000 @@ -64,6 +64,9 @@ ``` We'll use `thumbv7em-none-eabihf` as that covers the Cortex-M4F core. +> **NOTE**: As you may remember from the previous chapter, we have to install +> all targets and this is a new one. So don't forget to run the installation +> process `rustup target add thumbv7em-none-eabihf` for this target. The second step is to enter the memory region information into the `memory.x` file. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/guide-plugins.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/guide-plugins.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/guide-plugins.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/guide-plugins.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,3 @@ % The (old) Rust Compiler Plugins Guide -This content has moved into -[the Unstable Book](unstable-book/language-features/plugin.html). +Support for plugins has been removed. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/nomicon/src/exception-safety.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/nomicon/src/exception-safety.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/nomicon/src/exception-safety.md 2023-12-04 19:48:38.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/nomicon/src/exception-safety.md 2023-12-21 16:55:36.000000000 +0000 @@ -172,7 +172,7 @@ fn removed(&self) -> &T { self.elt.as_ref().unwrap() } - unsafe fn get(&self, index: usize) -> &T { &self.data[index] } + fn get(&self, index: usize) -> &T { &self.data[index] } unsafe fn move_to(&mut self, index: usize) { let index_ptr: *const _ = &self.data[index]; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes/codegen.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes/codegen.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes/codegen.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes/codegen.md 2023-12-21 16:55:38.000000000 +0000 @@ -204,6 +204,66 @@ `tme` | | FEAT_TME - Transactional Memory Extension `vh` | | FEAT_VHE - Virtualization Host Extensions +#### `riscv32` or `riscv64` + +This platform requires that `#[target_feature]` is only applied to [`unsafe` +functions][unsafe function]. + +Further documentation on these features can be found in their respective +specification. Many specifications are described in the [RISC-V ISA Manual] or +in another manual hosted on the [RISC-V GitHub Account]. + +[RISC-V ISA Manual]: https://github.com/riscv/riscv-isa-manual +[RISC-V GitHub Account]: https://github.com/riscv + +Feature | Implicitly Enables | Description +------------|---------------------|------------------- +`a` | | [A][rv-a] — Atomic instructions +`c` | | [C][rv-c] — Compressed instructions +`m` | | [M][rv-m] — Integer Multiplication and Division instructions +`zb` | `zba`, `zbc`, `zbs` | [Zb][rv-zb] — Bit Manipulation instructions +`zba` | | [Zba][rv-zb-zba] — Address Generation instructions +`zbb` | | [Zbb][rv-zb-zbb] — Basic bit-manipulation +`zbc` | | [Zbc][rv-zb-zbc] — Carry-less multiplication +`zbkb` | | [Zbkb][rv-zb-zbkb] — Bit Manipulation Instructions for Cryptography +`zbkc` | | [Zbkc][rv-zb-zbc] — Carry-less multiplication for Cryptography +`zbkx` | | [Zbkx][rv-zb-zbkx] — Crossbar permutations +`zbs` | | [Zbs][rv-zb-zbs] — Single-bit instructions +`zk` | `zkn`, `zkr`, `zks`, `zkt`, `zbkb`, `zbkc`, `zkbx` | [Zk][rv-zk] — Scalar Cryptography +`zkn` | `zknd`, `zkne`, `zknh`, `zbkb`, `zbkc`, `zkbx` | [Zkn][rv-zkn] — NIST Algorithm suite extension +`zknd` | | [Zknd][rv-zknd] — NIST Suite: AES Decryption +`zkne` | | [Zkne][rv-zkne] — NIST Suite: AES Encryption +`zknh` | | [Zknh][rv-zknh] — NIST Suite: Hash Function Instructions +`zkr` | | [Zkr][rv-zkr] — Entropy Source Extension +`zks` | `zksed`, `zksh`, `zbkb`, `zbkc`, `zkbx` | [Zks][rv-zks] — ShangMi Algorithm Suite +`zksed` | | [Zksed][rv-zksed] — ShangMi Suite: SM4 Block Cipher Instructions +`zksh` | | [Zksh][rv-zksh] — ShangMi Suite: SM3 Hash Function Instructions +`zkt` | | [Zkt][rv-zkt] — Data Independent Execution Latency Subset + + + +[rv-a]: https://github.com/riscv/riscv-isa-manual/blob/de46343a245c6ee1f7b1a40c92fe1a86bd4f4978/src/a-st-ext.adoc +[rv-c]: https://github.com/riscv/riscv-isa-manual/blob/de46343a245c6ee1f7b1a40c92fe1a86bd4f4978/src/c-st-ext.adoc +[rv-m]: https://github.com/riscv/riscv-isa-manual/blob/de46343a245c6ee1f7b1a40c92fe1a86bd4f4978/src/m-st-ext.adoc +[rv-zb]: https://github.com/riscv/riscv-bitmanip +[rv-zb-zba]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zba.adoc +[rv-zb-zbb]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zbb.adoc +[rv-zb-zbc]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zbc.adoc +[rv-zb-zbkb]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zbkb.adoc +[rv-zb-zbkc]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zbkc.adoc +[rv-zb-zbkx]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zbkx.adoc +[rv-zb-zbs]: https://github.com/riscv/riscv-bitmanip/blob/main/bitmanip/zbs.adoc +[rv-zk]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zk.adoc +[rv-zkn]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zkn.adoc +[rv-zkne]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zkne.adoc +[rv-zknd]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zknd.adoc +[rv-zknh]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zknh.adoc +[rv-zkr]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zkr.adoc +[rv-zks]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zks.adoc +[rv-zksed]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zksed.adoc +[rv-zksh]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zksh.adoc +[rv-zkt]: https://github.com/riscv/riscv-crypto/blob/e2dd7d98b7f34d477e38cb5fd7a3af4379525189/doc/scalar/riscv-crypto-scalar-zkr.adoc + #### `wasm32` or `wasm64` `#[target_feature]` may be used with both safe and diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/attributes.md 2023-12-21 16:55:38.000000000 +0000 @@ -275,8 +275,8 @@ - [`debugger_visualizer`] — Embeds a file that specifies debugger output for a type. [Doc comments]: comments.md#doc-comments -[ECMA-334]: https://www.ecma-international.org/publications/standards/Ecma-334.htm -[ECMA-335]: https://www.ecma-international.org/publications/standards/Ecma-335.htm +[ECMA-334]: https://www.ecma-international.org/publications-and-standards/standards/ecma-334/ +[ECMA-335]: https://www.ecma-international.org/publications-and-standards/standards/ecma-335/ [Expression Attributes]: expressions.md#expression-attributes [IDENTIFIER]: identifiers.md [RAW_STRING_LITERAL]: tokens.md#raw-string-literals diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/behavior-considered-undefined.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/behavior-considered-undefined.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/behavior-considered-undefined.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/behavior-considered-undefined.md 2023-12-21 16:55:38.000000000 +0000 @@ -27,9 +27,12 @@ * Data races. -* Evaluating a [dereference expression] (`*expr`) on a raw pointer that is - [dangling] or unaligned, even in [place expression context] - (e.g. `addr_of!(*expr)`). +* Accessing (loading from or storing to) a place that is [dangling] or [based on + a misaligned pointer]. +* Performing a place projection that violates the requirements of [in-bounds + pointer arithmetic][offset]. A place projection is a [field + expression][project-field], a [tuple index expression][project-tuple], or an + [array/slice index expression][project-slice]. * Breaking the [pointer aliasing rules]. `Box`, `&mut T` and `&T` follow LLVM’s scoped [noalias] model, except if the `&T` contains an [`UnsafeCell`]. References and boxes must not be [dangling] while they are @@ -68,7 +71,7 @@ * A `!` (all values are invalid for this type). * An integer (`i*`/`u*`), floating point value (`f*`), or raw pointer obtained from [uninitialized memory][undef], or uninitialized memory in a `str`. - * A reference or `Box` that is [dangling], unaligned, or points to an invalid value. + * A reference or `Box` that is [dangling], misaligned, or points to an invalid value. * Invalid metadata in a wide reference, `Box`, or raw pointer: * `dyn Trait` metadata is invalid if it is not a pointer to a vtable for `Trait` that matches the actual dynamic trait the pointer or reference points to. @@ -102,6 +105,36 @@ The span of bytes a pointer or reference "points to" is determined by the pointer value and the size of the pointee type (using `size_of_val`). +### Places based on misaligned pointers +[based on a misaligned pointer]: #places-based-on-misaligned-pointers + +A place is said to be "based on a misaligned pointer" if the last `*` projection +during place computation was performed on a pointer that was not aligned for its +type. (If there is no `*` projection in the place expression, then this is +accessing the field of a local and rustc will guarantee proper alignment. If +there are multiple `*` projection, then each of them incurs a load of the +pointer-to-be-dereferenced itself from memory, and each of these loads is +subject to the alignment constraint. Note that some `*` projections can be +omitted in surface Rust syntax due to automatic dereferencing; we are +considering the fully expanded place expression here.) + +For instance, if `ptr` has type `*const S` where `S` has an alignment of 8, then +`ptr` must be 8-aligned or else `(*ptr).f` is "based on an misaligned pointer". +This is true even if the type of the field `f` is `u8` (i.e., a type with +alignment 1). In other words, the alignment requirement derives from the type of +the pointer that was dereferenced, *not* the type of the field that is being +accessed. + +Note that a place based on a misaligned pointer only leads to Undefined Behavior +when it is loaded from or stored to. `addr_of!`/`addr_of_mut!` on such a place +is allowed. `&`/`&mut` on a place requires the alignment of the field type (or +else the program would be "producing an invalid value"), which generally is a +less restrictive requirement than being based on an aligned pointer. Taking a +reference will lead to a compiler error in cases where the field type might be +more aligned than the type that contains it, i.e., `repr(packed)`. This means +that being based on an aligned pointer is always sufficient to ensure that the +new reference is aligned, but it is not always necessary. + ### Dangling pointers [dangling]: #dangling-pointers @@ -128,8 +161,11 @@ [Rustonomicon]: ../nomicon/index.html [`NonNull`]: ../core/ptr/struct.NonNull.html [`NonZero*`]: ../core/num/index.html -[dereference expression]: expressions/operator-expr.md#the-dereference-operator [place expression context]: expressions.md#place-expressions-and-value-expressions [rules]: inline-assembly.md#rules-for-inline-assembly [points to]: #pointed-to-bytes [pointed to]: #pointed-to-bytes +[offset]: ../std/primitive.pointer.html#method.offset +[project-field]: expressions/field-expr.md +[project-tuple]: expressions/tuple-expr.md#tuple-indexing-expressions +[project-slice]: expressions/array-expr.md#array-and-slice-indexing-expressions diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/destructors.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/destructors.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/destructors.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/destructors.md 2023-12-21 16:55:38.000000000 +0000 @@ -156,7 +156,7 @@ Apart from lifetime extension, the temporary scope of an expression is the smallest scope that contains the expression and is one of the following: -* The entire function body. +* The entire function. * A statement. * The body of an [`if`], [`while`] or [`loop`] expression. * The `else` block of an `if` expression. @@ -168,8 +168,8 @@ > **Notes**: > > Temporaries that are created in the final expression of a function -> body are dropped *after* any named variables bound in the function body, as -> there is no smaller enclosing temporary scope. +> body are dropped *after* any named variables bound in the function body. +> Their drop scope is the entire function, as there is no smaller enclosing temporary scope. > > The [scrutinee] of a `match` expression is not a temporary scope, so > temporaries in the scrutinee can be dropped after the `match` expression. For diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/expressions/operator-expr.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/expressions/operator-expr.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/expressions/operator-expr.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/expressions/operator-expr.md 2023-12-21 16:55:38.000000000 +0000 @@ -478,6 +478,16 @@ assert_eq!(values[1], 3); ``` +#### Slice DST pointer to pointer cast + +For slice types like `[T]` and `[U]`, the raw pointer types `*const [T]`, `*mut [T]`, +`*const [U]`, and `*mut [U]` encode the number of elements in this slice. Casts between +these raw pointer types preserve the number of elements. Note that, as a consequence, +such casts do *not* necessarily preserve the size of the pointer's referent (e.g., +casting `*const [u16]` to `*const [u8]` will result in a raw pointer which refers to an +object of half the size of the original). The same holds for `str` and any compound type +whose unsized tail is a slice type, such as struct `Foo(i32, [u8])` or `(u64, Foo)`. + ## Assignment expressions > **Syntax**\ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/inline-assembly.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/inline-assembly.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/inline-assembly.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/inline-assembly.md 2023-12-21 16:55:38.000000000 +0000 @@ -414,10 +414,13 @@ Currently the following options are defined: - `pure`: The `asm!` block has no side effects, and its outputs depend only on its direct inputs (i.e. the values themselves, not what they point to) or values read from memory (unless the `nomem` options is also set). This allows the compiler to execute the `asm!` block fewer times than specified in the program (e.g. by hoisting it out of a loop) or even eliminate it entirely if the outputs are not used. + The `pure` option must be combined with either the `nomem` or `readonly` options, otherwise a compile-time error is emitted. - `nomem`: The `asm!` blocks does not read or write to any memory. This allows the compiler to cache the values of modified global variables in registers across the `asm!` block since it knows that they are not read or written to by the `asm!`. + The compiler also assumes that this `asm!` block does not perform any kind of synchronization with other threads, e.g. via fences. - `readonly`: The `asm!` block does not write to any memory. This allows the compiler to cache the values of unmodified global variables in registers across the `asm!` block since it knows that they are not written to by the `asm!`. + The compiler also assumes that this `asm!` block does not perform any kind of synchronization with other threads, e.g. via fences. - `preserves_flags`: The `asm!` block does not modify the flags register (defined in the rules below). This allows the compiler to avoid recomputing the condition flags after the `asm!` block. - `noreturn`: The `asm!` block never returns, and its return type is defined as `!` (never). @@ -432,7 +435,6 @@ The compiler performs some additional checks on options: - The `nomem` and `readonly` options are mutually exclusive: it is a compile-time error to specify both. -- The `pure` option must be combined with either the `nomem` or `readonly` options, otherwise a compile-time error is emitted. - It is a compile-time error to specify `pure` on an asm block with no outputs or only discarded outputs (`_`). - It is a compile-time error to specify `noreturn` on an asm block with outputs. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/items/traits.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/items/traits.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/items/traits.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/items/traits.md 2023-12-21 16:55:38.000000000 +0000 @@ -43,7 +43,7 @@ } ``` -Trait functions are not allowed to be [`async`] or [`const`]. +Trait functions are not allowed to be [`const`]. ## Trait bounds diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/impl-trait.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/impl-trait.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/impl-trait.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/impl-trait.md 2023-12-21 16:55:38.000000000 +0000 @@ -88,6 +88,12 @@ Similarly, the concrete types of iterators could become very complex, incorporating the types of all previous iterators in a chain. Returning `impl Iterator` means that a function only exposes the `Iterator` trait as a bound on its return type, instead of explicitly specifying all of the other iterator types involved. +## Return-position `impl Trait` in traits and trait implementations + +Functions in traits may also use `impl Trait` as a syntax for an anonymous associated type. + +Every `impl Trait` in the return type of an associated function in a trait is desugared to an anonymous associated type. The return type that appears in the implementation's function signature is used to determine the value of the associated type. + ### Differences between generics and `impl Trait` in return position In argument position, `impl Trait` is very similar in semantics to a generic type parameter. @@ -121,8 +127,8 @@ ## Limitations -`impl Trait` can only appear as a parameter or return type of a free or inherent function. -It cannot appear inside implementations of traits, nor can it be the type of a let binding or appear inside a type alias. +`impl Trait` can only appear as a parameter or return type of a non-`extern` function. +It cannot be the type of a `let` binding, field type, or appear inside a type alias. [closures]: closure.md [_GenericArgs_]: ../paths.md#paths-in-expressions diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/textual.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/textual.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/textual.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/reference/src/types/textual.md 2023-12-21 16:55:38.000000000 +0000 @@ -17,7 +17,9 @@ Since `str` is a [dynamically sized type], it can only be instantiated through a pointer type, such as `&str`. -## Bit validity +## Layout and bit validity + +`char` is guaranteed to have the same size and alignment as `u32` on all platforms. Every byte of a `char` is guaranteed to be initialized (in other words, `transmute::()]>(...)` is always sound -- but since diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/SUMMARY.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/SUMMARY.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/SUMMARY.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/SUMMARY.md 2023-12-21 16:55:39.000000000 +0000 @@ -161,7 +161,7 @@ - [Unpacking options with `?`](error/option_unwrap/question_mark.md) - [Combinators: `map`](error/option_unwrap/map.md) - [Combinators: `and_then`](error/option_unwrap/and_then.md) - - [Defaults: `or`, `or_else`, `get_or_insert`, 'get_or_insert_with`](error/option_unwrap/defaults.md) + - [Defaults: `or`, `or_else`, `get_or_insert`, `get_or_insert_with`](error/option_unwrap/defaults.md) - [`Result`](error/result.md) - [`map` for `Result`](error/result/result_map.md) - [aliases for `Result`](error/result/result_alias.md) @@ -197,7 +197,7 @@ - [File I/O](std_misc/file.md) - [`open`](std_misc/file/open.md) - [`create`](std_misc/file/create.md) - - [`read lines`](std_misc/file/read_lines.md) + - [`read_lines`](std_misc/file/read_lines.md) - [Child processes](std_misc/process.md) - [Pipes](std_misc/process/pipe.md) - [Wait](std_misc/process/wait.md) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/attribute.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/attribute.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/attribute.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/attribute.md 2023-12-21 16:55:39.000000000 +0000 @@ -14,9 +14,34 @@ * mark functions that will be part of a benchmark * [attribute like macros][macros] -When attributes apply to a whole crate, their syntax is `#![crate_attribute]`, -and when they apply to a module or item, the syntax is `#[item_attribute]` -(notice the missing bang `!`). +Attributes look like `#[outer_attribute]` or `#![inner_attribute]`, +with the difference between them being where they apply. + +- `#[outer_attribute]` applies to the [item][item] immediately + following it. Some examples of items are: a function, a module + declaration, a constant, a structure, an enum. Here is an example + where attribute `#[derive(Debug)]` applies to the struct + `Rectangle`: + ```rust + #[derive(Debug)] + struct Rectangle { + width: u32, + height: u32, + } + ``` + +- `#![inner_attribute]` applies to the enclosing [item][item] (typically a + module or a crate). In other words, this attribute is intepreted as + applying to the entire scope in which it's place. Here is an example + where `#![allow(unusude_variables)]` applies to the whole crate (if + placed in `main.rs`): + ```rust + #![allow(unused_variables)] + + fn main() { + let x = 3; // This would normally warn about an unused variable. + } + ``` Attributes can take arguments with different syntaxes: @@ -36,5 +61,6 @@ [cfg]: attribute/cfg.md [crate]: attribute/crate.md +[item]: https://doc.rust-lang.org/stable/reference/items.html [lint]: https://en.wikipedia.org/wiki/Lint_%28software%29 [macros]: https://doc.rust-lang.org/book/ch19-06-macros.html#attribute-like-macros diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/custom_types/constants.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/custom_types/constants.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/custom_types/constants.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/custom_types/constants.md 2023-12-21 16:55:39.000000000 +0000 @@ -4,7 +4,7 @@ including global. Both require explicit type annotation: * `const`: An unchangeable value (the common case). -* `static`: A possibly `mut`able variable with [`'static`][static] lifetime. +* `static`: A possibly mutable variable with [`'static`][static] lifetime. The static lifetime is inferred and does not have to be specified. Accessing or modifying a mutable static variable is [`unsafe`][unsafe]. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/error/option_unwrap/question_mark.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/error/option_unwrap/question_mark.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/error/option_unwrap/question_mark.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/error/option_unwrap/question_mark.md 2023-12-21 16:55:39.000000000 +0000 @@ -8,7 +8,8 @@ ```rust,editable fn next_birthday(current_age: Option) -> Option { // If `current_age` is `None`, this returns `None`. - // If `current_age` is `Some`, the inner `u8` gets assigned to `next_age` + // If `current_age` is `Some`, the inner `u8` value + 1 + // gets assigned to `next_age` let next_age: u8 = current_age? + 1; Some(format!("Next year I will be {}", next_age)) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/flow_control/while_let.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/flow_control/while_let.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/flow_control/while_let.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/flow_control/while_let.md 2023-12-21 16:55:39.000000000 +0000 @@ -31,26 +31,24 @@ Using `while let` makes this sequence much nicer: ```rust,editable -fn main() { - // Make `optional` of type `Option` - let mut optional = Some(0); +// Make `optional` of type `Option` +let mut optional = Some(0); - // This reads: "while `let` destructures `optional` into - // `Some(i)`, evaluate the block (`{}`). Else `break`. - while let Some(i) = optional { - if i > 9 { - println!("Greater than 9, quit!"); - optional = None; - } else { - println!("`i` is `{:?}`. Try again.", i); - optional = Some(i + 1); - } - // ^ Less rightward drift and doesn't require - // explicitly handling the failing case. +// This reads: "while `let` destructures `optional` into +// `Some(i)`, evaluate the block (`{}`). Else `break`. +while let Some(i) = optional { + if i > 9 { + println!("Greater than 9, quit!"); + optional = None; + } else { + println!("`i` is `{:?}`. Try again.", i); + optional = Some(i + 1); } - // ^ `if let` had additional optional `else`/`else if` - // clauses. `while let` does not have these. + // ^ Less rightward drift and doesn't require + // explicitly handling the failing case. } +// ^ `if let` had additional optional `else`/`else if` +// clauses. `while let` does not have these. ``` ### See also: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/closures.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/closures.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/closures.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/closures.md 2023-12-21 16:55:39.000000000 +0000 @@ -14,7 +14,7 @@ Other characteristics of closures include: * using `||` instead of `()` around input variables. -* optional body delimination (`{}`) for a single expression (mandatory otherwise). +* optional body delimitation (`{}`) for a single expression (mandatory otherwise). * the ability to capture the outer environment variables. ```rust,editable @@ -26,7 +26,7 @@ // TODO: uncomment the line above and see the compiler error. The compiler // suggests that we define a closure instead. - // Closures are anonymous, here we are binding them to references + // Closures are anonymous, here we are binding them to references. // Annotation is identical to function annotation but is optional // as are the `{}` wrapping the body. These nameless functions // are assigned to appropriately named variables. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/hof.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/hof.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/hof.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/fn/hof.md 2023-12-21 16:55:39.000000000 +0000 @@ -10,7 +10,7 @@ } fn main() { - println!("Find the sum of all the squared odd numbers under 1000"); + println!("Find the sum of all the numbers with odd squares under 1000"); let upper = 1000; // Imperative approach diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/doc.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/doc.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/doc.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/doc.md 2023-12-21 16:55:39.000000000 +0000 @@ -2,7 +2,8 @@ Use `cargo doc` to build documentation in `target/doc`. -Use `cargo test` to run all tests (including documentation tests), and `cargo test --doc` to only run documentation tests. +Use `cargo test` to run all tests (including documentation tests), and `cargo +test --doc` to only run documentation tests. These commands will appropriately invoke `rustdoc` (and `rustc`) as required. @@ -67,7 +68,8 @@ ## Doc attributes -Below are a few examples of the most common `#[doc]` attributes used with `rustdoc`. +Below are a few examples of the most common `#[doc]` attributes used with +`rustdoc`. ### `inline` @@ -104,7 +106,8 @@ pub use self::async_await::*; ``` -For documentation, `rustdoc` is widely used by the community. It's what is used to generate the [std library docs](https://doc.rust-lang.org/std/). +For documentation, `rustdoc` is widely used by the community. It's what is used +to generate the [std library docs](https://doc.rust-lang.org/std/). ### See also: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/playground.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/playground.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/playground.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/meta/playground.md 2023-12-21 16:55:39.000000000 +0000 @@ -1,6 +1,7 @@ # Playground -The [Rust Playground](https://play.rust-lang.org/) is a way to experiment with Rust code through a web interface. +The [Rust Playground](https://play.rust-lang.org/) is a way to experiment with +Rust code through a web interface. ## Using it with `mdbook` @@ -12,7 +13,9 @@ } ``` -This allows the reader to both run your code sample, but also modify and tweak it. The key here is the adding the word `editable` to your codefence block separated by a comma. +This allows the reader to both run your code sample, but also modify and tweak +it. The key here is the adding the word `editable` to your codefence block +separated by a comma. ````markdown ```rust,editable @@ -20,7 +23,8 @@ ``` ```` -Additionally, you can add `ignore` if you want `mdbook` to skip your code when it builds and tests. +Additionally, you can add `ignore` if you want `mdbook` to skip your code when +it builds and tests. ````markdown ```rust,editable,ignore @@ -30,7 +34,10 @@ ## Using it with docs -You may have noticed in some of the [official Rust docs][official-rust-docs] a button that says "Run", which opens the code sample up in a new tab in Rust Playground. This feature is enabled if you use the #[doc] attribute called [`html_playground_url`][html-playground-url]. +You may have noticed in some of the [official Rust docs][official-rust-docs] a +button that says "Run", which opens the code sample up in a new tab in Rust +Playground. This feature is enabled if you use the `#[doc]` attribute called +[`html_playground_url`][html-playground-url]. ### See also: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/primitives/array.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/primitives/array.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/primitives/array.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/primitives/array.md 2023-12-21 16:55:39.000000000 +0000 @@ -6,7 +6,7 @@ Slices are similar to arrays, but their length is not known at compile time. Instead, a slice is a two-word object; the first word is a pointer to the data, -the second word the length of the slice. The word size is the same as usize, +the second word is the length of the slice. The word size is the same as usize, determined by the processor architecture, e.g. 64 bits on an x86-64. Slices can be used to borrow a section of an array and have the type signature `&[T]`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/lifetime.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/lifetime.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/lifetime.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/lifetime.md 2023-12-21 16:55:39.000000000 +0000 @@ -1,6 +1,6 @@ # Lifetimes -A *lifetime* is a construct of the compiler (or more specifically, its *borrow +A *lifetime* is a construct the compiler (or more specifically, its *borrow checker*) uses to ensure all borrows are valid. Specifically, a variable's lifetime begins when it is created and ends when it is destroyed. While lifetimes and scopes are often referred to together, they are not the same. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/move.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/move.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/move.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/scope/move.md 2023-12-21 16:55:39.000000000 +0000 @@ -1,7 +1,7 @@ # Ownership and moves Because variables are in charge of freeing their own resources, -**resources can only have one owner**. This also prevents resources +**resources can only have one owner**. This prevents resources from being freed more than once. Note that not all variables own resources (e.g. [references]). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std/hash/alt_key_types.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std/hash/alt_key_types.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std/hash/alt_key_types.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std/hash/alt_key_types.md 2023-12-21 16:55:39.000000000 +0000 @@ -3,7 +3,7 @@ Any type that implements the `Eq` and `Hash` traits can be a key in `HashMap`. This includes: -* `bool` (though not very useful since there is only two possible keys) +* `bool` (though not very useful since there are only two possible keys) * `int`, `uint`, and all variations thereof * `String` and `&str` (protip: you can have a `HashMap` keyed by `String` and call `.get()` with an `&str`) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/arg/matching.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/arg/matching.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/arg/matching.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/arg/matching.md 2023-12-21 16:55:39.000000000 +0000 @@ -70,6 +70,9 @@ } ``` +If you named your program `match_args.rs` and compile it like this `rustc +match_args.rs`, you can execute it as follows: + ```shell $ ./match_args Rust This is not the answer. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/file/read_lines.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/file/read_lines.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/file/read_lines.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/file/read_lines.md 2023-12-21 16:55:39.000000000 +0000 @@ -56,15 +56,13 @@ // File hosts.txt must exist in the current path if let Ok(lines) = read_lines("./hosts.txt") { // Consumes the iterator, returns an (Optional) String - for line in lines { - if let Ok(ip) = line { - println!("{}", ip); - } + for line in lines.flatten() { + println!("{}", line); } } } -// The output is wrapped in a Result to allow matching on errors +// The output is wrapped in a Result to allow matching on errors. // Returns an Iterator to the Reader of the lines of the file. fn read_lines

(filename: P) -> io::Result>> where P: AsRef, { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/threads/testcase_mapreduce.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/threads/testcase_mapreduce.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/threads/testcase_mapreduce.md 2023-12-04 19:48:39.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rust-by-example/src/std_misc/threads/testcase_mapreduce.md 2023-12-21 16:55:39.000000000 +0000 @@ -30,7 +30,7 @@ fn main() { // This is our data to process. - // We will calculate the sum of all digits via a threaded map-reduce algorithm. + // We will calculate the sum of all digits via a threaded map-reduce algorithm. // Each whitespace separated chunk will be handled in a different thread. // // TODO: see what happens to the output if you insert spaces! diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/SUMMARY.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/SUMMARY.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/SUMMARY.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/SUMMARY.md 2023-12-21 16:55:28.000000000 +0000 @@ -33,7 +33,7 @@ - [\*-esp-espidf](platform-support/esp-idf.md) - [\*-unknown-fuchsia](platform-support/fuchsia.md) - [\*-kmc-solid_\*](platform-support/kmc-solid.md) - - [csky-unknown-linux-gnuabiv2](platform-support/csky-unknown-linux-gnuabiv2.md) + - [csky-unknown-linux-gnuabiv2\*](platform-support/csky-unknown-linux-gnuabiv2.md) - [loongarch\*-unknown-linux-\*](platform-support/loongarch-linux.md) - [loongarch\*-unknown-none\*](platform-support/loongarch-none.md) - [m68k-unknown-linux-gnu](platform-support/m68k-unknown-linux-gnu.md) @@ -41,6 +41,7 @@ - [mipsel-sony-psx](platform-support/mipsel-sony-psx.md) - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) + - [powerpc64-ibm-aix](platform-support/aix.md) - [riscv32imac-unknown-xous-elf](platform-support/riscv32imac-unknown-xous-elf.md) - [sparc-unknown-none-elf](./platform-support/sparc-unknown-none-elf.md) - [*-pc-windows-gnullvm](platform-support/pc-windows-gnullvm.md) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/codegen-options/index.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/codegen-options/index.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/codegen-options/index.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/codegen-options/index.md 2023-12-21 16:55:28.000000000 +0000 @@ -249,11 +249,9 @@ * `gcc`: use the `cc` executable, which is typically gcc or clang on many systems. * `ld`: use the `ld` executable. * `msvc`: use the `link.exe` executable from Microsoft Visual Studio MSVC. -* `ptx-linker`: use - [`rust-ptx-linker`](https://github.com/denzp/rust-ptx-linker) for Nvidia - NVPTX GPGPU support. -* `bpf-linker`: use - [`bpf-linker`](https://github.com/alessandrod/bpf-linker) for eBPF support. +* `ptx`: use [`rust-ptx-linker`](https://github.com/denzp/rust-ptx-linker) + for Nvidia NVPTX GPGPU support. +* `bpf`: use [`bpf-linker`](https://github.com/alessandrod/bpf-linker) for eBPF support. * `wasm-ld`: use the [`wasm-ld`](https://lld.llvm.org/WebAssembly.html) executable, a port of LLVM `lld` for WebAssembly. * `ld64.lld`: use the LLVM `lld` executable with the [`-flavor darwin` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/exploit-mitigations.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/exploit-mitigations.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/exploit-mitigations.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/exploit-mitigations.md 2023-12-21 16:55:28.000000000 +0000 @@ -1,12 +1,12 @@ # Exploit Mitigations -This chapter documents the exploit mitigations supported by the Rust -compiler, and is by no means an extensive survey of the Rust programming -language’s security features. +This chapter documents the exploit mitigations supported by the Rust compiler, +and is by no means an extensive survey of the Rust programming language’s +security features. This chapter is for software engineers working with the Rust programming -language, and assumes prior knowledge of the Rust programming language and -its toolchain. +language, and assumes prior knowledge of the Rust programming language and its +toolchain. ## Introduction @@ -14,8 +14,8 @@ The Rust programming language provides memory[1] and thread[2] safety guarantees via its ownership[3], references and borrowing[4], and slice types[5] features. However, Unsafe Rust[6] introduces unsafe blocks, unsafe -functions and methods, unsafe traits, and new types that are not subject to -the borrowing rules. +functions and methods, unsafe traits, and new types that are not subject to the +borrowing rules. Parts of the Rust standard library are implemented as safe abstractions over unsafe code (and historically have been vulnerable to memory corruption[7]). @@ -23,33 +23,32 @@ abstractions over unsafe code. This can cause a false sense of security if unsafe code is not properly reviewed and tested. -Unsafe Rust introduces features that do not provide the same memory and -thread safety guarantees. This causes programs or libraries to be -susceptible to memory corruption (CWE-119)[8] and concurrency issues -(CWE-557)[9]. Modern C and C++ compilers provide exploit mitigations to -increase the difficulty to exploit vulnerabilities resulting from these -issues. Therefore, the Rust compiler must also support these exploit -mitigations in order to mitigate vulnerabilities resulting from the use of -Unsafe Rust. This chapter documents these exploit mitigations and how they -apply to Rust. - -This chapter does not discuss the effectiveness of these exploit mitigations -as they vary greatly depending on several factors besides their design and -implementation, but rather describe what they do, so their effectiveness can -be understood within a given context. +Unsafe Rust introduces features that do not provide the same memory and thread +safety guarantees. This causes programs or libraries to be susceptible to +memory corruption (CWE-119)[8] and concurrency issues (CWE-557)[9]. Modern C +and C++ compilers provide exploit mitigations to increase the difficulty to +exploit vulnerabilities resulting from these issues. Therefore, the Rust +compiler must also support these exploit mitigations in order to mitigate +vulnerabilities resulting from the use of Unsafe Rust. This chapter documents +these exploit mitigations and how they apply to Rust. + +This chapter does not discuss the effectiveness of these exploit mitigations as +they vary greatly depending on several factors besides their design and +implementation, but rather describe what they do, so their effectiveness can be +understood within a given context. ## Exploit mitigations -This section documents the exploit mitigations applicable to the Rust -compiler when building programs for the Linux operating system on the AMD64 -architecture and equivalent.1 - -The Rust Programming Language currently has no specification. The Rust -compiler (i.e., rustc) is the language reference implementation. All -references to “the Rust compiler” in this chapter refer to the language -reference implementation. +This section documents the exploit mitigations applicable to the Rust compiler +when building programs for the Linux operating system on the AMD64 architecture +and equivalent.1 All examples in this section were built using +nightly builds of the Rust compiler on Debian testing. + +The Rust Programming Language currently has no specification. The Rust compiler +(i.e., rustc) is the language reference implementation. All references to “the +Rust compiler” in this chapter refer to the language reference implementation. Table I \ Summary of exploit mitigations supported by the Rust compiler when building @@ -83,8 +82,8 @@ random load address, thus also benefiting from address-space layout randomization (ASLR). This is also referred to as “full ASLR”. -The Rust compiler supports position-independent executable, and enables it -by default since version 0.12.0 (2014-10-09)[10]–[13]. +The Rust compiler supports position-independent executable, and enables it by +default since version 0.12.0 (2014-10-09)[10]–[13]. ```text $ readelf -h target/release/hello-rust | grep Type: @@ -93,8 +92,7 @@ Fig. 1. Checking if an executable is a position-independent executable. An executable with an object type of `ET_DYN` (i.e., shared object) and not -`ET_EXEC` (i.e., executable) is a position-independent executable (see Fig. -1). +`ET_EXEC` (i.e., executable) is a position-independent executable (see Fig. 1). ### Integer overflow checks @@ -104,8 +102,11 @@ and unsigned integer computations that cannot be represented in their type, resulting in an overflow or wraparound. -The Rust compiler supports integer overflow checks, and enables it when -debug assertions are enabled since version 1.1.0 (2015-06-25)[14]–[20]. +The Rust compiler supports integer overflow checks, and enables it when debug +assertions are enabled since version 1.0.0 (2015-05-15)[14]–[17], but support +for it was not completed until version 1.1.0 (2015-06-25)[16]. An option to +control integer overflow checks was later stabilized in version 1.17.0 +(2017-04-27)[18]–[20]. ```compile_fail fn main() { @@ -136,21 +137,21 @@ Fig. 4. Build and execution of hello-rust-integer with debug assertions disabled. -Integer overflow checks are enabled when debug assertions are enabled (see -Fig. 3), and disabled when debug assertions are disabled (see Fig. 4). To -enable integer overflow checks independently, use the option to control -integer overflow checks, scoped attributes, or explicit checking methods -such as `checked_add`2. -It is recommended that explicit wrapping methods such as `wrapping_add` be -used when wrapping semantics are intended, and that explicit checking and -wrapping methods always be used when using Unsafe Rust. - -2\. See [the `u32` docs](../std/primitive.u32.html) -for more information on the checked, overflowing, saturating, and wrapping -methods (using u32 as an example). +It is recommended that explicit wrapping methods such as `wrapping_add` be used +when wrapping semantics are intended, and that explicit checking and wrapping +methods always be used when using Unsafe Rust. + +2\. See [the `u32` docs](../std/primitive.u32.html) for more +information on the checked, overflowing, saturating, and wrapping methods +(using u32 as an example). ### Non-executable memory regions @@ -158,17 +159,16 @@ Non-executable memory regions increase the difficulty of exploitation by limiting the memory regions that can be used to execute arbitrary code. Most modern processors provide support for the operating system to mark memory -regions as non executable, but it was previously emulated by software, such -as in grsecurity/PaX's -[PAGEEXEC](https://pax.grsecurity.net/docs/pageexec.txt) and -[SEGMEXEC](https://pax.grsecurity.net/docs/segmexec.txt), on processors that -did not provide support for it. This is also known as “No Execute (NX) Bit”, -“Execute Disable (XD) Bit”, “Execute Never (XN) Bit”, and others. +regions as non executable, but it was previously emulated by software, such as +in grsecurity/PaX’s [PAGEEXEC](https://pax.grsecurity.net/docs/pageexec.txt) +and [SEGMEXEC](https://pax.grsecurity.net/docs/segmexec.txt), on processors +that did not provide support for it. This is also known as “No Execute (NX) +Bit”, “Execute Disable (XD) Bit”, “Execute Never (XN) Bit”, and others. The Rust compiler supports non-executable memory regions, and enables it by -default since its initial release, version 0.1 (2012-01-20)[21], [22], but -has regressed since then[23]–[25], and enforced by default since version -1.8.0 (2016-04-14)[25]. +default since its initial release, version 0.1 (2012-01-20)[21], [22], but has +regressed since then[23]–[25], and enforced by default since version 1.8.0 +(2016-04-14)[25]. ```text $ readelf -l target/release/hello-rust | grep -A 1 GNU_STACK @@ -178,9 +178,9 @@ Fig. 5. Checking if non-executable memory regions are enabled for a given binary. -The presence of an element of type `PT_GNU_STACK` in the program header -table with the `PF_X` (i.e., executable) flag unset indicates non-executable -memory regions3 are enabled for a given binary (see Fig. 5). Conversely, the presence of an element of type `PT_GNU_STACK` in the program header table with the `PF_X` flag set or the absence of an element of type @@ -196,38 +196,40 @@ Stack clashing protection protects the stack from overlapping with another memory region—allowing arbitrary data in both to be overwritten using each -other—by reading from the stack pages as the stack grows to cause a page -fault when attempting to read from the guard page/region. This is also -referred to as “stack probes” or “stack probing”. +other—by reading from the stack pages as the stack grows to cause a page fault +when attempting to read from the guard page/region. This is also referred to as +“stack probes” or “stack probing”. The Rust compiler supports stack clashing protection via stack probing, and enables it by default since version 1.20.0 (2017-08-31)[26]–[29]. -![Screenshot of IDA Pro listing cross references to __rust_probestack in hello-rust.](images/image1.png "Cross references to __rust_probestack in hello-rust.") -Fig. 6. IDA Pro listing cross references to `__rust_probestack` in -hello-rust. - ```rust -fn hello() { - println!("Hello, world!"); +fn main() { + let v: [u8; 16384] = [1; 16384]; + let first = &v[0]; + println!("The first element is: {first}"); } +``` +Fig. 6. hello-rust-stack-probe-1 program. + +![Screenshot of IDA Pro listing the "unrolled loop" stack probe variant in modified hello-rust.](images/image1.png "The \"unrolled loop\" stack probe variant in modified hello-rust.") +Fig. 7. The "unrolled loop" stack probe variant in modified hello-rust. +```rust fn main() { - let _: [u64; 1024] = [0; 1024]; - hello(); + let v: [u8; 65536] = [1; 65536]; + let first = &v[0]; + println!("The first element is: {first}"); } ``` -Fig 7. Modified hello-rust. +Fig. 8. hello-rust-stack-probe-2 program. + +![Screenshot of IDA Pro listing the "standard loop" stack probe variant in modified hello-rust.](images/image2.png "The \"standard loop\" stack probe variant in modified hello-rust.") +Fig. 9. The "standard loop" stack probe variant in modified hello-rust. -![Screenshot of IDA Pro listing cross references to __rust_probestack in modified hello-rust.](images/image2.png "Cross references to __rust_probestack in modified hello-rust.") -Fig. 8. IDA Pro listing cross references to `__rust_probestack` in modified -hello-rust. - -To check if stack clashing protection is enabled for a given binary, search -for cross references to `__rust_probestack`. The `__rust_probestack` is -called in the prologue of functions whose stack size is larger than a page -size (see Fig. 6), and can be forced for illustration purposes by modifying -the hello-rust example as seen in Fig. 7 and Fig. 8. +To check if stack clashing protection is enabled for a given binary, look for +any of the two stack probe variants in the prologue of functions whose stack +size is larger than a page size (see Figs. 6–9). ### Read-only relocations and immediate binding @@ -246,21 +248,20 @@ ``` Fig. 9. Checking if read-only relocations is enabled for a given binary. -The presence of an element of type `PT_GNU_RELRO` in the program header -table indicates read-only relocations are enabled for a given binary (see -Fig. 9). Conversely, the absence of an element of type `PT_GNU_RELRO` in the -program header table indicates read-only relocations are not enabled for a -given binary. +The presence of an element of type `PT_GNU_RELRO` in the program header table +indicates read-only relocations are enabled for a given binary (see Fig. 9). +Conversely, the absence of an element of type `PT_GNU_RELRO` in the program +header table indicates read-only relocations are not enabled for a given +binary. **Immediate binding** protects additional segments containing relocations -(i.e., `.got.plt`) from being overwritten by instructing the dynamic linker -to perform all relocations before transferring control to the program during -startup, so all segments containing relocations can be marked read only -(when combined with read-only relocations). This is also referred to as -“full RELRO”. +(i.e., `.got.plt`) from being overwritten by instructing the dynamic linker to +perform all relocations before transferring control to the program during +startup, so all segments containing relocations can be marked read only (when +combined with read-only relocations). This is also referred to as “full RELRO”. -The Rust compiler supports immediate binding, and enables it by default -since version 1.21.0 (2017-10-12)[30], [31]. +The Rust compiler supports immediate binding, and enables it by default since +version 1.21.0 (2017-10-12)[30], [31]. ```text $ readelf -d target/release/hello-rust | grep BIND_NOW @@ -270,16 +271,15 @@ The presence of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` flag4 in the dynamic section indicates immediate -binding is enabled for a given binary (see Fig. 10). Conversely, the absence -of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` flag in the -dynamic section indicates immediate binding is not enabled for a given -binary. +class="footnote">4 in the dynamic section indicates immediate binding +is enabled for a given binary (see Fig. 10). Conversely, the absence of an +element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` flag in the dynamic +section indicates immediate binding is not enabled for a given binary. The presence of both an element of type `PT_GNU_RELRO` in the program header -table and of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` -flag in the dynamic section indicates full RELRO is enabled for a given -binary (see Fig. 9 and Fig. 10). +table and of an element with the `DT_BIND_NOW` tag and the `DF_BIND_NOW` flag +in the dynamic section indicates full RELRO is enabled for a given binary (see +Figs. 9–10). 4\. And the `DF_1_NOW` flag for some link editors. @@ -287,26 +287,24 @@ ### Heap corruption protection -Heap corruption protection protects memory allocated dynamically by -performing several checks, such as checks for corrupted links between list -elements, invalid pointers, invalid sizes, double/multiple “frees” of the -same memory allocated, and many corner cases of these. These checks are -implementation specific, and vary per allocator. +Heap corruption protection protects memory allocated dynamically by performing +several checks, such as checks for corrupted links between list elements, +invalid pointers, invalid sizes, double/multiple “frees” of the same memory +allocated, and many corner cases of these. These checks are implementation +specific, and vary per allocator. [ARM Memory Tagging Extension (MTE)](https://community.arm.com/developer/ip-products/processors/b/processors-ip-blog/posts/enhancing-memory-safety), -when available, will provide hardware assistance for a probabilistic -mitigation to detect memory safety violations by tagging memory allocations, -and automatically checking that the correct tag is used on every memory -access. +when available, will provide hardware assistance for a probabilistic mitigation +to detect memory safety violations by tagging memory allocations, and +automatically checking that the correct tag is used on every memory access. Rust’s default allocator has historically been -[jemalloc](http://jemalloc.net/), and it has long been the cause of issues -and the subject of much discussion[32]–[38]. Consequently, it has been -removed as the default allocator in favor of the operating system’s standard -C library default allocator5 since version 1.32.0 -(2019-01-17)[39]. +[jemalloc](http://jemalloc.net/), and it has long been the cause of issues and +the subject of much discussion[32]–[38]. Consequently, it has been removed as +the default allocator in favor of the operating system’s standard C library +default allocator5 since version 1.32.0 (2019-01-17)[39]. ```rust,no_run fn main() { @@ -330,8 +328,7 @@ free(): invalid next size (normal) Aborted ``` -Fig. 12. Build and execution of hello-rust-heap with debug assertions -enabled. +Fig. 12. Build and execution of hello-rust-heap with debug assertions enabled. ```text $ cargo run --release @@ -341,47 +338,41 @@ free(): invalid next size (normal) Aborted ``` -Fig. 13. Build and execution of hello-rust-heap with debug assertions -disabled. +Fig. 13. Build and execution of hello-rust-heap with debug assertions disabled. -Heap corruption checks are being performed when using the default allocator -(i.e., the GNU Allocator) as seen in Fig. 12 and Fig. 13. +Heap corruption checks are performed when using the default allocator (i.e., +the GNU Allocator) (see Figs. 12–13). 5\. Linux's standard C library default allocator is the GNU -Allocator, which is derived from ptmalloc (pthreads malloc) by Wolfram -Gloger, which in turn is derived from dlmalloc (Doug Lea malloc) by Doug -Lea. +Allocator, which is derived from ptmalloc (pthreads malloc) by Wolfram Gloger, +which in turn is derived from dlmalloc (Doug Lea malloc) by Doug Lea. ### Stack smashing protection -Stack smashing protection protects programs from stack-based buffer -overflows by inserting a random guard value between local variables and the -saved return instruction pointer, and checking if this value has changed -when returning from a function. This is also known as “Stack Protector” or -“Stack Smashing Protector (SSP)”. +Stack smashing protection protects programs from stack-based buffer overflows +by inserting a random guard value between local variables and the saved return +instruction pointer, and checking if this value has changed when returning from +a function. This is also known as “Stack Protector” or “Stack Smashing +Protector (SSP)”. -The Rust compiler supports stack smashing protection on nightly builds[42]. +The Rust compiler supports stack smashing protection on nightly builds[40]. ![Screenshot of IDA Pro listing cross references to __stack_chk_fail in hello-rust.](images/image3.png "Cross references to __stack_chk_fail in hello-rust.") -Fig. 14. IDA Pro listing cross references to `__stack_chk_fail` in -hello-rust. +Fig. 14. IDA Pro listing cross references to `__stack_chk_fail` in hello-rust. -To check if stack smashing protection is enabled for a given binary, search -for cross references to `__stack_chk_fail`. The presence of these -cross-references in Rust-compiled code (e.g., `hello_rust::main`) indicates -that the stack smashing protection is enabled (see Fig. 14). +To check if stack smashing protection is enabled for a given binary, search for +cross references to `__stack_chk_fail` (see Fig. 14). ### Forward-edge control flow protection -Forward-edge control flow protection protects programs from having its -control flow changed/hijacked by performing checks to ensure that -destinations of indirect branches are one of their valid destinations in the -control flow graph. The comprehensiveness of these checks vary per -implementation. This is also known as “forward-edge control flow integrity -(CFI)”. +Forward-edge control flow protection protects programs from having its control +flow changed/hijacked by performing checks to ensure that destinations of +indirect branches are one of their valid destinations in the control flow +graph. The comprehensiveness of these checks vary per implementation. This is +also known as “forward-edge control flow integrity (CFI)”. Newer processors provide hardware assistance for forward-edge control flow protection, such as ARM Branch Target Identification (BTI), ARM Pointer @@ -394,22 +385,19 @@ (RAP)](https://grsecurity.net/rap_faq). The Rust compiler supports forward-edge control flow protection on nightly -builds[40]-[41] 6. ```text -$ readelf -s -W target/debug/rust-cfi | grep "\.cfi" - 12: 0000000000005170 46 FUNC LOCAL DEFAULT 14 _RNvCsjaOHoaNjor6_8rust_cfi7add_one.cfi - 15: 00000000000051a0 16 FUNC LOCAL DEFAULT 14 _RNvCsjaOHoaNjor6_8rust_cfi7add_two.cfi - 17: 0000000000005270 396 FUNC LOCAL DEFAULT 14 _RNvCsjaOHoaNjor6_8rust_cfi4main.cfi -... +$ readelf -s -W target/release/hello-rust | grep "\.cfi" + 5: 0000000000006480 657 FUNC LOCAL DEFAULT 15 _ZN10hello_rust4main17h4e359f1dcd627c83E.cfi ``` -Fig. 15. Checking if LLVM CFI is enabled for a given binary[41]. +Fig. 15. Checking if LLVM CFI is enabled for a given binary. The presence of symbols suffixed with ".cfi" or the `__cfi_init` symbol (and -references to `__cfi_check`) indicates that LLVM CFI (i.e., forward-edge control -flow protection) is enabled for a given binary. Conversely, the absence of -symbols suffixed with ".cfi" or the `__cfi_init` symbol (and references to +references to `__cfi_check`) indicates that LLVM CFI (i.e., forward-edge +control flow protection) is enabled for a given binary. Conversely, the absence +of symbols suffixed with ".cfi" or the `__cfi_init` symbol (and references to `__cfi_check`) indicates that LLVM CFI is not enabled for a given binary (see Fig. 15). @@ -421,48 +409,47 @@ ### Backward-edge control flow protection **Shadow stack** protects saved return instruction pointers from being -overwritten by storing a copy of them on a separate (shadow) stack, and -using these copies as authoritative values when returning from functions. -This is also known as “ShadowCallStack” and “Return Flow Guard”, and is -considered an implementation of backward-edge control flow protection (or -“backward-edge CFI”). +overwritten by storing a copy of them on a separate (shadow) stack, and using +these copies as authoritative values when returning from functions. This is +also known as “ShadowCallStack” and “Return Flow Guard”, and is considered an +implementation of backward-edge control flow protection (or “backward-edge +CFI”). **Safe stack** protects not only the saved return instruction pointers, but -also register spills and some local variables from being overwritten by -storing unsafe variables, such as large arrays, on a separate (unsafe) -stack, and using these unsafe variables on the separate stack instead. This -is also known as “SafeStack”, and is also considered an implementation of -backward-edge control flow protection. - -Both shadow and safe stack are intended to be a more comprehensive -alternatives to stack smashing protection as they protect the saved return -instruction pointers (and other data in the case of safe stack) from -arbitrary writes and non-linear out-of-bounds writes. +also register spills and some local variables from being overwritten by storing +unsafe variables, such as large arrays, on a separate (unsafe) stack, and using +these unsafe variables on the separate stack instead. This is also known as +“SafeStack”, and is also considered an implementation of backward-edge control +flow protection. + +Both shadow and safe stack are intended to be a more comprehensive alternatives +to stack smashing protection as they protect the saved return instruction +pointers (and other data in the case of safe stack) from arbitrary writes and +non-linear out-of-bounds writes. Newer processors provide hardware assistance for backward-edge control flow -protection, such as ARM Pointer Authentication, and Intel Shadow Stack as -part of Intel CET. +protection, such as ARM Pointer Authentication, and Intel Shadow Stack as part +of Intel CET. -The Rust compiler supports shadow stack for aarch64 only -7 -on nightly Rust compilers [43]-[44]. Safe stack is available on nightly -Rust compilers [45]-[46]. +The Rust compiler supports shadow stack for the AArch64 architecture7on +nightly builds[43]-[44], and also supports safe stack on nightly +builds[45]-[46]. ```text $ readelf -s target/release/hello-rust | grep __safestack_init - 1177: 00000000000057b0 444 FUNC GLOBAL DEFAULT 9 __safestack_init + 678: 0000000000008c80 426 FUNC GLOBAL DEFAULT 15 __safestack_init ``` Fig. 16. Checking if LLVM SafeStack is enabled for a given binary. -The presence of the `__safestack_init` symbol indicates that LLVM SafeStack -is enabled for a given binary (see Fig. 16). Conversely, the absence of the -`__safestack_init` symbol indicates that LLVM SafeStack is not enabled for a -given binary. - -7\. The shadow stack implementation for the AMD64 -architecture and equivalent in LLVM was removed due to performance and -security issues. +The presence of the `__safestack_init` symbol indicates that LLVM SafeStack is +enabled for a given binary. Conversely, the absence of the `__safestack_init` +symbol indicates that LLVM SafeStack is not enabled for a given binary (see +Fig. 16). + +7\. The shadow stack implementation for the AMD64 architecture +and equivalent in LLVM was removed due to performance and security issues. ## Appendix @@ -470,29 +457,28 @@ As of the latest version of the [Linux Standard Base (LSB) Core Specification](https://refspecs.linuxfoundation.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/progheader.html), the `PT_GNU_STACK` program header indicates whether the stack should be -executable, and the absence of this header indicates that the stack should -be executable. However, the Linux kernel currently sets the -`READ_IMPLIES_EXEC` personality upon loading any executable with the -`PT_GNU_STACK` program header and the `PF_X `flag set or with the absence of -this header, resulting in not only the stack, but also all readable virtual -memory mappings being executable. +executable, and the absence of this header indicates that the stack should be +executable. However, the Linux kernel currently sets the `READ_IMPLIES_EXEC` +personality upon loading any executable with the `PT_GNU_STACK` program header +and the `PF_X` flag set or with the absence of this header, resulting in not +only the stack, but also all readable virtual memory mappings being executable. An attempt to fix this [was made in 2012](https://lore.kernel.org/lkml/f298f914-2239-44e4-8aa1-a51282e7fac0@zmail15.collab.prod.int.phx2.redhat.com/), and another [was made in 2020](https://lore.kernel.org/kernel-hardening/20200327064820.12602-1-keescook@chromium.org/). The former never landed, and the latter partially fixed it, but introduced -other issues—the absence of the `PT_GNU_STACK` program header still causes -not only the stack, but also all readable virtual memory mappings to be -executable in some architectures, such as IA-32 and equivalent (or causes -the stack to be non-executable in some architectures, such as AMD64 and -equivalent, contradicting the LSB). - -The `READ_IMPLIES_EXEC` personality needs to be completely separated from -the `PT_GNU_STACK` program header by having a separate option for it (or -setarch -X could just be used whenever `READ_IMPLIES_EXEC` is needed), and -the absence of the `PT_GNU_STACK` program header needs to have more secure -defaults (unrelated to `READ_IMPLIES_EXEC`). +other issues—the absence of the `PT_GNU_STACK` program header still causes not +only the stack, but also all readable virtual memory mappings to be executable +in some architectures, such as IA-32 and equivalent (or causes the stack to be +non-executable in some architectures, such as AMD64 and equivalent, +contradicting the LSB). + +The `READ_IMPLIES_EXEC` personality needs to be completely separated from the +`PT_GNU_STACK` program header by having a separate option for it (or setarch -X +could just be used whenever `READ_IMPLIES_EXEC` is needed), and the absence of +the `PT_GNU_STACK` program header needs to have more secure defaults (unrelated +to `READ_IMPLIES_EXEC`). ## References @@ -576,19 +562,19 @@ 25. A. Clark. “Explicitly disable stack execution on linux and bsd #30859.” GitHub. . -26. “Replace stack overflow checking with stack probes #16012.” GitHub. +26. Zoxc. “Replace stack overflow checking with stack probes #16012.” GitHub. . -27. B. Striegel. “Extend stack probe support to non-tier-1 platforms, and - clarify policy for mitigating LLVM-dependent unsafety #43241.” GitHub. - . - -28. A. Crichton. “rustc: Implement stack probes for x86 #42816.” GitHub. +27. A. Crichton. “rustc: Implement stack probes for x86 #42816.” GitHub. . -29. A. Crichton. “Add \_\_rust\_probestack intrinsic #175.” GitHub. +28. A. Crichton. “Add \_\_rust\_probestack intrinsic #175.” GitHub. . +29. S. Guelton, S. Ledru, J. Stone. “Bringing Stack Clash Protection to Clang / + X86 — the Open Source Way.” The LLVM Project Blog. + . + 30. B. Anderson. “Consider applying -Wl,-z,relro or -Wl,-z,relro,-z,now by default #29877.” GitHub. . @@ -621,16 +607,16 @@ 39. A. Crichton. “Remove the alloc\_jemalloc crate #55238.” GitHub. . -40. R. de C Valle. “Tracking Issue for LLVM Control Flow Integrity (CFI) Support +40. bbjornse. “Add codegen option for using LLVM stack smash protection #84197.” + GitHub. + +41. R. de C. Valle. “Tracking Issue for LLVM Control Flow Integrity (CFI) Support for Rust #89653.” GitHub. . -41. “ControlFlowIntegrity.” The Rust Unstable Book. +42. “ControlFlowIntegrity.” The Rust Unstable Book. [https://doc.rust-lang.org/unstable-book/compiler-flags/sanitizer.html#controlflowintegrity](../unstable-book/compiler-flags/sanitizer.html#controlflowintegrity). -42. bbjornse. “add codegen option for using LLVM stack smash protection #84197.” - GitHub. - -43. ivanloz. “Add support for LLVM ShadowCallStack. #98208.” GitHub. +43. I. Lozano. “Add support for LLVM ShadowCallStack #98208.” GitHub. . 44. “ShadowCallStack.” The Rust Unstable Book. Binary files /tmp/tmpy9t6d7cw/96PADUEFHn/rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/images/image1.png and /tmp/tmpy9t6d7cw/aYN7PiltfC/rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/images/image1.png differ Binary files /tmp/tmpy9t6d7cw/96PADUEFHn/rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/images/image2.png and /tmp/tmpy9t6d7cw/aYN7PiltfC/rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/images/image2.png differ Binary files /tmp/tmpy9t6d7cw/96PADUEFHn/rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/images/image3.png and /tmp/tmpy9t6d7cw/aYN7PiltfC/rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/images/image3.png differ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aarch64-unknown-teeos.md 2023-12-21 16:55:28.000000000 +0000 @@ -58,7 +58,7 @@ ```toml profile = "compiler" -changelog-seen = 2 +change-id = 115898 [build] sanitizers = true diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aix.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aix.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aix.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/aix.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,26 @@ +# `powerpc64-ibm-aix` + +**Tier: 3** + +Rust for AIX operating system, currently only 64-bit PowerPC is supported. + +## Target maintainers + +- QIU Chaofan `qiucofan@cn.ibm.com`, https://github.com/ecnelises +- Kai LUO, `lkail@cn.ibm.com`, https://github.com/bzEq + +## Requirements + +This target supports host tools, std and alloc. This target cannot be cross-compiled as for now, mainly because of the unavailability of system linker on other platforms. + +Binary built for this target is expected to run on Power7 or newer CPU, and AIX 7.2 or newer version. + +Binary format of this platform is [XCOFF](https://www.ibm.com/docs/en/aix/7.2?topic=formats-xcoff-object-file-format). Archive file format is ['AIX big format'](https://www.ibm.com/docs/en/aix/7.2?topic=formats-ar-file-format-big). + +## Testing + +This target supports running test suites natively, but it's not available to cross-compile and execute in emulator. + +## Interoperability with C code + +This target supports C code. C code compiled by XL, Open XL and Clang are compatible with Rust. Typical triple of AIX on 64-bit PowerPC of these compilers are also `powerpc64-ibm-aix`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/android.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/android.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/android.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/android.md 2023-12-21 16:55:28.000000000 +0000 @@ -45,3 +45,19 @@ A list of all supported targets can be found [here](../platform-support.html) + +## Architecture Notes + +### riscv64-linux-android + +Currently the `riscv64-linux-android` target requires the following architecture features/extensions: + +* `a` (atomics) +* `d` (double-precision floating-point) +* `c` (compressed instruction set) +* `f` (single-precision floating-point) +* `m` (multiplication and division) +* `v` (vector) +* `Zba` (address calculation instructions) +* `Zbb` (base instructions) +* `Zbs` (single-bit instructions) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/apple-tvos.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/apple-tvos.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/apple-tvos.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/apple-tvos.md 2023-12-21 16:55:28.000000000 +0000 @@ -52,7 +52,7 @@ ```toml [build] build-stage = 1 -target = ["aarch64-apple-tvos", "x86_64-apple-tvos"] +target = ["aarch64-apple-tvos", "x86_64-apple-tvos", "aarch64-apple-tvos-sim"] ``` It's possible that cargo under `-Zbuild-std` may also be used to target them. @@ -67,6 +67,8 @@ $ rustc --target aarch64-apple-tvos your-code.rs ... $ rustc --target x86_64-apple-tvos your-code.rs +... +$ rustc --target aarch64-apple-tvos-sim your-code.rs ``` ## Testing diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/armv7-sony-vita-newlibeabihf.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/armv7-sony-vita-newlibeabihf.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/armv7-sony-vita-newlibeabihf.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/armv7-sony-vita-newlibeabihf.md 2023-12-21 16:55:28.000000000 +0000 @@ -2,15 +2,16 @@ **Tier: 3** -This tier supports the ARM Cortex A9 processor running on a PlayStation Vita console. `armv7-vita-newlibeabihf` aims to have support for `std` crate using `newlib` as a bridge. +This tier supports the ARM Cortex A9 processor running on a PlayStation Vita console. Rust support for this target is not affiliated with Sony, and is not derived from nor used with any official Sony SDK. ## Target maintainers -* [@amg98](https://github.com/amg98) * [@nikarh](https://github.com/nikarh) +* [@pheki](https://github.com/pheki) +* [@ZetaNumbers](https://github.com/ZetaNumbers) ## Requirements @@ -20,18 +21,16 @@ `alloc`, and `panic_abort`. `std` is partially supported, but mostly works. Some APIs are unimplemented -and will simply return an error, such as `std::process`. An allocator is provided -by default. +and will simply return an error, such as `std::process`. -In order to support some APIs, binaries must be linked against `libc` written -for the target, using a linker for the target. These are provided by the -VITASDK toolchain. +This target generates binaries in the ELF format with thumb ISA by default. + +Binaries are linked with `arm-vita-eabi-gcc` provided by VITASDK toolchain. -This target generates binaries in the ELF format with thumb ISA. ## Building the target -Rust does not ship pre-compiled artifacts for this target. You can use `build-std` flag to build binaries with `std`: +Rust does not ship pre-compiled artifacts for this target. You can use `build-std` flag to build ELF binaries with `std`: ```sh cargo build -Z build-std=std,panic_abort --target=armv7-sony-vita-newlibeabihf --release @@ -39,113 +38,45 @@ ## Building Rust programs -To test your developed rust programs on PlayStation Vita, first you must correctly package your elf. These steps can be preformed using tools available in VITASDK, and can be automated using a tool like `cargo-make`. +The recommended way to build artifacts that can be installed and run on PlayStation Vita is by using the [cargo-vita](https://github.com/vita-rust/cargo-vita) tool. This tool uses `build-std` and VITASDK toolchain to build artifacts runnable on Vita. + +To install the tool run: + +```sh +cargo install cargo-vita +``` -First, set up environment variables for `VITASDK`, and it's binaries: +[VITASDK](https://vitasdk.org/) toolchain must be installed, and the `VITASDK` environment variable must be set to its location, e.g.: ```sh export VITASDK=/opt/vitasdk -export PATH=$PATH:$VITASDK/bin ``` -Use the example below as a template for your project: +Add the following section to your project's `Cargo.toml`: + ```toml -[env] -TITLE = "Rust Hello World" -TITLEID = "RUST00001" - -# At least a "sce_sys" folder should be place there for app metadata (title, icons, description...) -# You can find sample assets for that on $VITASDK/share/gcc-arm-vita-eabi/samples/hello_world/sce_sys/ -STATIC_DIR = "static" # Folder where static assets should be placed (sce_sys folder is at $STATIC_DIR/sce_sys) -CARGO_TARGET_DIR = { script = ["echo ${CARGO_TARGET_DIR:=target}"] } -CARGO_OUT_DIR = "${CARGO_TARGET_DIR}/${RUST_TARGET}/release" - -[tasks.build] -description = "Build the project using `cargo`." -command = "cargo" -args = ["build", "-Z", "build-std=std,panic_abort", "--target=armv7-sony-vita-newlibeabihf", "--release"] - -[tasks.strip] -description = "Strip the produced ELF executable." -dependencies = ["build"] -command = "arm-vita-eabi-strip" -args = ["-g", '${CARGO_OUT_DIR}/${CARGO_MAKE_CRATE_FS_NAME}.elf'] - -[tasks.velf] -description = "Build an VELF executable from the obtained ELF file." -dependencies = ["strip"] -command = "vita-elf-create" -args = ['${CARGO_OUT_DIR}/${CARGO_MAKE_CRATE_NAME}.elf', '${CARGO_OUT_DIR}/${CARGO_MAKE_CRATE_NAME}.velf'] - -[tasks.eboot-bin] -description = "Build an `eboot.bin` file from the obtained VELF file." -dependencies = ["velf"] -command = "vita-make-fself" -args = ["-s", '${CARGO_OUT_DIR}/${CARGO_MAKE_CRATE_NAME}.velf', '${CARGO_OUT_DIR}/eboot.bin'] - -[tasks.param-sfo] -description = "Build the `param.sfo` manifest using with given TITLE and TITLEID." -command = "vita-mksfoex" -args = ["-s", 'TITLE_ID=${TITLEID}', '${TITLE}', '${CARGO_OUT_DIR}/param.sfo'] - -[tasks.manifest] -description = "List all static resources into a manifest file." -script = [ - 'mkdir -p "${CARGO_OUT_DIR}"', - ''' - if [ -d "${STATIC_DIR}" ]; then - find "${STATIC_DIR}" -type f > "${CARGO_OUT_DIR}/MANIFEST" - else - touch "${CARGO_OUT_DIR}/MANIFEST" - fi - ''' -] - -[tasks.vpk] -description = "Build a VPK distribution of the project executable and resources." -dependencies = ["eboot-bin", "param-sfo", "manifest"] -script_runner = "@rust" -script = [ - ''' - use std::io::BufRead; - use std::fs::File; - - fn main() { - - let crate_name = env!("CARGO_MAKE_CRATE_NAME"); - let static_dir = env!("STATIC_DIR"); - let out_dir = std::path::PathBuf::from(env!("CARGO_OUT_DIR")); - - let mut cmd = ::std::process::Command::new("vita-pack-vpk"); - cmd.arg("-s").arg(out_dir.join("param.sfo")); - cmd.arg("-b").arg(out_dir.join("eboot.bin")); - - // Add files from MANIFEST - if let Ok(file) = File::open(out_dir.join("MANIFEST")) { - let mut reader = ::std::io::BufReader::new(file); - let mut lines = reader.lines(); - while let Some(Ok(line)) = lines.next() { - let p1 = ::std::path::PathBuf::from(line); // path on FS - let p2 = p1.strip_prefix(static_dir).unwrap(); // path in VPK - cmd.arg("--add").arg(format!("{}={}", p1.display(), p2.display())); - } - } - - cmd.arg(out_dir.join(format!("{}.vpk", crate_name))) - .output() - .expect("command failed."); - } - ''' -] +[package.metadata.vita] +# A unique 9 character alphanumeric identifier of the app. +title_id = "RUSTAPP01" +# A title that will be used for the app. Optional, name will be used if not defined +title_name = "My application" ``` -After running the above script, you should be able to get a *.vpk file in the same folder your *.elf executable resides. Now you can pick it and install it on your own PlayStation Vita using, or you can use an [Vita3K](https://vita3k.org/) emulator. +To build a VPK with ELF in the release profile, run: + +```sh +cargo vita build vpk --release +``` + +After building a *.vpk file it can be uploaded to a PlayStation Vita and installed, or used with a [Vita3K](https://vita3k.org/) emulator. ## Testing -Currently there is no support to run the rustc test suite for this target. +The default Rust test runner is supported, and tests can be compiled to an elf and packed to a *.vpk file using `cargo-vita` tool. Filtering tests is not currently supported since passing command-line arguments to the executable is not supported on Vita, so the runner will always execute all tests. + +The Rust test suite for `library/std` is not yet supported. ## Cross-compilation -This target can be cross-compiled from `x86_64` on either Windows, MacOS or Linux systems. Other hosts are not supported for cross-compilation. +This target can be cross-compiled from `x86_64` on Windows, MacOS or Linux systems. Other hosts are not supported for cross-compilation. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/csky-unknown-linux-gnuabiv2.md 2023-12-21 16:55:28.000000000 +0000 @@ -4,8 +4,21 @@ This target supports [C-SKY](https://github.com/c-sky) CPUs with `abi` v2 and `glibc`. -https://c-sky.github.io/ -https://gitlab.com/c-sky/ +target | std | host | notes +-------|:---:|:----:|------- +`csky-unknown-linux-gnuabiv2` | ✓ | | C-SKY abiv2 Linux (little endian) +`csky-unknown-linux-gnuabiv2hf` | ✓ | | C-SKY abiv2 Linux, hardfloat (little endian) + +Reference: + +- [CSKY ABI Manual](https://occ-oss-prod.oss-cn-hangzhou.aliyuncs.com/resource//1695027452256/T-HEAD_800_Series_ABI_Standards_Manual.pdf) +- [csky-linux-gnuabiv2-toolchain](https://occ-oss-prod.oss-cn-hangzhou.aliyuncs.com/resource/1356021/1619528643136/csky-linux-gnuabiv2-tools-x86_64-glibc-linux-4.9.56-20210423.tar.gz) +- [csky-linux-gnuabiv2-qemu](https://occ-oss-prod.oss-cn-hangzhou.aliyuncs.com/resource//1689324918932/xuantie-qemu-x86_64-Ubuntu-18.04-20230714-0202.tar.gz) + +other links: + +- https://c-sky.github.io/ +- https://gitlab.com/c-sky/ ## Target maintainers @@ -13,7 +26,6 @@ ## Requirements - ## Building the target ### Get a C toolchain @@ -28,13 +40,17 @@ ```toml [build] -target = ["x86_64-unknown-linux-gnu", "csky-unknown-linux-gnuabiv2"] +target = ["x86_64-unknown-linux-gnu", "csky-unknown-linux-gnuabiv2", "csky-unknown-linux-gnuabiv2hf"] stage = 2 [target.csky-unknown-linux-gnuabiv2] # ADJUST THIS PATH TO POINT AT YOUR TOOLCHAIN cc = "${TOOLCHAIN_PATH}/bin/csky-linux-gnuabiv2-gcc" +[target.csky-unknown-linux-gnuabiv2hf] +# ADJUST THIS PATH TO POINT AT YOUR TOOLCHAIN +cc = "${TOOLCHAIN_PATH}/bin/csky-linux-gnuabiv2-gcc" + ### Build ```sh diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/fuchsia.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/fuchsia.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/fuchsia.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/fuchsia.md 2023-12-21 16:55:28.000000000 +0000 @@ -692,10 +692,12 @@ test environment with: ```sh -src/ci/docker/scripts/fuchsia-test-runner.py start \ +( \ + src/ci/docker/scripts/fuchsia-test-runner.py start \ --rust-build ${RUST_SRC_PATH}/build \ --sdk ${SDK_PATH} \ --target {x86_64-unknown-fuchsia|aarch64-unknown-fuchsia} \ +) ``` Where `${RUST_SRC_PATH}/build` is the `build-dir` set in `config.toml` and diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/mips-release-6.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/mips-release-6.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/mips-release-6.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/mips-release-6.md 2023-12-21 16:55:28.000000000 +0000 @@ -67,7 +67,7 @@ ### Prerequisite: Disable debuginfo -A LLVM bug makes rustc crash if debug or debug info generation is enabled. You need to edit `config.toml` to disable this: +An LLVM bug makes rustc crash if debug or debug info generation is enabled. You need to edit `config.toml` to disable this: ```toml [rust] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/nto-qnx.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/nto-qnx.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/nto-qnx.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/nto-qnx.md 2023-12-21 16:55:28.000000000 +0000 @@ -98,7 +98,7 @@ ```toml profile = "compiler" -changelog-seen = 2 +change-id = 115898 ``` 2. Compile the Rust toolchain for an `x86_64-unknown-linux-gnu` host (for both `aarch64` and `x86_64` targets) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/openharmony.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/openharmony.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/openharmony.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/openharmony.md 2023-12-21 16:55:28.000000000 +0000 @@ -101,7 +101,7 @@ ```toml profile = "compiler" -changelog-seen = 2 +change-id = 115898 [build] sanitizers = true diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/unknown-uefi.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/unknown-uefi.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/unknown-uefi.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support/unknown-uefi.md 2023-12-21 16:55:28.000000000 +0000 @@ -265,9 +265,14 @@ #### os_str - While the strings in UEFI should be valid UCS-2, in practice, many implementations just do not care and use UTF-16 strings. - Thus, the current implementation supports full UTF-16 strings. +#### stdio +- Uses `Simple Text Input Protocol` and `Simple Text Output Protocol`. +- Note: UEFI uses CRLF for new line. This means Enter key is registered as CR instead of LF. +#### args +- Uses `EFI_LOADED_IMAGE_PROTOCOL->LoadOptions` ## Example: Hello World With std -The following code features a valid UEFI application, including stdio and `alloc` (`OsString` and `Vec`): +The following code features a valid UEFI application, including `stdio` and `alloc` (`OsString` and `Vec`): This example can be compiled as binary crate via `cargo` using the toolchain compiled from the above source (named custom): @@ -286,6 +291,9 @@ }; pub fn main() { + println!("Starting Rust Application..."); + + // Use System Table Directly let st = env::system_table().as_ptr() as *mut efi::SystemTable; let mut s: Vec = OsString::from("Hello World!\n").encode_wide().collect(); s.push(0); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/platform-support.md 2023-12-21 16:55:28.000000000 +0000 @@ -33,9 +33,9 @@ target | notes -------|------- `aarch64-unknown-linux-gnu` | ARM64 Linux (kernel 4.1, glibc 2.17+) [^missing-stack-probes] -`i686-pc-windows-gnu` | 32-bit MinGW (Windows 7+) [^windows-support] -`i686-pc-windows-msvc` | 32-bit MSVC (Windows 7+) [^windows-support] -`i686-unknown-linux-gnu` | 32-bit Linux (kernel 3.2+, glibc 2.17+) +`i686-pc-windows-gnu` | 32-bit MinGW (Windows 7+) [^windows-support] [^x86_32-floats-return-ABI] +`i686-pc-windows-msvc` | 32-bit MSVC (Windows 7+) [^windows-support] [^x86_32-floats-return-ABI] +`i686-unknown-linux-gnu` | 32-bit Linux (kernel 3.2+, glibc 2.17+) [^x86_32-floats-return-ABI] `x86_64-apple-darwin` | 64-bit macOS (10.12+, Sierra+) `x86_64-pc-windows-gnu` | 64-bit MinGW (Windows 7+) [^windows-support] `x86_64-pc-windows-msvc` | 64-bit MSVC (Windows 7+) [^windows-support] @@ -47,7 +47,10 @@ [^windows-support]: Only Windows 10 currently undergoes automated testing. Earlier versions of Windows rely on testing and support from the community. +[^x86_32-floats-return-ABI]: Due to limitations of the C ABI, floating-point support on `i686` targets is non-compliant: floating-point return values are passed via an x87 register, so NaN payload bits can be lost. See [issue #114479][x86-32-float-issue]. + [77071]: https://github.com/rust-lang/rust/issues/77071 +[x86-32-float-issue]: https://github.com/rust-lang/rust/issues/114479 ## Tier 1 @@ -90,10 +93,6 @@ `arm-unknown-linux-gnueabihf` | ARMv6 Linux, hardfloat (kernel 3.2, glibc 2.17) `armv7-unknown-linux-gnueabihf` | ARMv7-A Linux, hardfloat (kernel 3.2, glibc 2.17) [`loongarch64-unknown-linux-gnu`](platform-support/loongarch-linux.md) | LoongArch64 Linux, LP64D ABI (kernel 5.19, glibc 2.36) -`mips-unknown-linux-gnu` | MIPS Linux (kernel 4.4, glibc 2.23) -`mips64-unknown-linux-gnuabi64` | MIPS64 Linux, n64 ABI (kernel 4.4, glibc 2.23) -`mips64el-unknown-linux-gnuabi64` | MIPS64 (LE) Linux, n64 ABI (kernel 4.4, glibc 2.23) -`mipsel-unknown-linux-gnu` | MIPS (LE) Linux (kernel 4.4, glibc 2.23) `powerpc-unknown-linux-gnu` | PowerPC Linux (kernel 3.2, glibc 2.17) `powerpc64-unknown-linux-gnu` | PPC64 Linux (kernel 3.2, glibc 2.17) `powerpc64le-unknown-linux-gnu` | PPC64LE Linux (kernel 3.10, glibc 2.17) @@ -150,19 +149,16 @@ `armv7r-none-eabi` | * | Bare ARMv7-R `armv7r-none-eabihf` | * | Bare ARMv7-R, hardfloat `asmjs-unknown-emscripten` | ✓ | asm.js via Emscripten -`i586-pc-windows-msvc` | * | 32-bit Windows w/o SSE -`i586-unknown-linux-gnu` | ✓ | 32-bit Linux w/o SSE (kernel 3.2, glibc 2.17) -`i586-unknown-linux-musl` | ✓ | 32-bit Linux w/o SSE, MUSL -[`i686-linux-android`](platform-support/android.md) | ✓ | 32-bit x86 Android -`i686-unknown-freebsd` | ✓ | 32-bit FreeBSD -`i686-unknown-linux-musl` | ✓ | 32-bit Linux with MUSL +`i586-pc-windows-msvc` | * | 32-bit Windows w/o SSE [^x86_32-floats-x87] +`i586-unknown-linux-gnu` | ✓ | 32-bit Linux w/o SSE (kernel 3.2, glibc 2.17) [^x86_32-floats-x87] +`i586-unknown-linux-musl` | ✓ | 32-bit Linux w/o SSE, MUSL [^x86_32-floats-x87] +[`i586-unknown-netbsd`](platform-support/netbsd.md) | ✓ | 32-bit x86, restricted to Pentium +[`i686-linux-android`](platform-support/android.md) | ✓ | 32-bit x86 Android [^x86_32-floats-return-ABI] +`i686-unknown-freebsd` | ✓ | 32-bit FreeBSD [^x86_32-floats-return-ABI] +`i686-unknown-linux-musl` | ✓ | 32-bit Linux with MUSL [^x86_32-floats-return-ABI] [`i686-unknown-uefi`](platform-support/unknown-uefi.md) | * | 32-bit UEFI [`loongarch64-unknown-none`](platform-support/loongarch-none.md) | * | | LoongArch64 Bare-metal (LP64D ABI) [`loongarch64-unknown-none-softfloat`](platform-support/loongarch-none.md) | * | | LoongArch64 Bare-metal (LP64S ABI) -`mips-unknown-linux-musl` | ✓ | MIPS Linux with MUSL -`mips64-unknown-linux-muslabi64` | ✓ | MIPS64 Linux, n64 ABI, MUSL -`mips64el-unknown-linux-muslabi64` | ✓ | MIPS64 (LE) Linux, n64 ABI, MUSL -`mipsel-unknown-linux-musl` | ✓ | MIPS (LE) Linux with MUSL [`nvptx64-nvidia-cuda`](platform-support/nvptx64-nvidia-cuda.md) | * | --emit=asm generates PTX code that [runs on NVIDIA GPUs] `riscv32i-unknown-none-elf` | * | Bare RISC-V (RV32I ISA) `riscv32imac-unknown-none-elf` | * | Bare RISC-V (RV32IMAC ISA) @@ -195,6 +191,8 @@ `x86_64-unknown-redox` | ✓ | Redox OS [`x86_64-unknown-uefi`](platform-support/unknown-uefi.md) | * | 64-bit UEFI +[^x86_32-floats-x87]: Floating-point support on `i586` targets is non-compliant: the `x87` registers and instructions used for these targets do not provide IEEE-754-compliant behavior, in particular when it comes to rounding and NaN payload bits. See [issue #114479][x86-32-float-issue]. + [Fortanix ABI]: https://edp.fortanix.com/ ## Tier 3 @@ -220,6 +218,7 @@ -------|:---:|:----:|------- `aarch64-apple-ios-macabi` | ? | | Apple Catalyst on ARM64 [`aarch64-apple-tvos`](platform-support/apple-tvos.md) | ? | | ARM64 tvOS +[`aarch64-apple-tvos-sim`](platform-support/apple-tvos.md) | ? | | ARM64 tvOS Simulator [`aarch64-apple-watchos-sim`](platform-support/apple-watchos.md) | ✓ | | ARM64 Apple WatchOS Simulator [`aarch64-kmc-solid_asp3`](platform-support/kmc-solid.md) | ✓ | | ARM64 SOLID with TOPPERS/ASP3 [`aarch64-nintendo-switch-freestanding`](platform-support/aarch64-nintendo-switch-freestanding.md) | * | | ARM64 Nintendo Switch, Horizon @@ -247,7 +246,7 @@ `armv6-unknown-freebsd` | ✓ | ✓ | ARMv6 FreeBSD [`armv6-unknown-netbsd-eabihf`](platform-support/netbsd.md) | ✓ | ✓ | ARMv6 NetBSD w/hard-float [`armv6k-nintendo-3ds`](platform-support/armv6k-nintendo-3ds.md) | ? | | ARMv6K Nintendo 3DS, Horizon (Requires devkitARM toolchain) -[`armv7-sony-vita-newlibeabihf`](platform-support/armv7-sony-vita-newlibeabihf.md) | ? | | ARMv7-A Cortex-A9 Sony PlayStation Vita (requires VITASDK toolchain) +[`armv7-sony-vita-newlibeabihf`](platform-support/armv7-sony-vita-newlibeabihf.md) | ✓ | | ARMv7-A Cortex-A9 Sony PlayStation Vita (requires VITASDK toolchain) [`armv7-unknown-linux-ohos`](platform-support/openharmony.md) | ✓ | | ARMv7-A OpenHarmony | [`armv7-unknown-linux-uclibceabi`](platform-support/armv7-unknown-linux-uclibceabi.md) | ✓ | ✓ | ARMv7-A Linux with uClibc, softfloat [`armv7-unknown-linux-uclibceabihf`](platform-support/armv7-unknown-linux-uclibceabihf.md) | ✓ | ? | ARMv7-A Linux with uClibc, hardfloat @@ -262,23 +261,33 @@ `avr-unknown-gnu-atmega328` | * | | AVR. Requires `-Z build-std=core` `bpfeb-unknown-none` | * | | BPF (big endian) `bpfel-unknown-none` | * | | BPF (little endian) -`csky-unknown-linux-gnuabiv2` | ✓ | | C-SKY abiv2 Linux(little endian) +`csky-unknown-linux-gnuabiv2` | ✓ | | C-SKY abiv2 Linux (little endian) +`csky-unknown-linux-gnuabiv2hf` | ✓ | | C-SKY abiv2 Linux, hardfloat (little endian) `hexagon-unknown-linux-musl` | ? | | -`i386-apple-ios` | ✓ | | 32-bit x86 iOS -[`i586-pc-nto-qnx700`](platform-support/nto-qnx.md) | * | | 32-bit x86 QNX Neutrino 7.0 RTOS | -`i686-apple-darwin` | ✓ | ✓ | 32-bit macOS (10.12+, Sierra+) -`i686-pc-windows-msvc` | * | | 32-bit Windows XP support -[`i686-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | ✓ | -`i686-unknown-haiku` | ✓ | ✓ | 32-bit Haiku -[`i686-unknown-hurd-gnu`](platform-support/hurd.md) | ✓ | ✓ | 32-bit GNU/Hurd -[`i686-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD/i386 with SSE2 -[`i686-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | 32-bit OpenBSD -`i686-uwp-windows-gnu` | ? | | -`i686-uwp-windows-msvc` | ? | | -`i686-wrs-vxworks` | ? | | +`i386-apple-ios` | ✓ | | 32-bit x86 iOS [^x86_32-floats-return-ABI] +[`i586-pc-nto-qnx700`](platform-support/nto-qnx.md) | * | | 32-bit x86 QNX Neutrino 7.0 RTOS [^x86_32-floats-return-ABI] +`i686-apple-darwin` | ✓ | ✓ | 32-bit macOS (10.12+, Sierra+) [^x86_32-floats-return-ABI] +`i686-pc-windows-msvc` | * | | 32-bit Windows XP support [^x86_32-floats-return-ABI] +[`i686-pc-windows-gnullvm`](platform-support/pc-windows-gnullvm.md) | ✓ | ✓ | [^x86_32-floats-return-ABI] +`i686-unknown-haiku` | ✓ | ✓ | 32-bit Haiku [^x86_32-floats-return-ABI] +[`i686-unknown-hurd-gnu`](platform-support/hurd.md) | ✓ | ✓ | 32-bit GNU/Hurd [^x86_32-floats-return-ABI] +[`i686-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD/i386 with SSE2 [^x86_32-floats-return-ABI] +[`i686-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | 32-bit OpenBSD [^x86_32-floats-return-ABI] +`i686-uwp-windows-gnu` | ? | | [^x86_32-floats-return-ABI] +`i686-uwp-windows-msvc` | ? | | [^x86_32-floats-return-ABI] +`i686-wrs-vxworks` | ? | | [^x86_32-floats-return-ABI] [`m68k-unknown-linux-gnu`](platform-support/m68k-unknown-linux-gnu.md) | ? | | Motorola 680x0 Linux +`mips-unknown-linux-gnu` | ✓ | ✓ | MIPS Linux (kernel 4.4, glibc 2.23) +`mips-unknown-linux-musl` | ✓ | | MIPS Linux with musl libc `mips-unknown-linux-uclibc` | ✓ | | MIPS Linux with uClibc [`mips64-openwrt-linux-musl`](platform-support/mips64-openwrt-linux-musl.md) | ? | | MIPS64 for OpenWrt Linux MUSL +`mips64-unknown-linux-gnuabi64` | ✓ | ✓ | MIPS64 Linux, N64 ABI (kernel 4.4, glibc 2.23) +`mips64-unknown-linux-muslabi64` | ✓ | | MIPS64 Linux, N64 ABI, musl libc +`mips64el-unknown-linux-gnuabi64` | ✓ | ✓ | MIPS64 (little endian) Linux, N64 ABI (kernel 4.4, glibc 2.23) +`mips64el-unknown-linux-muslabi64` | ✓ | | MIPS64 (little endian) Linux, N64 ABI, musl libc +`mipsel-unknown-linux-gnu` | ✓ | ✓ | MIPS (little endian) Linux (kernel 4.4, glibc 2.23) +`mipsel-unknown-linux-musl` | ✓ | | MIPS (little endian) Linux with musl libc +[`mipsel-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | 32-bit MIPS (LE), requires mips32 cpu support `mipsel-sony-psp` | * | | MIPS (LE) Sony PlayStation Portable (PSP) [`mipsel-sony-psx`](platform-support/mipsel-sony-psx.md) | * | | MIPS (LE) Sony PlayStation 1 (PSX) `mipsel-unknown-linux-uclibc` | ✓ | | MIPS (LE) Linux with uClibc @@ -301,7 +310,7 @@ `powerpc64-wrs-vxworks` | ? | | `powerpc64le-unknown-linux-musl` | ? | | [`powerpc64-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | OpenBSD/powerpc64 -`powerpc64-ibm-aix` | ? | | 64-bit AIX (7.2 and newer) +[`powerpc64-ibm-aix`](platform-support/aix.md) | ? | | 64-bit AIX (7.2 and newer) `riscv32gc-unknown-linux-gnu` | | | RISC-V Linux (kernel 5.4, glibc 2.33) `riscv32gc-unknown-linux-musl` | | | RISC-V Linux (kernel 5.4, musl + RISCV32 support patches) `riscv32im-unknown-none-elf` | * | | Bare RISC-V (RV32IM ISA) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/profile-guided-optimization.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/profile-guided-optimization.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc/src/profile-guided-optimization.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc/src/profile-guided-optimization.md 2023-12-21 16:55:28.000000000 +0000 @@ -145,3 +145,26 @@ to use PGO with Rust. [clang-pgo]: https://clang.llvm.org/docs/UsersManual.html#profile-guided-optimization + +## Community Maintained Tools + +As an alternative to directly using the compiler for Profile-Guided Optimization, +you may choose to go with `cargo-pgo`, which has an intuitive command-line API +and saves you the trouble of doing all the manual work. You can read more about +it in their repository accessible from this link: https://github.com/Kobzol/cargo-pgo + +For the sake of completeness, here are the corresponding steps using `cargo-pgo`: + +```bash +# Install if you haven't already +cargo install cargo-pgo + +cargo pgo build +cargo pgo optimize +``` + +These steps will do the following just as before: + +1. Build an instrumented binary from the source code. +2. Run the instrumented binary to gather PGO profiles. +3. Use the gathered PGO profiles from the last step to build an optimized binary. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-example.rs 2023-12-21 16:55:40.000000000 +0000 @@ -63,6 +63,8 @@ // Registry of diagnostics codes. registry: registry::Registry::new(&rustc_error_codes::DIAGNOSTICS), make_codegen_backend: None, + expanded_args: Vec::new(), + ice_file: None, }; rustc_interface::run_compiler(config, |compiler| { compiler.enter(|queries| { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-getting-diagnostics.rs 2023-12-21 16:55:40.000000000 +0000 @@ -73,6 +73,8 @@ override_queries: None, registry: registry::Registry::new(&rustc_error_codes::DIAGNOSTICS), make_codegen_backend: None, + expanded_args: Vec::new(), + ice_file: None, }; rustc_interface::run_compiler(config, |compiler| { compiler.enter(|queries| { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/examples/rustc-driver-interacting-with-the-ast.rs 2023-12-21 16:55:40.000000000 +0000 @@ -51,6 +51,8 @@ override_queries: None, make_codegen_backend: None, registry: registry::Registry::new(&rustc_error_codes::DIAGNOSTICS), + expanded_args: Vec::new(), + ice_file: None, }; rustc_interface::run_compiler(config, |compiler| { compiler.enter(|queries| { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/SUMMARY.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/SUMMARY.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/SUMMARY.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/SUMMARY.md 2023-12-21 16:55:40.000000000 +0000 @@ -124,6 +124,7 @@ - [Goals and clauses](./traits/goals-and-clauses.md) - [Canonical queries](./traits/canonical-queries.md) - [Next-gen trait solving](./solve/trait-solving.md) + - [Invariants of the type system](./solve/invariants.md) - [The solver](./solve/the-solver.md) - [Canonicalization](./solve/canonicalization.md) - [Coinduction](./solve/coinduction.md) @@ -135,6 +136,7 @@ - [Opaque Types](./opaque-types-type-alias-impl-trait.md) - [Inference details](./opaque-types-impl-trait-inference.md) - [Return Position Impl Trait In Trait](./return-position-impl-trait-in-trait.md) +- [Effect checking](./effects.md) - [Pattern and Exhaustiveness Checking](./pat-exhaustive-checking.md) - [MIR dataflow](./mir/dataflow.md) - [Drop elaboration](./mir/drop-elaboration.md) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/bibliography.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/bibliography.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/bibliography.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/bibliography.md 2023-12-21 16:55:40.000000000 +0000 @@ -79,7 +79,7 @@ Rust](https://munksgaard.me/papers/laumann-munksgaard-larsen.pdf). Philip Munksgaard's master's thesis. Research for Servo. * [Ownership is Theft: Experiences Building an Embedded OS in Rust - Amit Levy, et. al.](https://amitlevy.com/papers/tock-plos2015.pdf) -* [You can't spell trust without Rust](https://raw.githubusercontent.com/Gankro/thesis/master/thesis.pdf). Alexis Beingessner's master's thesis. +* [You can't spell trust without Rust](https://faultlore.com/blah/papers/thesis.pdf). Aria Beingessner's master's thesis. * [Rust-Bio: a fast and safe bioinformatics library](https://academic.oup.com/bioinformatics/article/32/3/444/1743419). Johannes Köster * [Safe, Correct, and Fast Low-Level Networking](https://octarineparrot.com/assets/msci_paper.pdf). Robert Clipsham's master's thesis. * [Formalizing Rust traits](https://open.library.ubc.ca/cIRcle/collections/ubctheses/24/items/1.0220521). Jonatan Milewski's master's thesis. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/glossary.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/glossary.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/glossary.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/appendix/glossary.md 2023-12-21 16:55:40.000000000 +0000 @@ -25,6 +25,7 @@ drop glue   | (internal) compiler-generated instructions that handle calling the destructors (`Drop`) for data types. DST   | Short for Dynamically-Sized Type, this is a type for which the compiler cannot statically know the size in memory (e.g. `str` or `[u8]`). Such types don't implement `Sized` and cannot be allocated on the stack. They can only occur as the last field in a struct. They can only be used behind a pointer (e.g. `&str` or `&[u8]`). early-bound lifetime   | A lifetime region that is substituted at its definition site. Bound in an item's `Generics` and substituted using a `GenericArgs`. Contrast with **late-bound lifetime**. ([see more](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_type_ir/sty/enum.RegionKind.html#bound-regions)) +effects   | Right now only means const traits and `~const` bounds. ([see more](../effects.md)) empty type   | see "uninhabited type". fat pointer   | A two word value carrying the address of some value, along with some further information necessary to put the value to use. Rust includes two kinds of "fat pointers": references to slices, and trait objects. A reference to a slice carries the starting address of the slice and its length. A trait object carries a value's address and a pointer to the trait's implementation appropriate to that value. "Fat pointers" are also known as "wide pointers", and "double pointers". free variable   | A "free variable" is one that is not bound within an expression or term; see [the background chapter for more](./background.md#free-vs-bound) @@ -93,6 +94,7 @@ upvar   | A variable captured by a closure from outside the closure. variance   | Determines how changes to a generic type/lifetime parameter affect subtyping; for example, if `T` is a subtype of `U`, then `Vec` is a subtype `Vec` because `Vec` is *covariant* in its generic parameter. See [the background chapter](./background.md#variance) for a more general explanation. See the [variance chapter](../variance.md) for an explanation of how type checking handles variance. variant index   | In an enum, identifies a variant by assigning them indices starting at 0. This is purely internal and not to be confused with the ["discriminant"](#discriminant) which can be overwritten by the user (e.g. `enum Bool { True = 42, False = 0 }`). +Well-formedness   | Semantically:An expression that evaluates to meaningful result. In Type Systems: A type related construct which follows rules of the type system. wide pointer   | A pointer with additional metadata. See "fat pointer" for more. ZST   | Zero-Sized Type. A type whose values have size 0 bytes. Since `2^0 = 1`, such types can have exactly one value. For example, `()` (unit) is a ZST. `struct Foo;` is also a ZST. The compiler can do some nice optimizations around ZSTs. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/building/suggested.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/building/suggested.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/building/suggested.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/building/suggested.md 2023-12-21 16:55:40.000000000 +0000 @@ -277,7 +277,6 @@ # `config.toml.example`) from `1bd30ce2aac40c7698aa4a1b9520aa649ff2d1c5` config = pkgs.writeText "rustc-config" '' profile = "compiler" # you may want to choose a different profile, like `library` or `tools` - changelog-seen = 2 [build] patch-binaries-for-nix = true diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/effects.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/effects.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/effects.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/effects.md 2023-12-21 16:55:40.000000000 +0000 @@ -0,0 +1,66 @@ +# Effects and effect checking + +Note: all of this describes the implementation of the unstable `effects` and +`const_trait_impl` features. None of this implementation is usable or visible from +stable Rust. + +The implementation of const traits and `~const` bounds is a limited effect system. +It is used to allow trait bounds on `const fn` to be used within the `const fn` for +method calls. Within the function, in order to know whether a method on a trait +bound is `const`, we need to know whether there is a `~const` bound for the trait. +In order to know whether we can instantiate a `~const` bound on a `const fn`, we +need to know whether there is a `const_trait` impl for the type and trait being +used (or whether the `const fn` is used at runtime, then any type implementing the +trait is ok, just like with other bounds). + +We perform these checks via a const generic boolean that gets attached to all +`const fn` and `const trait`. The following sections will explain the desugarings +and the way we perform the checks at call sites. + +The const generic boolean is inverted to the meaning of `const`. In the compiler +it is called `host`, because it enables "host APIs" like `static` items, network +access, disk access, random numbers and everything else that isn't available in +`const` contexts. So `false` means "const", `true` means "not const" and if it's +a generic parameter, it means "maybe const" (meaning we're in a const fn or const +trait). + +## `const fn` + +All `const fn` have a `#[rustc_host] const host: bool` generic parameter that is +hidden from users. Any `~const Trait` bounds in the generics list or `where` bounds +of a `const fn` get converted to `Trait + Trait` bounds. The `Trait` +exists so that associated types of the generic param can be used from projections +like `::Assoc`, because there are no `` projections for now. + +## `#[const_trait] trait`s + +The `#[const_trait]` attribute gives the marked trait a `#[rustc_host] const host: bool` +generic parameter. All functions of the trait "inherit" this generic parameter, just like +they have all the regular generic parameters of the trait. Any `~const Trait` super-trait +bounds get desugared to `Trait + Trait` in order to allow using associated +types and consts of the super traits in the trait declaration. This is necessary, because +`::Assoc` is always `>::Assoc` as there is +no `` syntax. + +## `typeck` performing method and function call checks. + +When generic parameters are instantiated for any items, the `host` generic parameter +is always instantiated as an inference variable. This is a special kind of inference var +that is not part of the type or const inference variables, similar to how we have +special inference variables for type variables that we know to be an integer, but not +yet which one. These separate inference variables fall back to `true` at +the end of typeck (in `fallback_effects`) to ensure that `let _ = some_fn_item_name;` +will keep compiling. + +All actually used (in function calls, casts, or anywhere else) function items, will +have the `enforce_context_effects` method invoked. +It trivially returns if the function being called has no `host` generic parameter. + +In order to error if a non-const function is called in a const context, we have not +yet disabled the const-check logic that happens on MIR, because +`enforce_context_effects` does not yet perform this check. + +The function call's `host` parameter is then equated to the context's `host` value, +which almost always trivially succeeds, as it was an inference var. If the inference +var has already been bound (since the function item is invoked twice), the second +invocation checks it against the first. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/feature-gates.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/feature-gates.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/feature-gates.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/feature-gates.md 2023-12-21 16:55:40.000000000 +0000 @@ -20,12 +20,12 @@ To remove a feature gate, follow these steps: -1. Remove the feature gate declaration in `rustc_feature/src/active.rs`. +1. Remove the feature gate declaration in `rustc_feature/src/unstable.rs`. It will look like this: ```rust,ignore /// description of feature - (active, $feature_name, "$version", Some($tracking_issue_number), $edition) + (unstable, $feature_name, "$version", Some($tracking_issue_number), $edition) ``` 2. Add a modified version of the feature gate declaration that you just @@ -45,12 +45,12 @@ To rename a feature gate, follow these steps (the first two are the same steps to follow when [removing a feature gate][removing]): -1. Remove the old feature gate declaration in `rustc_feature/src/active.rs`. +1. Remove the old feature gate declaration in `rustc_feature/src/unstable.rs`. It will look like this: ```rust,ignore /// description of feature - (active, $old_feature_name, "$version", Some($tracking_issue_number), $edition) + (unstable, $old_feature_name, "$version", Some($tracking_issue_number), $edition) ``` 2. Add a modified version of the old feature gate declaration that you just @@ -64,12 +64,12 @@ ``` 3. Add a feature gate declaration with the new name to - `rustc_feature/src/active.rs`. It should look very similar to the old + `rustc_feature/src/unstable.rs`. It should look very similar to the old declaration: ```rust,ignore /// description of feature - (active, $new_feature_name, "$version", Some($tracking_issue_number), $edition) + (unstable, $new_feature_name, "$version", Some($tracking_issue_number), $edition) ``` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/hir-debugging.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/hir-debugging.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/hir-debugging.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/hir-debugging.md 2023-12-21 16:55:40.000000000 +0000 @@ -1,6 +1,13 @@ # HIR Debugging -The `-Z unpretty=hir-tree` flag will dump out the HIR. +Use the `-Z unpretty=hir` flag to produce a human-readable representation of the HIR. +For cargo projects this can be done with `cargo rustc -- -Z unpretty=hir`. +This output is useful when you need to see at a glance how your code was desugared and transformed +during AST lowering. + +For a full `Debug` dump of the data in the HIR, use the `-Z unpretty=hir-tree` flag. +This may be useful when you need to see the full structure of the HIR from the perspective of the +compiler. If you are trying to correlate `NodeId`s or `DefId`s with source code, the `-Z unpretty=expanded,identified` flag may be useful. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/implementing_new_features.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/implementing_new_features.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/implementing_new_features.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/implementing_new_features.md 2023-12-21 16:55:40.000000000 +0000 @@ -123,12 +123,12 @@ 1. Add the feature name to `rustc_span/src/symbol.rs` in the `Symbols {...}` block. -1. Add a feature gate declaration to `rustc_feature/src/active.rs` in the active +1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable `declare_features` block. ```rust ignore /// description of feature - (active, $feature_name, "CURRENT_RUSTC_VERSION", Some($tracking_issue_number), $edition) + (unstable, $feature_name, "CURRENT_RUSTC_VERSION", Some($tracking_issue_number), $edition) ``` where `$edition` has the type `Option`, and is typically just `None`. If you haven't yet @@ -140,7 +140,7 @@ ```rust ignore /// Allows defining identifiers beyond ASCII. - (active, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467), None), + (unstable, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467), None), ``` Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md 2023-12-21 16:55:40.000000000 +0000 @@ -73,21 +73,21 @@ Coverage instrumentation is performed on the MIR with a [MIR pass][mir-passes] called [`InstrumentCoverage`][mir-instrument-coverage]. This MIR pass analyzes the control flow graph (CFG)--represented by MIR `BasicBlock`s--to identify -code branches, and injects additional [`Coverage`][coverage-statement] -statements into the `BasicBlock`s. +code branches, attaches [`FunctionCoverageInfo`] to the function's body, +and injects additional [`Coverage`][coverage-statement] statements into the +`BasicBlock`s. A MIR `Coverage` statement is a virtual instruction that indicates a counter should be incremented when its adjacent statements are executed, to count a span of code ([`CodeRegion`][code-region]). It counts the number of times a -branch is executed, and also specifies the exact location of that code span in -the Rust source code. +branch is executed, and is referred to by coverage mappings in the function's +coverage-info struct. -Note that many of these `Coverage` statements will _not_ be converted into +Note that many coverage counters will _not_ be converted into physical counters (or any other executable instructions) in the final binary. -Some of them will be (see [`CoverageKind::Counter`]), +Some of them will be (see [`CoverageKind::CounterIncrement`]), but other counters can be computed on the fly, when generating a coverage -report, by mapping a `CodeRegion` to a -[`CoverageKind::Expression`]. +report, by mapping a `CodeRegion` to a coverage-counter _expression_. As an example: @@ -121,8 +121,8 @@ `match` with a `break`). In MIR, this is typically lowered to a `SwitchInt`, with one branch to stay in the loop, and another branch to break out of the loop. The branch that breaks out will almost always execute less often, -so `InstrumentCoverage` chooses to add a `Counter` to that branch, and an -`Expression(continue) = Counter(loop) - Counter(break)` to the branch that +so `InstrumentCoverage` chooses to add a `CounterIncrement` to that branch, and +uses an expression (`Counter(loop) - Counter(break)`) for the branch that continues. The `InstrumentCoverage` MIR pass is documented in @@ -130,9 +130,9 @@ [mir-passes]: mir/passes.md [mir-instrument-coverage]: https://github.com/rust-lang/rust/tree/master/compiler/rustc_mir_transform/src/coverage +[`FunctionCoverageInfo`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/coverage/struct.FunctionCoverageInfo.html [code-region]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/coverage/struct.CodeRegion.html -[`CoverageKind::Counter`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/coverage/enum.CoverageKind.html#variant.Counter -[`CoverageKind::Expression`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/coverage/enum.CoverageKind.html#variant.Expression +[`CoverageKind::CounterIncrement`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/coverage/enum.CoverageKind.html#variant.CounterIncrement [coverage-statement]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/mir/enum.StatementKind.html#variant.Coverage [instrument-coverage-pass-details]: #implementation-details-of-the-instrumentcoverage-mir-pass @@ -150,40 +150,38 @@ match statement.kind { ... mir::StatementKind::Coverage(box ref coverage) => { - self.codegen_coverage(&mut bx, coverage.clone(), statement.source_info.scope); - bx + self.codegen_coverage(bx, coverage, statement.source_info.scope); } ``` -`codegen_coverage()` handles each `CoverageKind` as follows: +`codegen_coverage()` handles inlined statements and then forwards the coverage +statement to [`Builder::add_coverage`], which handles each `CoverageKind` as +follows: -- For all `CoverageKind`s, Coverage data (counter ID, expression equation - and ID, and code regions) are passed to the backend's `Builder`, to - populate data structures that will be used to generate the crate's - "Coverage Map". (See the [`FunctionCoverage`][function-coverage] `struct`.) -- For `CoverageKind::Counter`s, an instruction is injected in the backend + +- For both `CounterIncrement` and `ExpressionUsed`, the underlying counter or + expression ID is passed through to the corresponding [`FunctionCoverage`] + struct to indicate that the corresponding regions of code were not removed + by MIR optimizations. +- For `CoverageKind::CounterIncrement`s, an instruction is injected in the backend IR to increment the physical counter, by calling the `BuilderMethod` [`instrprof_increment()`][instrprof-increment]. ```rust - pub fn codegen_coverage(&self, bx: &mut Bx, coverage: Coverage, scope: SourceScope) { + fn add_coverage(&mut self, instance: Instance<'tcx>, coverage: &Coverage) { ... - let instance = ... // the scoped instance (current or inlined function) - let Coverage { kind, code_region } = coverage; - match kind { - CoverageKind::Counter { function_source_hash, id } => { - ... - bx.add_coverage_counter(instance, id, code_region); + let Coverage { kind } = coverage; + match *kind { + CoverageKind::CounterIncrement { id } => { + func_coverage.mark_counter_id_seen(id); ... bx.instrprof_increment(fn_name, hash, num_counters, index); } - CoverageKind::Expression { id, lhs, op, rhs } => { - bx.add_coverage_counter_expression(instance, id, lhs, op, rhs, code_region); + CoverageKind::ExpressionUsed { id } => { + func_coverage.mark_expression_id_seen(id); } - CoverageKind::Unreachable => { - bx.add_coverage_unreachable( - instance, - code_region.expect(... + } + } ``` > The function name `instrprof_increment()` is taken from the LLVM intrinsic @@ -199,7 +197,8 @@ [backend-lowering-mir]: backend/lowering-mir.md [codegen-statement]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/mir/struct.FunctionCx.html#method.codegen_statement [codegen-coverage]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/mir/struct.FunctionCx.html#method.codegen_coverage -[function-coverage]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_llvm/coverageinfo/map_data/struct.FunctionCoverage.html +[`Builder::add_coverage`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_llvm/builder/struct.Builder.html#method.add_coverage +[`FunctionCoverage`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_llvm/coverageinfo/map_data/struct.FunctionCoverage.html [instrprof-increment]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_codegen_ssa/traits/trait.BuilderMethods.html#tymethod.instrprof_increment ### Coverage Map Generation @@ -327,9 +326,10 @@ The `CoverageGraph` is a coverage-specific simplification of the MIR control flow graph (CFG). Its nodes are [`BasicCoverageBlock`s][bcb], which encompass one or more sequentially-executed MIR `BasicBlock`s -(with no internal branching), plus a `CoverageKind` counter (to -be added, via coverage analysis), and an optional set of additional counters -to count incoming edges (if there are more than one). +(with no internal branching). + +Nodes and edges in the graph can have associated [`BcbCounter`]s, which are +stored in [`CoverageCounters`]. The `Instrumentor`'s `inject_counters()` uses the `CoverageGraph` to compute the best places to inject coverage counters, as MIR `Statement`s, @@ -338,16 +338,15 @@ 1. [`generate_coverage_spans()`][generate-coverage-spans] computes the minimum set of distinct, non-branching code regions, from the MIR. These `CoverageSpan`s represent a span of code that must be counted. -2. [`make_bcb_counters()`][make-bcb-counters] generates `CoverageKind::Counter`s and - `CoverageKind::Expression`s for each `CoverageSpan`, plus additional - `intermediate_expressions`[^intermediate-expressions], not associated with any `CodeRegion`, but +2. [`make_bcb_counters()`][make-bcb-counters] generates `BcbCounter::Counter`s and + `BcbCounter::Expression`s for each `CoverageSpan`, plus additional + _intermediate expressions_[^intermediate-expressions] that are not associated + with any `CodeRegion`, but are required to compute a final `Expression` value for a `CodeRegion`. 3. Inject the new counters into the MIR, as new `StatementKind::Coverage` - statements. This is done by three distinct functions: - - `inject_coverage_span_counters()` - - `inject_indirect_counters()` - - `inject_intermediate_expression()`, called for each intermediate expression - returned from `make_bcb_counters()` + statements. +4. Attach all other necessary coverage information to the function's body as + [`FunctionCoverageInfo`]. [^intermediate-expressions]: Intermediate expressions are sometimes required because `Expression`s are limited to binary additions or subtractions. For @@ -359,7 +358,8 @@ [coverage-graph]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/graph/struct.CoverageGraph.html [inject-counters]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/struct.Instrumentor.html#method.inject_counters [bcb]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/graph/struct.BasicCoverageBlock.html -[debug]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/debug +[`BcbCounter`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/counters/enum.BcbCounter.html +[`CoverageCounters`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/counters/struct.CoverageCounters.html [generate-coverage-spans]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/spans/struct.CoverageSpans.html#method.generate_coverage_spans [make-bcb-counters]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/counters/struct.BcbCounters.html#method.make_bcb_counters @@ -505,34 +505,3 @@ [bcb-counters]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/counters/struct.BcbCounters.html [traverse-coverage-graph-with-loops]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/graph/struct.TraverseCoverageGraphWithLoops.html - -### Injecting counters into a MIR `BasicBlock` - -With the refined `CoverageSpan`s, and after all `Counter`s and `Expression`s are -created, the final step is to inject the `StatementKind::Coverage` statements -into the MIR. There are three distinct sources, handled by the following -functions: - -- [`inject_coverage_span_counters()`][inject-coverage-span-counters] injects the - counter from each `CoverageSpan`'s BCB. -- [`inject_indirect_counters()`][inject-indirect-counters] injects counters - for any BCB not assigned to a `CoverageSpan`, and for all edge counters. - These counters don't have `CoverageSpan`s. -- [`inject_intermediate_expression()`][inject-intermediate-expression] injects - the intermediate expressions returned from `make_bcb_counters()`. These - counters aren't associated with any BCB, edge, or `CoverageSpan`. - -These three functions inject the `Coverage` statements into the MIR. -`Counter`s and `Expression`s with `CoverageSpan`s add `Coverage` statements -to a corresponding `BasicBlock`, with a `CodeRegion` computed from the -refined `Span` and current `SourceMap`. - -All other `Coverage` statements have a `CodeRegion` of `None`, but they -still must be injected because they contribute to other `Expression`s. - -Finally, edge's with a `CoverageKind::Counter` require a new `BasicBlock`, -so the counter is only incremented when traversing the branch edge. - -[inject-coverage-span-counters]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/struct.Instrumentor.html#method.inject_coverage_span_counters -[inject-indirect-counters]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/struct.Instrumentor.html#method.inject_indirect_counters -[inject-intermediate-expression]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_mir_transform/coverage/fn.inject_intermediate_expression.html diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/return-position-impl-trait-in-trait.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/return-position-impl-trait-in-trait.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/return-position-impl-trait-in-trait.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/return-position-impl-trait-in-trait.md 2023-12-21 16:55:40.000000000 +0000 @@ -298,6 +298,24 @@ come after the `=` in `type Assoc = ...` for each RPITIT. +##### Implied bounds in RPITIT hidden type inference + +Since `collect_return_position_impl_trait_in_trait_tys` does fulfillment and +region resolution, we must provide it `assumed_wf_types` so that we can prove +region obligations with the same expected implied bounds as +`compare_method_predicate_entailment` does. + +Since the return type of a method is understood to be one of the assumed WF +types, and we eagerly fold the return type with inference variables to do +opaque type inference, after opaque type inference, the return type will +resolve to contain the hidden types of the RPITITs. this would mean that the +hidden types of the RPITITs would be assumed to be well-formed without having +independently proven that they are. This resulted in a +[subtle unsoundness bug](https://github.com/rust-lang/rust/pull/116072). In +order to prevent this cyclic reasoning, we instead replace the hidden types of +the RPITITs in the return type of the method with *placeholders*, which lead +to no implied well-formedness bounds. + #### Default trait body Type-checking a default trait body, like: diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-getting-diagnostics.md 2023-12-21 16:55:40.000000000 +0000 @@ -7,7 +7,7 @@ To get diagnostics from the compiler, configure `rustc_interface::Config` to output diagnostic to a buffer, and run `TyCtxt.analysis`. The following was tested -with `nightly-2023-03-27`: +with `nightly-2023-10-03`: ```rust {{#include ../examples/rustc-driver-getting-diagnostics.rs}} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/rustc-driver-interacting-with-the-ast.md 2023-12-21 16:55:40.000000000 +0000 @@ -5,7 +5,7 @@ ## Getting the type of an expression To get the type of an expression, use the `global_ctxt` to get a `TyCtxt`. -The following was tested with `nightly-2023-03-27`: +The following was tested with `nightly-2023-10-03`: ```rust {{#include ../examples/rustc-driver-interacting-with-the-ast.rs}} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/invariants.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/invariants.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/invariants.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/invariants.md 2023-12-21 16:55:40.000000000 +0000 @@ -0,0 +1,154 @@ +# Invariants of the type system + +FIXME: This file talks about invariants of the type system as a whole, not only the solver + +There are a lot of invariants - things the type system guarantees to be true at all times - +which are desirable or expected from other languages and type systems. Unfortunately, quite +a few of them do not hold in Rust right now. This is either a fundamental to its design or +caused by bugs and something that may change in the future. + +It is important to know about the things you can assume while working on - and with - the +type system, so here's an incomplete and inofficial list of invariants of +the core type system: + +- ✅: this invariant mostly holds, with some weird exceptions, you can rely on it outside +of these cases +- ❌: this invariant does not hold, either due to bugs or by design, you must not rely on +it for soundness or have to be incredibly careful when doing so + +### `wf(X)` implies `wf(normalize(X))` ✅ + +If a type containing aliases is well-formed, it should also be +well-formed after normalizing said aliases. We rely on this as +otherwise we would have to re-check for well-formedness for these +types. + +This is unfortunately broken for `>::Output` due to implied bounds, +resulting in [#114936]. + +### Structural equality modulo regions implies semantic equality ✅ + +If you have a some type and equate it to itself after replacing any regions with unique +inference variables in both the lhs and rhs, the now potentially structurally different +types should still be equal to each other. + +Needed to prevent goals from succeeding in HIR typeck and then failing in MIR borrowck. +If this does invariant is broken MIR typeck ends up failing with an ICE. + +### Applying inference results from a goal does not change its result ❌ + +TODO: this invariant is formulated in a weird way and needs to be elaborated. +Pretty much: I would like this check to only fail if there's a solver bug: +https://github.com/rust-lang/rust/blob/2ffeb4636b4ae376f716dc4378a7efb37632dc2d/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs#L391-L407 + +If we prove some goal/equate types/whatever, apply the resulting inference constraints, +and then redo the original action, the result should be the same. + +This unfortunately does not hold - at least in the new solver - due to a few annoying reasons. + +### The trait solver has to be *locally sound* ✅ + +This means that we must never return *success* for goals for which no `impl` exists. That would +mean we assume a trait is implemented even though it is not, which is very likely to result in +actual unsoundness. When using `where`-bounds to prove a goal, the `impl` will be provided by the +user of the item. + +This invariant only holds if we check region constraints. As we do not check region constraints +during implicit negative overlap check in coherence, this invariant is broken there. As this check +relies on *completeness* of the trait solver, it is not able to use the current region constraints +check - `InferCtxt::resolve_regions` - as its handling of type outlives goals is incomplete. + +### Normalization of semantically equal aliases in empty environments results in a unique type ✅ + +Normalization for alias types/consts has to have a unique result. Otherwise we can easily +implement transmute in safe code. Given the following function, we have to make sure that +the input and output types always get normalized to the same concrete type. + +```rust +fn foo( + x: ::Assoc +) -> ::Assoc { + x +} +``` + +Many of the currently known unsound issues end up relying on this invariant being broken. +It is however very difficult to imagine a sound type system without this invariant, so +the issue is that the invariant is broken, not that we incorrectly rely on it. + +### Generic goals and their instantiations have the same result ✅ + +Pretty much: If we successfully typecheck a generic function concrete instantiations +of that function should also typeck. We should not get errors post-monomorphization. +We can however get overflow errors at that point. + +TODO: example for overflow error post-monomorphization + +This invariant is relied on to allow the normalization of generic aliases. Breaking +it can easily result in unsoundness, e.g. [#57893](https://github.com/rust-lang/rust/issues/57893) + +### Trait goals in empty environments are proven by a unique impl ✅ + +If a trait goal holds with an empty environment, there should be a unique `impl`, +either user-defined or builtin, which is used to prove that goal. This is +necessary to select a unique method. It + +We do however break this invariant in few cases, some of which are due to bugs, +some by design: +- *marker traits* are allowed to overlap as they do not have associated items +- *specialization* allows specializing impls to overlap with their parent +- the builtin trait object trait implementation can overlap with a user-defined impl: +[#57893] + +### The type system is complete ❌ + +The type system is not complete, it often adds unnecessary inference constraints, and errors +even though the goal could hold. + +- method selection +- opaque type inference +- handling type outlives constraints +- preferring `ParamEnv` candidates over `Impl` candidates during candidate selection +in the trait solver + +#### The type system is complete during the implicit negative overlap check in coherence ✅ + +During the implicit negative overlap check in coherence we must never return *error* for +goals which can be proven. This would allow for overlapping impls with potentially different +associated items, breaking a bunch of other invariants. + +This invariant is currently broken in many different ways while actually something we rely on. +We have to be careful as it is quite easy to break: +- generalization of aliases +- generalization during subtyping binders (luckily not exploitable in coherence) + +### Trait solving must be (free) lifetime agnostic ✅ + +Trait solving during codegen should have the same result as during typeck. As we erase +all free regions during codegen we must not rely on them during typeck. A noteworthy example +is special behavior for `'static`. + +We also have to be careful with relying on equality of regions in the trait solver. +This is fine for codegen, as we treat all erased regions as equal. We can however +lose equality information from HIR to MIR typeck. + +The new solver "uniquifies regions" during canonicalization, canonicalizing `u32: Trait<'x, 'x>` +as `exists<'0, '1> u32: Trait<'0, '1>`, to make it harder to rely on this property. + +### Removing ambiguity makes strictly more things compile ❌ + +Ideally we *should* not rely on ambiguity for things to compile. +Not doing that will cause future improvements to be breaking changes. + +Due to *incompleteness* this is not the case and improving inference can result in inference +changes, breaking existing projects. + +### Semantic equality implies structural equality ✅ + +Two types being equal in the type system must mean that they have the +same `TypeId` after instantiating their generic parameters with concrete +arguments. This currently does not hold: [#97156]. + +[#57893]: https://github.com/rust-lang/rust/issues/57893 +[#97156]: https://github.com/rust-lang/rust/issues/97156 +[#114936]: https://github.com/rust-lang/rust/issues/114936 \ No newline at end of file diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/the-solver.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/the-solver.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/the-solver.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/the-solver.md 2023-12-21 16:55:40.000000000 +0000 @@ -6,12 +6,71 @@ [chalk]: https://rust-lang.github.io/chalk/book/recursive.html -The basic structure of the solver is a pure function -`fn evaluate_goal(goal: Goal<'tcx>) -> Response`. -While the actual solver is not fully pure to deal with overflow and cycles, we are -going to defer that for now. +## A rough walkthrough -To deal with inference variables and to improve caching, we use -[canonicalization](./canonicalization.md). +The entry-point of the solver is `InferCtxtEvalExt::evaluate_root_goal`. This +function sets up the root `EvalCtxt` and then calls `EvalCtxt::evaluate_goal`, +to actually enter the trait solver. -TODO: write the remaining code for this as well. +`EvalCtxt::evaluate_goal` handles [canonicalization](./canonicalization.md), caching, +overflow, and solver cycles. Once that is done, it creates a nested `EvalCtxt` with a +separate local `InferCtxt` and calls `EvalCtxt::compute_goal`, which is responsible for the +'actual solver behavior'. We match on the `PredicateKind`, delegating to a separate function +for each one. + +For trait goals, such a `Vec: Clone`, `EvalCtxt::compute_trait_goal` has +to collect all the possible ways this goal can be proven via +`EvalCtxt::assemble_and_evaluate_candidates`. Each candidate is handled in +a separate "probe", to not leak inference constraints to the other candidates. +We then try to merge the assembled candidates via `EvalCtxt::merge_candidates`. + + +## Important concepts and design pattern + +### `EvalCtxt::add_goal` + +To prove nested goals, we don't directly call `EvalCtxt::compute_goal`, but instead +add the goal to the `EvalCtxt` with `EvalCtxt::all_goal`. We then prove all nested +goals together in either `EvalCtxt::try_evaluate_added_goals` or +`EvalCtxt::evaluate_added_goals_and_make_canonical_response`. This allows us to handle +inference constraints from later goals. + +E.g. if we have both `?x: Debug` and `(): ConstrainToU8` as nested goals, +then proving `?x: Debug` is initially ambiguous, but after proving `(): ConstrainToU8` +we constrain `?x` to `u8` and proving `u8: Debug` succeeds. + +### Matching on `TyKind` + +We lazily normalize types in the solver, so we always have to assume that any types +and constants are potentially unnormalized. This means that matching on `TyKind` can easily +be incorrect. + +We handle normalization in two different ways. When proving `Trait` goals when normalizing +associated types, we separately assemble candidates depending on whether they structurally +match the self type. Candidates which match on the self type are handled in +`EvalCtxt::assemble_candidates_via_self_ty` which recurses via +`EvalCtxt::assemble_candidates_after_normalizing_self_ty`, which normalizes the self type +by one level. In all other cases we have to match on a `TyKind` we first use +`EvalCtxt::try_normalize_ty` to normalize the type as much as possible. + +### Higher ranked goals + +In case the goal is higher-ranked, e.g. `for<'a> F: FnOnce(&'a ())`, `EvalCtxt::compute_goal` +eagerly instantiates `'a` with a placeholder and then recursively proves +`F: FnOnce(&'!a ())` as a nested goal. + +### Dealing with choice + +Some goals can be proven in multiple ways. In these cases we try each option in +a separate "probe" and then attempt to merge the resulting responses by using +`EvalCtxt::try_merge_responses`. If merging the responses fails, we use +`EvalCtxt::flounder` instead, returning ambiguity. For some goals, we try +incompletely prefer some choices over others in case `EvalCtxt::try_merge_responses` +fails. + +## Learning more + +The solver should be fairly self-contained. I hope that the above information provides a +good foundation when looking at the code itself. Please reach out on zulip if you get stuck +while doing so or there are some quirks and design decisions which were unclear and deserve +better comments or should be mentioned here. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/trait-solving.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/trait-solving.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/trait-solving.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/solve/trait-solving.md 2023-12-21 16:55:40.000000000 +0000 @@ -39,77 +39,6 @@ The trait solver can either return success, ambiguity or an error as a [`CanonicalResponse`]. For success and ambiguity it also returns constraints inference and region constraints. -## Requirements - -Before we dive into the new solver lets first take the time to go through all of our requirements -on the trait system. We can then use these to guide our design later on. - -TODO: elaborate on these rules and get more precise about their meaning. -Also add issues where each of these rules have been broken in the past -(or still are). - -### 1. The trait solver has to be *sound* - -This means that we must never return *success* for goals for which no `impl` exists. That would -simply be unsound by assuming a trait is implemented even though it is not. When using predicates -from the `where`-bounds, the `impl` will be proved by the user of the item. - -### 2. If type checker solves generic goal concrete instantiations of that goal have the same result - -Pretty much: If we successfully typecheck a generic function concrete instantiations -of that function should also typeck. We should not get errors post-monomorphization. -We can however get overflow as in the following snippet: - -```rust -fn foo(x: ) -``` - -### 3. Trait goals in empty environments are proven by a unique impl - -If a trait goal holds with an empty environment, there is a unique `impl`, -either user-defined or builtin, which is used to prove that goal. - -This is necessary for codegen to select a unique method. -An exception here are *marker traits* which are allowed to overlap. - -### 4. Normalization in empty environments results in a unique type - -Normalization for alias types/consts has a unique result. Otherwise we can easily implement -transmute in safe code. Given the following function, we have to make sure that the input and -output types always get normalized to the same concrete type. -```rust -fn foo( - x: ::Assoc -) -> ::Assoc { - x -} -``` - -### 5. During coherence trait solving has to be complete - -During coherence we never return *error* for goals which can be proven. This allows overlapping -impls which would break rule 3. - -### 6. Trait solving must be (free) lifetime agnostic - -Trait solving during codegen should have the same result as during typeck. As we erase -all free regions during codegen we must not rely on them during typeck. A noteworthy example -is special behavior for `'static`. - -We also have to be careful with relying on equality of regions in the trait solver. -This is fine for codegen, as we treat all erased regions as equal. We can however -lose equality information from HIR to MIR typeck. - -### 7. Removing ambiguity makes strictly more things compile - -We *should* not rely on ambiguity for things to compile. -Not doing that will cause future improvements to be breaking changes. - -### 8. semantic equality implies structural equality - -Two types being equal in the type system must mean that they have the same `TypeId`. - - [solve]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/solve/index.html [`Goal`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_trait_selection/traits/solve/struct.Goal.html [`Predicate`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.Predicate.html diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/stabilization_guide.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/stabilization_guide.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/stabilization_guide.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/stabilization_guide.md 2023-12-21 16:55:40.000000000 +0000 @@ -109,7 +109,7 @@ ```rust,ignore // pub(restricted) visibilities (RFC 1422) -(active, pub_restricted, "CURRENT_RUSTC_VERSION", Some(32409)), +(unstable, pub_restricted, "CURRENT_RUSTC_VERSION", Some(32409)), ``` The above line should be moved down to the area for "accepted" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/tests/headers.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/tests/headers.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/tests/headers.md 2023-12-04 19:48:40.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/tests/headers.md 2023-12-21 16:55:40.000000000 +0000 @@ -190,7 +190,7 @@ test suites. * `compile-flags` passes extra command-line args to the compiler, - e.g. `compile-flags -g` which forces debuginfo to be enabled. + e.g. `// compile-flags: -g` which forces debuginfo to be enabled. * `run-flags` passes extra args to the test if the test is to be executed. * `edition` controls the edition the test should be compiled with (defaults to 2015). Example usage: `// edition:2018`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/traits/unsize.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/traits/unsize.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/traits/unsize.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustc-dev-guide/src/traits/unsize.md 2023-12-21 16:55:40.000000000 +0000 @@ -0,0 +1,84 @@ +# [`CoerceUnsized`](https://doc.rust-lang.org/std/ops/trait.CoerceUnsized.html) + +`CoerceUnsized` is primarily concerned with data containers. When a struct +(typically, a smart pointer) implements `CoerceUnsized`, that means that the +data it points to is being unsized. + +Some implementors of `CoerceUnsized` include: +* `&T` +* `Arc` +* `Box` + +This trait is (eventually) intended to be implemented by user-written smart +pointers, and there are rules about when a type is allowed to implement +`CoerceUnsized` that are explained in the trait's documentation. + +# [`Unsize`](https://doc.rust-lang.org/std/marker/trait.Unsize.html) + +To contrast, the `Unsize` trait is concerned the actual types that are allowed +to be unsized. + +This is not intended to be implemented by users ever, since `Unsize` does not +instruct the compiler (namely codegen) *how* to unsize a type, just whether it +is allowed to be unsized. This is paired somewhat intimately with codegen +which must understand how types are represented and unsized. + +## Primitive unsizing implementations + +Built-in implementations are provided for: +* `T` -> `dyn Trait + 'a` when `T: Trait` (and `T: Sized + 'a`, and `Trait` + is object safe). +* `[T; N]` -> `[T]` + +## Structural implementations + +There are two implementations of `Unsize` which can be thought of as +structural: +* `(A1, A2, .., An): Unsize<(A1, A2, .., U)>` given `An: Unsize`, which + allows the tail field of a tuple to be unsized. This is gated behind the + [`unsized_tuple_coercion`] feature. +* `Struct<.., Pi, .., Pj, ..>: Unsize>` given + `TailField: Unsize`, which allows the tail field of a + struct to be unsized if it is the only field that mentions generic parameters + `Pi`, .., `Pj` (which don't need to be contiguous). + +The rules for the latter implementation are slightly complicated, since they +may allow more than one parameter to be changed (not necessarily unsized) and +are best stated in terms of the tail field of the struct. + +[`unsized_tuple_coercion`]: https://doc.rust-lang.org/beta/unstable-book/language-features/unsized-tuple-coercion.html + +## Upcasting implementations + +Two things are called "upcasting" internally: +1. True upcasting `dyn SubTrait` -> `dyn SuperTrait` (this also allows + dropping auto traits and adjusting lifetimes, as below). +2. Dropping auto traits and adjusting the lifetimes of dyn trait + *without changing the principal[^1]*: + `dyn Trait + AutoTraits... + 'a` -> `dyn Trait + NewAutoTraits... + 'b` + when `AutoTraits` ⊇ `NewAutoTraits`, and `'a: 'b`. + +These may seem like different operations, since (1.) includes adjusting the +vtable of a dyn trait, while (2.) is a no-op. However, to the type system, +these are handled with much the same code. + +This built-in implementation of `Unsize` is the most involved, particularly +after [it was reworked](https://github.com/rust-lang/rust/pull/114036) to +support the complexities of associated types. + +Specifically, the upcasting algorithm involves: For each supertrait of the +source dyn trait's principal (including itself)... +1. Unify the super trait ref with the principal of the target (making sure + we only ever upcast to a true supertrait, and never [via an impl]). +2. For every auto trait in the source, check that it's present in the principal + (allowing us to drop auto traits, but never gain new ones). +3. For every projection in the source, check that it unifies with a single + projection in the target (since there may be more than one given + `trait Sub: Sup<.., A = i32> + Sup<.., A = u32>`). + +[via an impl]: https://github.com/rust-lang/rust/blob/f3457dbf84cd86d284454d12705861398ece76c3/tests/ui/traits/trait-upcasting/illegal-upcast-from-impl.rs#L19 + +Specifically, (3.) prevents a choice of projection bound to guide inference +unnecessarily, though it may guide inference when it is unambiguous. + +[^1]: The principal is the one non-auto trait of a `dyn Trait`. \ No newline at end of file diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/advanced-features.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/advanced-features.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/advanced-features.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/advanced-features.md 2023-12-21 16:55:28.000000000 +0000 @@ -110,3 +110,23 @@ This URL adds the `go_to_first=true` query parameter which can be appended to any `rustdoc` search URL to automatically go to the first result. + +## `#[repr(transparent)]`: Documenting the transparent representation + +You can read more about `#[repr(transparent)]` itself in the [Rust Reference][repr-trans-ref] and +in the [Rustonomicon][repr-trans-nomicon]. + +Since this representation is only considered part of the public ABI if the single field with non-trivial +size or alignment is public and if the documentation does not state otherwise, Rustdoc helpfully displays +the attribute if and only if the non-1-ZST field is public or at least one field is public in case all +fields are 1-ZST fields. The term *1-ZST* refers to types that are one-aligned and zero-sized. + +It would seem that one can manually hide the attribute with `#[cfg_attr(not(doc), repr(transparent))]` +if one wishes to declare the representation as private even if the non-1-ZST field is public. +However, due to [current limitations][cross-crate-cfg-doc], this method is not always guaranteed to work. +Therefore, if you would like to do so, you should always write it down in prose independently of whether +you use `cfg_attr` or not. + +[repr-trans-ref]: https://doc.rust-lang.org/reference/type-layout.html#the-transparent-representation +[repr-trans-nomicon]: https://doc.rust-lang.org/nomicon/other-reprs.html#reprtransparent +[cross-crate-cfg-doc]: https://github.com/rust-lang/rust/issues/114952 diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/unstable-features.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/unstable-features.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/unstable-features.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/unstable-features.md 2023-12-21 16:55:28.000000000 +0000 @@ -207,6 +207,21 @@ mod empty_mod {} ``` +### Use the Rust logo as the crate logo + +This is for official Rust project use only. + +Internal Rustdoc pages like settings.html and scrape-examples-help.html show the Rust logo. +This logo is tracked as a static resource. The attribute `#![doc(rust_logo)]` makes this same +built-in resource act as the main logo. + +```rust +#![feature(rustdoc_internals)] +#![allow(internal_features)] +#![doc(rust_logo)] +//! This crate has the Rust(tm) branding on it. +``` + ## Effects of other nightly features These nightly-only features are not primarily related to Rustdoc, @@ -613,10 +628,10 @@ ```bash $ rustdoc src/lib.rs -Z unstable-options \ - --check-cfg='names()' --check-cfg='values(feature, "foo", "bar")' + --check-cfg='cfg(feature, values("foo", "bar"))' ``` -The example above check every well known names (`target_os`, `doc`, `test`, ... via `names()`) +The example above check every well known names and values (`target_os`, `doc`, `test`, ...) and check the values of `feature`: `foo` and `bar`. ### `--generate-link-to-definition`: Generate links on types in source code diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/write-documentation/what-to-include.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/write-documentation/what-to-include.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/write-documentation/what-to-include.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/rustdoc/src/write-documentation/what-to-include.md 2023-12-21 16:55:28.000000000 +0000 @@ -73,7 +73,7 @@ ``````text /// Example /// ```rust -/// # main() -> Result<(), std::num::ParseIntError> { +/// # fn main() -> Result<(), std::num::ParseIntError> { /// let fortytwo = "42".parse::()?; /// println!("{} + 10 = {}", fortytwo, fortytwo+10); /// # Ok(()) @@ -117,7 +117,7 @@ Here is an example of a new theme, [Ayu]. -[Ayu]: https://github.com/rust-lang/rust/blob/master/src/librustdoc/html/static/css/themes/ayu.css +[Ayu]: https://github.com/rust-lang/rust/blob/master/src/librustdoc/html/static/css/rustdoc.css#L2384-L2574 [API Guidelines]: https://rust-lang.github.io/api-guidelines/documentation.html#rustdoc-does-not-show-unhelpful-implementation-details-c-hidden [Documentation tests]: documentation-tests.md [on this blog]: https://blog.guillaume-gomez.fr/articles/2016-09-16+Generating+doc+with+rustdoc+and+a+custom+theme diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/check-cfg.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/check-cfg.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/check-cfg.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/check-cfg.md 2023-12-21 16:55:28.000000000 +0000 @@ -10,97 +10,80 @@ check them. The `--check-cfg` option takes a value, called the _check cfg specification_. The check cfg specification is parsed using the Rust metadata syntax, just as the `--cfg` option is. -`--check-cfg` option can take one of two forms: +`--check-cfg` option take one form: -1. `--check-cfg names(...)` enables checking condition names. -2. `--check-cfg values(...)` enables checking the values within list-valued conditions. - -These two options are independent. `names` checks only the namespace of condition names -while `values` checks only the namespace of the values of list-valued conditions. +1. `--check-cfg cfg(...)` enables checking the values within list-valued conditions. NOTE: No implicit expectation is added when using `--cfg` for both forms. Users are expected to -pass all expected names and values using `names(...)` and `values(...)`. +pass all expected names and values using `cfg(...)`. -## The `names(...)` form +## The `cfg(...)` form -The `names(...)` form enables checking the names. This form uses a named list: +The `cfg(...)` form enables checking the values within list-valued conditions. It has this +basic form: ```bash -rustc --check-cfg 'names(name1, name2, ... nameN)' +rustc --check-cfg 'cfg(name1, ..., nameN, values("value1", "value2", ... "valueN"))' ``` -where each `name` is a bare identifier (has no quotes). The order of the names is not significant. +where `name` is a bare identifier (has no quotes) and each `"value"` term is a quoted literal +string. `name` specifies the name of the condition, such as `feature` or `my_cfg`. -If `--check-cfg names(...)` is specified at least once, then `rustc` will check all references to -condition names. `rustc` will check every `#[cfg]` attribute, `#[cfg_attr]` attribute, `cfg` clause -inside `#[link]` attribute and `cfg!(...)` call against the provided list of expected condition -names. If a name is not present in this list, then `rustc` will report an `unexpected_cfgs` lint -diagnostic. The default diagnostic level for this lint is `Warn`. +When the `cfg(...)` option is specified, `rustc` will check every `#[cfg(name = "value")]` +attribute, `#[cfg_attr(name = "value")]` attribute, `#[link(name = "a", cfg(name = "value"))]` +and `cfg!(name = "value")` call. It will check that the `"value"` specified is present in the +list of expected values. If `"value"` is not in it, then `rustc` will report an `unexpected_cfgs` +lint diagnostic. The default diagnostic level for this lint is `Warn`. -If `--check-cfg names(...)` is not specified, then `rustc` will not check references to condition -names. +To enable checking of values, but to provide an empty set of expected values, use these forms: -`--check-cfg names(...)` may be specified more than once. The result is that the list of valid -condition names is merged across all options. It is legal for a condition name to be specified -more than once; redundantly specifying a condition name has no effect. +```bash +rustc --check-cfg 'cfg(name1, ..., nameN)' +rustc --check-cfg 'cfg(name1, ..., nameN, values())' +``` -To enable checking condition names with an empty set of valid condition names, use the following -form. The parentheses are required. +To enable checking of name but not values (i.e. unknown expected values), use this form: ```bash -rustc --check-cfg 'names()' +rustc --check-cfg 'cfg(name1, ..., nameN, values(any()))' ``` -Note that `--check-cfg 'names()'` is _not_ equivalent to omitting the option entirely. -The first form enables checking condition names, while specifying that there are no valid -condition names (outside of the set of well-known names defined by `rustc`). Omitting the -`--check-cfg 'names(...)'` option does not enable checking condition names. - -## The `values(...)` form +The `--check-cfg cfg(...)` option can be repeated, both for the same condition name and for +different names. If it is repeated for the same condition name, then the sets of values for that +condition are merged together (presedence is given to `any()`). -The `values(...)` form enables checking the values within list-valued conditions. It has this -form: +## Well known names and values -```bash -rustc --check-cfg `values(name, "value1", "value2", ... "valueN")' -``` +`rustc` has a internal list of well known names and their corresponding values. +Those well known names and values follows the same stability as what they refer to. -where `name` is a bare identifier (has no quotes) and each `"value"` term is a quoted literal -string. `name` specifies the name of the condition, such as `feature` or `target_os`. +Well known values checking is always enabled as long as a `--check-cfg` argument is present. -When the `values(...)` option is specified, `rustc` will check every `#[cfg(name = "value")]` -attribute, `#[cfg_attr(name = "value")]` attribute, `#[link(name = "a", cfg(name = "value"))]` -and `cfg!(name = "value")` call. It will check that the `"value"` specified is present in the -list of expected values. If `"value"` is not in it, then `rustc` will report an `unexpected_cfgs` -lint diagnostic. The default diagnostic level for this lint is `Warn`. +Well known names checking is always enable as long as a `--check-cfg` argument is present +**unless** any `cfg(any())` argument is passed. -To enable checking of values, but to provide an empty set of valid values, use this form: +To disable checking of well known names, use this form: ```bash -rustc --check-cfg `values(name)` +rustc --check-cfg 'cfg(any())' ``` -The `--check-cfg values(...)` option can be repeated, both for the same condition name and for -different names. If it is repeated for the same condition name, then the sets of values for that -condition are merged together. - -If `values()` is specified, then `rustc` will enable the checking of well-known values defined -by itself. Note that it's necessary to specify the `values()` form to enable the checking of -well known values, specifying the other forms doesn't implicitly enable it. +NOTE: If one want to enable values and names checking without having any cfg to declare, one +can use an empty `cfg()` argument. ## Examples Consider this command line: ```bash -rustc --check-cfg 'names(feature)' \ - --check-cfg 'values(feature, "lion", "zebra")' \ +rustc --check-cfg 'cfg(feature, values("lion", "zebra"))' \ --cfg 'feature="lion"' -Z unstable-options \ example.rs ``` This command line indicates that this crate has two features: `lion` and `zebra`. The `lion` -feature is enabled, while the `zebra` feature is disabled. Consider compiling this code: +feature is enabled, while the `zebra` feature is disabled. Exhaustive checking of names and +values are enabled by default. Consider compiling this code: ```rust // This is expected, and tame_lion() will be compiled @@ -119,35 +102,36 @@ // and will cause a compiler warning (by default). #[cfg(feechure = "lion")] fn tame_lion() {} -``` -> Note: The `--check-cfg names(feature)` option is necessary only to enable checking the condition -> name, as in the last example. `feature` is a well-known (always-expected) condition name, and so -> it is not necessary to specify it in a `--check-cfg 'names(...)'` option. That option can be -> shortened to > `--check-cfg names()` in order to enable checking well-known condition names. +// This is UNEXPECTED, because 'windows' is a well known condition name, +// and because 'windows' doens't take any values, +// and will cause a compiler warning (by default). +#[cfg(windows = "unix")] +fn tame_windows() {} +``` ### Example: Checking condition names, but not values ```bash # This turns on checking for condition names, but not values, such as 'feature' values. -rustc --check-cfg 'names(is_embedded, has_feathers)' \ +rustc --check-cfg 'cfg(is_embedded, has_feathers, values(any()))' \ --cfg has_feathers -Z unstable-options ``` ```rust -#[cfg(is_embedded)] // This is expected as "is_embedded" was provided in names() -fn do_embedded() {} +#[cfg(is_embedded)] // This is expected as "is_embedded" was provided in cfg() +fn do_embedded() {} // and because names exhaustiveness was not disabled -#[cfg(has_feathers)] // This is expected as "has_feathers" was provided in names() -fn do_features() {} +#[cfg(has_feathers)] // This is expected as "has_feathers" was provided in cfg() +fn do_features() {} // and because names exhaustiveness was not disabled -#[cfg(has_feathers = "zapping")] // This is expected as "has_feathers" was provided in names() +#[cfg(has_feathers = "zapping")] // This is expected as "has_feathers" was provided in cfg() // and because no value checking was enable for "has_feathers" // no warning is emitted for the value "zapping" fn do_zapping() {} #[cfg(has_mumble_frotz)] // This is UNEXPECTED because names checking is enable and - // "has_mumble_frotz" was not provided in names() + // "has_mumble_frotz" was not provided in cfg() fn do_mumble_frotz() {} ``` @@ -155,25 +139,25 @@ ```bash # This turns on checking for feature values, but not for condition names. -rustc --check-cfg 'values(feature, "zapping", "lasers")' \ +rustc --check-cfg 'cfg(feature, values("zapping", "lasers"))' \ + --check-cfg 'cfg(any())' \ --cfg 'feature="zapping"' -Z unstable-options ``` ```rust -#[cfg(is_embedded)] // This is doesn't raise a warning, because names checking was not - // enable (ie not names()) +#[cfg(is_embedded)] // This is doesn't raise a warning, because names checking was + // disabled by 'cfg(any())' fn do_embedded() {} -#[cfg(has_feathers)] // Same as above, --check-cfg names(...) was never used so no name +#[cfg(has_feathers)] // Same as above, 'cfg(any())' was provided so no name // checking is performed fn do_features() {} - -#[cfg(feature = "lasers")] // This is expected, "lasers" is in the values(feature) list +#[cfg(feature = "lasers")] // This is expected, "lasers" is in the cfg(feature) list fn shoot_lasers() {} #[cfg(feature = "monkeys")] // This is UNEXPECTED, because "monkeys" is not in the - // --check-cfg values(feature) list + // cfg(feature) list fn write_shakespeare() {} ``` @@ -181,26 +165,92 @@ ```bash # This turns on checking for feature values and for condition names. -rustc --check-cfg 'names(is_embedded, has_feathers)' \ - --check-cfg 'values(feature, "zapping", "lasers")' \ +rustc --check-cfg 'cfg(is_embedded, has_feathers)' \ + --check-cfg 'cfg(feature, values("zapping", "lasers"))' \ --cfg has_feathers --cfg 'feature="zapping"' -Z unstable-options ``` ```rust -#[cfg(is_embedded)] // This is expected because "is_embedded" was provided in names() -fn do_embedded() {} +#[cfg(is_embedded)] // This is expected because "is_embedded" was provided in cfg() +fn do_embedded() {} // and doesn't take any value -#[cfg(has_feathers)] // This is expected because "has_feathers" was provided in names() -fn do_features() {} +#[cfg(has_feathers)] // This is expected because "has_feathers" was provided in cfg() +fn do_features() {} // and deosn't take any value -#[cfg(has_mumble_frotz)] // This is UNEXPECTED, because has_mumble_frotz is not in the - // --check-cfg names(...) list +#[cfg(has_mumble_frotz)] // This is UNEXPECTED, because "has_mumble_frotz" was never provided fn do_mumble_frotz() {} -#[cfg(feature = "lasers")] // This is expected, "lasers" is in the values(feature) list +#[cfg(feature = "lasers")] // This is expected, "lasers" is in the cfg(feature) list fn shoot_lasers() {} #[cfg(feature = "monkeys")] // This is UNEXPECTED, because "monkeys" is not in - // the values(feature) list + // the cfg(feature) list fn write_shakespeare() {} ``` + +## The deprecated `names(...)` form + +The `names(...)` form enables checking the names. This form uses a named list: + +```bash +rustc --check-cfg 'names(name1, name2, ... nameN)' +``` + +where each `name` is a bare identifier (has no quotes). The order of the names is not significant. + +If `--check-cfg names(...)` is specified at least once, then `rustc` will check all references to +condition names. `rustc` will check every `#[cfg]` attribute, `#[cfg_attr]` attribute, `cfg` clause +inside `#[link]` attribute and `cfg!(...)` call against the provided list of expected condition +names. If a name is not present in this list, then `rustc` will report an `unexpected_cfgs` lint +diagnostic. The default diagnostic level for this lint is `Warn`. + +If `--check-cfg names(...)` is not specified, then `rustc` will not check references to condition +names. + +`--check-cfg names(...)` may be specified more than once. The result is that the list of valid +condition names is merged across all options. It is legal for a condition name to be specified +more than once; redundantly specifying a condition name has no effect. + +To enable checking condition names with an empty set of valid condition names, use the following +form. The parentheses are required. + +```bash +rustc --check-cfg 'names()' +``` + +Note that `--check-cfg 'names()'` is _not_ equivalent to omitting the option entirely. +The first form enables checking condition names, while specifying that there are no valid +condition names (outside of the set of well-known names defined by `rustc`). Omitting the +`--check-cfg 'names(...)'` option does not enable checking condition names. + +## The deprecated `values(...)` form + +The `values(...)` form enables checking the values within list-valued conditions. It has this +form: + +```bash +rustc --check-cfg `values(name, "value1", "value2", ... "valueN")' +``` + +where `name` is a bare identifier (has no quotes) and each `"value"` term is a quoted literal +string. `name` specifies the name of the condition, such as `feature` or `target_os`. + +When the `values(...)` option is specified, `rustc` will check every `#[cfg(name = "value")]` +attribute, `#[cfg_attr(name = "value")]` attribute, `#[link(name = "a", cfg(name = "value"))]` +and `cfg!(name = "value")` call. It will check that the `"value"` specified is present in the +list of expected values. If `"value"` is not in it, then `rustc` will report an `unexpected_cfgs` +lint diagnostic. The default diagnostic level for this lint is `Warn`. + +To enable checking of values, but to provide an empty set of valid values, use this form: + +```bash +rustc --check-cfg `values(name)` +``` + +The `--check-cfg values(...)` option can be repeated, both for the same condition name and for +different names. If it is repeated for the same condition name, then the sets of values for that +condition are merged together. + +If `values()` is specified, then `rustc` will enable the checking of well-known values defined +by itself. Note that it's necessary to specify the `values()` form to enable the checking of +well known values, specifying the other forms doesn't implicitly enable it. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/no-jump-tables.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/no-jump-tables.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/no-jump-tables.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/no-jump-tables.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,19 @@ +# `no-jump-tables` + +The tracking issue for this feature is [#116592](https://github.com/rust-lang/rust/issues/116592) + +--- + +This option enables the `-fno-jump-tables` flag for LLVM, which makes the +codegen backend avoid generating jump tables when lowering switches. + +This option adds the LLVM `no-jump-tables=true` attribute to every function. + +The option can be used to help provide protection against +jump-oriented-programming (JOP) attacks, such as with the linux kernel's [IBT]. + +```sh +RUSTFLAGS="-Zno-jump-tables" cargo +nightly build -Z build-std +``` + +[IBT]: https://www.phoronix.com/news/Linux-IBT-By-Default-Tip diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/remap-path-scope.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/remap-path-scope.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/remap-path-scope.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/remap-path-scope.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,24 @@ +# `remap-path-scope` + +The tracking issue for this feature is: [#111540](https://github.com/rust-lang/rust/issues/111540). + +------------------------ + +When the `--remap-path-prefix` option is passed to rustc, source path prefixes in all output will be affected by default. +The `--remap-path-scope` argument can be used in conjunction with `--remap-path-prefix` to determine paths in which output context should be affected. +This flag accepts a comma-separated list of values and may be specified multiple times, in which case the scopes are aggregated together. The valid scopes are: + +- `macro` - apply remappings to the expansion of `std::file!()` macro. This is where paths in embedded panic messages come from +- `diagnostics` - apply remappings to printed compiler diagnostics +- `unsplit-debuginfo` - apply remappings to debug information only when they are written to compiled executables or libraries, but not when they are in split debuginfo files +- `split-debuginfo` - apply remappings to debug information only when they are written to split debug information files, but not in compiled executables or libraries +- `split-debuginfo-path` - apply remappings to the paths pointing to split debug information files. Does nothing when these files are not generated. +- `object` - an alias for `macro,unsplit-debuginfo,split-debuginfo-path`. This ensures all paths in compiled executables or libraries are remapped, but not elsewhere. +- `all` and `true` - an alias for all of the above, also equivalent to supplying only `--remap-path-prefix` without `--remap-path-scope`. + +## Example +```sh +# This would produce an absolute path to main.rs in build outputs of +# "./main.rs". +rustc --remap-path-prefix=$(PWD)=/remapped -Zremap-path-prefix=object main.rs +``` diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/sanitizer.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/sanitizer.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/sanitizer.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/compiler-flags/sanitizer.md 2023-12-21 16:55:28.000000000 +0000 @@ -197,22 +197,26 @@ # ControlFlowIntegrity -The LLVM Control Flow Integrity (CFI) support in the Rust compiler provides -forward-edge control flow protection for both Rust-compiled code only and for C -or C++ and Rust -compiled code mixed-language binaries, also known as “mixed -binaries” (i.e., for when C or C++ and Rust -compiled code share the same -virtual address space), by aggregating function pointers in groups identified by -their return and parameter types. - -LLVM CFI can be enabled with `-Zsanitizer=cfi` and requires LTO (i.e., `-Clto`). -Cross-language LLVM CFI can be enabled with `-Zsanitizer=cfi`, and requires the -`-Zsanitizer-cfi-normalize-integers` option to be used with Clang -`-fsanitize-cfi-icall-normalize-integers` for normalizing integer types, and -proper (i.e., non-rustc) LTO (i.e., `-Clinker-plugin-lto`). +The LLVM CFI support in the Rust compiler provides forward-edge control flow +protection for both Rust-compiled code only and for C or C++ and Rust -compiled +code mixed-language binaries, also known as “mixed binaries” (i.e., for when C +or C++ and Rust -compiled code share the same virtual address space), by +aggregating function pointers in groups identified by their return and parameter +types. + +LLVM CFI can be enabled with `-Zsanitizer=cfi` and requires LTO (i.e., +`-Clinker-plugin-lto` or `-Clto`). Cross-language LLVM CFI can be enabled with +`-Zsanitizer=cfi`, and requires the `-Zsanitizer-cfi-normalize-integers` option +to be used with Clang `-fsanitize-cfi-icall-experimental-normalize-integers` +option for cross-language LLVM CFI support, and proper (i.e., non-rustc) LTO +(i.e., `-Clinker-plugin-lto`). + +It is recommended to rebuild the standard library with CFI enabled by using the +Cargo build-std feature (i.e., `-Zbuild-std`) when enabling CFI. See the [Clang ControlFlowIntegrity documentation][clang-cfi] for more details. -## Example +## Example 1: Redirecting control flow using an indirect branch/call to an invalid destination ```rust,ignore (making doc tests pass cross-platform is hard) #![feature(naked_functions)] @@ -239,7 +243,7 @@ nop nop nop - lea eax, [edi+2] + lea eax, [rdi+2] ret ", options(noreturn) @@ -258,8 +262,9 @@ println!("With CFI enabled, you should not see the next answer"); let f: fn(i32) -> i32 = unsafe { - // Offsets 0-8 make it land in the landing pad/nop block, and offsets 1-8 are - // invalid branch/call destinations (i.e., within the body of the function). + // Offset 0 is a valid branch/call destination (i.e., the function entry + // point), but offsets 1-8 within the landing pad/nop block are invalid + // branch/call destinations (i.e., within the body of the function). mem::transmute::<*const u8, fn(i32) -> i32>((add_two as *const u8).offset(5)) }; let next_answer = do_twice(f, 5); @@ -267,38 +272,40 @@ println!("The next answer is: {}", next_answer); } ``` -Fig. 1. Modified example from the [Advanced Functions and -Closures][rust-book-ch19-05] chapter of the [The Rust Programming -Language][rust-book] book. +Fig. 1. Redirecting control flow using an indirect branch/call to an invalid +destination (i.e., within the body of the function). ```shell $ cargo run --release Compiling rust-cfi-1 v0.1.0 (/home/rcvalle/rust-cfi-1) - Finished release [optimized] target(s) in 0.76s + Finished release [optimized] target(s) in 0.42s Running `target/release/rust-cfi-1` The answer is: 12 With CFI enabled, you should not see the next answer The next answer is: 14 $ ``` -Fig. 2. Build and execution of the modified example with LLVM CFI disabled. +Fig. 2. Build and execution of Fig. 1 with LLVM CFI disabled. ```shell -$ RUSTFLAGS="-Zsanitizer=cfi -Cembed-bitcode=yes -Clto" cargo run --release +$ RUSTFLAGS="-Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld -Zsanitizer=cfi" cargo run -Zbuild-std -Zbuild-std-features --release --target x86_64-unknown-linux-gnu + ... Compiling rust-cfi-1 v0.1.0 (/home/rcvalle/rust-cfi-1) - Finished release [optimized] target(s) in 3.39s - Running `target/release/rust-cfi-1` + Finished release [optimized] target(s) in 1m 08s + Running `target/x86_64-unknown-linux-gnu/release/rust-cfi-1` The answer is: 12 With CFI enabled, you should not see the next answer Illegal instruction $ ``` -Fig. 3. Build and execution of the modified example with LLVM CFI enabled. +Fig. 3. Build and execution of Fig. 1 with LLVM CFI enabled. When LLVM CFI is enabled, if there are any attempts to change/hijack control flow using an indirect branch/call to an invalid destination, the execution is terminated (see Fig. 3). +## Example 2: Redirecting control flow using an indirect branch/call to a function with a different number of parameters + ```rust use std::mem; @@ -327,39 +334,42 @@ println!("The next answer is: {}", next_answer); } ``` -Fig. 4. Another modified example from the [Advanced Functions and -Closures][rust-book-ch19-05] chapter of the [The Rust Programming -Language][rust-book] book. +Fig. 4. Redirecting control flow using an indirect branch/call to a function +with a different number of parameters than arguments intended/passed in the +call/branch site. ```shell $ cargo run --release Compiling rust-cfi-2 v0.1.0 (/home/rcvalle/rust-cfi-2) - Finished release [optimized] target(s) in 0.76s + Finished release [optimized] target(s) in 0.43s Running `target/release/rust-cfi-2` The answer is: 12 With CFI enabled, you should not see the next answer The next answer is: 14 $ ``` -Fig. 5. Build and execution of the modified example with LLVM CFI disabled. +Fig. 5. Build and execution of Fig. 4 with LLVM CFI disabled. ```shell -$ RUSTFLAGS="-Cembed-bitcode=yes -Clto -Zsanitizer=cfi" cargo run --release +$ RUSTFLAGS="-Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld -Zsanitizer=cfi" cargo run -Zbuild-std -Zbuild-std-features --release --target x86_64-unknown-linux-gnu + ... Compiling rust-cfi-2 v0.1.0 (/home/rcvalle/rust-cfi-2) - Finished release [optimized] target(s) in 3.38s - Running `target/release/rust-cfi-2` + Finished release [optimized] target(s) in 1m 08s + Running `target/x86_64-unknown-linux-gnu/release/rust-cfi-2` The answer is: 12 With CFI enabled, you should not see the next answer Illegal instruction $ ``` -Fig. 6. Build and execution of the modified example with LLVM CFI enabled. +Fig. 6. Build and execution of Fig. 4 with LLVM CFI enabled. When LLVM CFI is enabled, if there are any attempts to change/hijack control flow using an indirect branch/call to a function with different number of parameters than arguments intended/passed in the call/branch site, the execution is also terminated (see Fig. 6). +## Example 3: Redirecting control flow using an indirect branch/call to a function with different return and parameter types + ```rust use std::mem; @@ -388,42 +398,46 @@ println!("The next answer is: {}", next_answer); } ``` -Fig. 7. Another modified example from the [Advanced Functions and -Closures][rust-book-ch19-05] chapter of the [The Rust Programming -Language][rust-book] book. +Fig. 7. Redirecting control flow using an indirect branch/call to a function +with different return and parameter types than the return type expected and +arguments intended/passed at the call/branch site. ```shell $ cargo run --release Compiling rust-cfi-3 v0.1.0 (/home/rcvalle/rust-cfi-3) - Finished release [optimized] target(s) in 0.74s + Finished release [optimized] target(s) in 0.44s Running `target/release/rust-cfi-3` The answer is: 12 With CFI enabled, you should not see the next answer The next answer is: 14 $ ``` -Fig. 8. Build and execution of the modified example with LLVM CFI disabled. +Fig. 8. Build and execution of Fig. 7 with LLVM CFI disabled. ```shell -$ RUSTFLAGS="-Cembed-bitcode=yes -Clto -Zsanitizer=cfi" cargo run --release +$ RUSTFLAGS="-Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld -Zsanitizer=cfi" cargo run -Zbuild-std -Zbuild-std-features --release --target x86_64-unknown-linux-gnu + ... Compiling rust-cfi-3 v0.1.0 (/home/rcvalle/rust-cfi-3) - Finished release [optimized] target(s) in 3.40s - Running `target/release/rust-cfi-3` + Finished release [optimized] target(s) in 1m 07s + Running `target/x86_64-unknown-linux-gnu/release/rust-cfi-3` The answer is: 12 With CFI enabled, you should not see the next answer Illegal instruction $ ``` -Fig. 9. Build and execution of the modified example with LLVM CFI enabled. +Fig. 9. Build and execution of Fig. 7 with LLVM CFI enabled. When LLVM CFI is enabled, if there are any attempts to change/hijack control flow using an indirect branch/call to a function with different return and parameter types than the return type expected and arguments intended/passed in the call/branch site, the execution is also terminated (see Fig. 9). +## Example 4: Redirecting control flow using an indirect branch/call to a function with different return and parameter types across the FFI boundary + ```ignore (cannot-test-this-because-uses-custom-build) int -do_twice(int (*fn)(int), int arg) { +do_twice(int (*fn)(int), int arg) +{ return fn(arg) + fn(arg); } ``` @@ -459,54 +473,49 @@ println!("The next answer is: {}", next_answer); } ``` -Fig. 11. Another modified example from the [Advanced Functions and -Closures][rust-book-ch19-05] chapter of the [The Rust Programming -Language][rust-book] book. +Fig. 11. Redirecting control flow using an indirect branch/call to a function +with different return and parameter types than the return type expected and +arguments intended/passed in the call/branch site, across the FFI boundary. ```shell $ make -mkdir -p target/debug -clang -I. -Isrc -Wall -flto -fvisibility=hidden -c -emit-llvm src/foo.c -o target/debug/libfoo.bc -llvm-ar rcs target/debug/libfoo.a target/debug/libfoo.bc -RUSTFLAGS="-L./target/debug -Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld" cargo build - Compiling main v0.1.0 (/home/rcvalle/rust-cross-cfi-1) - Finished dev [unoptimized + debuginfo] target(s) in 0.45s -$ ./target/debug/main +mkdir -p target/release +clang -I. -Isrc -Wall -c src/foo.c -o target/release/libfoo.o +llvm-ar rcs target/release/libfoo.a target/release/libfoo.o +RUSTFLAGS="-L./target/release -Clinker=clang -Clink-arg=-fuse-ld=lld" cargo build --release + Compiling rust-cfi-4 v0.1.0 (/home/rcvalle/rust-cfi-4) + Finished release [optimized] target(s) in 0.49s +$ ./target/release/rust-cfi-4 The answer is: 12 With CFI enabled, you should not see the next answer The next answer is: 14 $ ``` -Fig. 12. Build and execution of the modified example with LLVM CFI disabled. +Fig. 12. Build and execution of Figs. 10–11 with LLVM CFI disabled. ```shell $ make -mkdir -p target/debug -clang -I. -Isrc -Wall -flto -fvisibility=hidden -fsanitize=cfi -fsanitize-cfi-icall-normalize-integers -c -emit-llvm src/foo.c -o target/debug/libfoo.bc -llvm-ar rcs target/debug/libfoo.a target/debug/libfoo.bc -RUSTFLAGS="-L./target/debug -Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld -Zsanitizer=cfi -Zsanitizer-cfi-normalize-integers" cargo build - Compiling main v0.1.0 (/home/rcvalle/rust-cross-cfi-1) - Finished dev [unoptimized + debuginfo] target(s) in 0.45s -$ ./target/debug/main +mkdir -p target/release +clang -I. -Isrc -Wall -flto -fsanitize=cfi -fsanitize-cfi-icall-experimental-normalize-integers -fvisibility=hidden -c -emit-llvm src/foo.c -o target/release/libfoo.bc +llvm-ar rcs target/release/libfoo.a target/release/libfoo.bc +RUSTFLAGS="-L./target/release -Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld -Zsanitizer=cfi -Zsanitizer-cfi-normalize-integers" cargo build -Zbuild-std -Zbuild-std-features --release --target x86_64-unknown-linux-gnu + ... + Compiling rust-cfi-4 v0.1.0 (/home/rcvalle/rust-cfi-4) + Finished release [optimized] target(s) in 1m 06s +$ ./target/x86_64-unknown-linux-gnu/release/rust-cfi-4 The answer is: 12 With CFI enabled, you should not see the next answer Illegal instruction $ ``` -Fig. 13. Build and execution of the modified example with LLVM CFI enabled. - -When LLVM CFI is enabled, if there are any attempts to change/hijack control -flow using an indirect branch/call to a function with different return and -parameter types than the return type expected and arguments intended/passed in -the call/branch site, even across the FFI boundary and for extern "C" function -types indirectly called (i.e., callbacks/function pointers) across the FFI -boundary, in C or C++ and Rust -compiled code mixed-language binaries, also -known as “mixed binaries” (i.e., for when C or C++ and Rust -compiled code share -the same virtual address space), the execution is also terminated (see Fig. 13). - +Fig. 13. Build and execution of FIgs. 10–11 with LLVM CFI enabled. -[rust-book-ch19-05]: https://doc.rust-lang.org/book/ch19-05-advanced-functions-and-closures.html -[rust-book]: https://doc.rust-lang.org/book/title-page.html +When LLVM CFI is enabled, if there are any attempts to redirect control flow +using an indirect branch/call to a function with different return and parameter +types than the return type expected and arguments intended/passed in the +call/branch site, even across the FFI boundary and for extern "C" function types +indirectly called (i.e., callbacks/function pointers) across the FFI boundary, +the execution is also terminated (see Fig. 13). # HWAddressSanitizer diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/closure-track-caller.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/closure-track-caller.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/closure-track-caller.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/closure-track-caller.md 2023-12-21 16:55:28.000000000 +0000 @@ -6,7 +6,7 @@ ------------------------ -Allows using the `#[track_caller]` attribute on closures and generators. -Calls made to the closure or generator will have caller information +Allows using the `#[track_caller]` attribute on closures and coroutines. +Calls made to the closure or coroutine will have caller information available through `std::panic::Location::caller()`, just like using `#[track_caller]` on a function. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/coroutines.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/coroutines.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/coroutines.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/coroutines.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,246 @@ +# `coroutines` + +The tracking issue for this feature is: [#43122] + +[#43122]: https://github.com/rust-lang/rust/issues/43122 + +------------------------ + +The `coroutines` feature gate in Rust allows you to define coroutine or +coroutine literals. A coroutine is a "resumable function" that syntactically +resembles a closure but compiles to much different semantics in the compiler +itself. The primary feature of a coroutine is that it can be suspended during +execution to be resumed at a later date. Coroutines use the `yield` keyword to +"return", and then the caller can `resume` a coroutine to resume execution just +after the `yield` keyword. + +Coroutines are an extra-unstable feature in the compiler right now. Added in +[RFC 2033] they're mostly intended right now as a information/constraint +gathering phase. The intent is that experimentation can happen on the nightly +compiler before actual stabilization. A further RFC will be required to +stabilize coroutines and will likely contain at least a few small +tweaks to the overall design. + +[RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033 + +A syntactical example of a coroutine is: + +```rust +#![feature(coroutines, coroutine_trait)] + +use std::ops::{Coroutine, CoroutineState}; +use std::pin::Pin; + +fn main() { + let mut coroutine = || { + yield 1; + return "foo" + }; + + match Pin::new(&mut coroutine).resume(()) { + CoroutineState::Yielded(1) => {} + _ => panic!("unexpected value from resume"), + } + match Pin::new(&mut coroutine).resume(()) { + CoroutineState::Complete("foo") => {} + _ => panic!("unexpected value from resume"), + } +} +``` + +Coroutines are closure-like literals which can contain a `yield` statement. The +`yield` statement takes an optional expression of a value to yield out of the +coroutine. All coroutine literals implement the `Coroutine` trait in the +`std::ops` module. The `Coroutine` trait has one main method, `resume`, which +resumes execution of the coroutine at the previous suspension point. + +An example of the control flow of coroutines is that the following example +prints all numbers in order: + +```rust +#![feature(coroutines, coroutine_trait)] + +use std::ops::Coroutine; +use std::pin::Pin; + +fn main() { + let mut coroutine = || { + println!("2"); + yield; + println!("4"); + }; + + println!("1"); + Pin::new(&mut coroutine).resume(()); + println!("3"); + Pin::new(&mut coroutine).resume(()); + println!("5"); +} +``` + +At this time the main intended use case of coroutines is an implementation +primitive for async/await syntax, but coroutines will likely be extended to +ergonomic implementations of iterators and other primitives in the future. +Feedback on the design and usage is always appreciated! + +### The `Coroutine` trait + +The `Coroutine` trait in `std::ops` currently looks like: + +```rust +# #![feature(arbitrary_self_types, coroutine_trait)] +# use std::ops::CoroutineState; +# use std::pin::Pin; + +pub trait Coroutine { + type Yield; + type Return; + fn resume(self: Pin<&mut Self>, resume: R) -> CoroutineState; +} +``` + +The `Coroutine::Yield` type is the type of values that can be yielded with the +`yield` statement. The `Coroutine::Return` type is the returned type of the +coroutine. This is typically the last expression in a coroutine's definition or +any value passed to `return` in a coroutine. The `resume` function is the entry +point for executing the `Coroutine` itself. + +The return value of `resume`, `CoroutineState`, looks like: + +```rust +pub enum CoroutineState { + Yielded(Y), + Complete(R), +} +``` + +The `Yielded` variant indicates that the coroutine can later be resumed. This +corresponds to a `yield` point in a coroutine. The `Complete` variant indicates +that the coroutine is complete and cannot be resumed again. Calling `resume` +after a coroutine has returned `Complete` will likely result in a panic of the +program. + +### Closure-like semantics + +The closure-like syntax for coroutines alludes to the fact that they also have +closure-like semantics. Namely: + +* When created, a coroutine executes no code. A closure literal does not + actually execute any of the closure's code on construction, and similarly a + coroutine literal does not execute any code inside the coroutine when + constructed. + +* Coroutines can capture outer variables by reference or by move, and this can + be tweaked with the `move` keyword at the beginning of the closure. Like + closures all coroutines will have an implicit environment which is inferred by + the compiler. Outer variables can be moved into a coroutine for use as the + coroutine progresses. + +* Coroutine literals produce a value with a unique type which implements the + `std::ops::Coroutine` trait. This allows actual execution of the coroutine + through the `Coroutine::resume` method as well as also naming it in return + types and such. + +* Traits like `Send` and `Sync` are automatically implemented for a `Coroutine` + depending on the captured variables of the environment. Unlike closures, + coroutines also depend on variables live across suspension points. This means + that although the ambient environment may be `Send` or `Sync`, the coroutine + itself may not be due to internal variables live across `yield` points being + not-`Send` or not-`Sync`. Note that coroutines do + not implement traits like `Copy` or `Clone` automatically. + +* Whenever a coroutine is dropped it will drop all captured environment + variables. + +### Coroutines as state machines + +In the compiler, coroutines are currently compiled as state machines. Each +`yield` expression will correspond to a different state that stores all live +variables over that suspension point. Resumption of a coroutine will dispatch on +the current state and then execute internally until a `yield` is reached, at +which point all state is saved off in the coroutine and a value is returned. + +Let's take a look at an example to see what's going on here: + +```rust +#![feature(coroutines, coroutine_trait)] + +use std::ops::Coroutine; +use std::pin::Pin; + +fn main() { + let ret = "foo"; + let mut coroutine = move || { + yield 1; + return ret + }; + + Pin::new(&mut coroutine).resume(()); + Pin::new(&mut coroutine).resume(()); +} +``` + +This coroutine literal will compile down to something similar to: + +```rust +#![feature(arbitrary_self_types, coroutines, coroutine_trait)] + +use std::ops::{Coroutine, CoroutineState}; +use std::pin::Pin; + +fn main() { + let ret = "foo"; + let mut coroutine = { + enum __Coroutine { + Start(&'static str), + Yield1(&'static str), + Done, + } + + impl Coroutine for __Coroutine { + type Yield = i32; + type Return = &'static str; + + fn resume(mut self: Pin<&mut Self>, resume: ()) -> CoroutineState { + use std::mem; + match mem::replace(&mut *self, __Coroutine::Done) { + __Coroutine::Start(s) => { + *self = __Coroutine::Yield1(s); + CoroutineState::Yielded(1) + } + + __Coroutine::Yield1(s) => { + *self = __Coroutine::Done; + CoroutineState::Complete(s) + } + + __Coroutine::Done => { + panic!("coroutine resumed after completion") + } + } + } + } + + __Coroutine::Start(ret) + }; + + Pin::new(&mut coroutine).resume(()); + Pin::new(&mut coroutine).resume(()); +} +``` + +Notably here we can see that the compiler is generating a fresh type, +`__Coroutine` in this case. This type has a number of states (represented here +as an `enum`) corresponding to each of the conceptual states of the coroutine. +At the beginning we're closing over our outer variable `foo` and then that +variable is also live over the `yield` point, so it's stored in both states. + +When the coroutine starts it'll immediately yield 1, but it saves off its state +just before it does so indicating that it has reached the yield point. Upon +resuming again we'll execute the `return ret` which returns the `Complete` +state. + +Here we can also note that the `Done` state, if resumed, panics immediately as +it's invalid to resume a completed coroutine. It's also worth noting that this +is just a rough desugaring, not a normative specification for what the compiler +does. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/diagnostic-namespace.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/diagnostic-namespace.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/diagnostic-namespace.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/diagnostic-namespace.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,84 @@ +# `diagnostic_namespace` + +The tracking issue for this feature is: [#111996] + +[#111996]: https://github.com/rust-lang/rust/issues/111996 + +------------------------ + +The `diagnostic_namespace` feature permits customization of compilation errors. + +## diagnostic::on_unimplemented + +With [#114452] support for `diagnostic::on_unimplemented` was added. + +When used on a trait declaration, the following options are available: + +* `message` to customize the primary error message +* `note` to add a customized note message to an error message +* `label` to customize the label part of the error message + +The attribute will hint to the compiler to use these in error messages: +```rust +// some library +#![feature(diagnostic_namespace)] + +#[diagnostic::on_unimplemented( + message = "cannot insert element", + label = "cannot be put into a table", + note = "see for more information about the Table api" +)] +pub trait Element { + // ... +} +``` + +```rust,compile_fail,E0277 +# #![feature(diagnostic_namespace)] +# +# #[diagnostic::on_unimplemented( +# message = "cannot insert element", +# label = "cannot be put into a table", +# note = "see for more information about the Table api" +# )] +# pub trait Element { +# // ... +# } +# struct Table; +# impl Table { +# fn insert(&self, element: T) { +# // .. +# } +# } +# fn main() { +# let table = Table; +# let element = (); +// user code +table.insert(element); +# } +``` + +```text +error[E0277]: cannot insert element + --> src/main.rs:24:18 + | +24 | table.insert(element); + | ------ ^^^^^^^ cannot be put into a table + | | + | required by a bound introduced by this call + | + = help: the trait `Element` is not implemented for `` + = note: see for more information about the Table api +note: required by a bound in `Table::insert` + --> src/main.rs:15:18 + | +15 | fn insert(&self, element: T) { + | ^^^^^^^ required by this bound in `Table::insert` + +For more information about this error, try `rustc --explain E0277`. +``` + +See [RFC 3368] for more information. + +[#114452]: https://github.com/rust-lang/rust/pull/114452 +[RFC 3368]: https://github.com/rust-lang/rfcs/blob/master/text/3368-diagnostic-attribute-namespace.md diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/generators.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/generators.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/generators.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/generators.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,246 +0,0 @@ -# `generators` - -The tracking issue for this feature is: [#43122] - -[#43122]: https://github.com/rust-lang/rust/issues/43122 - ------------------------- - -The `generators` feature gate in Rust allows you to define generator or -coroutine literals. A generator is a "resumable function" that syntactically -resembles a closure but compiles to much different semantics in the compiler -itself. The primary feature of a generator is that it can be suspended during -execution to be resumed at a later date. Generators use the `yield` keyword to -"return", and then the caller can `resume` a generator to resume execution just -after the `yield` keyword. - -Generators are an extra-unstable feature in the compiler right now. Added in -[RFC 2033] they're mostly intended right now as a information/constraint -gathering phase. The intent is that experimentation can happen on the nightly -compiler before actual stabilization. A further RFC will be required to -stabilize generators/coroutines and will likely contain at least a few small -tweaks to the overall design. - -[RFC 2033]: https://github.com/rust-lang/rfcs/pull/2033 - -A syntactical example of a generator is: - -```rust -#![feature(generators, generator_trait)] - -use std::ops::{Generator, GeneratorState}; -use std::pin::Pin; - -fn main() { - let mut generator = || { - yield 1; - return "foo" - }; - - match Pin::new(&mut generator).resume(()) { - GeneratorState::Yielded(1) => {} - _ => panic!("unexpected value from resume"), - } - match Pin::new(&mut generator).resume(()) { - GeneratorState::Complete("foo") => {} - _ => panic!("unexpected value from resume"), - } -} -``` - -Generators are closure-like literals which can contain a `yield` statement. The -`yield` statement takes an optional expression of a value to yield out of the -generator. All generator literals implement the `Generator` trait in the -`std::ops` module. The `Generator` trait has one main method, `resume`, which -resumes execution of the generator at the previous suspension point. - -An example of the control flow of generators is that the following example -prints all numbers in order: - -```rust -#![feature(generators, generator_trait)] - -use std::ops::Generator; -use std::pin::Pin; - -fn main() { - let mut generator = || { - println!("2"); - yield; - println!("4"); - }; - - println!("1"); - Pin::new(&mut generator).resume(()); - println!("3"); - Pin::new(&mut generator).resume(()); - println!("5"); -} -``` - -At this time the main intended use case of generators is an implementation -primitive for async/await syntax, but generators will likely be extended to -ergonomic implementations of iterators and other primitives in the future. -Feedback on the design and usage is always appreciated! - -### The `Generator` trait - -The `Generator` trait in `std::ops` currently looks like: - -```rust -# #![feature(arbitrary_self_types, generator_trait)] -# use std::ops::GeneratorState; -# use std::pin::Pin; - -pub trait Generator { - type Yield; - type Return; - fn resume(self: Pin<&mut Self>, resume: R) -> GeneratorState; -} -``` - -The `Generator::Yield` type is the type of values that can be yielded with the -`yield` statement. The `Generator::Return` type is the returned type of the -generator. This is typically the last expression in a generator's definition or -any value passed to `return` in a generator. The `resume` function is the entry -point for executing the `Generator` itself. - -The return value of `resume`, `GeneratorState`, looks like: - -```rust -pub enum GeneratorState { - Yielded(Y), - Complete(R), -} -``` - -The `Yielded` variant indicates that the generator can later be resumed. This -corresponds to a `yield` point in a generator. The `Complete` variant indicates -that the generator is complete and cannot be resumed again. Calling `resume` -after a generator has returned `Complete` will likely result in a panic of the -program. - -### Closure-like semantics - -The closure-like syntax for generators alludes to the fact that they also have -closure-like semantics. Namely: - -* When created, a generator executes no code. A closure literal does not - actually execute any of the closure's code on construction, and similarly a - generator literal does not execute any code inside the generator when - constructed. - -* Generators can capture outer variables by reference or by move, and this can - be tweaked with the `move` keyword at the beginning of the closure. Like - closures all generators will have an implicit environment which is inferred by - the compiler. Outer variables can be moved into a generator for use as the - generator progresses. - -* Generator literals produce a value with a unique type which implements the - `std::ops::Generator` trait. This allows actual execution of the generator - through the `Generator::resume` method as well as also naming it in return - types and such. - -* Traits like `Send` and `Sync` are automatically implemented for a `Generator` - depending on the captured variables of the environment. Unlike closures, - generators also depend on variables live across suspension points. This means - that although the ambient environment may be `Send` or `Sync`, the generator - itself may not be due to internal variables live across `yield` points being - not-`Send` or not-`Sync`. Note that generators do - not implement traits like `Copy` or `Clone` automatically. - -* Whenever a generator is dropped it will drop all captured environment - variables. - -### Generators as state machines - -In the compiler, generators are currently compiled as state machines. Each -`yield` expression will correspond to a different state that stores all live -variables over that suspension point. Resumption of a generator will dispatch on -the current state and then execute internally until a `yield` is reached, at -which point all state is saved off in the generator and a value is returned. - -Let's take a look at an example to see what's going on here: - -```rust -#![feature(generators, generator_trait)] - -use std::ops::Generator; -use std::pin::Pin; - -fn main() { - let ret = "foo"; - let mut generator = move || { - yield 1; - return ret - }; - - Pin::new(&mut generator).resume(()); - Pin::new(&mut generator).resume(()); -} -``` - -This generator literal will compile down to something similar to: - -```rust -#![feature(arbitrary_self_types, generators, generator_trait)] - -use std::ops::{Generator, GeneratorState}; -use std::pin::Pin; - -fn main() { - let ret = "foo"; - let mut generator = { - enum __Generator { - Start(&'static str), - Yield1(&'static str), - Done, - } - - impl Generator for __Generator { - type Yield = i32; - type Return = &'static str; - - fn resume(mut self: Pin<&mut Self>, resume: ()) -> GeneratorState { - use std::mem; - match mem::replace(&mut *self, __Generator::Done) { - __Generator::Start(s) => { - *self = __Generator::Yield1(s); - GeneratorState::Yielded(1) - } - - __Generator::Yield1(s) => { - *self = __Generator::Done; - GeneratorState::Complete(s) - } - - __Generator::Done => { - panic!("generator resumed after completion") - } - } - } - } - - __Generator::Start(ret) - }; - - Pin::new(&mut generator).resume(()); - Pin::new(&mut generator).resume(()); -} -``` - -Notably here we can see that the compiler is generating a fresh type, -`__Generator` in this case. This type has a number of states (represented here -as an `enum`) corresponding to each of the conceptual states of the generator. -At the beginning we're closing over our outer variable `foo` and then that -variable is also live over the `yield` point, so it's stored in both states. - -When the generator starts it'll immediately yield 1, but it saves off its state -just before it does so indicating that it has reached the yield point. Upon -resuming again we'll execute the `return ret` which returns the `Complete` -state. - -Here we can also note that the `Done` state, if resumed, panics immediately as -it's invalid to resume a completed generator. It's also worth noting that this -is just a rough desugaring, not a normative specification for what the compiler -does. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/plugin.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/plugin.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/plugin.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/plugin.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -# `plugin` - -The tracking issue for this feature is: [#29597] - -[#29597]: https://github.com/rust-lang/rust/issues/29597 - - -This feature is part of "compiler plugins." It will often be used with the -`rustc_private` feature. - ------------------------- - -`rustc` can load compiler plugins, which are user-provided libraries that -extend the compiler's behavior with new lint checks, etc. - -A plugin is a dynamic library crate with a designated *registrar* function that -registers extensions with `rustc`. Other crates can load these extensions using -the crate attribute `#![plugin(...)]`. See the -`rustc_driver::plugin` documentation for more about the -mechanics of defining and loading a plugin. - -In the vast majority of cases, a plugin should *only* be used through -`#![plugin]` and not through an `extern crate` item. Linking a plugin would -pull in all of librustc_ast and librustc as dependencies of your crate. This is -generally unwanted unless you are building another plugin. - -The usual practice is to put compiler plugins in their own crate, separate from -any `macro_rules!` macros or ordinary Rust code meant to be used by consumers -of a library. - -# Lint plugins - -Plugins can extend [Rust's lint -infrastructure](../../reference/attributes/diagnostics.md#lint-check-attributes) with -additional checks for code style, safety, etc. Now let's write a plugin -[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui-fulldeps/auxiliary/lint-plugin-test.rs) -that warns about any item named `lintme`. - -```rust,ignore (requires-stage-2) -#![feature(rustc_private)] - -extern crate rustc_ast; - -// Load rustc as a plugin to get macros -extern crate rustc_driver; -extern crate rustc_lint; -#[macro_use] -extern crate rustc_session; - -use rustc_ast::ast; -use rustc_driver::plugin::Registry; -use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; - -declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'"); - -declare_lint_pass!(Pass => [TEST_LINT]); - -impl EarlyLintPass for Pass { - fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) { - if it.ident.name.as_str() == "lintme" { - cx.lint(TEST_LINT, "item is named 'lintme'", |lint| lint.set_span(it.span)); - } - } -} - -#[no_mangle] -fn __rustc_plugin_registrar(reg: &mut Registry) { - reg.lint_store.register_lints(&[&TEST_LINT]); - reg.lint_store.register_early_pass(|| Box::new(Pass)); -} -``` - -Then code like - -```rust,ignore (requires-plugin) -#![feature(plugin)] -#![plugin(lint_plugin_test)] - -fn lintme() { } -``` - -will produce a compiler warning: - -```txt -foo.rs:4:1: 4:16 warning: item is named 'lintme', #[warn(test_lint)] on by default -foo.rs:4 fn lintme() { } - ^~~~~~~~~~~~~~~ -``` - -The components of a lint plugin are: - -* one or more `declare_lint!` invocations, which define static `Lint` structs; - -* a struct holding any state needed by the lint pass (here, none); - -* a `LintPass` - implementation defining how to check each syntax element. A single - `LintPass` may call `span_lint` for several different `Lint`s, but should - register them all through the `get_lints` method. - -Lint passes are syntax traversals, but they run at a late stage of compilation -where type information is available. `rustc`'s [built-in -lints](https://github.com/rust-lang/rust/blob/master/compiler/rustc_lint_defs/src/builtin.rs) -mostly use the same infrastructure as lint plugins, and provide examples of how -to access type information. - -Lints defined by plugins are controlled by the usual [attributes and compiler -flags](../../reference/attributes/diagnostics.md#lint-check-attributes), e.g. -`#[allow(test_lint)]` or `-A test-lint`. These identifiers are derived from the -first argument to `declare_lint!`, with appropriate case and punctuation -conversion. - -You can run `rustc -W help foo.rs` to see a list of lints known to `rustc`, -including those provided by plugins loaded by `foo.rs`. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/string-deref-patterns.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/string-deref-patterns.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/string-deref-patterns.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/language-features/string-deref-patterns.md 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,45 @@ +# `string_deref_patterns` + +The tracking issue for this feature is: [#87121] + +[#87121]: https://github.com/rust-lang/rust/issues/87121 + +------------------------ + +This feature permits pattern matching `String` to `&str` through [its `Deref` implementation]. + +```rust +#![feature(string_deref_patterns)] + +pub enum Value { + String(String), + Number(u32), +} + +pub fn is_it_the_answer(value: Value) -> bool { + match value { + Value::String("42") => true, + Value::Number(42) => true, + _ => false, + } +} +``` + +Without this feature other constructs such as match guards have to be used. + +```rust +# pub enum Value { +# String(String), +# Number(u32), +# } +# +pub fn is_it_the_answer(value: Value) -> bool { + match value { + Value::String(s) if s == "42" => true, + Value::Number(42) => true, + _ => false, + } +} +``` + +[its `Deref` implementation]: https://doc.rust-lang.org/std/string/struct.String.html#impl-Deref-for-String diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/the-unstable-book.md rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/the-unstable-book.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/the-unstable-book.md 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/doc/unstable-book/src/the-unstable-book.md 2023-12-21 16:55:28.000000000 +0000 @@ -5,31 +5,31 @@ feature of Rust, you must use a flag, like this: ```rust -#![feature(generators, generator_trait)] +#![feature(coroutines, coroutine_trait)] -use std::ops::{Generator, GeneratorState}; +use std::ops::{Coroutine, CoroutineState}; use std::pin::Pin; fn main() { - let mut generator = || { + let mut coroutine = || { yield 1; return "foo" }; - match Pin::new(&mut generator).resume(()) { - GeneratorState::Yielded(1) => {} + match Pin::new(&mut coroutine).resume(()) { + CoroutineState::Yielded(1) => {} _ => panic!("unexpected value from resume"), } - match Pin::new(&mut generator).resume(()) { - GeneratorState::Complete("foo") => {} + match Pin::new(&mut coroutine).resume(()) { + CoroutineState::Complete("foo") => {} _ => panic!("unexpected value from resume"), } } ``` -The `generators` feature [has a chapter][generators] describing how to use it. +The `coroutines` feature [has a chapter][coroutines] describing how to use it. -[generators]: language-features/generators.md +[coroutines]: language-features/coroutines.md Because this documentation relates to unstable features, we make no guarantees that what is contained here is accurate or up to date. It's developed on a diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.fish rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.fish --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.fish 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.fish 2023-12-21 16:55:28.000000000 +0000 @@ -12,10 +12,10 @@ complete -c x.py -n "__fish_use_subcommand" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_use_subcommand" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_use_subcommand" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_use_subcommand" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_use_subcommand" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_use_subcommand" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_use_subcommand" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_use_subcommand" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_use_subcommand" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_use_subcommand" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_use_subcommand" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_use_subcommand" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_use_subcommand" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -27,6 +27,8 @@ complete -c x.py -n "__fish_use_subcommand" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_use_subcommand" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_use_subcommand" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_use_subcommand" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_use_subcommand" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_use_subcommand" -s h -l help -d 'Print help' complete -c x.py -n "__fish_use_subcommand" -f -a "build" -d 'Compile either the compiler or libraries' complete -c x.py -n "__fish_use_subcommand" -f -a "check" -d 'Compile either the compiler or libraries, using cargo check' @@ -56,10 +58,10 @@ complete -c x.py -n "__fish_seen_subcommand_from build" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from build" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from build" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from build" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from build" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from build" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from build" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from build" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from build" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from build" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from build" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from build" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from build" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -71,6 +73,8 @@ complete -c x.py -n "__fish_seen_subcommand_from build" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from build" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from build" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from build" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from build" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from build" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from check" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from check" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -86,10 +90,10 @@ complete -c x.py -n "__fish_seen_subcommand_from check" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from check" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from check" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from check" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from check" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from check" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from check" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from check" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from check" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from check" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from check" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from check" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from check" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -102,6 +106,8 @@ complete -c x.py -n "__fish_seen_subcommand_from check" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from check" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from check" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from check" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from check" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from check" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from clippy" -s A -d 'clippy lints to allow' -r complete -c x.py -n "__fish_seen_subcommand_from clippy" -s D -d 'clippy lints to deny' -r @@ -121,10 +127,10 @@ complete -c x.py -n "__fish_seen_subcommand_from clippy" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from clippy" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from clippy" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from clippy" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from clippy" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from clippy" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from clippy" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from clippy" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from clippy" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from clippy" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from clippy" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from clippy" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from clippy" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -137,6 +143,8 @@ complete -c x.py -n "__fish_seen_subcommand_from clippy" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from clippy" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from clippy" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from clippy" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from fix" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from fix" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -152,10 +160,10 @@ complete -c x.py -n "__fish_seen_subcommand_from fix" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from fix" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from fix" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from fix" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from fix" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from fix" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from fix" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from fix" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from fix" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from fix" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from fix" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from fix" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from fix" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -167,6 +175,8 @@ complete -c x.py -n "__fish_seen_subcommand_from fix" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from fix" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from fix" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from fix" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from fix" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from fix" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from fmt" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -182,10 +192,10 @@ complete -c x.py -n "__fish_seen_subcommand_from fmt" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from fmt" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from fmt" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from fmt" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from fmt" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from fmt" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from fmt" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from fmt" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from fmt" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from fmt" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from fmt" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from fmt" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from fmt" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -198,6 +208,8 @@ complete -c x.py -n "__fish_seen_subcommand_from fmt" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from fmt" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from fmt" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from fmt" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from doc" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from doc" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -213,10 +225,10 @@ complete -c x.py -n "__fish_seen_subcommand_from doc" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from doc" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from doc" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from doc" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from doc" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from doc" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from doc" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from doc" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from doc" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from doc" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from doc" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from doc" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from doc" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -230,6 +242,8 @@ complete -c x.py -n "__fish_seen_subcommand_from doc" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from doc" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from doc" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from doc" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from doc" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from doc" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from test" -l skip -d 'skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times' -r -F complete -c x.py -n "__fish_seen_subcommand_from test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r @@ -251,10 +265,10 @@ complete -c x.py -n "__fish_seen_subcommand_from test" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from test" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from test" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from test" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from test" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from test" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from test" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from test" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from test" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from test" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from test" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from test" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from test" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -273,6 +287,8 @@ complete -c x.py -n "__fish_seen_subcommand_from test" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from test" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from test" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from test" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from test" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from bench" -l test-args -r complete -c x.py -n "__fish_seen_subcommand_from bench" -l config -d 'TOML configuration file for build' -r -F @@ -289,10 +305,10 @@ complete -c x.py -n "__fish_seen_subcommand_from bench" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from bench" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from bench" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from bench" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from bench" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from bench" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from bench" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from bench" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from bench" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from bench" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from bench" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from bench" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from bench" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -304,6 +320,8 @@ complete -c x.py -n "__fish_seen_subcommand_from bench" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from bench" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from bench" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from bench" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from bench" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from bench" -s h -l help -d 'Print help' complete -c x.py -n "__fish_seen_subcommand_from clean" -l stage -d 'Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used' -r complete -c x.py -n "__fish_seen_subcommand_from clean" -l config -d 'TOML configuration file for build' -r -F @@ -319,10 +337,10 @@ complete -c x.py -n "__fish_seen_subcommand_from clean" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from clean" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from clean" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from clean" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from clean" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from clean" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from clean" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from clean" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from clean" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from clean" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from clean" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from clean" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from clean" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -335,6 +353,8 @@ complete -c x.py -n "__fish_seen_subcommand_from clean" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from clean" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from clean" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from clean" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from clean" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from clean" -s h -l help -d 'Print help' complete -c x.py -n "__fish_seen_subcommand_from dist" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from dist" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -350,10 +370,10 @@ complete -c x.py -n "__fish_seen_subcommand_from dist" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from dist" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from dist" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from dist" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from dist" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from dist" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from dist" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from dist" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from dist" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from dist" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from dist" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from dist" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from dist" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -365,6 +385,8 @@ complete -c x.py -n "__fish_seen_subcommand_from dist" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from dist" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from dist" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from dist" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from dist" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from dist" -s h -l help -d 'Print help' complete -c x.py -n "__fish_seen_subcommand_from install" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from install" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -380,10 +402,10 @@ complete -c x.py -n "__fish_seen_subcommand_from install" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from install" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from install" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from install" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from install" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from install" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from install" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from install" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from install" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from install" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from install" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from install" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from install" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -395,6 +417,8 @@ complete -c x.py -n "__fish_seen_subcommand_from install" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from install" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from install" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from install" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from install" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from install" -s h -l help -d 'Print help' complete -c x.py -n "__fish_seen_subcommand_from run" -l args -d 'arguments for the tool' -r complete -c x.py -n "__fish_seen_subcommand_from run" -l config -d 'TOML configuration file for build' -r -F @@ -411,10 +435,10 @@ complete -c x.py -n "__fish_seen_subcommand_from run" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from run" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from run" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from run" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from run" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from run" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from run" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from run" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from run" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from run" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from run" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from run" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from run" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -426,6 +450,8 @@ complete -c x.py -n "__fish_seen_subcommand_from run" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from run" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from run" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from run" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from run" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from run" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from setup" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from setup" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -441,10 +467,10 @@ complete -c x.py -n "__fish_seen_subcommand_from setup" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from setup" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from setup" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from setup" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from setup" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from setup" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from setup" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from setup" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from setup" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from setup" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from setup" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from setup" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from setup" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -456,6 +482,8 @@ complete -c x.py -n "__fish_seen_subcommand_from setup" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from setup" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from setup" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from setup" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from setup" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from setup" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from suggest" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" @@ -471,10 +499,10 @@ complete -c x.py -n "__fish_seen_subcommand_from suggest" -l keep-stage-std -d 'stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)' -r -f complete -c x.py -n "__fish_seen_subcommand_from suggest" -l src -d 'path to the root of the rust checkout' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from suggest" -s j -l jobs -d 'number of jobs to run in parallel' -r -f -complete -c x.py -n "__fish_seen_subcommand_from suggest" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny ,warn ,default }" +complete -c x.py -n "__fish_seen_subcommand_from suggest" -l warnings -d 'if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour' -r -f -a "{deny '',warn '',default ''}" complete -c x.py -n "__fish_seen_subcommand_from suggest" -l error-format -d 'rustc error format' -r -f -complete -c x.py -n "__fish_seen_subcommand_from suggest" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always ,never ,auto }" -complete -c x.py -n "__fish_seen_subcommand_from suggest" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true ,false }" +complete -c x.py -n "__fish_seen_subcommand_from suggest" -l color -d 'whether to use color in cargo and rustc output' -r -f -a "{always '',never '',auto ''}" +complete -c x.py -n "__fish_seen_subcommand_from suggest" -l llvm-skip-rebuild -d 'whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml' -r -f -a "{true '',false ''}" complete -c x.py -n "__fish_seen_subcommand_from suggest" -l rust-profile-generate -d 'generate PGO profile with rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from suggest" -l rust-profile-use -d 'use PGO profile for rustc build' -r -F complete -c x.py -n "__fish_seen_subcommand_from suggest" -l llvm-profile-use -d 'use PGO profile for LLVM build' -r -F @@ -487,4 +515,6 @@ complete -c x.py -n "__fish_seen_subcommand_from suggest" -l dry-run -d 'dry run; don\'t build anything' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' +complete -c x.py -n "__fish_seen_subcommand_from suggest" -l enable-bolt-settings -d 'Enable BOLT link flags' +complete -c x.py -n "__fish_seen_subcommand_from suggest" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from suggest" -s h -l help -d 'Print help (see more with \'--help\')' diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.ps1 rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.ps1 --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.ps1 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.ps1 2023-12-21 16:55:28.000000000 +0000 @@ -53,6 +53,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('build', 'build', [CompletionResultType]::ParameterValue, 'Compile either the compiler or libraries') @@ -104,6 +106,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -142,15 +146,17 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break } 'x.py;clippy' { - [CompletionResult]::new('-A', 'A', [CompletionResultType]::ParameterName, 'clippy lints to allow') - [CompletionResult]::new('-D', 'D', [CompletionResultType]::ParameterName, 'clippy lints to deny') - [CompletionResult]::new('-W', 'W', [CompletionResultType]::ParameterName, 'clippy lints to warn on') - [CompletionResult]::new('-F', 'F', [CompletionResultType]::ParameterName, 'clippy lints to forbid') + [CompletionResult]::new('-A', 'A ', [CompletionResultType]::ParameterName, 'clippy lints to allow') + [CompletionResult]::new('-D', 'D ', [CompletionResultType]::ParameterName, 'clippy lints to deny') + [CompletionResult]::new('-W', 'W ', [CompletionResultType]::ParameterName, 'clippy lints to warn on') + [CompletionResult]::new('-F', 'F ', [CompletionResultType]::ParameterName, 'clippy lints to forbid') [CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') [CompletionResult]::new('--build-dir', 'build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') [CompletionResult]::new('--build', 'build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') @@ -184,6 +190,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -221,6 +229,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -259,6 +269,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -298,6 +310,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -348,6 +362,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -386,6 +402,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help') break @@ -424,6 +442,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help') break @@ -461,6 +481,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help') break @@ -498,6 +520,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help') break @@ -536,6 +560,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -573,6 +599,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break @@ -611,6 +639,8 @@ [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') + [CompletionResult]::new('--enable-bolt-settings', 'enable-bolt-settings', [CompletionResultType]::ParameterName, 'Enable BOLT link flags') + [CompletionResult]::new('--skip-stage0-validation', 'skip-stage0-validation', [CompletionResultType]::ParameterName, 'Skip stage0 compiler validation') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') break diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.sh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.sh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.sh 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.sh 2023-12-21 16:55:28.000000000 +0000 @@ -61,7 +61,7 @@ case "${cmd}" in x.py) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]... build check clippy fix fmt doc test bench clean dist install run setup suggest" + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]... build check clippy fix fmt doc test bench clean dist install run setup suggest" if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -171,7 +171,7 @@ return 0 ;; x.py__bench) - opts="-v -i -j -h --test-args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --test-args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -285,7 +285,7 @@ return 0 ;; x.py__build) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -395,7 +395,7 @@ return 0 ;; x.py__check) - opts="-v -i -j -h --all-targets --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --all-targets --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -505,7 +505,7 @@ return 0 ;; x.py__clean) - opts="-v -i -j -h --all --stage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --all --stage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -615,7 +615,7 @@ return 0 ;; x.py__clippy) - opts="-A -D -W -F -v -i -j -h --fix --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-A -D -W -F -v -i -j -h --fix --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -741,7 +741,7 @@ return 0 ;; x.py__dist) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -851,7 +851,7 @@ return 0 ;; x.py__doc) - opts="-v -i -j -h --open --json --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --open --json --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -961,7 +961,7 @@ return 0 ;; x.py__fix) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1071,7 +1071,7 @@ return 0 ;; x.py__fmt) - opts="-v -i -j -h --check --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --check --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1181,7 +1181,7 @@ return 0 ;; x.py__install) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1291,7 +1291,7 @@ return 0 ;; x.py__run) - opts="-v -i -j -h --args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1405,7 +1405,7 @@ return 0 ;; x.py__setup) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [|hook|vscode|link] [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [|hook|vscode|link] [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1515,7 +1515,7 @@ return 0 ;; x.py__suggest) - opts="-v -i -j -h --run --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --run --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1625,7 +1625,7 @@ return 0 ;; x.py__test) - opts="-v -i -j -h --no-fail-fast --skip --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --no-fail-fast --skip --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1761,4 +1761,4 @@ esac } -complete -F _x.py -o bashdefault -o default x.py +complete -F _x.py -o nosort -o bashdefault -o default x.py diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.zsh rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.zsh --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.zsh 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/completions/x.py.zsh 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,766 @@ +#compdef x.py + +autoload -U is-at-least + +_x.py() { + typeset -A opt_args + typeset -a _arguments_options + local ret=1 + + if is-at-least 5.2; then + _arguments_options=(-s -S -C) + else + _arguments_options=(-s -C) + fi + + local context curcontext="$curcontext" state line + _arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help]' \ +'--help[Print help]' \ +'::paths -- paths for the subcommand:_files' \ +'::free_args -- arguments passed to subcommands:' \ +":: :_x.py_commands" \ +"*::: :->bootstrap" \ +&& ret=0 + case $state in + (bootstrap) + words=($line[3] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:x.py-command-$line[3]:" + case $line[3] in + (build) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(check) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--all-targets[Check all targets]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(clippy) +_arguments "${_arguments_options[@]}" \ +'*-A+[clippy lints to allow]:LINT: ' \ +'*-D+[clippy lints to deny]:LINT: ' \ +'*-W+[clippy lints to warn on]:LINT: ' \ +'*-F+[clippy lints to forbid]:LINT: ' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--fix[]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(fix) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(fmt) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--check[check formatting instead of applying]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(doc) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--open[open the docs in a browser]' \ +'--json[render the documentation in JSON format in addition to the usual HTML format]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(test) +_arguments "${_arguments_options[@]}" \ +'*--skip=[skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times]:SUBSTRING:_files' \ +'*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \ +'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \ +'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell)]:EXTRA_CHECKS: ' \ +'--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE: ' \ +'--pass=[force {check,build,run}-pass tests to this mode]:check | build | run: ' \ +'--run=[whether to execute run-* tests]:auto | always | never: ' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--no-fail-fast[run all tests regardless of failure]' \ +'--no-doc[do not run doc tests]' \ +'--doc[only run doc tests]' \ +'--bless[whether to automatically update stderr/stdout files]' \ +'--force-rerun[rerun tests even if the inputs are unchanged]' \ +'--only-modified[only run tests that result has been changed]' \ +'--rustfix-coverage[enable this to generate a Rustfix coverage file, which is saved in \`//rustfix_missing_coverage.txt\`]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(bench) +_arguments "${_arguments_options[@]}" \ +'*--test-args=[]:TEST_ARGS: ' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help]' \ +'--help[Print help]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(clean) +_arguments "${_arguments_options[@]}" \ +'--stage=[Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used]:N: ' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--all[Clean the entire build directory (not used by default)]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help]' \ +'--help[Print help]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(dist) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help]' \ +'--help[Print help]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(install) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help]' \ +'--help[Print help]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" \ +'*--args=[arguments for the tool]:ARGS: ' \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(setup) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'::profile -- Either the profile for `config.toml` or another setup action. May be omitted to set up interactively:_files' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; +(suggest) +_arguments "${_arguments_options[@]}" \ +'--config=[TOML configuration file for build]:FILE:_files' \ +'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ +'--build=[build target of the stage0 compiler]:BUILD:( )' \ +'--host=[host targets to build]:HOST:( )' \ +'--target=[target targets to build]:TARGET:( )' \ +'*--exclude=[build paths to exclude]:PATH:_files' \ +'*--skip=[build paths to skip]:PATH:_files' \ +'--rustc-error-format=[]:RUSTC_ERROR_FORMAT:( )' \ +'--on-fail=[command to run on failure]:CMD:_cmdstring' \ +'--stage=[stage to build (indicates compiler to use/test, e.g., stage 0 uses the bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)]:N:( )' \ +'*--keep-stage=[stage(s) to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'*--keep-stage-std=[stage(s) of the standard library to keep without recompiling (pass multiple times to keep e.g., both stages 0 and 1)]:N:( )' \ +'--src=[path to the root of the rust checkout]:DIR:_files -/' \ +'-j+[number of jobs to run in parallel]:JOBS:( )' \ +'--jobs=[number of jobs to run in parallel]:JOBS:( )' \ +'--warnings=[if value is deny, will deny warnings if value is warn, will emit warnings otherwise, use the default configured behaviour]:deny|warn:(deny warn default)' \ +'--error-format=[rustc error format]:FORMAT:( )' \ +'--color=[whether to use color in cargo and rustc output]:STYLE:(always never auto)' \ +'--llvm-skip-rebuild=[whether rebuilding llvm should be skipped, overriding \`skip-rebuld\` in config.toml]:VALUE:(true false)' \ +'--rust-profile-generate=[generate PGO profile with rustc build]:PROFILE:_files' \ +'--rust-profile-use=[use PGO profile for rustc build]:PROFILE:_files' \ +'--llvm-profile-use=[use PGO profile for LLVM build]:PROFILE:_files' \ +'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \ +'*--set=[override options in config.toml]:section.option=value:( )' \ +'--run[run suggested tests]' \ +'*-v[use verbose output (-vv for very verbose)]' \ +'*--verbose[use verbose output (-vv for very verbose)]' \ +'-i[use incremental compilation]' \ +'--incremental[use incremental compilation]' \ +'--include-default-paths[include default paths in addition to the provided ones]' \ +'--dry-run[dry run; don'\''t build anything]' \ +'--json-output[use message-format=json]' \ +'--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ +'--enable-bolt-settings[Enable BOLT link flags]' \ +'--skip-stage0-validation[Skip stage0 compiler validation]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +'*::paths -- paths for the subcommand:_files' \ +&& ret=0 +;; + esac + ;; +esac +} + +(( $+functions[_x.py_commands] )) || +_x.py_commands() { + local commands; commands=( +'build:Compile either the compiler or libraries' \ +'check:Compile either the compiler or libraries, using cargo check' \ +'clippy:Run Clippy (uses rustup/cargo-installed clippy binary)' \ +'fix:Run cargo fix' \ +'fmt:Run rustfmt' \ +'doc:Build documentation' \ +'test:Build and run some test suites' \ +'bench:Build and run some benchmarks' \ +'clean:Clean out build directories' \ +'dist:Build distribution artifacts' \ +'install:Install distribution artifacts' \ +'run:Run tools contained in this repository' \ +'setup:Set up the environment for development' \ +'suggest:Suggest a subset of tests to run, based on modified files' \ + ) + _describe -t commands 'x.py commands' commands "$@" +} +(( $+functions[_x.py__bench_commands] )) || +_x.py__bench_commands() { + local commands; commands=() + _describe -t commands 'x.py bench commands' commands "$@" +} +(( $+functions[_x.py__build_commands] )) || +_x.py__build_commands() { + local commands; commands=() + _describe -t commands 'x.py build commands' commands "$@" +} +(( $+functions[_x.py__check_commands] )) || +_x.py__check_commands() { + local commands; commands=() + _describe -t commands 'x.py check commands' commands "$@" +} +(( $+functions[_x.py__clean_commands] )) || +_x.py__clean_commands() { + local commands; commands=() + _describe -t commands 'x.py clean commands' commands "$@" +} +(( $+functions[_x.py__clippy_commands] )) || +_x.py__clippy_commands() { + local commands; commands=() + _describe -t commands 'x.py clippy commands' commands "$@" +} +(( $+functions[_x.py__dist_commands] )) || +_x.py__dist_commands() { + local commands; commands=() + _describe -t commands 'x.py dist commands' commands "$@" +} +(( $+functions[_x.py__doc_commands] )) || +_x.py__doc_commands() { + local commands; commands=() + _describe -t commands 'x.py doc commands' commands "$@" +} +(( $+functions[_x.py__fix_commands] )) || +_x.py__fix_commands() { + local commands; commands=() + _describe -t commands 'x.py fix commands' commands "$@" +} +(( $+functions[_x.py__fmt_commands] )) || +_x.py__fmt_commands() { + local commands; commands=() + _describe -t commands 'x.py fmt commands' commands "$@" +} +(( $+functions[_x.py__install_commands] )) || +_x.py__install_commands() { + local commands; commands=() + _describe -t commands 'x.py install commands' commands "$@" +} +(( $+functions[_x.py__run_commands] )) || +_x.py__run_commands() { + local commands; commands=() + _describe -t commands 'x.py run commands' commands "$@" +} +(( $+functions[_x.py__setup_commands] )) || +_x.py__setup_commands() { + local commands; commands=() + _describe -t commands 'x.py setup commands' commands "$@" +} +(( $+functions[_x.py__suggest_commands] )) || +_x.py__suggest_commands() { + local commands; commands=() + _describe -t commands 'x.py suggest commands' commands "$@" +} +(( $+functions[_x.py__test_commands] )) || +_x.py__test_commands() { + local commands; commands=() + _describe -t commands 'x.py test commands' commands "$@" +} + +if [ "$funcstack[1]" = "_x.py" ]; then + _x.py "$@" +else + compdef _x.py x.py +fi diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/gdb_lookup.py rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/gdb_lookup.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/gdb_lookup.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/gdb_lookup.py 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,5 @@ import gdb +import gdb.printing import re from gdb_providers import * @@ -9,7 +10,7 @@ gdb_version = [int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else [] def register_printers(objfile): - objfile.pretty_printers.append(lookup) + objfile.pretty_printers.append(printer) # BACKCOMPAT: rust 1.35 @@ -38,58 +39,80 @@ return True -def lookup(valobj): - rust_type = classify_rust_type(valobj.type) +# Helper for enum printing that checks the discriminant. Only used in +# older gdb. +def enum_provider(valobj): + if check_enum_discriminant(valobj): + return EnumProvider(valobj) + return None - if rust_type == RustType.ENUM: - # use enum provider only for GDB <7.12 - if gdb_version[0] < 7 or (gdb_version[0] == 7 and gdb_version[1] < 12): - if check_enum_discriminant(valobj): - return EnumProvider(valobj) - - if rust_type == RustType.STD_STRING: - return StdStringProvider(valobj) - if rust_type == RustType.STD_OS_STRING: - return StdOsStringProvider(valobj) - if rust_type == RustType.STD_STR: - return StdStrProvider(valobj) - if rust_type == RustType.STD_SLICE: - return StdSliceProvider(valobj) - if rust_type == RustType.STD_VEC: - return StdVecProvider(valobj) - if rust_type == RustType.STD_VEC_DEQUE: - return StdVecDequeProvider(valobj) - if rust_type == RustType.STD_BTREE_SET: - return StdBTreeSetProvider(valobj) - if rust_type == RustType.STD_BTREE_MAP: - return StdBTreeMapProvider(valobj) - if rust_type == RustType.STD_HASH_MAP: - if is_hashbrown_hashmap(valobj): - return StdHashMapProvider(valobj) - else: - return StdOldHashMapProvider(valobj) - if rust_type == RustType.STD_HASH_SET: - hash_map = valobj[valobj.type.fields()[0]] - if is_hashbrown_hashmap(hash_map): - return StdHashMapProvider(valobj, show_values=False) - else: - return StdOldHashMapProvider(hash_map, show_values=False) - - if rust_type == RustType.STD_RC: - return StdRcProvider(valobj) - if rust_type == RustType.STD_ARC: - return StdRcProvider(valobj, is_atomic=True) - - if rust_type == RustType.STD_CELL: - return StdCellProvider(valobj) - if rust_type == RustType.STD_REF: - return StdRefProvider(valobj) - if rust_type == RustType.STD_REF_MUT: - return StdRefProvider(valobj) - if rust_type == RustType.STD_REF_CELL: - return StdRefCellProvider(valobj) - if rust_type == RustType.STD_NONZERO_NUMBER: - return StdNonZeroNumberProvider(valobj) +# Helper to handle both old and new hash maps. +def hashmap_provider(valobj): + if is_hashbrown_hashmap(valobj): + return StdHashMapProvider(valobj) + else: + return StdOldHashMapProvider(valobj) + + +# Helper to handle both old and new hash sets. +def hashset_provider(valobj): + hash_map = valobj[valobj.type.fields()[0]] + if is_hashbrown_hashmap(hash_map): + return StdHashMapProvider(valobj, show_values=False) + else: + return StdOldHashMapProvider(hash_map, show_values=False) + + +class PrintByRustType(gdb.printing.SubPrettyPrinter): + def __init__(self, name, provider): + super(PrintByRustType, self).__init__(name) + self.provider = provider + + def __call__(self, val): + if self.enabled: + return self.provider(val) + return None + + +class RustPrettyPrinter(gdb.printing.PrettyPrinter): + def __init__(self, name): + super(RustPrettyPrinter, self).__init__(name, []) + self.type_map = {} + + def add(self, rust_type, provider): + # Just use the rust_type as the name. + printer = PrintByRustType(rust_type, provider) + self.type_map[rust_type] = printer + self.subprinters.append(printer) + + def __call__(self, valobj): + rust_type = classify_rust_type(valobj.type) + if rust_type in self.type_map: + return self.type_map[rust_type](valobj) + return None + + +printer = RustPrettyPrinter("rust") +# use enum provider only for GDB <7.12 +if gdb_version[0] < 7 or (gdb_version[0] == 7 and gdb_version[1] < 12): + printer.add(RustType.ENUM, enum_provider) +printer.add(RustType.STD_STRING, StdStringProvider) +printer.add(RustType.STD_OS_STRING, StdOsStringProvider) +printer.add(RustType.STD_STR, StdStrProvider) +printer.add(RustType.STD_SLICE, StdSliceProvider) +printer.add(RustType.STD_VEC, StdVecProvider) +printer.add(RustType.STD_VEC_DEQUE, StdVecDequeProvider) +printer.add(RustType.STD_BTREE_SET, StdBTreeSetProvider) +printer.add(RustType.STD_BTREE_MAP, StdBTreeMapProvider) +printer.add(RustType.STD_HASH_MAP, hashmap_provider) +printer.add(RustType.STD_HASH_SET, hashset_provider) +printer.add(RustType.STD_RC, StdRcProvider) +printer.add(RustType.STD_ARC, lambda valobj: StdRcProvider(valobj, is_atomic=True)) + +printer.add(RustType.STD_CELL, StdCellProvider) +printer.add(RustType.STD_REF, StdRefProvider) +printer.add(RustType.STD_REF_MUT, StdRefProvider) +printer.add(RustType.STD_REF_CELL, StdRefCellProvider) - return None +printer.add(RustType.STD_NONZERO_NUMBER, StdNonZeroNumberProvider) diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/gdb_providers.py rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/gdb_providers.py --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/gdb_providers.py 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/gdb_providers.py 2023-12-21 16:55:28.000000000 +0000 @@ -18,70 +18,79 @@ return ptr if ptr.type.code == gdb.TYPE_CODE_PTR else ptr[ptr.type.fields()[0]] -class EnumProvider: +# GDB 14 has a tag class that indicates that extension methods are ok +# to call. Use of this tag only requires that printers hide local +# attributes and methods by prefixing them with "_". +if hasattr(gdb, 'ValuePrinter'): + printer_base = gdb.ValuePrinter +else: + printer_base = object + + +class EnumProvider(printer_base): def __init__(self, valobj): content = valobj[valobj.type.fields()[0]] fields = content.type.fields() - self.empty = len(fields) == 0 - if not self.empty: + self._empty = len(fields) == 0 + if not self._empty: if len(fields) == 1: discriminant = 0 else: discriminant = int(content[fields[0]]) + 1 - self.active_variant = content[fields[discriminant]] - self.name = fields[discriminant].name - self.full_name = "{}::{}".format(valobj.type.name, self.name) + self._active_variant = content[fields[discriminant]] + self._name = fields[discriminant].name + self._full_name = "{}::{}".format(valobj.type.name, self._name) else: - self.full_name = valobj.type.name + self._full_name = valobj.type.name def to_string(self): - return self.full_name + return self._full_name def children(self): - if not self.empty: - yield self.name, self.active_variant + if not self._empty: + yield self._name, self._active_variant -class StdStringProvider: +class StdStringProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj + self._valobj = valobj vec = valobj["vec"] - self.length = int(vec["len"]) - self.data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) + self._length = int(vec["len"]) + self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) def to_string(self): - return self.data_ptr.lazy_string(encoding="utf-8", length=self.length) + return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) @staticmethod def display_hint(): return "string" -class StdOsStringProvider: +class StdOsStringProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj - buf = self.valobj["inner"]["inner"] + self._valobj = valobj + buf = self._valobj["inner"]["inner"] is_windows = "Wtf8Buf" in buf.type.name vec = buf[ZERO_FIELD] if is_windows else buf - self.length = int(vec["len"]) - self.data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) + self._length = int(vec["len"]) + self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) def to_string(self): - return self.data_ptr.lazy_string(encoding="utf-8", length=self.length) + return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) def display_hint(self): return "string" -class StdStrProvider: +class StdStrProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj - self.length = int(valobj["length"]) - self.data_ptr = valobj["data_ptr"] + self._valobj = valobj + self._length = int(valobj["length"]) + self._data_ptr = valobj["data_ptr"] def to_string(self): - return self.data_ptr.lazy_string(encoding="utf-8", length=self.length) + return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) @staticmethod def display_hint(): @@ -103,36 +112,36 @@ yield key, element -class StdSliceProvider: +class StdSliceProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj - self.length = int(valobj["length"]) - self.data_ptr = valobj["data_ptr"] + self._valobj = valobj + self._length = int(valobj["length"]) + self._data_ptr = valobj["data_ptr"] def to_string(self): - return "{}(size={})".format(self.valobj.type, self.length) + return "{}(size={})".format(self._valobj.type, self._length) def children(self): return _enumerate_array_elements( - self.data_ptr + index for index in xrange(self.length) + self._data_ptr + index for index in xrange(self._length) ) @staticmethod def display_hint(): return "array" -class StdVecProvider: +class StdVecProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj - self.length = int(valobj["len"]) - self.data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) + self._valobj = valobj + self._length = int(valobj["len"]) + self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) def to_string(self): - return "Vec(size={})".format(self.length) + return "Vec(size={})".format(self._length) def children(self): return _enumerate_array_elements( - self.data_ptr + index for index in xrange(self.length) + self._data_ptr + index for index in xrange(self._length) ) @staticmethod @@ -140,20 +149,20 @@ return "array" -class StdVecDequeProvider: +class StdVecDequeProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj - self.head = int(valobj["head"]) - self.size = int(valobj["len"]) - self.cap = int(valobj["buf"]["cap"]) - self.data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) + self._valobj = valobj + self._head = int(valobj["head"]) + self._size = int(valobj["len"]) + self._cap = int(valobj["buf"]["cap"]) + self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) def to_string(self): - return "VecDeque(size={})".format(self.size) + return "VecDeque(size={})".format(self._size) def children(self): return _enumerate_array_elements( - (self.data_ptr + ((self.head + index) % self.cap)) for index in xrange(self.size) + (self._data_ptr + ((self._head + index) % self._cap)) for index in xrange(self._size) ) @staticmethod @@ -161,81 +170,81 @@ return "array" -class StdRcProvider: +class StdRcProvider(printer_base): def __init__(self, valobj, is_atomic=False): - self.valobj = valobj - self.is_atomic = is_atomic - self.ptr = unwrap_unique_or_non_null(valobj["ptr"]) - self.value = self.ptr["data" if is_atomic else "value"] - self.strong = self.ptr["strong"]["v" if is_atomic else "value"]["value"] - self.weak = self.ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 + self._valobj = valobj + self._is_atomic = is_atomic + self._ptr = unwrap_unique_or_non_null(valobj["ptr"]) + self._value = self._ptr["data" if is_atomic else "value"] + self._strong = self._ptr["strong"]["v" if is_atomic else "value"]["value"] + self._weak = self._ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 def to_string(self): - if self.is_atomic: - return "Arc(strong={}, weak={})".format(int(self.strong), int(self.weak)) + if self._is_atomic: + return "Arc(strong={}, weak={})".format(int(self._strong), int(self._weak)) else: - return "Rc(strong={}, weak={})".format(int(self.strong), int(self.weak)) + return "Rc(strong={}, weak={})".format(int(self._strong), int(self._weak)) def children(self): - yield "value", self.value - yield "strong", self.strong - yield "weak", self.weak + yield "value", self._value + yield "strong", self._strong + yield "weak", self._weak -class StdCellProvider: +class StdCellProvider(printer_base): def __init__(self, valobj): - self.value = valobj["value"]["value"] + self._value = valobj["value"]["value"] def to_string(self): return "Cell" def children(self): - yield "value", self.value + yield "value", self._value -class StdRefProvider: +class StdRefProvider(printer_base): def __init__(self, valobj): - self.value = valobj["value"].dereference() - self.borrow = valobj["borrow"]["borrow"]["value"]["value"] + self._value = valobj["value"].dereference() + self._borrow = valobj["borrow"]["borrow"]["value"]["value"] def to_string(self): - borrow = int(self.borrow) + borrow = int(self._borrow) if borrow >= 0: return "Ref(borrow={})".format(borrow) else: return "Ref(borrow_mut={})".format(-borrow) def children(self): - yield "*value", self.value - yield "borrow", self.borrow + yield "*value", self._value + yield "borrow", self._borrow -class StdRefCellProvider: +class StdRefCellProvider(printer_base): def __init__(self, valobj): - self.value = valobj["value"]["value"] - self.borrow = valobj["borrow"]["value"]["value"] + self._value = valobj["value"]["value"] + self._borrow = valobj["borrow"]["value"]["value"] def to_string(self): - borrow = int(self.borrow) + borrow = int(self._borrow) if borrow >= 0: return "RefCell(borrow={})".format(borrow) else: return "RefCell(borrow_mut={})".format(-borrow) def children(self): - yield "value", self.value - yield "borrow", self.borrow + yield "value", self._value + yield "borrow", self._borrow -class StdNonZeroNumberProvider: +class StdNonZeroNumberProvider(printer_base): def __init__(self, valobj): fields = valobj.type.fields() assert len(fields) == 1 field = list(fields)[0] - self.value = str(valobj[field.name]) + self._value = str(valobj[field.name]) def to_string(self): - return self.value + return self._value # Yields children (in a provider's sense of the word) for a BTreeMap. @@ -280,15 +289,15 @@ yield child -class StdBTreeSetProvider: +class StdBTreeSetProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj + self._valobj = valobj def to_string(self): - return "BTreeSet(size={})".format(self.valobj["map"]["length"]) + return "BTreeSet(size={})".format(self._valobj["map"]["length"]) def children(self): - inner_map = self.valobj["map"] + inner_map = self._valobj["map"] for i, (child, _) in enumerate(children_of_btree_map(inner_map)): yield "[{}]".format(i), child @@ -297,15 +306,15 @@ return "array" -class StdBTreeMapProvider: +class StdBTreeMapProvider(printer_base): def __init__(self, valobj): - self.valobj = valobj + self._valobj = valobj def to_string(self): - return "BTreeMap(size={})".format(self.valobj["length"]) + return "BTreeMap(size={})".format(self._valobj["length"]) def children(self): - for i, (key, val) in enumerate(children_of_btree_map(self.valobj)): + for i, (key, val) in enumerate(children_of_btree_map(self._valobj)): yield "key{}".format(i), key yield "val{}".format(i), val @@ -315,124 +324,124 @@ # BACKCOMPAT: rust 1.35 -class StdOldHashMapProvider: +class StdOldHashMapProvider(printer_base): def __init__(self, valobj, show_values=True): - self.valobj = valobj - self.show_values = show_values + self._valobj = valobj + self._show_values = show_values - self.table = self.valobj["table"] - self.size = int(self.table["size"]) - self.hashes = self.table["hashes"] - self.hash_uint_type = self.hashes.type - self.hash_uint_size = self.hashes.type.sizeof - self.modulo = 2 ** self.hash_uint_size - self.data_ptr = self.hashes[ZERO_FIELD]["pointer"] - - self.capacity_mask = int(self.table["capacity_mask"]) - self.capacity = (self.capacity_mask + 1) % self.modulo - - marker = self.table["marker"].type - self.pair_type = marker.template_argument(0) - self.pair_type_size = self.pair_type.sizeof - - self.valid_indices = [] - for idx in range(self.capacity): - data_ptr = self.data_ptr.cast(self.hash_uint_type.pointer()) + self._table = self._valobj["table"] + self._size = int(self._table["size"]) + self._hashes = self._table["hashes"] + self._hash_uint_type = self._hashes.type + self._hash_uint_size = self._hashes.type.sizeof + self._modulo = 2 ** self._hash_uint_size + self._data_ptr = self._hashes[ZERO_FIELD]["pointer"] + + self._capacity_mask = int(self._table["capacity_mask"]) + self._capacity = (self._capacity_mask + 1) % self._modulo + + marker = self._table["marker"].type + self._pair_type = marker.template_argument(0) + self._pair_type_size = self._pair_type.sizeof + + self._valid_indices = [] + for idx in range(self._capacity): + data_ptr = self._data_ptr.cast(self._hash_uint_type.pointer()) address = data_ptr + idx hash_uint = address.dereference() hash_ptr = hash_uint[ZERO_FIELD]["pointer"] if int(hash_ptr) != 0: - self.valid_indices.append(idx) + self._valid_indices.append(idx) def to_string(self): - if self.show_values: - return "HashMap(size={})".format(self.size) + if self._show_values: + return "HashMap(size={})".format(self._size) else: - return "HashSet(size={})".format(self.size) + return "HashSet(size={})".format(self._size) def children(self): - start = int(self.data_ptr) & ~1 + start = int(self._data_ptr) & ~1 - hashes = self.hash_uint_size * self.capacity - align = self.pair_type_size - len_rounded_up = (((((hashes + align) % self.modulo - 1) % self.modulo) & ~( - (align - 1) % self.modulo)) % self.modulo - hashes) % self.modulo + hashes = self._hash_uint_size * self._capacity + align = self._pair_type_size + len_rounded_up = (((((hashes + align) % self._modulo - 1) % self._modulo) & ~( + (align - 1) % self._modulo)) % self._modulo - hashes) % self._modulo pairs_offset = hashes + len_rounded_up - pairs_start = gdb.Value(start + pairs_offset).cast(self.pair_type.pointer()) + pairs_start = gdb.Value(start + pairs_offset).cast(self._pair_type.pointer()) - for index in range(self.size): - table_index = self.valid_indices[index] - idx = table_index & self.capacity_mask + for index in range(self._size): + table_index = self._valid_indices[index] + idx = table_index & self._capacity_mask element = (pairs_start + idx).dereference() - if self.show_values: + if self._show_values: yield "key{}".format(index), element[ZERO_FIELD] yield "val{}".format(index), element[FIRST_FIELD] else: yield "[{}]".format(index), element[ZERO_FIELD] def display_hint(self): - return "map" if self.show_values else "array" + return "map" if self._show_values else "array" -class StdHashMapProvider: +class StdHashMapProvider(printer_base): def __init__(self, valobj, show_values=True): - self.valobj = valobj - self.show_values = show_values + self._valobj = valobj + self._show_values = show_values - table = self.table() + table = self._table() table_inner = table["table"] capacity = int(table_inner["bucket_mask"]) + 1 ctrl = table_inner["ctrl"]["pointer"] - self.size = int(table_inner["items"]) - self.pair_type = table.type.template_argument(0).strip_typedefs() + self._size = int(table_inner["items"]) + self._pair_type = table.type.template_argument(0).strip_typedefs() - self.new_layout = not table_inner.type.has_key("data") - if self.new_layout: - self.data_ptr = ctrl.cast(self.pair_type.pointer()) + self._new_layout = not table_inner.type.has_key("data") + if self._new_layout: + self._data_ptr = ctrl.cast(self._pair_type.pointer()) else: - self.data_ptr = table_inner["data"]["pointer"] + self._data_ptr = table_inner["data"]["pointer"] - self.valid_indices = [] + self._valid_indices = [] for idx in range(capacity): address = ctrl + idx value = address.dereference() is_presented = value & 128 == 0 if is_presented: - self.valid_indices.append(idx) + self._valid_indices.append(idx) - def table(self): - if self.show_values: - hashbrown_hashmap = self.valobj["base"] - elif self.valobj.type.fields()[0].name == "map": + def _table(self): + if self._show_values: + hashbrown_hashmap = self._valobj["base"] + elif self._valobj.type.fields()[0].name == "map": # BACKCOMPAT: rust 1.47 # HashSet wraps std::collections::HashMap, which wraps hashbrown::HashMap - hashbrown_hashmap = self.valobj["map"]["base"] + hashbrown_hashmap = self._valobj["map"]["base"] else: # HashSet wraps hashbrown::HashSet, which wraps hashbrown::HashMap - hashbrown_hashmap = self.valobj["base"]["map"] + hashbrown_hashmap = self._valobj["base"]["map"] return hashbrown_hashmap["table"] def to_string(self): - if self.show_values: - return "HashMap(size={})".format(self.size) + if self._show_values: + return "HashMap(size={})".format(self._size) else: - return "HashSet(size={})".format(self.size) + return "HashSet(size={})".format(self._size) def children(self): - pairs_start = self.data_ptr + pairs_start = self._data_ptr - for index in range(self.size): - idx = self.valid_indices[index] - if self.new_layout: + for index in range(self._size): + idx = self._valid_indices[index] + if self._new_layout: idx = -(idx + 1) element = (pairs_start + idx).dereference() - if self.show_values: + if self._show_values: yield "key{}".format(index), element[ZERO_FIELD] yield "val{}".format(index), element[FIRST_FIELD] else: yield "[{}]".format(index), element[ZERO_FIELD] def display_hint(self): - return "map" if self.show_values else "array" + return "map" if self._show_values else "array" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.lock 2023-12-21 16:55:28.000000000 +0000 @@ -0,0 +1,75 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "test-float-parse" +version = "0.1.0" +dependencies = [ + "rand", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/etc/test-float-parse/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -8,7 +8,7 @@ resolver = "1" [dependencies] -rand = "0.4" +rand = "0.8" [lib] name = "test_float_parse" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/Cargo.toml 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/Cargo.toml 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,8 @@ arrayvec = { version = "0.7", default-features = false } askama = { version = "0.12", default-features = false, features = ["config"] } itertools = "0.10.1" -minifier = "0.2.2" +indexmap = "2" +minifier = "0.3.0" once_cell = "1.10.0" regex = "1" rustdoc-json-types = { path = "../rustdoc-json-types" } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/auto_trait.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/auto_trait.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/auto_trait.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/auto_trait.rs 2023-12-21 16:55:28.000000000 +0000 @@ -551,8 +551,8 @@ WherePredicate::RegionPredicate { lifetime, bounds } => { lifetime_to_bounds.entry(lifetime).or_default().extend(bounds); } - WherePredicate::EqPredicate { lhs, rhs, bound_params } => { - match *lhs { + WherePredicate::EqPredicate { lhs, rhs } => { + match lhs { Type::QPath(box QPathData { ref assoc, ref self_type, @@ -590,14 +590,13 @@ GenericArgs::AngleBracketed { ref mut bindings, .. } => { bindings.push(TypeBinding { assoc: assoc.clone(), - kind: TypeBindingKind::Equality { term: *rhs }, + kind: TypeBindingKind::Equality { term: rhs }, }); } GenericArgs::Parenthesized { .. } => { existing_predicates.push(WherePredicate::EqPredicate { lhs: lhs.clone(), rhs, - bound_params, }); continue; // If something other than a Fn ends up // with parentheses, leave it alone diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/inline.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/inline.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/inline.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/inline.rs 2023-12-21 16:55:28.000000000 +0000 @@ -18,14 +18,16 @@ use rustc_span::symbol::{kw, sym, Symbol}; use crate::clean::{ - self, clean_fn_decl_from_did_and_sig, clean_generics, clean_impl_item, clean_middle_assoc_item, - clean_middle_field, clean_middle_ty, clean_trait_ref_with_bindings, clean_ty, - clean_ty_alias_inner_type, clean_ty_generics, clean_variant_def, utils, Attributes, + self, clean_bound_vars, clean_fn_decl_from_did_and_sig, clean_generics, clean_impl_item, + clean_middle_assoc_item, clean_middle_field, clean_middle_ty, clean_trait_ref_with_bindings, + clean_ty, clean_ty_alias_inner_type, clean_ty_generics, clean_variant_def, utils, Attributes, AttributesExt, ImplKind, ItemId, Type, }; use crate::core::DocContext; use crate::formats::item_type::ItemType; +use super::Item; + /// Attempt to inline a definition into this AST. /// /// This function will fetch the definition specified, and if it is @@ -83,7 +85,7 @@ Res::Def(DefKind::TyAlias, did) => { record_extern_fqn(cx, did, ItemType::TypeAlias); build_impls(cx, did, attrs_without_docs, &mut ret); - clean::TypeAliasItem(build_type_alias(cx, did)) + clean::TypeAliasItem(build_type_alias(cx, did, &mut ret)) } Res::Def(DefKind::Enum, did) => { record_extern_fqn(cx, did, ItemType::Enum); @@ -239,20 +241,13 @@ fn build_external_function<'tcx>(cx: &mut DocContext<'tcx>, did: DefId) -> Box { let sig = cx.tcx.fn_sig(did).instantiate_identity(); - - let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var { - ty::BoundVariableKind::Region(ty::BrNamed(_, name)) if name != kw::UnderscoreLifetime => { - Some(clean::GenericParamDef::lifetime(name)) - } - _ => None, - }); - let predicates = cx.tcx.explicit_predicates_of(did); + let (generics, decl) = clean::enter_impl_trait(cx, |cx| { // NOTE: generics need to be cleaned before the decl! let mut generics = clean_ty_generics(cx, cx.tcx.generics_of(did), predicates); // FIXME: This does not place parameters in source order (late-bound ones come last) - generics.params.extend(late_bound_regions); + generics.params.extend(clean_bound_vars(sig.bound_vars())); let decl = clean_fn_decl_from_did_and_sig(cx, Some(did), sig); (generics, decl) }); @@ -288,11 +283,15 @@ clean::Union { generics, fields } } -fn build_type_alias(cx: &mut DocContext<'_>, did: DefId) -> Box { +fn build_type_alias( + cx: &mut DocContext<'_>, + did: DefId, + ret: &mut Vec, +) -> Box { let predicates = cx.tcx.explicit_predicates_of(did); let ty = cx.tcx.type_of(did).instantiate_identity(); let type_ = clean_middle_ty(ty::Binder::dummy(ty), cx, Some(did), None); - let inner_type = clean_ty_alias_inner_type(ty, cx); + let inner_type = clean_ty_alias_inner_type(ty, cx, ret); Box::new(clean::TypeAlias { type_, @@ -600,7 +599,7 @@ let prim_ty = clean::PrimitiveType::from(p); items.push(clean::Item { name: None, - attrs: Box::new(clean::Attributes::default()), + attrs: Box::default(), // We can use the item's `DefId` directly since the only information ever used // from it is `DefId.krate`. item_id: ItemId::DefId(did), @@ -648,13 +647,13 @@ clean::simplify::move_bounds_to_generic_parameters(&mut generics); clean::Constant { - type_: clean_middle_ty( + type_: Box::new(clean_middle_ty( ty::Binder::dummy(cx.tcx.type_of(def_id).instantiate_identity()), cx, Some(def_id), None, - ), - generics: Box::new(generics), + )), + generics, kind: clean::ConstantKind::Extern { def_id }, } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -232,20 +232,11 @@ poly_trait_ref: ty::PolyTraitRef<'tcx>, bindings: ThinVec, ) -> GenericBound { - // collect any late bound regions - let late_bound_regions: Vec<_> = cx - .tcx - .collect_referenced_late_bound_regions(&poly_trait_ref) - .into_iter() - .filter_map(|br| match br { - ty::BrNamed(_, name) if br.is_named() => Some(GenericParamDef::lifetime(name)), - _ => None, - }) - .collect(); - - let trait_ = clean_trait_ref_with_bindings(cx, poly_trait_ref, bindings); GenericBound::TraitBound( - PolyTrait { trait_, generic_params: late_bound_regions }, + PolyTrait { + trait_: clean_trait_ref_with_bindings(cx, poly_trait_ref, bindings), + generic_params: clean_bound_vars(poly_trait_ref.bound_vars()), + }, hir::TraitBoundModifier::None, ) } @@ -268,13 +259,13 @@ pub(crate) fn clean_const<'tcx>(constant: &hir::ConstArg, cx: &mut DocContext<'tcx>) -> Constant { let def_id = cx.tcx.hir().body_owner_def_id(constant.value.body).to_def_id(); Constant { - type_: clean_middle_ty( + type_: Box::new(clean_middle_ty( ty::Binder::dummy(cx.tcx.type_of(def_id).instantiate_identity()), cx, Some(def_id), None, - ), - generics: Box::new(Generics::default()), + )), + generics: Generics::default(), kind: ConstantKind::Anonymous { body: constant.value.body }, } } @@ -285,8 +276,8 @@ ) -> Constant { // FIXME: instead of storing the stringified expression, store `self` directly instead. Constant { - type_: clean_middle_ty(constant.map_bound(|c| c.ty()), cx, None, None), - generics: Box::new(Generics::default()), + type_: Box::new(clean_middle_ty(constant.map_bound(|c| c.ty()), cx, None, None)), + generics: Generics::default(), kind: ConstantKind::TyConst { expr: constant.skip_binder().to_string().into() }, } } @@ -338,9 +329,8 @@ }, hir::WherePredicate::EqPredicate(ref wrp) => WherePredicate::EqPredicate { - lhs: Box::new(clean_ty(wrp.lhs_ty, cx)), - rhs: Box::new(clean_ty(wrp.rhs_ty, cx).into()), - bound_params: Vec::new(), + lhs: clean_ty(wrp.lhs_ty, cx), + rhs: clean_ty(wrp.rhs_ty, cx).into(), }, }) } @@ -436,20 +426,9 @@ pred: ty::Binder<'tcx, ty::ProjectionPredicate<'tcx>>, cx: &mut DocContext<'tcx>, ) -> WherePredicate { - let late_bound_regions = cx - .tcx - .collect_referenced_late_bound_regions(&pred) - .into_iter() - .filter_map(|br| match br { - ty::BrNamed(_, name) if br.is_named() => Some(GenericParamDef::lifetime(name)), - _ => None, - }) - .collect(); - WherePredicate::EqPredicate { - lhs: Box::new(clean_projection(pred.map_bound(|p| p.projection_ty), cx, None)), - rhs: Box::new(clean_middle_term(pred.map_bound(|p| p.term), cx)), - bound_params: late_bound_regions, + lhs: clean_projection(pred.map_bound(|p| p.projection_ty), cx, None), + rhs: clean_middle_term(pred.map_bound(|p| p.term), cx), } } @@ -496,8 +475,9 @@ ty: ty::Binder<'tcx, ty::AliasTy<'tcx>>, cx: &mut DocContext<'tcx>, ) -> PathSegment { - let item = cx.tcx.associated_item(ty.skip_binder().def_id); - let generics = cx.tcx.generics_of(ty.skip_binder().def_id); + let def_id = ty.skip_binder().def_id; + let item = cx.tcx.associated_item(def_id); + let generics = cx.tcx.generics_of(def_id); PathSegment { name: item.name, args: GenericArgs::AngleBracketed { @@ -505,7 +485,7 @@ cx, ty.map_bound(|ty| &ty.args[generics.parent_count..]), false, - None, + def_id, ) .into(), bindings: Default::default(), @@ -519,7 +499,7 @@ ) -> GenericParamDef { let (name, kind) = match def.kind { ty::GenericParamDefKind::Lifetime => { - (def.name, GenericParamDefKind::Lifetime { outlives: vec![] }) + (def.name, GenericParamDefKind::Lifetime { outlives: ThinVec::new() }) } ty::GenericParamDefKind::Type { has_default, synthetic, .. } => { let default = if has_default { @@ -536,7 +516,7 @@ def.name, GenericParamDefKind::Type { did: def.def_id, - bounds: vec![], // These are filled in from the where-clauses. + bounds: ThinVec::new(), // These are filled in from the where-clauses. default: default.map(Box::new), synthetic, }, @@ -588,7 +568,7 @@ }) .collect() } else { - Vec::new() + ThinVec::new() }; (param.name.ident().name, GenericParamDefKind::Lifetime { outlives }) } @@ -601,7 +581,7 @@ .filter_map(|x| clean_generic_bound(x, cx)) .collect() } else { - Vec::new() + ThinVec::new() }; ( param.name.ident().name, @@ -657,7 +637,7 @@ match param.kind { GenericParamDefKind::Lifetime { .. } => unreachable!(), GenericParamDefKind::Type { did, ref bounds, .. } => { - cx.impl_trait_bounds.insert(did.into(), bounds.clone()); + cx.impl_trait_bounds.insert(did.into(), bounds.to_vec()); } GenericParamDefKind::Const { .. } => unreachable!(), } @@ -705,8 +685,8 @@ } } } - WherePredicate::EqPredicate { lhs, rhs, bound_params } => { - eq_predicates.push(WherePredicate::EqPredicate { lhs, rhs, bound_params }); + WherePredicate::EqPredicate { lhs, rhs } => { + eq_predicates.push(WherePredicate::EqPredicate { lhs, rhs }); } } } @@ -800,11 +780,9 @@ }) .collect::>(); - // param index -> [(trait DefId, associated type name & generics, term, higher-ranked params)] - let mut impl_trait_proj = FxHashMap::< - u32, - Vec<(DefId, PathSegment, ty::Binder<'_, ty::Term<'_>>, Vec)>, - >::default(); + // param index -> [(trait DefId, associated type name & generics, term)] + let mut impl_trait_proj = + FxHashMap::>)>>::default(); let where_predicates = preds .predicates @@ -856,11 +834,6 @@ trait_did, name, proj.map_bound(|p| p.term), - pred.get_bound_params() - .into_iter() - .flatten() - .cloned() - .collect(), )); } @@ -896,9 +869,9 @@ let crate::core::ImplTraitParam::ParamIndex(idx) = param else { unreachable!() }; if let Some(proj) = impl_trait_proj.remove(&idx) { - for (trait_did, name, rhs, bound_params) in proj { + for (trait_did, name, rhs) in proj { let rhs = clean_middle_term(rhs, cx); - simplify::merge_bounds(cx, &mut bounds, bound_params, trait_did, name, &rhs); + simplify::merge_bounds(cx, &mut bounds, trait_did, name, &rhs); } } @@ -962,11 +935,16 @@ fn clean_ty_alias_inner_type<'tcx>( ty: Ty<'tcx>, cx: &mut DocContext<'tcx>, + ret: &mut Vec, ) -> Option { let ty::Adt(adt_def, args) = ty.kind() else { return None; }; + if !adt_def.did().is_local() { + inline::build_impls(cx, adt_def.did(), None, ret); + } + Some(if adt_def.is_enum() { let variants: rustc_index::IndexVec<_, _> = adt_def .variants() @@ -974,6 +952,10 @@ .map(|variant| clean_variant_def_with_args(variant, args, cx)) .collect(); + if !adt_def.did().is_local() { + inline::record_extern_fqn(cx, adt_def.did(), ItemType::Enum); + } + TypeAliasInnerType::Enum { variants, is_non_exhaustive: adt_def.is_variant_list_non_exhaustive(), @@ -989,8 +971,14 @@ clean_variant_def_with_args(variant, args, cx).kind.inner_items().cloned().collect(); if adt_def.is_struct() { + if !adt_def.did().is_local() { + inline::record_extern_fqn(cx, adt_def.did(), ItemType::Struct); + } TypeAliasInnerType::Struct { ctor_kind: variant.ctor_kind(), fields } } else { + if !adt_def.did().is_local() { + inline::record_extern_fqn(cx, adt_def.did(), ItemType::Union); + } TypeAliasInnerType::Union { fields } } }) @@ -1244,14 +1232,14 @@ hir::TraitItemKind::Const(ty, Some(default)) => { let generics = enter_impl_trait(cx, |cx| clean_generics(trait_item.generics, cx)); AssocConstItem( - Box::new(generics), - clean_ty(ty, cx), + generics, + Box::new(clean_ty(ty, cx)), ConstantKind::Local { def_id: local_did, body: default }, ) } hir::TraitItemKind::Const(ty, None) => { let generics = enter_impl_trait(cx, |cx| clean_generics(trait_item.generics, cx)); - TyAssocConstItem(Box::new(generics), clean_ty(ty, cx)) + TyAssocConstItem(generics, Box::new(clean_ty(ty, cx))) } hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => { let m = clean_function(cx, sig, trait_item.generics, FunctionArgs::Body(body)); @@ -1300,7 +1288,7 @@ hir::ImplItemKind::Const(ty, expr) => { let generics = clean_generics(impl_.generics, cx); let default = ConstantKind::Local { def_id: local_did, body: expr }; - AssocConstItem(Box::new(generics), clean_ty(ty, cx), default) + AssocConstItem(generics, Box::new(clean_ty(ty, cx)), default) } hir::ImplItemKind::Fn(ref sig, body) => { let m = clean_function(cx, sig, impl_.generics, FunctionArgs::Body(body)); @@ -1339,18 +1327,18 @@ let tcx = cx.tcx; let kind = match assoc_item.kind { ty::AssocKind::Const => { - let ty = clean_middle_ty( + let ty = Box::new(clean_middle_ty( ty::Binder::dummy(tcx.type_of(assoc_item.def_id).instantiate_identity()), cx, Some(assoc_item.def_id), None, - ); + )); - let mut generics = Box::new(clean_ty_generics( + let mut generics = clean_ty_generics( cx, tcx.generics_of(assoc_item.def_id), tcx.explicit_predicates_of(assoc_item.def_id), - )); + ); simplify::move_bounds_to_generic_parameters(&mut generics); let provided = match assoc_item.container { @@ -1365,23 +1353,13 @@ } ty::AssocKind::Fn => { let sig = tcx.fn_sig(assoc_item.def_id).instantiate_identity(); - - let late_bound_regions = sig.bound_vars().into_iter().filter_map(|var| match var { - ty::BoundVariableKind::Region(ty::BrNamed(_, name)) - if name != kw::UnderscoreLifetime => - { - Some(GenericParamDef::lifetime(name)) - } - _ => None, - }); - let mut generics = clean_ty_generics( cx, tcx.generics_of(assoc_item.def_id), tcx.explicit_predicates_of(assoc_item.def_id), ); // FIXME: This does not place parameters in source order (late-bound ones come last) - generics.params.extend(late_bound_regions); + generics.params.extend(clean_bound_vars(sig.bound_vars())); let mut decl = clean_fn_decl_from_did_and_sig(cx, Some(assoc_item.def_id), sig); @@ -2117,9 +2095,11 @@ // FIXME: should we merge the outer and inner binders somehow? let sig = bound_ty.skip_binder().fn_sig(cx.tcx); let decl = clean_fn_decl_from_did_and_sig(cx, None, sig); + let generic_params = clean_bound_vars(sig.bound_vars()); + BareFunction(Box::new(BareFunctionDecl { unsafety: sig.unsafety(), - generic_params: Vec::new(), + generic_params, decl, abi: sig.abi(), })) @@ -2195,8 +2175,8 @@ let late_bound_regions: FxIndexSet<_> = obj .iter() - .flat_map(|pb| pb.bound_vars()) - .filter_map(|br| match br { + .flat_map(|pred| pred.bound_vars()) + .filter_map(|var| match var { ty::BoundVariableKind::Region(ty::BrNamed(_, name)) if name != kw::UnderscoreLifetime => { @@ -2221,18 +2201,19 @@ } ty::Alias(ty::Inherent, alias_ty) => { + let def_id = alias_ty.def_id; let alias_ty = bound_ty.rebind(alias_ty); let self_type = clean_middle_ty(alias_ty.map_bound(|ty| ty.self_ty()), cx, None, None); Type::QPath(Box::new(QPathData { assoc: PathSegment { - name: cx.tcx.associated_item(alias_ty.skip_binder().def_id).name, + name: cx.tcx.associated_item(def_id).name, args: GenericArgs::AngleBracketed { args: ty_args_to_args( cx, alias_ty.map_bound(|ty| ty.args.as_slice()), true, - None, + def_id, ) .into(), bindings: Default::default(), @@ -2270,6 +2251,11 @@ } } + ty::Bound(_, ref ty) => match ty.kind { + ty::BoundTyKind::Param(_, name) => Generic(name), + ty::BoundTyKind::Anon => panic!("unexpected anonymous bound type variable"), + }, + ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => { // If it's already in the same alias, don't get an infinite loop. if cx.current_type_aliases.contains_key(&def_id) { @@ -2297,10 +2283,9 @@ } ty::Closure(..) => panic!("Closure"), - ty::Generator(..) => panic!("Generator"), - ty::Bound(..) => panic!("Bound"), + ty::Coroutine(..) => panic!("Coroutine"), ty::Placeholder(..) => panic!("Placeholder"), - ty::GeneratorWitness(..) => panic!("GeneratorWitness"), + ty::CoroutineWitness(..) => panic!("CoroutineWitness"), ty::Infer(..) => panic!("Infer"), ty::Error(_) => rustc_errors::FatalError.raise(), } @@ -2549,7 +2534,8 @@ } hir::GenericArg::Lifetime(_) => GenericArg::Lifetime(Lifetime::elided()), hir::GenericArg::Type(ty) => GenericArg::Type(clean_ty(ty, cx)), - // FIXME(effects): This will still emit `` for non-const impls of const traits + // Checking for `#[rustc_host]` on the `AnonConst` not only accounts for the case + // where the argument is `host` but for all possible cases (e.g., `true`, `false`). hir::GenericArg::Const(ct) if cx.tcx.has_attr(ct.value.def_id, sym::rustc_host) => { @@ -2750,8 +2736,8 @@ StaticItem(Static { type_: clean_ty(ty, cx), mutability, expr: Some(body_id) }) } ItemKind::Const(ty, generics, body_id) => ConstantItem(Constant { - type_: clean_ty(ty, cx), - generics: Box::new(clean_generics(generics, cx)), + type_: Box::new(clean_ty(ty, cx)), + generics: clean_generics(generics, cx), kind: ConstantKind::Local { body: body_id, def_id }, }), ItemKind::OpaqueTy(ref ty) => OpaqueTyItem(OpaqueTy { @@ -2776,14 +2762,24 @@ } let ty = cx.tcx.type_of(def_id).instantiate_identity(); - let inner_type = clean_ty_alias_inner_type(ty, cx); - TypeAliasItem(Box::new(TypeAlias { - generics, - inner_type, - type_: rustdoc_ty, - item_type: Some(type_), - })) + let mut ret = Vec::new(); + let inner_type = clean_ty_alias_inner_type(ty, cx, &mut ret); + + ret.push(generate_item_with_correct_attrs( + cx, + TypeAliasItem(Box::new(TypeAlias { + generics, + inner_type, + type_: rustdoc_ty, + item_type: Some(type_), + })), + item.owner_id.def_id.to_def_id(), + name, + import_id, + renamed, + )); + return ret; } ItemKind::Enum(ref def, generics) => EnumItem(Enum { variants: def.variants.iter().map(|v| clean_variant(v, cx)).collect(), @@ -3137,3 +3133,30 @@ }, } } + +fn clean_bound_vars<'tcx>( + bound_vars: &'tcx ty::List, +) -> Vec { + bound_vars + .into_iter() + .filter_map(|var| match var { + ty::BoundVariableKind::Region(ty::BrNamed(_, name)) + if name != kw::UnderscoreLifetime => + { + Some(GenericParamDef::lifetime(name)) + } + ty::BoundVariableKind::Ty(ty::BoundTyKind::Param(did, name)) => Some(GenericParamDef { + name, + kind: GenericParamDefKind::Type { + did, + bounds: ThinVec::new(), + default: None, + synthetic: false, + }, + }), + // FIXME(non_lifetime_binders): Support higher-ranked const parameters. + ty::BoundVariableKind::Const => None, + _ => None, + }) + .collect() +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/simplify.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/simplify.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/simplify.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/simplify.rs 2023-12-21 16:55:28.000000000 +0000 @@ -40,18 +40,18 @@ WP::RegionPredicate { lifetime, bounds } => { lifetimes.push((lifetime, bounds)); } - WP::EqPredicate { lhs, rhs, bound_params } => equalities.push((lhs, rhs, bound_params)), + WP::EqPredicate { lhs, rhs } => equalities.push((lhs, rhs)), } } // Look for equality predicates on associated types that can be merged into // general bound predicates. - equalities.retain(|(lhs, rhs, bound_params)| { + equalities.retain(|(lhs, rhs)| { let Some((ty, trait_did, name)) = lhs.projection() else { return true; }; let Some((bounds, _)) = tybounds.get_mut(ty) else { return true }; - merge_bounds(cx, bounds, bound_params.clone(), trait_did, name, rhs) + merge_bounds(cx, bounds, trait_did, name, rhs) }); // And finally, let's reassemble everything @@ -64,18 +64,13 @@ bounds, bound_params, })); - clauses.extend(equalities.into_iter().map(|(lhs, rhs, bound_params)| WP::EqPredicate { - lhs, - rhs, - bound_params, - })); + clauses.extend(equalities.into_iter().map(|(lhs, rhs)| WP::EqPredicate { lhs, rhs })); clauses } pub(crate) fn merge_bounds( cx: &clean::DocContext<'_>, bounds: &mut Vec, - mut bound_params: Vec, trait_did: DefId, assoc: clean::PathSegment, rhs: &clean::Term, @@ -93,12 +88,6 @@ } let last = trait_ref.trait_.segments.last_mut().expect("segments were empty"); - trait_ref.generic_params.append(&mut bound_params); - // Sort parameters (likely) originating from a hashset alphabetically to - // produce predictable output (and to allow for full deduplication). - trait_ref.generic_params.sort_unstable_by(|p, q| p.name.as_str().cmp(q.name.as_str())); - trait_ref.generic_params.dedup_by_key(|p| p.name); - match last.args { PP::AngleBracketed { ref mut bindings, .. } => { bindings.push(clean::TypeBinding { @@ -156,7 +145,7 @@ .. }) = generics.params.iter_mut().find(|param| ¶m.name == arg) { - param_bounds.append(bounds); + param_bounds.extend(bounds.drain(..)); } else if let WherePredicate::RegionPredicate { lifetime: Lifetime(arg), bounds } = &mut pred && let Some(GenericParamDef { kind: GenericParamDefKind::Lifetime { outlives: param_bounds }, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types/tests.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types/tests.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types/tests.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types/tests.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,9 +1,8 @@ use super::*; use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind}; -use rustc_span::create_default_session_globals_then; -use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::Symbol; +use rustc_span::{create_default_session_globals_then, DUMMY_SP}; fn create_doc_fragment(s: &str) -> Vec { vec![DocFragment { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/types.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,7 +12,7 @@ use rustc_ast as ast; use rustc_ast_pretty::pprust; -use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel}; +use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel, StableSince}; use rustc_const_eval::const_eval::is_unstable_const_fn; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir as hir; @@ -585,14 +585,14 @@ }) } - pub(crate) fn stable_since(&self, tcx: TyCtxt<'_>) -> Option { + pub(crate) fn stable_since(&self, tcx: TyCtxt<'_>) -> Option { match self.stability(tcx)?.level { StabilityLevel::Stable { since, .. } => Some(since), StabilityLevel::Unstable { .. } => None, } } - pub(crate) fn const_stable_since(&self, tcx: TyCtxt<'_>) -> Option { + pub(crate) fn const_stable_since(&self, tcx: TyCtxt<'_>) -> Option { match self.const_stability(tcx)?.level { StabilityLevel::Stable { since, .. } => Some(since), StabilityLevel::Unstable { .. } => None, @@ -713,12 +713,16 @@ Some(tcx.visibility(def_id)) } - pub(crate) fn attributes(&self, tcx: TyCtxt<'_>, keep_as_is: bool) -> Vec { + pub(crate) fn attributes( + &self, + tcx: TyCtxt<'_>, + cache: &Cache, + keep_as_is: bool, + ) -> Vec { const ALLOWED_ATTRIBUTES: &[Symbol] = - &[sym::export_name, sym::link_section, sym::no_mangle, sym::repr, sym::non_exhaustive]; + &[sym::export_name, sym::link_section, sym::no_mangle, sym::non_exhaustive]; use rustc_abi::IntegerType; - use rustc_middle::ty::ReprFlags; let mut attrs: Vec = self .attrs @@ -739,20 +743,38 @@ } }) .collect(); - if let Some(def_id) = self.def_id() && - !def_id.is_local() && - // This check is needed because `adt_def` will panic if not a compatible type otherwise... - matches!(self.type_(), ItemType::Struct | ItemType::Enum | ItemType::Union) + if !keep_as_is + && let Some(def_id) = self.def_id() + && let ItemType::Struct | ItemType::Enum | ItemType::Union = self.type_() { - let repr = tcx.adt_def(def_id).repr(); + let adt = tcx.adt_def(def_id); + let repr = adt.repr(); let mut out = Vec::new(); - if repr.flags.contains(ReprFlags::IS_C) { + if repr.c() { out.push("C"); } - if repr.flags.contains(ReprFlags::IS_TRANSPARENT) { - out.push("transparent"); + if repr.transparent() { + // Render `repr(transparent)` iff the non-1-ZST field is public or at least one + // field is public in case all fields are 1-ZST fields. + let render_transparent = cache.document_private + || adt + .all_fields() + .find(|field| { + let ty = + field.ty(tcx, ty::GenericArgs::identity_for_item(tcx, field.did)); + tcx.layout_of(tcx.param_env(field.did).and(ty)) + .is_ok_and(|layout| !layout.is_1zst()) + }) + .map_or_else( + || adt.all_fields().any(|field| field.vis.is_public()), + |field| field.vis.is_public(), + ); + + if render_transparent { + out.push("transparent"); + } } - if repr.flags.contains(ReprFlags::IS_SIMD) { + if repr.simd() { out.push("simd"); } let pack_s; @@ -777,10 +799,9 @@ }; out.push(&int_s); } - if out.is_empty() { - return Vec::new(); + if !out.is_empty() { + attrs.push(format!("#[repr({})]", out.join(", "))); } - attrs.push(format!("#[repr({})]", out.join(", "))); } attrs } @@ -831,9 +852,9 @@ ProcMacroItem(ProcMacro), PrimitiveItem(PrimitiveType), /// A required associated constant in a trait declaration. - TyAssocConstItem(Box, Type), + TyAssocConstItem(Generics, Box), /// An associated constant in a trait impl or a provided one in a trait declaration. - AssocConstItem(Box, Type, ConstantKind), + AssocConstItem(Generics, Box, ConstantKind), /// A required associated type in a trait declaration. /// /// The bounds may be non-empty if there is a `where` clause. @@ -1289,7 +1310,7 @@ pub(crate) enum WherePredicate { BoundPredicate { ty: Type, bounds: Vec, bound_params: Vec }, RegionPredicate { lifetime: Lifetime, bounds: Vec }, - EqPredicate { lhs: Box, rhs: Box, bound_params: Vec }, + EqPredicate { lhs: Type, rhs: Term }, } impl WherePredicate { @@ -1300,21 +1321,13 @@ _ => None, } } - - pub(crate) fn get_bound_params(&self) -> Option<&[GenericParamDef]> { - match self { - Self::BoundPredicate { bound_params, .. } | Self::EqPredicate { bound_params, .. } => { - Some(bound_params) - } - _ => None, - } - } } #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub(crate) enum GenericParamDefKind { - Lifetime { outlives: Vec }, - Type { did: DefId, bounds: Vec, default: Option>, synthetic: bool }, + Lifetime { outlives: ThinVec }, + Type { did: DefId, bounds: ThinVec, default: Option>, synthetic: bool }, + // Option> makes this type smaller than `Option` would. Const { ty: Box, default: Option>, is_host_effect: bool }, } @@ -1332,7 +1345,7 @@ impl GenericParamDef { pub(crate) fn lifetime(name: Symbol) -> Self { - Self { name, kind: GenericParamDefKind::Lifetime { outlives: Vec::new() } } + Self { name, kind: GenericParamDefKind::Lifetime { outlives: ThinVec::new() } } } pub(crate) fn is_synthetic_param(&self) -> bool { @@ -1443,6 +1456,9 @@ pub(crate) fn unsafety(&self, tcx: TyCtxt<'_>) -> hir::Unsafety { tcx.trait_def(self.def_id).unsafety } + pub(crate) fn is_object_safe(&self, tcx: TyCtxt<'_>) -> bool { + tcx.check_is_object_safe(self.def_id) + } } #[derive(Clone, Debug)] @@ -2094,9 +2110,8 @@ pub(crate) fn expr(&self, tcx: TyCtxt<'_>) -> Option { self.expr.map(|body| rendered_const(tcx, body)) } - /// Will always be a machine readable number, without underscores or suffixes. - pub(crate) fn value(&self, tcx: TyCtxt<'_>) -> String { - print_evaluated_const(tcx, self.value, false).unwrap() + pub(crate) fn value(&self, tcx: TyCtxt<'_>, with_underscores: bool) -> String { + print_evaluated_const(tcx, self.value, with_underscores, false).unwrap() } } @@ -2271,8 +2286,8 @@ #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub(crate) struct Constant { - pub(crate) type_: Type, - pub(crate) generics: Box, + pub(crate) type_: Box, + pub(crate) generics: Generics, pub(crate) kind: ConstantKind, } @@ -2341,7 +2356,7 @@ match *self { ConstantKind::TyConst { .. } | ConstantKind::Anonymous { .. } => None, ConstantKind::Extern { def_id } | ConstantKind::Local { def_id, .. } => { - print_evaluated_const(tcx, def_id, true) + print_evaluated_const(tcx, def_id, true, true) } } } @@ -2510,11 +2525,10 @@ static_assert_size!(DocFragment, 32); static_assert_size!(GenericArg, 32); static_assert_size!(GenericArgs, 32); - static_assert_size!(GenericParamDef, 56); + static_assert_size!(GenericParamDef, 40); static_assert_size!(Generics, 16); static_assert_size!(Item, 56); - // FIXME(generic_const_items): Further reduce the size. - static_assert_size!(ItemKind, 72); + static_assert_size!(ItemKind, 56); static_assert_size!(PathSegment, 40); static_assert_size!(Type, 32); // tidy-alphabetical-end diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/utils.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/utils.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/clean/utils.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/clean/utils.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,6 +17,7 @@ use rustc_metadata::rendered_const; use rustc_middle::mir; use rustc_middle::ty::{self, GenericArgKind, GenericArgsRef, TyCtxt}; +use rustc_middle::ty::{TypeVisitable, TypeVisitableExt}; use rustc_span::symbol::{kw, sym, Symbol}; use std::fmt::Write as _; use std::mem; @@ -76,44 +77,119 @@ pub(crate) fn ty_args_to_args<'tcx>( cx: &mut DocContext<'tcx>, - args: ty::Binder<'tcx, &'tcx [ty::GenericArg<'tcx>]>, + ty_args: ty::Binder<'tcx, &'tcx [ty::GenericArg<'tcx>]>, has_self: bool, - container: Option, + owner: DefId, ) -> Vec { - let mut skip_first = has_self; - let mut ret_val = - Vec::with_capacity(args.skip_binder().len().saturating_sub(if skip_first { 1 } else { 0 })); - - ret_val.extend(args.iter().enumerate().filter_map(|(index, kind)| { - match kind.skip_binder().unpack() { - GenericArgKind::Lifetime(lt) => { - Some(GenericArg::Lifetime(clean_middle_region(lt).unwrap_or(Lifetime::elided()))) - } - GenericArgKind::Type(_) if skip_first => { - skip_first = false; - None + if ty_args.skip_binder().is_empty() { + // Fast path which avoids executing the query `generics_of`. + return Vec::new(); + } + + let params = &cx.tcx.generics_of(owner).params; + let mut elision_has_failed_once_before = false; + + let offset = if has_self { 1 } else { 0 }; + let mut args = Vec::with_capacity(ty_args.skip_binder().len().saturating_sub(offset)); + + let ty_arg_to_arg = |(index, arg): (usize, &ty::GenericArg<'tcx>)| match arg.unpack() { + GenericArgKind::Lifetime(lt) => { + Some(GenericArg::Lifetime(clean_middle_region(lt).unwrap_or(Lifetime::elided()))) + } + GenericArgKind::Type(_) if has_self && index == 0 => None, + GenericArgKind::Type(ty) => { + if !elision_has_failed_once_before + && let Some(default) = params[index].default_value(cx.tcx) + { + let default = + ty_args.map_bound(|args| default.instantiate(cx.tcx, args).expect_ty()); + + if can_elide_generic_arg(ty_args.rebind(ty), default) { + return None; + } + + elision_has_failed_once_before = true; } - GenericArgKind::Type(ty) => Some(GenericArg::Type(clean_middle_ty( - kind.rebind(ty), + + Some(GenericArg::Type(clean_middle_ty( + ty_args.rebind(ty), cx, None, - container.map(|container| crate::clean::ContainerTy::Regular { - ty: container, - args, + Some(crate::clean::ContainerTy::Regular { + ty: owner, + args: ty_args, has_self, arg: index, }), - ))), - // FIXME(effects): this relies on the host effect being called `host`, which users could also name - // their const generics. - // FIXME(effects): this causes `host = true` and `host = false` generics to also be emitted. - GenericArgKind::Const(ct) if let ty::ConstKind::Param(p) = ct.kind() && p.name == sym::host => None, - GenericArgKind::Const(ct) => { - Some(GenericArg::Const(Box::new(clean_middle_const(kind.rebind(ct), cx)))) + ))) + } + GenericArgKind::Const(ct) => { + if let ty::GenericParamDefKind::Const { is_host_effect: true, .. } = params[index].kind + { + return None; + } + + if !elision_has_failed_once_before + && let Some(default) = params[index].default_value(cx.tcx) + { + let default = + ty_args.map_bound(|args| default.instantiate(cx.tcx, args).expect_const()); + + if can_elide_generic_arg(ty_args.rebind(ct), default) { + return None; + } + + elision_has_failed_once_before = true; } + + Some(GenericArg::Const(Box::new(clean_middle_const(ty_args.rebind(ct), cx)))) } - })); - ret_val + }; + + args.extend(ty_args.skip_binder().iter().enumerate().rev().filter_map(ty_arg_to_arg)); + args.reverse(); + args +} + +/// Check if the generic argument `actual` coincides with the `default` and can therefore be elided. +/// +/// This uses a very conservative approach for performance and correctness reasons, meaning for +/// several classes of terms it claims that they cannot be elided even if they theoretically could. +/// This is absolutely fine since it mostly concerns edge cases. +fn can_elide_generic_arg<'tcx, Term>( + actual: ty::Binder<'tcx, Term>, + default: ty::Binder<'tcx, Term>, +) -> bool +where + Term: Eq + TypeVisitable>, +{ + // In practice, we shouldn't have any inference variables at this point. + // However to be safe, we bail out if we do happen to stumble upon them. + if actual.has_infer() || default.has_infer() { + return false; + } + + // Since we don't properly keep track of bound variables in rustdoc (yet), we don't attempt to + // make any sense out of escaping bound variables. We simply don't have enough context and it + // would be incorrect to try to do so anyway. + if actual.has_escaping_bound_vars() || default.has_escaping_bound_vars() { + return false; + } + + // Theoretically we could now check if either term contains (non-escaping) late-bound regions or + // projections, relate the two using an `InferCtxt` and check if the resulting obligations hold. + // Having projections means that the terms can potentially be further normalized thereby possibly + // revealing that they are equal after all. Regarding late-bound regions, they could to be + // liberated allowing us to consider more types to be equal by ignoring the names of binders + // (e.g., `for<'a> TYPE<'a>` and `for<'b> TYPE<'b>`). + // + // However, we are mostly interested in “reeliding” generic args, i.e., eliding generic args that + // were originally elided by the user and later filled in by the compiler contrary to eliding + // arbitrary generic arguments if they happen to semantically coincide with the default (of course, + // we cannot possibly distinguish these two cases). Therefore and for performance reasons, it + // suffices to only perform a syntactic / structural check by comparing the memory addresses of + // the interned arguments. + actual.skip_binder() == default.skip_binder() } fn external_generic_args<'tcx>( @@ -123,7 +199,7 @@ bindings: ThinVec, ty_args: ty::Binder<'tcx, GenericArgsRef<'tcx>>, ) -> GenericArgs { - let args = ty_args_to_args(cx, ty_args.map_bound(|args| &args[..]), has_self, Some(did)); + let args = ty_args_to_args(cx, ty_args.map_bound(|args| &args[..]), has_self, did); if cx.tcx.fn_trait_kind_from_def_id(did).is_some() { let ty = ty_args @@ -279,7 +355,8 @@ pub(crate) fn print_evaluated_const( tcx: TyCtxt<'_>, def_id: DefId, - underscores_and_type: bool, + with_underscores: bool, + with_type: bool, ) -> Option { tcx.const_eval_poly(def_id).ok().and_then(|val| { let ty = tcx.type_of(def_id).instantiate_identity(); @@ -288,7 +365,7 @@ (mir::ConstValue::Scalar(_), &ty::Adt(_, _)) => None, (mir::ConstValue::Scalar(_), _) => { let const_ = mir::Const::from_value(val, ty); - Some(print_const_with_custom_print_scalar(tcx, const_, underscores_and_type)) + Some(print_const_with_custom_print_scalar(tcx, const_, with_underscores, with_type)) } _ => None, } @@ -324,32 +401,37 @@ fn print_const_with_custom_print_scalar<'tcx>( tcx: TyCtxt<'tcx>, ct: mir::Const<'tcx>, - underscores_and_type: bool, + with_underscores: bool, + with_type: bool, ) -> String { // Use a slightly different format for integer types which always shows the actual value. // For all other types, fallback to the original `pretty_print_const`. match (ct, ct.ty().kind()) { (mir::Const::Val(mir::ConstValue::Scalar(int), _), ty::Uint(ui)) => { - if underscores_and_type { - format!("{}{}", format_integer_with_underscore_sep(&int.to_string()), ui.name_str()) + let mut output = if with_underscores { + format_integer_with_underscore_sep(&int.to_string()) } else { int.to_string() + }; + if with_type { + output += ui.name_str(); } + output } (mir::Const::Val(mir::ConstValue::Scalar(int), _), ty::Int(i)) => { let ty = ct.ty(); let size = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size; let data = int.assert_bits(size); let sign_extended_data = size.sign_extend(data) as i128; - if underscores_and_type { - format!( - "{}{}", - format_integer_with_underscore_sep(&sign_extended_data.to_string()), - i.name_str() - ) + let mut output = if with_underscores { + format_integer_with_underscore_sep(&sign_extended_data.to_string()) } else { sign_extended_data.to_string() + }; + if with_type { + output += i.name_str(); } + output } _ => ct.to_string(), } @@ -502,7 +584,7 @@ /// Set by `bootstrap::Builder::doc_rust_lang_org_channel` in order to keep tests passing on beta/stable. pub(crate) const DOC_RUST_LANG_ORG_CHANNEL: &str = env!("DOC_RUST_LANG_ORG_CHANNEL"); pub(crate) static DOC_CHANNEL: Lazy<&'static str> = - Lazy::new(|| DOC_RUST_LANG_ORG_CHANNEL.rsplit("/").filter(|c| !c.is_empty()).next().unwrap()); + Lazy::new(|| DOC_RUST_LANG_ORG_CHANNEL.rsplit('/').filter(|c| !c.is_empty()).next().unwrap()); /// Render a sequence of macro arms in a format suitable for displaying to the user /// as part of an item declaration. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/core.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/core.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/core.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/core.rs 2023-12-21 16:55:28.000000000 +0000 @@ -14,8 +14,8 @@ use rustc_middle::hir::nested_filter; use rustc_middle::ty::{ParamEnv, Ty, TyCtxt}; use rustc_session::config::{self, CrateType, ErrorOutputType, ResolveDocLinks}; +use rustc_session::lint; use rustc_session::Session; -use rustc_session::{lint, EarlyErrorHandler}; use rustc_span::symbol::sym; use rustc_span::{source_map, Span}; @@ -23,6 +23,7 @@ use std::mem; use std::rc::Rc; use std::sync::LazyLock; +use std::sync::{atomic::AtomicBool, Arc}; use crate::clean::inline::build_external_trait; use crate::clean::{self, ItemId}; @@ -174,7 +175,6 @@ /// Parse, resolve, and typecheck the given crate. pub(crate) fn create_config( - handler: &EarlyErrorHandler, RustdocOptions { input, crate_name, @@ -198,6 +198,7 @@ .. }: RustdocOptions, RenderOptions { document_private, .. }: &RenderOptions, + using_internal_features: Arc, ) -> rustc_interface::Config { // Add the doc cfg into the doc build. cfgs.push("doc".to_string()); @@ -253,8 +254,8 @@ interface::Config { opts: sessopts, - crate_cfg: interface::parse_cfgspecs(handler, cfgs), - crate_check_cfg: interface::parse_check_cfg(handler, check_cfgs), + crate_cfg: cfgs, + crate_check_cfg: check_cfgs, input, output_file: None, output_dir: None, @@ -262,6 +263,7 @@ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES, lint_caps, parse_sess_created: None, + hash_untracked_state: None, register_lints: Some(Box::new(crate::lint::register_lints)), override_queries: Some(|_sess, providers| { // We do not register late module lints, so this only runs `MissingDoc`. @@ -292,6 +294,7 @@ make_codegen_backend: None, registry: rustc_driver::diagnostics_registry(), ice_file: None, + using_internal_features, expanded_args, } } @@ -316,10 +319,14 @@ tcx.hir().for_each_module(|module| tcx.ensure().collect_mod_item_types(module)) }); - // NOTE: This is copy/pasted from typeck/lib.rs and should be kept in sync with those changes. + // NOTE: These are copy/pasted from typeck/lib.rs and should be kept in sync with those changes. + let _ = tcx.sess.time("wf_checking", || { + tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module)) + }); tcx.sess.time("item_types_checking", || { tcx.hir().for_each_module(|module| tcx.ensure().check_mod_item_types(module)) }); + tcx.sess.abort_if_errors(); tcx.sess.time("missing_docs", || rustc_lint::check_crate(tcx)); tcx.sess.time("check_mod_attrs", || { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/doctest.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/doctest.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/doctest.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/doctest.rs 2023-12-21 16:55:28.000000000 +0000 @@ -13,7 +13,7 @@ use rustc_resolve::rustdoc::span_of_fragments; use rustc_session::config::{self, CrateType, ErrorOutputType}; use rustc_session::parse::ParseSess; -use rustc_session::{lint, EarlyErrorHandler, Session}; +use rustc_session::{lint, Session}; use rustc_span::edition::Edition; use rustc_span::source_map::SourceMap; use rustc_span::symbol::sym; @@ -85,18 +85,13 @@ ..config::Options::default() }; - let early_error_handler = EarlyErrorHandler::new(ErrorOutputType::default()); - let mut cfgs = options.cfgs.clone(); cfgs.push("doc".to_owned()); cfgs.push("doctest".to_owned()); let config = interface::Config { opts: sessopts, - crate_cfg: interface::parse_cfgspecs(&early_error_handler, cfgs), - crate_check_cfg: interface::parse_check_cfg( - &early_error_handler, - options.check_cfgs.clone(), - ), + crate_cfg: cfgs, + crate_check_cfg: options.check_cfgs.clone(), input, output_file: None, output_dir: None, @@ -104,11 +99,13 @@ locale_resources: rustc_driver::DEFAULT_LOCALE_RESOURCES, lint_caps, parse_sess_created: None, + hash_untracked_state: None, register_lints: Some(Box::new(crate::lint::register_lints)), override_queries: None, make_codegen_backend: None, registry: rustc_driver::diagnostics_registry(), ice_file: None, + using_internal_features: Arc::default(), expanded_args: options.expanded_args.clone(), }; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/formats/cache.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/formats/cache.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/formats/cache.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/formats/cache.rs 2023-12-21 16:55:28.000000000 +0000 @@ -50,8 +50,8 @@ /// Unlike 'paths', this mapping ignores any renames that occur /// due to 'use' statements. /// - /// This map is used when writing out the special 'implementors' - /// javascript file. By using the exact path that the type + /// This map is used when writing out the `impl.trait` and `impl.type` + /// javascript files. By using the exact path that the type /// is declared with, we ensure that each path will be identical /// to the path used if the corresponding type is inlined. By /// doing this, we can detect duplicate impls on a trait page, and only display @@ -221,19 +221,25 @@ _ => self.cache.stripped_mod, }; + #[inline] + fn is_from_private_dep(tcx: TyCtxt<'_>, cache: &Cache, def_id: DefId) -> bool { + let krate = def_id.krate; + + cache.masked_crates.contains(&krate) || tcx.is_private_dep(krate) + } + // If the impl is from a masked crate or references something from a // masked crate then remove it completely. - if let clean::ImplItem(ref i) = *item.kind { - if self.cache.masked_crates.contains(&item.item_id.krate()) + if let clean::ImplItem(ref i) = *item.kind && + (self.cache.masked_crates.contains(&item.item_id.krate()) || i.trait_ .as_ref() - .map_or(false, |t| self.cache.masked_crates.contains(&t.def_id().krate)) + .map_or(false, |t| is_from_private_dep(self.tcx, self.cache, t.def_id())) || i.for_ .def_id(self.cache) - .map_or(false, |d| self.cache.masked_crates.contains(&d.krate)) - { - return None; - } + .map_or(false, |d| is_from_private_dep(self.tcx, self.cache, d))) + { + return None; } // Propagate a trait method's documentation to all implementors of the @@ -334,33 +340,37 @@ // A crate has a module at its root, containing all items, // which should not be indexed. The crate-item itself is // inserted later on when serializing the search-index. - if item.item_id.as_def_id().map_or(false, |idx| !idx.is_crate_root()) { + if item.item_id.as_def_id().map_or(false, |idx| !idx.is_crate_root()) + && let ty = item.type_() + && (ty != ItemType::StructField + || u16::from_str_radix(s.as_str(), 10).is_err()) + { let desc = short_markdown_summary(&item.doc_value(), &item.link_names(self.cache)); - let ty = item.type_(); - if ty != ItemType::StructField - || u16::from_str_radix(s.as_str(), 10).is_err() - { - // In case this is a field from a tuple struct, we don't add it into - // the search index because its name is something like "0", which is - // not useful for rustdoc search. - self.cache.search_index.push(IndexItem { - ty, - name: s, - path: join_with_double_colon(path), - desc, - parent, - parent_idx: None, - search_type: get_function_type_for_search( - &item, - self.tcx, - clean_impl_generics(self.cache.parent_stack.last()).as_ref(), - self.cache, - ), - aliases: item.attrs.get_doc_aliases(), - deprecation: item.deprecation(self.tcx), - }); - } + // In case this is a field from a tuple struct, we don't add it into + // the search index because its name is something like "0", which is + // not useful for rustdoc search. + self.cache.search_index.push(IndexItem { + ty, + name: s, + path: join_with_double_colon(path), + desc, + parent, + parent_idx: None, + impl_id: if let Some(ParentStackItem::Impl { item_id, .. }) = self.cache.parent_stack.last() { + item_id.as_def_id() + } else { + None + }, + search_type: get_function_type_for_search( + &item, + self.tcx, + clean_impl_generics(self.cache.parent_stack.last()).as_ref(), + self.cache, + ), + aliases: item.attrs.get_doc_aliases(), + deprecation: item.deprecation(self.tcx), + }); } } (Some(parent), None) if is_inherent_impl_item => { @@ -371,6 +381,13 @@ parent, item: item.clone(), impl_generics, + impl_id: if let Some(ParentStackItem::Impl { item_id, .. }) = + self.cache.parent_stack.last() + { + item_id.as_def_id() + } else { + None + }, }); } _ => {} @@ -541,6 +558,7 @@ pub(crate) struct OrphanImplItem { pub(crate) parent: DefId, + pub(crate) impl_id: Option, pub(crate) item: clean::Item, pub(crate) impl_generics: Option<(clean::Type, clean::Generics)>, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/formats/item_type.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/formats/item_type.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/formats/item_type.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/formats/item_type.rs 2023-12-21 16:55:28.000000000 +0000 @@ -141,7 +141,7 @@ | DefKind::GlobalAsm | DefKind::Impl { .. } | DefKind::Closure - | DefKind::Generator => Self::ForeignType, + | DefKind::Coroutine => Self::ForeignType, } } } @@ -180,6 +180,9 @@ pub(crate) fn is_method(&self) -> bool { matches!(*self, ItemType::Method | ItemType::TyMethod) } + pub(crate) fn is_adt(&self) -> bool { + matches!(*self, ItemType::Struct | ItemType::Union | ItemType::Enum) + } } impl fmt::Display for ItemType { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/format.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/format.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/format.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/format.rs 2023-12-21 16:55:28.000000000 +0000 @@ -325,8 +325,7 @@ bounds_display.truncate(bounds_display.len() - " + ".len()); write!(f, "{}: {bounds_display}", lifetime.print()) } - // FIXME(fmease): Render bound params. - clean::WherePredicate::EqPredicate { lhs, rhs, bound_params: _ } => { + clean::WherePredicate::EqPredicate { lhs, rhs } => { if f.alternate() { write!(f, "{:#} == {:#}", lhs.print(cx), rhs.print(cx)) } else { @@ -848,7 +847,7 @@ fn primitive_link( f: &mut fmt::Formatter<'_>, prim: clean::PrimitiveType, - name: &str, + name: fmt::Arguments<'_>, cx: &Context<'_>, ) -> fmt::Result { primitive_link_fragment(f, prim, name, "", cx) @@ -857,7 +856,7 @@ fn primitive_link_fragment( f: &mut fmt::Formatter<'_>, prim: clean::PrimitiveType, - name: &str, + name: fmt::Arguments<'_>, fragment: &str, cx: &Context<'_>, ) -> fmt::Result { @@ -908,7 +907,7 @@ None => {} } } - f.write_str(name)?; + std::fmt::Display::fmt(&name, f)?; if needs_termination { write!(f, "")?; } @@ -978,9 +977,11 @@ } clean::Infer => write!(f, "_"), clean::Primitive(clean::PrimitiveType::Never) => { - primitive_link(f, PrimitiveType::Never, "!", cx) + primitive_link(f, PrimitiveType::Never, format_args!("!"), cx) + } + clean::Primitive(prim) => { + primitive_link(f, prim, format_args!("{}", prim.as_sym().as_str()), cx) } - clean::Primitive(prim) => primitive_link(f, prim, prim.as_sym().as_str(), cx), clean::BareFunction(ref decl) => { if f.alternate() { write!( @@ -999,16 +1000,16 @@ decl.unsafety.print_with_space(), print_abi_with_space(decl.abi) )?; - primitive_link(f, PrimitiveType::Fn, "fn", cx)?; + primitive_link(f, PrimitiveType::Fn, format_args!("fn"), cx)?; write!(f, "{}", decl.decl.print(cx)) } } clean::Tuple(ref typs) => { match &typs[..] { - &[] => primitive_link(f, PrimitiveType::Unit, "()", cx), + &[] => primitive_link(f, PrimitiveType::Unit, format_args!("()"), cx), [one] => { if let clean::Generic(name) = one { - primitive_link(f, PrimitiveType::Tuple, &format!("({name},)"), cx) + primitive_link(f, PrimitiveType::Tuple, format_args!("({name},)"), cx) } else { write!(f, "(")?; // Carry `f.alternate()` into this display w/o branching manually. @@ -1029,7 +1030,10 @@ primitive_link( f, PrimitiveType::Tuple, - &format!("({})", generic_names.iter().map(|s| s.as_str()).join(", ")), + format_args!( + "({})", + generic_names.iter().map(|s| s.as_str()).join(", ") + ), cx, ) } else { @@ -1048,7 +1052,7 @@ } clean::Slice(ref t) => match **t { clean::Generic(name) => { - primitive_link(f, PrimitiveType::Slice, &format!("[{name}]"), cx) + primitive_link(f, PrimitiveType::Slice, format_args!("[{name}]"), cx) } _ => { write!(f, "[")?; @@ -1060,7 +1064,7 @@ clean::Generic(name) if !f.alternate() => primitive_link( f, PrimitiveType::Array, - &format!("[{name}; {n}]", n = Escape(n)), + format_args!("[{name}; {n}]", n = Escape(n)), cx, ), _ => { @@ -1070,7 +1074,12 @@ write!(f, "; {n}")?; } else { write!(f, "; ")?; - primitive_link(f, PrimitiveType::Array, &format!("{n}", n = Escape(n)), cx)?; + primitive_link( + f, + PrimitiveType::Array, + format_args!("{n}", n = Escape(n)), + cx, + )?; } write!(f, "]") } @@ -1082,22 +1091,32 @@ }; if matches!(**t, clean::Generic(_)) || t.is_assoc_ty() { - let text = if f.alternate() { - format!("*{m} {ty:#}", ty = t.print(cx)) + let ty = t.print(cx); + if f.alternate() { + primitive_link( + f, + clean::PrimitiveType::RawPointer, + format_args!("*{m} {ty:#}"), + cx, + ) } else { - format!("*{m} {ty}", ty = t.print(cx)) - }; - primitive_link(f, clean::PrimitiveType::RawPointer, &text, cx) + primitive_link( + f, + clean::PrimitiveType::RawPointer, + format_args!("*{m} {ty}"), + cx, + ) + } } else { - primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{m} "), cx)?; + primitive_link(f, clean::PrimitiveType::RawPointer, format_args!("*{m} "), cx)?; fmt::Display::fmt(&t.print(cx), f) } } clean::BorrowedRef { lifetime: ref l, mutability, type_: ref ty } => { - let lt = match l { - Some(l) => format!("{} ", l.print()), - _ => String::new(), - }; + let lt = display_fn(|f| match l { + Some(l) => write!(f, "{} ", l.print()), + _ => Ok(()), + }); let m = mutability.print_with_space(); let amp = if f.alternate() { "&" } else { "&" }; @@ -1105,7 +1124,7 @@ return primitive_link( f, PrimitiveType::Reference, - &format!("{amp}{lt}{m}{name}"), + format_args!("{amp}{lt}{m}{name}"), cx, ); } @@ -1255,7 +1274,7 @@ { // Hardcoded anchor library/core/src/primitive_docs.rs // Link should match `# Trait implementations` - primitive_link_fragment(f, PrimitiveType::Tuple, &format!("({name}₁, {name}₂, …, {name}ₙ)"), "#trait-implementations-1", cx)?; + primitive_link_fragment(f, PrimitiveType::Tuple, format_args!("({name}₁, {name}₂, …, {name}ₙ)"), "#trait-implementations-1", cx)?; } else if let clean::BareFunction(bare_fn) = &self.for_ && let [clean::Argument { type_: clean::Type::Generic(name), .. }] = &bare_fn.decl.inputs.values[..] && (self.kind.is_fake_variadic() || self.kind.is_auto()) @@ -1282,7 +1301,7 @@ } else { "" }; - primitive_link_fragment(f, PrimitiveType::Tuple, &format!("fn ({name}₁, {name}₂, …, {name}ₙ{ellipsis})"), "#trait-implementations-1", cx)?; + primitive_link_fragment(f, PrimitiveType::Tuple, format_args!("fn ({name}₁, {name}₂, …, {name}ₙ{ellipsis})"), "#trait-implementations-1", cx)?; // Write output. if !bare_fn.decl.output.is_unit() { write!(f, " -> ")?; @@ -1666,7 +1685,12 @@ } let name = self.path.last(); if let hir::def::Res::PrimTy(p) = self.path.res { - primitive_link(f, PrimitiveType::from(p), name.as_str(), cx)?; + primitive_link( + f, + PrimitiveType::from(p), + format_args!("{}", name.as_str()), + cx, + )?; } else { f.write_str(name.as_str())?; } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/layout.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/layout.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/layout.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/layout.rs 2023-12-21 16:55:28.000000000 +0000 @@ -17,6 +17,7 @@ pub(crate) external_html: ExternalHtml, pub(crate) default_settings: FxHashMap, pub(crate) krate: String, + pub(crate) krate_version: String, /// The given user css file which allow to customize the generated /// documentation theme. pub(crate) css_file_extension: Option, @@ -31,6 +32,7 @@ pub(crate) static_root_path: Option<&'a str>, pub(crate) description: &'a str, pub(crate) resource_suffix: &'a str, + pub(crate) rust_logo: bool, } impl<'a> Page<'a> { @@ -54,9 +56,19 @@ themes: Vec, sidebar: String, content: String, - krate_with_trailing_slash: String, rust_channel: &'static str, pub(crate) rustdoc_version: &'a str, + // same as layout.krate, except on top-level pages like + // Settings, Help, All Crates, and About Scraped Examples, + // where these things instead give Rustdoc name and version. + // + // These are separate from the variables used for the search + // engine, because "Rustdoc" isn't necessarily a crate in + // the current workspace. + display_krate: &'a str, + display_krate_with_trailing_slash: String, + display_krate_version_number: &'a str, + display_krate_version_extra: &'a str, } pub(crate) fn render( @@ -66,12 +78,26 @@ t: T, style_files: &[StylePath], ) -> String { + let rustdoc_version = rustc_interface::util::version_str!().unwrap_or("unknown version"); + + let (display_krate, display_krate_version, display_krate_with_trailing_slash) = + if page.root_path == "./" { + // top level pages use Rust branding + ("Rustdoc", rustdoc_version, String::new()) + } else { + let display_krate_with_trailing_slash = + ensure_trailing_slash(&layout.krate).to_string(); + (&layout.krate[..], &layout.krate_version[..], display_krate_with_trailing_slash) + }; let static_root_path = page.get_static_root_path(); - let krate_with_trailing_slash = ensure_trailing_slash(&layout.krate).to_string(); + + // bootstrap passes in parts of the version separated by tabs, but other stuff might use spaces + let (display_krate_version_number, display_krate_version_extra) = + display_krate_version.split_once([' ', '\t']).unwrap_or((display_krate_version, "")); + let mut themes: Vec = style_files.iter().map(|s| s.basename().unwrap()).collect(); themes.sort(); - let rustdoc_version = rustc_interface::util::version_str!().unwrap_or("unknown version"); let content = Buffer::html().to_display(t); // Note: This must happen before making the sidebar. let sidebar = Buffer::html().to_display(sidebar); PageLayout { @@ -82,7 +108,10 @@ themes, sidebar, content, - krate_with_trailing_slash, + display_krate, + display_krate_with_trailing_slash, + display_krate_version_number, + display_krate_version_extra, rust_channel: *crate::clean::utils::DOC_CHANNEL, rustdoc_version, } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/markdown.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/markdown.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/markdown.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/markdown.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1750,7 +1750,7 @@ } // do not actually include braces in the span let range = (open_brace + 1)..close_brace; - MarkdownLinkRange::Destination(range.clone()) + MarkdownLinkRange::Destination(range) }; let span_for_offset_forward = |span: Range, open: u8, close: u8| { @@ -1786,7 +1786,7 @@ } // do not actually include braces in the span let range = (open_brace + 1)..close_brace; - MarkdownLinkRange::Destination(range.clone()) + MarkdownLinkRange::Destination(range) }; let mut broken_link_callback = |link: BrokenLink<'md>| Some((link.reference, "".into())); @@ -2024,6 +2024,7 @@ map.insert("required-associated-consts".into(), 1); map.insert("required-methods".into(), 1); map.insert("provided-methods".into(), 1); + map.insert("object-safety".into(), 1); map.insert("implementors".into(), 1); map.insert("synthetic-implementors".into(), 1); map.insert("implementations-list".into(), 1); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/context.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/context.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/context.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/context.rs 2023-12-21 16:55:28.000000000 +0000 @@ -7,13 +7,10 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE}; -use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; use rustc_middle::ty::TyCtxt; use rustc_session::Session; -use rustc_span::def_id::DefId; use rustc_span::edition::Edition; -use rustc_span::source_map::FileName; -use rustc_span::{sym, Symbol}; +use rustc_span::{sym, FileName, Symbol}; use super::print_item::{full_path, item_path, print_item}; use super::search_index::build_index; @@ -24,13 +21,14 @@ sidebar::{sidebar_module_like, Sidebar}, AllTypes, LinkFromSrc, StylePath, }; -use crate::clean::{self, types::ExternalLocation, ExternalCrate, TypeAliasItem}; +use crate::clean::utils::has_doc_flag; +use crate::clean::{self, types::ExternalLocation, ExternalCrate}; use crate::config::{ModuleSorting, RenderOptions}; use crate::docfs::{DocFS, PathError}; use crate::error::Error; use crate::formats::cache::Cache; use crate::formats::item_type::ItemType; -use crate::formats::{self, FormatRenderer}; +use crate::formats::FormatRenderer; use crate::html::escape::Escape; use crate::html::format::{join_with_double_colon, Buffer}; use crate::html::markdown::{self, plain_text_summary, ErrorCodes, IdMap}; @@ -149,53 +147,6 @@ pub(crate) fn edition(&self) -> Edition { self.tcx.sess.edition() } - - /// Returns a list of impls on the given type, and, if it's a type alias, - /// other types that it aliases. - pub(crate) fn all_impls_for_item<'a>( - &'a self, - it: &clean::Item, - did: DefId, - ) -> Vec<&'a formats::Impl> { - let tcx = self.tcx; - let cache = &self.cache; - let mut saw_impls = FxHashSet::default(); - let mut v: Vec<&formats::Impl> = cache - .impls - .get(&did) - .map(Vec::as_slice) - .unwrap_or(&[]) - .iter() - .filter(|i| saw_impls.insert(i.def_id())) - .collect(); - if let TypeAliasItem(ait) = &*it.kind && - let aliased_clean_type = ait.item_type.as_ref().unwrap_or(&ait.type_) && - let Some(aliased_type_defid) = aliased_clean_type.def_id(cache) && - let Some(av) = cache.impls.get(&aliased_type_defid) && - let Some(alias_def_id) = it.item_id.as_def_id() - { - // This branch of the compiler compares types structually, but does - // not check trait bounds. That's probably fine, since type aliases - // don't normally constrain on them anyway. - // https://github.com/rust-lang/rust/issues/21903 - // - // FIXME(lazy_type_alias): Once the feature is complete or stable, rewrite this to use type unification. - // Be aware of `tests/rustdoc/issue-112515-impl-ty-alias.rs` which might regress. - let aliased_ty = tcx.type_of(alias_def_id).skip_binder(); - let reject_cx = DeepRejectCtxt { - treat_obligation_params: TreatParams::AsCandidateKey, - }; - v.extend(av.iter().filter(|impl_| { - if let Some(impl_def_id) = impl_.impl_item.item_id.as_def_id() { - reject_cx.types_may_unify(aliased_ty, tcx.type_of(impl_def_id).skip_binder()) - && saw_impls.insert(impl_def_id) - } else { - false - } - })); - } - v - } } impl<'tcx> Context<'tcx> { @@ -277,6 +228,7 @@ title: &title, description: &desc, resource_suffix: &clone_shared.resource_suffix, + rust_logo: has_doc_flag(self.tcx(), LOCAL_CRATE.as_def_id(), sym::rust_logo), }; let mut page_buffer = Buffer::html(); print_item(self, it, &mut page_buffer, &page); @@ -528,12 +480,14 @@ if let Some(url) = playground_url { playground = Some(markdown::Playground { crate_name: Some(krate.name(tcx)), url }); } + let krate_version = cache.crate_version.as_deref().unwrap_or_default(); let mut layout = layout::Layout { logo: String::new(), favicon: String::new(), external_html, default_settings, krate: krate.name(tcx).to_string(), + krate_version: krate_version.to_string(), css_file_extension: extension_css, scrape_examples_extension: !call_locations.is_empty(), }; @@ -658,21 +612,22 @@ let shared = Rc::clone(&self.shared); let mut page = layout::Page { title: "List of all items in this crate", - css_class: "mod", + css_class: "mod sys", root_path: "../", static_root_path: shared.static_root_path.as_deref(), description: "List of all items in this crate", resource_suffix: &shared.resource_suffix, + rust_logo: has_doc_flag(self.tcx(), LOCAL_CRATE.as_def_id(), sym::rust_logo), }; let all = shared.all.replace(AllTypes::new()); let mut sidebar = Buffer::html(); let blocks = sidebar_module_like(all.item_sections()); let bar = Sidebar { - title_prefix: "Crate ", - title: crate_name.as_str(), + title_prefix: "", + title: "", is_crate: false, - version: "", + is_mod: false, blocks: vec![blocks], path: String::new(), }; @@ -689,9 +644,10 @@ shared.fs.write(final_file, v)?; // Generating settings page. - page.title = "Rustdoc settings"; + page.title = "Settings"; page.description = "Settings of Rustdoc"; page.root_path = "./"; + page.rust_logo = true; let sidebar = "

Settings

"; let v = layout::render( @@ -739,9 +695,10 @@ shared.fs.write(settings_file, v)?; // Generating help page. - page.title = "Rustdoc help"; + page.title = "Help"; page.description = "Documentation for Rustdoc"; page.root_path = "./"; + page.rust_logo = true; let sidebar = "

Help

"; let v = layout::render( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/mod.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/mod.rs 2023-12-21 16:55:28.000000000 +0000 @@ -48,13 +48,13 @@ use std::string::ToString; use askama::Template; -use rustc_attr::{ConstStability, Deprecation, StabilityLevel}; +use rustc_attr::{ConstStability, DeprecatedSince, Deprecation, StabilityLevel, StableSince}; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_hir::Mutability; -use rustc_middle::middle::stability; -use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::{self, TyCtxt}; +use rustc_session::RustcVersion; use rustc_span::{ symbol::{sym, Symbol}, BytePos, FileName, RealFileName, @@ -102,6 +102,7 @@ pub(crate) desc: String, pub(crate) parent: Option, pub(crate) parent_idx: Option, + pub(crate) impl_id: Option, pub(crate) search_type: Option, pub(crate) aliases: Box<[Symbol]>, pub(crate) deprecation: Option, @@ -615,24 +616,22 @@ ) -> Vec { let mut extra_info = vec![]; - if let Some(depr @ Deprecation { note, since, is_since_rustc_version: _, suggestion: _ }) = - item.deprecation(cx.tcx()) - { + if let Some(depr @ Deprecation { note, since, suggestion: _ }) = item.deprecation(cx.tcx()) { // We display deprecation messages for #[deprecated], but only display // the future-deprecation messages for rustc versions. - let mut message = if let Some(since) = since { - let since = since.as_str(); - if !stability::deprecation_in_effect(&depr) { - if since == "TBD" { - String::from("Deprecating in a future Rust version") + let mut message = match since { + DeprecatedSince::RustcVersion(version) => { + if depr.is_in_effect() { + format!("Deprecated since {version}") } else { - format!("Deprecating in {}", Escape(since)) + format!("Deprecating in {version}") } - } else { - format!("Deprecated since {}", Escape(since)) } - } else { - String::from("Deprecated") + DeprecatedSince::Future => String::from("Deprecating in a future Rust version"), + DeprecatedSince::NonStandard(since) => { + format!("Deprecated since {}", Escape(since.as_str())) + } + DeprecatedSince::Unspecified | DeprecatedSince::Err => String::from("Deprecated"), }; if let Some(note) = note { @@ -867,10 +866,10 @@ let (indent, indent_str, end_newline) = if parent == ItemType::Trait { header_len += 4; let indent_str = " "; - write!(w, "{}", render_attributes_in_pre(meth, indent_str, tcx)); + write!(w, "{}", render_attributes_in_pre(meth, indent_str, cx)); (4, indent_str, Ending::NoNewline) } else { - render_attributes_in_code(w, meth, tcx); + render_attributes_in_code(w, meth, cx); (0, "", Ending::Newline) }; w.reserve(header_len + "{".len() + "".len()); @@ -910,13 +909,17 @@ /// consequence of the above rules. fn render_stability_since_raw_with_extra( w: &mut Buffer, - ver: Option, + ver: Option, const_stability: Option, - containing_ver: Option, - containing_const_ver: Option, + containing_ver: Option, + containing_const_ver: Option, extra_class: &str, ) -> bool { - let stable_version = ver.filter(|inner| !inner.is_empty() && Some(*inner) != containing_ver); + let stable_version = if ver != containing_ver && let Some(ver) = &ver { + since_to_string(ver) + } else { + None + }; let mut title = String::new(); let mut stability = String::new(); @@ -930,7 +933,8 @@ Some(ConstStability { level: StabilityLevel::Stable { since, .. }, .. }) if Some(since) != containing_const_ver => { - Some((format!("const since {since}"), format!("const: {since}"))) + since_to_string(&since) + .map(|since| (format!("const since {since}"), format!("const: {since}"))) } Some(ConstStability { level: StabilityLevel::Unstable { issue, .. }, feature, .. }) => { let unstable = if let Some(n) = issue { @@ -970,13 +974,21 @@ !stability.is_empty() } +fn since_to_string(since: &StableSince) -> Option { + match since { + StableSince::Version(since) => Some(since.to_string()), + StableSince::Current => Some(RustcVersion::CURRENT.to_string()), + StableSince::Err => None, + } +} + #[inline] fn render_stability_since_raw( w: &mut Buffer, - ver: Option, + ver: Option, const_stability: Option, - containing_ver: Option, - containing_const_ver: Option, + containing_ver: Option, + containing_const_ver: Option, ) -> bool { render_stability_since_raw_with_extra( w, @@ -1046,13 +1058,13 @@ // When an attribute is rendered inside a `
` tag, it is formatted using
 // a whitespace prefix and newline.
-fn render_attributes_in_pre<'a, 'b: 'a>(
+fn render_attributes_in_pre<'a, 'tcx: 'a>(
     it: &'a clean::Item,
     prefix: &'a str,
-    tcx: TyCtxt<'b>,
-) -> impl fmt::Display + Captures<'a> + Captures<'b> {
+    cx: &'a Context<'tcx>,
+) -> impl fmt::Display + Captures<'a> + Captures<'tcx> {
     crate::html::format::display_fn(move |f| {
-        for a in it.attributes(tcx, false) {
+        for a in it.attributes(cx.tcx(), cx.cache(), false) {
             writeln!(f, "{prefix}{a}")?;
         }
         Ok(())
@@ -1061,8 +1073,8 @@
 
 // When an attribute is rendered inside a  tag, it is formatted using
 // a div to produce a newline after it.
-fn render_attributes_in_code(w: &mut impl fmt::Write, it: &clean::Item, tcx: TyCtxt<'_>) {
-    for attr in it.attributes(tcx, false) {
+fn render_attributes_in_code(w: &mut impl fmt::Write, it: &clean::Item, cx: &Context<'_>) {
+    for attr in it.attributes(cx.tcx(), cx.cache(), false) {
         write!(w, "
{attr}
").unwrap(); } } @@ -1131,13 +1143,13 @@ fn render_assoc_items<'a, 'cx: 'a>( cx: &'a mut Context<'cx>, containing_item: &'a clean::Item, - did: DefId, + it: DefId, what: AssocItemRender<'a>, ) -> impl fmt::Display + 'a + Captures<'cx> { let mut derefs = DefIdSet::default(); - derefs.insert(did); + derefs.insert(it); display_fn(move |f| { - render_assoc_items_inner(f, cx, containing_item, did, what, &mut derefs); + render_assoc_items_inner(f, cx, containing_item, it, what, &mut derefs); Ok(()) }) } @@ -1146,17 +1158,15 @@ mut w: &mut dyn fmt::Write, cx: &mut Context<'_>, containing_item: &clean::Item, - did: DefId, + it: DefId, what: AssocItemRender<'_>, derefs: &mut DefIdSet, ) { info!("Documenting associated items of {:?}", containing_item.name); let shared = Rc::clone(&cx.shared); - let v = shared.all_impls_for_item(containing_item, did); - let v = v.as_slice(); - let (non_trait, traits): (Vec<&Impl>, _) = - v.iter().partition(|i| i.inner_impl().trait_.is_none()); - let mut saw_impls = FxHashSet::default(); + let cache = &shared.cache; + let Some(v) = cache.impls.get(&it) else { return }; + let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| i.inner_impl().trait_.is_none()); if !non_trait.is_empty() { let mut tmp_buf = Buffer::html(); let (render_mode, id, class_html) = match what { @@ -1185,9 +1195,6 @@ }; let mut impls_buf = Buffer::html(); for i in &non_trait { - if !saw_impls.insert(i.def_id()) { - continue; - } render_impl( &mut impls_buf, cx, @@ -1233,10 +1240,8 @@ let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) = traits.into_iter().partition(|t| t.inner_impl().kind.is_auto()); - let (blanket_impl, concrete): (Vec<&Impl>, _) = concrete - .into_iter() - .filter(|t| saw_impls.insert(t.def_id())) - .partition(|t| t.inner_impl().kind.is_blanket()); + let (blanket_impl, concrete): (Vec<&Impl>, _) = + concrete.into_iter().partition(|t| t.inner_impl().kind.is_blanket()); render_all_impls(w, cx, containing_item, &concrete, &synthetic, &blanket_impl); } @@ -1877,7 +1882,7 @@ aliases: &[String], ) { let inner_impl = i.inner_impl(); - let id = cx.derive_id(get_id_for_impl(&inner_impl.for_, inner_impl.trait_.as_ref(), cx)); + let id = cx.derive_id(get_id_for_impl(cx.tcx(), i.impl_item.item_id)); let aliases = if aliases.is_empty() { String::new() } else { @@ -1994,21 +1999,35 @@ } } -fn get_id_for_impl(for_: &clean::Type, trait_: Option<&clean::Path>, cx: &Context<'_>) -> String { - match trait_ { - Some(t) => small_url_encode(format!("impl-{:#}-for-{:#}", t.print(cx), for_.print(cx))), - None => small_url_encode(format!("impl-{:#}", for_.print(cx))), - } +fn get_id_for_impl<'tcx>(tcx: TyCtxt<'tcx>, impl_id: ItemId) -> String { + use rustc_middle::ty::print::with_forced_trimmed_paths; + let (type_, trait_) = match impl_id { + ItemId::Auto { trait_, for_ } => { + let ty = tcx.type_of(for_).skip_binder(); + (ty, Some(ty::TraitRef::new(tcx, trait_, [ty]))) + } + ItemId::Blanket { impl_id, .. } | ItemId::DefId(impl_id) => { + match tcx.impl_subject(impl_id).skip_binder() { + ty::ImplSubject::Trait(trait_ref) => { + (trait_ref.args[0].expect_ty(), Some(trait_ref)) + } + ty::ImplSubject::Inherent(ty) => (ty, None), + } + } + }; + with_forced_trimmed_paths!(small_url_encode(if let Some(trait_) = trait_ { + format!("impl-{trait_}-for-{type_}", trait_ = trait_.print_only_trait_path()) + } else { + format!("impl-{type_}") + })) } fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> { match *item.kind { - clean::ItemKind::ImplItem(ref i) => { - i.trait_.as_ref().map(|trait_| { - // Alternative format produces no URLs, - // so this parameter does nothing. - (format!("{:#}", i.for_.print(cx)), get_id_for_impl(&i.for_, Some(trait_), cx)) - }) + clean::ItemKind::ImplItem(ref i) if i.trait_.is_some() => { + // Alternative format produces no URLs, + // so this parameter does nothing. + Some((format!("{:#}", i.for_.print(cx)), get_id_for_impl(cx.tcx(), item.item_id))) } _ => None, } @@ -2079,6 +2098,7 @@ const ALL: &'static [Self] = { use ItemSection::*; // NOTE: The order here affects the order in the UI. + // Keep this synchronized with addSidebarItems in main.js &[ Reexports, PrimitiveTypes, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/print_item.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/print_item.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/print_item.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/print_item.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,10 +5,12 @@ use rustc_hir as hir; use rustc_hir::def::CtorKind; use rustc_hir::def_id::DefId; -use rustc_middle::middle::stability; +use rustc_index::IndexVec; +use rustc_middle::query::Key; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Symbol}; +use rustc_target::abi::VariantIdx; use std::cell::{RefCell, RefMut}; use std::cmp::Ordering; use std::fmt; @@ -117,8 +119,7 @@ fn render_attributes_in_pre<'b>(&'b self) -> impl fmt::Display + Captures<'a> + 'b + Captures<'cx> { display_fn(move |f| { let (item, cx) = self.item_and_mut_cx(); - let tcx = cx.tcx(); - let v = render_attributes_in_pre(item, "", tcx); + let v = render_attributes_in_pre(item, "", &cx); write!(f, "{v}") }) } @@ -589,11 +590,7 @@ // The trailing space after each tag is to space it properly against the rest of the docs. if let Some(depr) = &item.deprecation(tcx) { - let message = if stability::deprecation_in_effect(depr) { - "Deprecated" - } else { - "Deprecation planned" - }; + let message = if depr.is_in_effect() { "Deprecated" } else { "Deprecation planned" }; write!(f, "{}", tag_html("deprecated", "", message))?; } @@ -656,7 +653,7 @@ w, "{attrs}{vis}{constness}{asyncness}{unsafety}{abi}fn \ {name}{generics}{decl}{notable_traits}{where_clause}", - attrs = render_attributes_in_pre(it, "", tcx), + attrs = render_attributes_in_pre(it, "", cx), vis = visibility, constness = constness, asyncness = asyncness, @@ -691,7 +688,7 @@ write!( w, "{attrs}{vis}{unsafety}{is_auto}trait {name}{generics}{bounds}", - attrs = render_attributes_in_pre(it, "", tcx), + attrs = render_attributes_in_pre(it, "", cx), vis = visibility_print_with_space(it.visibility(tcx), it.item_id, cx), unsafety = t.unsafety(tcx).print_with_space(), is_auto = if t.is_auto(tcx) { "auto " } else { "" }, @@ -957,6 +954,21 @@ let cloned_shared = Rc::clone(&cx.shared); let cache = &cloned_shared.cache; let mut extern_crates = FxHashSet::default(); + + if !t.is_object_safe(cx.tcx()) { + write_small_section_header( + w, + "object-safety", + "Object Safety", + &format!( + "
This trait is not \ + \ + object safe.
", + base = crate::clean::utils::DOC_RUST_LANG_ORG_CHANNEL + ), + ); + } + if let Some(implementors) = cache.implementors.get(&it.item_id.expect_def_id()) { // The DefId is for the first Type found with that name. The bool is // if any Types with the same name but different DefId have been found. @@ -979,7 +991,7 @@ } } - let (local, foreign) = + let (local, mut foreign) = implementors.iter().partition::, _>(|i| i.is_on_local_type(cx)); let (mut synthetic, mut concrete): (Vec<&&Impl>, Vec<&&Impl>) = @@ -987,6 +999,7 @@ synthetic.sort_by_cached_key(|i| ImplString::new(i, cx)); concrete.sort_by_cached_key(|i| ImplString::new(i, cx)); + foreign.sort_by_cached_key(|i| ImplString::new(i, cx)); if !foreign.is_empty() { write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", ""); @@ -1064,6 +1077,8 @@ } } + // [RUSTDOCIMPL] trait.impl + // // Include implementors in crates that depend on the current crate. // // This is complicated by the way rustdoc is invoked, which is basically @@ -1099,7 +1114,7 @@ // ``` // // Basically, we want `C::Baz` and `A::Foo` to show the same set of - // impls, which is easier if they both treat `/implementors/A/trait.Foo.js` + // impls, which is easier if they both treat `/trait.impl/A/trait.Foo.js` // as the Single Source of Truth. // // We also want the `impl Baz for Quux` to be written to @@ -1108,7 +1123,7 @@ // because that'll load faster, and it's better for SEO. And we don't want // the same impl to show up twice on the same page. // - // To make this work, the implementors JS file has a structure kinda + // To make this work, the trait.impl/A/trait.Foo.js JS file has a structure kinda // like this: // // ```js @@ -1125,7 +1140,7 @@ // So C's HTML will have something like this: // // ```html - // // ``` // @@ -1135,7 +1150,7 @@ // [JSONP]: https://en.wikipedia.org/wiki/JSONP let mut js_src_path: UrlPartsBuilder = std::iter::repeat("..") .take(cx.current.len()) - .chain(std::iter::once("implementors")) + .chain(std::iter::once("trait.impl")) .collect(); if let Some(did) = it.item_id.as_def_id() && let get_extern = { || cache.external_paths.get(&did).map(|s| &s.0) } && @@ -1170,7 +1185,7 @@ write!( w, "{attrs}trait {name}{generics}{where_b} = {bounds};", - attrs = render_attributes_in_pre(it, "", cx.tcx()), + attrs = render_attributes_in_pre(it, "", cx), name = it.name.unwrap(), generics = t.generics.print(cx), where_b = print_where_clause(&t.generics, cx, 0, Ending::Newline), @@ -1198,7 +1213,7 @@ write!( w, "{attrs}type {name}{generics}{where_clause} = impl {bounds};", - attrs = render_attributes_in_pre(it, "", cx.tcx()), + attrs = render_attributes_in_pre(it, "", cx), name = it.name.unwrap(), generics = t.generics.print(cx), where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline), @@ -1223,7 +1238,7 @@ write!( w, "{attrs}{vis}type {name}{generics}{where_clause} = {type_};", - attrs = render_attributes_in_pre(it, "", cx.tcx()), + attrs = render_attributes_in_pre(it, "", cx), vis = visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), name = it.name.unwrap(), generics = t.generics.print(cx), @@ -1247,6 +1262,9 @@ match inner_type { clean::TypeAliasInnerType::Enum { variants, is_non_exhaustive } => { let variants_iter = || variants.iter().filter(|i| !i.is_stripped()); + let ty = cx.tcx().type_of(it.def_id().unwrap()).instantiate_identity(); + let enum_def_id = ty.ty_adt_id().unwrap(); + wrap_item(w, |w| { let variants_len = variants.len(); let variants_count = variants_iter().count(); @@ -1257,13 +1275,14 @@ w, cx, Some(&t.generics), - variants_iter(), + &variants, variants_count, has_stripped_entries, *is_non_exhaustive, + enum_def_id, ) }); - item_variants(w, cx, it, variants_iter()); + item_variants(w, cx, it, &variants, enum_def_id); } clean::TypeAliasInnerType::Union { fields } => { wrap_item(w, |w| { @@ -1313,6 +1332,102 @@ // we need #14072 to make sense of the generics. write!(w, "{}", render_assoc_items(cx, it, def_id, AssocItemRender::All)); write!(w, "{}", document_type_layout(cx, def_id)); + + // [RUSTDOCIMPL] type.impl + // + // Include type definitions from the alias target type. + // + // Earlier versions of this code worked by having `render_assoc_items` + // include this data directly. That generates *O*`(types*impls)` of HTML + // text, and some real crates have a lot of types and impls. + // + // To create the same UX without generating half a gigabyte of HTML for a + // crate that only contains 20 megabytes of actual documentation[^115718], + // rustdoc stashes these type-alias-inlined docs in a [JSONP] + // "database-lite". The file itself is generated in `write_shared.rs`, + // and hooks into functions provided by `main.js`. + // + // The format of `trait.impl` and `type.impl` JS files are superficially + // similar. Each line, except the JSONP wrapper itself, belongs to a crate, + // and they are otherwise separate (rustdoc should be idempotent). The + // "meat" of the file is HTML strings, so the frontend code is very simple. + // Links are relative to the doc root, though, so the frontend needs to fix + // that up, and inlined docs can reuse these files. + // + // However, there are a few differences, caused by the sophisticated + // features that type aliases have. Consider this crate graph: + // + // ```text + // --------------------------------- + // | crate A: struct Foo | + // | type Bar = Foo | + // | impl X for Foo | + // | impl Y for Foo | + // --------------------------------- + // | + // ---------------------------------- + // | crate B: type Baz = A::Foo | + // | type Xyy = A::Foo | + // | impl Z for Xyy | + // ---------------------------------- + // ``` + // + // The type.impl/A/struct.Foo.js JS file has a structure kinda like this: + // + // ```js + // JSONP({ + // "A": [["impl Y for Foo", "Y", "A::Bar"]], + // "B": [["impl X for Foo", "X", "B::Baz", "B::Xyy"], ["impl Z for Xyy", "Z", "B::Baz"]], + // }); + // ``` + // + // When the type.impl file is loaded, only the current crate's docs are + // actually used. The main reason to bundle them together is that there's + // enough duplication in them for DEFLATE to remove the redundancy. + // + // The contents of a crate are a list of impl blocks, themselves + // represented as lists. The first item in the sublist is the HTML block, + // the second item is the name of the trait (which goes in the sidebar), + // and all others are the names of type aliases that successfully match. + // + // This way: + // + // - There's no need to generate these files for types that have no aliases + // in the current crate. If a dependent crate makes a type alias, it'll + // take care of generating its own docs. + // - There's no need to reimplement parts of the type checker in + // JavaScript. The Rust backend does the checking, and includes its + // results in the file. + // - Docs defined directly on the type alias are dropped directly in the + // HTML by `render_assoc_items`, and are accessible without JavaScript. + // The JSONP file will not list impl items that are known to be part + // of the main HTML file already. + // + // [JSONP]: https://en.wikipedia.org/wiki/JSONP + // [^115718]: https://github.com/rust-lang/rust/issues/115718 + let cloned_shared = Rc::clone(&cx.shared); + let cache = &cloned_shared.cache; + if let Some(target_did) = t.type_.def_id(cache) && + let get_extern = { || cache.external_paths.get(&target_did) } && + let Some(&(ref target_fqp, target_type)) = cache.paths.get(&target_did).or_else(get_extern) && + target_type.is_adt() && // primitives cannot be inlined + let Some(self_did) = it.item_id.as_def_id() && + let get_local = { || cache.paths.get(&self_did).map(|(p, _)| p) } && + let Some(self_fqp) = cache.exact_paths.get(&self_did).or_else(get_local) + { + let mut js_src_path: UrlPartsBuilder = std::iter::repeat("..") + .take(cx.current.len()) + .chain(std::iter::once("type.impl")) + .collect(); + js_src_path.extend(target_fqp[..target_fqp.len() - 1].iter().copied()); + js_src_path.push_fmt(format_args!("{target_type}.{}.js", target_fqp.last().unwrap())); + let self_path = self_fqp.iter().map(Symbol::as_str).collect::>().join("::"); + write!( + w, + "", + src = js_src_path.finish(), + ); + } } fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) { @@ -1408,7 +1523,7 @@ let tcx = cx.tcx(); let count_variants = e.variants().count(); wrap_item(w, |w| { - render_attributes_in_code(w, it, tcx); + render_attributes_in_code(w, it, cx); write!( w, "{}enum {}{}", @@ -1416,36 +1531,90 @@ it.name.unwrap(), e.generics.print(cx), ); + render_enum_fields( w, cx, Some(&e.generics), - e.variants(), + &e.variants, count_variants, e.has_stripped_entries(), it.is_non_exhaustive(), + it.def_id().unwrap(), ); }); write!(w, "{}", document(cx, it, None, HeadingOffset::H2)); if count_variants != 0 { - item_variants(w, cx, it, e.variants()); + item_variants(w, cx, it, &e.variants, it.def_id().unwrap()); } let def_id = it.item_id.expect_def_id(); write!(w, "{}", render_assoc_items(cx, it, def_id, AssocItemRender::All)); write!(w, "{}", document_type_layout(cx, def_id)); } -fn render_enum_fields<'a>( +/// It'll return false if any variant is not a C-like variant. Otherwise it'll return true if at +/// least one of them has an explicit discriminant or if the enum has `#[repr(C)]` or an integer +/// `repr`. +fn should_show_enum_discriminant( + cx: &Context<'_>, + enum_def_id: DefId, + variants: &IndexVec, +) -> bool { + let mut has_variants_with_value = false; + for variant in variants { + if let clean::VariantItem(ref var) = *variant.kind && + matches!(var.kind, clean::VariantKind::CLike) + { + has_variants_with_value |= var.discriminant.is_some(); + } else { + return false; + } + } + if has_variants_with_value { + return true; + } + let repr = cx.tcx().adt_def(enum_def_id).repr(); + repr.c() || repr.int.is_some() +} + +fn display_c_like_variant( + w: &mut Buffer, + cx: &mut Context<'_>, + item: &clean::Item, + variant: &clean::Variant, + index: VariantIdx, + should_show_enum_discriminant: bool, + enum_def_id: DefId, +) { + let name = item.name.unwrap(); + if let Some(ref value) = variant.discriminant { + write!(w, "{} = {}", name.as_str(), value.value(cx.tcx(), true)); + } else if should_show_enum_discriminant { + let adt_def = cx.tcx().adt_def(enum_def_id); + let discr = adt_def.discriminant_for_variant(cx.tcx(), index); + if discr.ty.is_signed() { + write!(w, "{} = {}", name.as_str(), discr.val as i128); + } else { + write!(w, "{} = {}", name.as_str(), discr.val); + } + } else { + w.write_str(name.as_str()); + } +} + +fn render_enum_fields( mut w: &mut Buffer, cx: &mut Context<'_>, g: Option<&clean::Generics>, - variants: impl Iterator, + variants: &IndexVec, count_variants: usize, has_stripped_entries: bool, is_non_exhaustive: bool, + enum_def_id: DefId, ) { + let should_show_enum_discriminant = should_show_enum_discriminant(cx, enum_def_id, variants); if !g.is_some_and(|g| print_where_clause_and_check(w, g, cx)) { // If there wasn't a `where` clause, we add a whitespace. w.write_str(" "); @@ -1461,15 +1630,24 @@ toggle_open(&mut w, format_args!("{count_variants} variants")); } const TAB: &str = " "; - for v in variants { + for (index, v) in variants.iter_enumerated() { + if v.is_stripped() { + continue; + } w.write_str(TAB); - let name = v.name.unwrap(); match *v.kind { - // FIXME(#101337): Show discriminant clean::VariantItem(ref var) => match var.kind { - clean::VariantKind::CLike => w.write_str(name.as_str()), + clean::VariantKind::CLike => display_c_like_variant( + w, + cx, + v, + var, + index, + should_show_enum_discriminant, + enum_def_id, + ), clean::VariantKind::Tuple(ref s) => { - write!(w, "{name}({})", print_tuple_struct_fields(cx, s),); + write!(w, "{}({})", v.name.unwrap(), print_tuple_struct_fields(cx, s)); } clean::VariantKind::Struct(ref s) => { render_struct(w, v, None, None, &s.fields, TAB, false, cx); @@ -1490,11 +1668,12 @@ } } -fn item_variants<'a>( +fn item_variants( w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, - variants: impl Iterator, + variants: &IndexVec, + enum_def_id: DefId, ) { let tcx = cx.tcx(); write!( @@ -1507,7 +1686,11 @@ document_non_exhaustive_header(it), document_non_exhaustive(it) ); - for variant in variants { + let should_show_enum_discriminant = should_show_enum_discriminant(cx, enum_def_id, variants); + for (index, variant) in variants.iter_enumerated() { + if variant.is_stripped() { + continue; + } let id = cx.derive_id(format!("{}.{}", ItemType::Variant, variant.name.unwrap())); write!( w, @@ -1522,7 +1705,22 @@ it.const_stable_since(tcx), " rightside", ); - write!(w, "

{name}", name = variant.name.unwrap()); + w.write_str("

"); + if let clean::VariantItem(ref var) = *variant.kind && + let clean::VariantKind::CLike = var.kind + { + display_c_like_variant( + w, + cx, + variant, + var, + index, + should_show_enum_discriminant, + enum_def_id, + ); + } else { + w.write_str(variant.name.unwrap().as_str()); + } let clean::VariantItem(variant_data) = &*variant.kind else { unreachable!() }; @@ -1644,7 +1842,7 @@ fn item_constant(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, c: &clean::Constant) { wrap_item(w, |w| { let tcx = cx.tcx(); - render_attributes_in_code(w, it, tcx); + render_attributes_in_code(w, it, cx); write!( w, @@ -1693,7 +1891,7 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Struct) { wrap_item(w, |w| { - render_attributes_in_code(w, it, cx.tcx()); + render_attributes_in_code(w, it, cx); render_struct(w, it, Some(&s.generics), s.ctor_kind, &s.fields, "", true, cx); }); @@ -1753,7 +1951,7 @@ fn item_static(w: &mut impl fmt::Write, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Static) { wrap_item(w, |buffer| { - render_attributes_in_code(buffer, it, cx.tcx()); + render_attributes_in_code(buffer, it, cx); write!( buffer, "{vis}static {mutability}{name}: {typ}", @@ -1771,7 +1969,7 @@ fn item_foreign_type(w: &mut impl fmt::Write, cx: &mut Context<'_>, it: &clean::Item) { wrap_item(w, |buffer| { buffer.write_str("extern {\n").unwrap(); - render_attributes_in_code(buffer, it, cx.tcx()); + render_attributes_in_code(buffer, it, cx); write!( buffer, " {}type {};\n}}", diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/search_index.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/search_index.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/search_index.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/search_index.rs 2023-12-21 16:55:28.000000000 +0000 @@ -12,7 +12,7 @@ use crate::formats::item_type::ItemType; use crate::html::format::join_with_double_colon; use crate::html::markdown::short_markdown_summary; -use crate::html::render::{IndexItem, IndexItemFunctionType, RenderType, RenderTypeId}; +use crate::html::render::{self, IndexItem, IndexItemFunctionType, RenderType, RenderTypeId}; /// Builds the search index from the collected metadata pub(crate) fn build_index<'tcx>( @@ -26,7 +26,8 @@ // Attach all orphan items to the type's definition if the type // has since been learned. - for &OrphanImplItem { parent, ref item, ref impl_generics } in &cache.orphan_impl_items { + for &OrphanImplItem { impl_id, parent, ref item, ref impl_generics } in &cache.orphan_impl_items + { if let Some((fqp, _)) = cache.paths.get(&parent) { let desc = short_markdown_summary(&item.doc_value(), &item.link_names(cache)); cache.search_index.push(IndexItem { @@ -36,6 +37,7 @@ desc, parent: Some(parent), parent_idx: None, + impl_id, search_type: get_function_type_for_search(item, tcx, impl_generics.as_ref(), cache), aliases: item.attrs.get_doc_aliases(), deprecation: item.deprecation(tcx), @@ -222,6 +224,29 @@ }) .collect(); + // Find associated items that need disambiguators + let mut associated_item_duplicates = FxHashMap::<(isize, ItemType, Symbol), usize>::default(); + + for &item in &crate_items { + if item.impl_id.is_some() && let Some(parent_idx) = item.parent_idx { + let count = associated_item_duplicates + .entry((parent_idx, item.ty, item.name)) + .or_insert(0); + *count += 1; + } + } + + let associated_item_disambiguators = crate_items + .iter() + .enumerate() + .filter_map(|(index, item)| { + let impl_id = ItemId::DefId(item.impl_id?); + let parent_idx = item.parent_idx?; + let count = *associated_item_duplicates.get(&(parent_idx, item.ty, item.name))?; + if count > 1 { Some((index, render::get_id_for_impl(tcx, impl_id))) } else { None } + }) + .collect::>(); + struct CrateData<'a> { doc: String, items: Vec<&'a IndexItem>, @@ -230,6 +255,8 @@ // // To be noted: the `usize` elements are indexes to `items`. aliases: &'a BTreeMap>, + // Used when a type has more than one impl with an associated item with the same name. + associated_item_disambiguators: &'a Vec<(usize, String)>, } struct Paths { @@ -382,6 +409,7 @@ crate_data.serialize_field("f", &functions)?; crate_data.serialize_field("c", &deprecated)?; crate_data.serialize_field("p", &paths)?; + crate_data.serialize_field("b", &self.associated_item_disambiguators)?; if has_aliases { crate_data.serialize_field("a", &self.aliases)?; } @@ -398,6 +426,7 @@ items: crate_items, paths: crate_paths, aliases: &aliases, + associated_item_disambiguators: &associated_item_disambiguators, }) .expect("failed serde conversion") // All these `replace` calls are because we have to go through JS string for JSON content. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/sidebar.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/sidebar.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/sidebar.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/sidebar.rs 2023-12-21 16:55:28.000000000 +0000 @@ -19,7 +19,7 @@ pub(super) title_prefix: &'static str, pub(super) title: &'a str, pub(super) is_crate: bool, - pub(super) version: &'a str, + pub(super) is_mod: bool, pub(super) blocks: Vec>, pub(super) path: String, } @@ -38,18 +38,19 @@ /// as well as the link to it, e.g. `#implementations`. /// Will be rendered inside an `

` tag heading: Link<'a>, + class: &'static str, links: Vec>, /// Render the heading even if there are no links force_render: bool, } impl<'a> LinkBlock<'a> { - pub fn new(heading: Link<'a>, links: Vec>) -> Self { - Self { heading, links, force_render: false } + pub fn new(heading: Link<'a>, class: &'static str, links: Vec>) -> Self { + Self { heading, links, class, force_render: false } } - pub fn forced(heading: Link<'a>) -> Self { - Self { heading, links: vec![], force_render: true } + pub fn forced(heading: Link<'a>, class: &'static str) -> Self { + Self { heading, links: vec![], class, force_render: true } } pub fn should_render(&self) -> bool { @@ -99,12 +100,12 @@ || it.is_primitive() || it.is_union() || it.is_enum() - || it.is_mod() + // crate title is displayed as part of logo lockup + || (it.is_mod() && !it.is_crate()) || it.is_type_alias() { ( match *it.kind { - clean::ModuleItem(..) if it.is_crate() => "Crate ", clean::ModuleItem(..) => "Module ", _ => "", }, @@ -113,14 +114,22 @@ } else { ("", "") }; - let version = - if it.is_crate() { cx.cache().crate_version.as_deref().unwrap_or_default() } else { "" }; - let path: String = if !it.is_mod() { - cx.current.iter().map(|s| s.as_str()).intersperse("::").collect() + // need to show parent path header if: + // - it's a child module, instead of the crate root + // - there's a sidebar section for the item itself + // + // otherwise, the parent path header is redundant with the big crate + // branding area at the top of the sidebar + let sidebar_path = + if it.is_mod() { &cx.current[..cx.current.len() - 1] } else { &cx.current[..] }; + let path: String = if sidebar_path.len() > 1 || !title.is_empty() { + let path = sidebar_path.iter().map(|s| s.as_str()).intersperse("::").collect(); + if sidebar_path.len() == 1 { format!("crate {path}") } else { path } } else { "".into() }; - let sidebar = Sidebar { title_prefix, title, is_crate: it.is_crate(), version, blocks, path }; + let sidebar = + Sidebar { title_prefix, title, is_mod: it.is_mod(), is_crate: it.is_crate(), blocks, path }; sidebar.render_into(buffer).unwrap(); } @@ -149,7 +158,7 @@ }; let mut items = vec![]; if let Some(name) = field_name { - items.push(LinkBlock::new(Link::new("fields", name), fields)); + items.push(LinkBlock::new(Link::new("fields", name), "structfield", fields)); } sidebar_assoc_items(cx, it, &mut items); items @@ -206,12 +215,23 @@ ("foreign-impls", "Implementations on Foreign Types", foreign_impls), ] .into_iter() - .map(|(id, title, items)| LinkBlock::new(Link::new(id, title), items)) + .map(|(id, title, items)| LinkBlock::new(Link::new(id, title), "", items)) .collect(); sidebar_assoc_items(cx, it, &mut blocks); - blocks.push(LinkBlock::forced(Link::new("implementors", "Implementors"))); + + if !t.is_object_safe(cx.tcx()) { + blocks.push(LinkBlock::forced( + Link::new("object-safety", "Object Safety"), + "object-safety-note", + )); + } + + blocks.push(LinkBlock::forced(Link::new("implementors", "Implementors"), "impl")); if t.is_auto(cx.tcx()) { - blocks.push(LinkBlock::forced(Link::new("synthetic-implementors", "Auto Implementors"))); + blocks.push(LinkBlock::forced( + Link::new("synthetic-implementors", "Auto Implementors"), + "impl-auto", + )); } blocks } @@ -237,7 +257,7 @@ ) -> Vec> { let mut items = vec![]; if let Some(inner_type) = &t.inner_type { - items.push(LinkBlock::forced(Link::new("aliased-type", "Aliased type"))); + items.push(LinkBlock::forced(Link::new("aliased-type", "Aliased type"), "type")); match inner_type { clean::TypeAliasInnerType::Enum { variants, is_non_exhaustive: _ } => { let mut variants = variants @@ -248,12 +268,12 @@ .collect::>(); variants.sort_unstable(); - items.push(LinkBlock::new(Link::new("variants", "Variants"), variants)); + items.push(LinkBlock::new(Link::new("variants", "Variants"), "variant", variants)); } clean::TypeAliasInnerType::Union { fields } | clean::TypeAliasInnerType::Struct { ctor_kind: _, fields } => { let fields = get_struct_fields_name(fields); - items.push(LinkBlock::new(Link::new("fields", "Fields"), fields)); + items.push(LinkBlock::new(Link::new("fields", "Fields"), "field", fields)); } } } @@ -267,7 +287,7 @@ u: &'a clean::Union, ) -> Vec> { let fields = get_struct_fields_name(&u.fields); - let mut items = vec![LinkBlock::new(Link::new("fields", "Fields"), fields)]; + let mut items = vec![LinkBlock::new(Link::new("fields", "Fields"), "structfield", fields)]; sidebar_assoc_items(cx, it, &mut items); items } @@ -279,12 +299,11 @@ links: &mut Vec>, ) { let did = it.item_id.expect_def_id(); - let v = cx.shared.all_impls_for_item(it, it.item_id.expect_def_id()); - let v = v.as_slice(); + let cache = cx.cache(); let mut assoc_consts = Vec::new(); let mut methods = Vec::new(); - if !v.is_empty() { + if let Some(v) = cache.impls.get(&did) { let mut used_links = FxHashSet::default(); let mut id_map = IdMap::new(); @@ -320,7 +339,7 @@ cx, &mut deref_methods, impl_, - v.iter().copied(), + v, &mut derefs, &mut used_links, ); @@ -333,12 +352,16 @@ sidebar_render_assoc_items(cx, &mut id_map, concrete, synthetic, blanket_impl) } else { - std::array::from_fn(|_| LinkBlock::new(Link::empty(), vec![])) + std::array::from_fn(|_| LinkBlock::new(Link::empty(), "", vec![])) }; let mut blocks = vec![ - LinkBlock::new(Link::new("implementations", "Associated Constants"), assoc_consts), - LinkBlock::new(Link::new("implementations", "Methods"), methods), + LinkBlock::new( + Link::new("implementations", "Associated Constants"), + "associatedconstant", + assoc_consts, + ), + LinkBlock::new(Link::new("implementations", "Methods"), "method", methods), ]; blocks.append(&mut deref_methods); blocks.extend([concrete, synthetic, blanket]); @@ -350,7 +373,7 @@ cx: &'a Context<'_>, out: &mut Vec>, impl_: &Impl, - v: impl Iterator, + v: &[Impl], derefs: &mut DefIdSet, used_links: &mut FxHashSet, ) { @@ -375,7 +398,7 @@ // Avoid infinite cycles return; } - let deref_mut = { v }.any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait()); + let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait()); let inner_impl = target .def_id(c) .or_else(|| { @@ -407,7 +430,7 @@ ); // We want links' order to be reproducible so we don't use unstable sort. ret.sort(); - out.push(LinkBlock::new(Link::new(id, title), ret)); + out.push(LinkBlock::new(Link::new(id, title), "deref-methods", ret)); } } @@ -426,7 +449,7 @@ cx, out, target_deref_impl, - target_impls.iter(), + target_impls, derefs, used_links, ); @@ -446,7 +469,7 @@ .collect::>(); variants.sort_unstable(); - let mut items = vec![LinkBlock::new(Link::new("variants", "Variants"), variants)]; + let mut items = vec![LinkBlock::new(Link::new("variants", "Variants"), "variant", variants)]; sidebar_assoc_items(cx, it, &mut items); items } @@ -460,7 +483,7 @@ .filter(|sec| item_sections_in_use.contains(sec)) .map(|sec| Link::new(sec.id(), sec.name())) .collect(); - LinkBlock::new(Link::empty(), item_sections) + LinkBlock::new(Link::empty(), "", item_sections) } fn sidebar_module(items: &[clean::Item]) -> LinkBlock<'static> { @@ -503,8 +526,7 @@ .iter() .filter_map(|it| { let trait_ = it.inner_impl().trait_.as_ref()?; - let encoded = - id_map.derive(super::get_id_for_impl(&it.inner_impl().for_, Some(trait_), cx)); + let encoded = id_map.derive(super::get_id_for_impl(cx.tcx(), it.impl_item.item_id)); let prefix = match it.inner_impl().polarity { ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "", @@ -522,12 +544,21 @@ let synthetic = format_impls(synthetic, id_map); let blanket = format_impls(blanket_impl, id_map); [ - LinkBlock::new(Link::new("trait-implementations", "Trait Implementations"), concrete), + LinkBlock::new( + Link::new("trait-implementations", "Trait Implementations"), + "trait-implementation", + concrete, + ), LinkBlock::new( Link::new("synthetic-implementations", "Auto Trait Implementations"), + "synthetic-implementation", synthetic, ), - LinkBlock::new(Link::new("blanket-implementations", "Blanket Implementations"), blanket), + LinkBlock::new( + Link::new("blanket-implementations", "Blanket Implementations"), + "blanket-implementation", + blanket, + ), ] } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/write_shared.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/write_shared.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/write_shared.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/render/write_shared.rs 2023-12-21 16:55:28.000000000 +0000 @@ -5,18 +5,28 @@ use std::path::{Component, Path}; use std::rc::{Rc, Weak}; +use indexmap::IndexMap; use itertools::Itertools; use rustc_data_structures::flock; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; +use rustc_span::def_id::DefId; +use rustc_span::Symbol; use serde::ser::SerializeSeq; use serde::{Serialize, Serializer}; use super::{collect_paths_for_type, ensure_trailing_slash, Context}; -use crate::clean::Crate; +use crate::clean::{Crate, Item, ItemId, ItemKind}; use crate::config::{EmitType, RenderOptions}; use crate::docfs::PathError; use crate::error::Error; +use crate::formats::cache::Cache; +use crate::formats::item_type::ItemType; +use crate::formats::{Impl, RenderMode}; +use crate::html::format::Buffer; +use crate::html::render::{AssocItemLink, ImplRenderingParameters}; use crate::html::{layout, static_files}; +use crate::visit::DocVisitor; use crate::{try_err, try_none}; /// Rustdoc writes out two kinds of shared files: @@ -336,33 +346,286 @@ let dst = cx.dst.join("index.html"); let page = layout::Page { title: "Index of crates", - css_class: "mod", + css_class: "mod sys", root_path: "./", static_root_path: shared.static_root_path.as_deref(), description: "List of crates", resource_suffix: &shared.resource_suffix, + rust_logo: true, }; let content = format!( "

List of all crates

    {}
", - krates - .iter() - .map(|s| { - format!( - "
  • {s}
  • ", - trailing_slash = ensure_trailing_slash(s), - ) - }) - .collect::() + krates.iter().format_with("", |k, f| { + f(&format_args!( + "
  • {k}
  • ", + trailing_slash = ensure_trailing_slash(k), + )) + }) ); let v = layout::render(&shared.layout, &page, "", content, &shared.style_files); shared.fs.write(dst, v)?; } } + let cloned_shared = Rc::clone(&cx.shared); + let cache = &cloned_shared.cache; + + // Collect the list of aliased types and their aliases. + // + // + // The clean AST has type aliases that point at their types, but + // this visitor works to reverse that: `aliased_types` is a map + // from target to the aliases that reference it, and each one + // will generate one file. + struct TypeImplCollector<'cx, 'cache> { + // Map from DefId-of-aliased-type to its data. + aliased_types: IndexMap>, + visited_aliases: FxHashSet, + cache: &'cache Cache, + cx: &'cache mut Context<'cx>, + } + // Data for an aliased type. + // + // In the final file, the format will be roughly: + // + // ```json + // // type.impl/CRATE/TYPENAME.js + // JSONP( + // "CRATE": [ + // ["IMPL1 HTML", "ALIAS1", "ALIAS2", ...], + // ["IMPL2 HTML", "ALIAS3", "ALIAS4", ...], + // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ struct AliasedType + // ... + // ] + // ) + // ``` + struct AliasedType<'cache> { + // This is used to generate the actual filename of this aliased type. + target_fqp: &'cache [Symbol], + target_type: ItemType, + // This is the data stored inside the file. + // ItemId is used to deduplicate impls. + impl_: IndexMap>, + } + // The `impl_` contains data that's used to figure out if an alias will work, + // and to generate the HTML at the end. + // + // The `type_aliases` list is built up with each type alias that matches. + struct AliasedTypeImpl<'cache> { + impl_: &'cache Impl, + type_aliases: Vec<(&'cache [Symbol], Item)>, + } + impl<'cx, 'cache> DocVisitor for TypeImplCollector<'cx, 'cache> { + fn visit_item(&mut self, it: &Item) { + self.visit_item_recur(it); + let cache = self.cache; + let ItemKind::TypeAliasItem(ref t) = *it.kind else { return }; + let Some(self_did) = it.item_id.as_def_id() else { return }; + if !self.visited_aliases.insert(self_did) { + return; + } + let Some(target_did) = t.type_.def_id(cache) else { return }; + let get_extern = { || cache.external_paths.get(&target_did) }; + let Some(&(ref target_fqp, target_type)) = + cache.paths.get(&target_did).or_else(get_extern) + else { + return; + }; + let aliased_type = self.aliased_types.entry(target_did).or_insert_with(|| { + let impl_ = cache + .impls + .get(&target_did) + .map(|v| &v[..]) + .unwrap_or_default() + .iter() + .map(|impl_| { + ( + impl_.impl_item.item_id, + AliasedTypeImpl { impl_, type_aliases: Vec::new() }, + ) + }) + .collect(); + AliasedType { target_fqp: &target_fqp[..], target_type, impl_ } + }); + let get_local = { || cache.paths.get(&self_did).map(|(p, _)| p) }; + let Some(self_fqp) = cache.exact_paths.get(&self_did).or_else(get_local) else { + return; + }; + let aliased_ty = self.cx.tcx().type_of(self_did).skip_binder(); + // Exclude impls that are directly on this type. They're already in the HTML. + // Some inlining scenarios can cause there to be two versions of the same + // impl: one on the type alias and one on the underlying target type. + let mut seen_impls: FxHashSet = cache + .impls + .get(&self_did) + .map(|s| &s[..]) + .unwrap_or_default() + .iter() + .map(|i| i.impl_item.item_id) + .collect(); + for (impl_item_id, aliased_type_impl) in &mut aliased_type.impl_ { + // Only include this impl if it actually unifies with this alias. + // Synthetic impls are not included; those are also included in the HTML. + // + // FIXME(lazy_type_alias): Once the feature is complete or stable, rewrite this + // to use type unification. + // Be aware of `tests/rustdoc/type-alias/deeply-nested-112515.rs` which might regress. + let Some(impl_did) = impl_item_id.as_def_id() else { continue }; + let for_ty = self.cx.tcx().type_of(impl_did).skip_binder(); + let reject_cx = + DeepRejectCtxt { treat_obligation_params: TreatParams::AsCandidateKey }; + if !reject_cx.types_may_unify(aliased_ty, for_ty) { + continue; + } + // Avoid duplicates + if !seen_impls.insert(*impl_item_id) { + continue; + } + // This impl was not found in the set of rejected impls + aliased_type_impl.type_aliases.push((&self_fqp[..], it.clone())); + } + } + } + let mut type_impl_collector = TypeImplCollector { + aliased_types: IndexMap::default(), + visited_aliases: FxHashSet::default(), + cache, + cx, + }; + DocVisitor::visit_crate(&mut type_impl_collector, &krate); + // Final serialized form of the alias impl + struct AliasSerializableImpl { + text: String, + trait_: Option, + aliases: Vec, + } + impl Serialize for AliasSerializableImpl { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut seq = serializer.serialize_seq(None)?; + seq.serialize_element(&self.text)?; + if let Some(trait_) = &self.trait_ { + seq.serialize_element(trait_)?; + } else { + seq.serialize_element(&0)?; + } + for type_ in &self.aliases { + seq.serialize_element(type_)?; + } + seq.end() + } + } + let cx = type_impl_collector.cx; + let dst = cx.dst.join("type.impl"); + let aliased_types = type_impl_collector.aliased_types; + for aliased_type in aliased_types.values() { + let impls = aliased_type + .impl_ + .values() + .flat_map(|AliasedTypeImpl { impl_, type_aliases }| { + let mut ret = Vec::new(); + let trait_ = impl_ + .inner_impl() + .trait_ + .as_ref() + .map(|trait_| format!("{:#}", trait_.print(cx))); + // render_impl will filter out "impossible-to-call" methods + // to make that functionality work here, it needs to be called with + // each type alias, and if it gives a different result, split the impl + for &(type_alias_fqp, ref type_alias_item) in type_aliases { + let mut buf = Buffer::html(); + cx.id_map = Default::default(); + cx.deref_id_map = Default::default(); + let target_did = impl_ + .inner_impl() + .trait_ + .as_ref() + .map(|trait_| trait_.def_id()) + .or_else(|| impl_.inner_impl().for_.def_id(cache)); + let provided_methods; + let assoc_link = if let Some(target_did) = target_did { + provided_methods = impl_.inner_impl().provided_trait_methods(cx.tcx()); + AssocItemLink::GotoSource(ItemId::DefId(target_did), &provided_methods) + } else { + AssocItemLink::Anchor(None) + }; + super::render_impl( + &mut buf, + cx, + *impl_, + &type_alias_item, + assoc_link, + RenderMode::Normal, + None, + &[], + ImplRenderingParameters { + show_def_docs: true, + show_default_items: true, + show_non_assoc_items: true, + toggle_open_by_default: true, + }, + ); + let text = buf.into_inner(); + let type_alias_fqp = (*type_alias_fqp).iter().join("::"); + if Some(&text) == ret.last().map(|s: &AliasSerializableImpl| &s.text) { + ret.last_mut() + .expect("already established that ret.last() is Some()") + .aliases + .push(type_alias_fqp); + } else { + ret.push(AliasSerializableImpl { + text, + trait_: trait_.clone(), + aliases: vec![type_alias_fqp], + }) + } + } + ret + }) + .collect::>(); + let impls = format!( + r#""{}":{}"#, + krate.name(cx.tcx()), + serde_json::to_string(&impls).expect("failed serde conversion"), + ); + + let mut mydst = dst.clone(); + for part in &aliased_type.target_fqp[..aliased_type.target_fqp.len() - 1] { + mydst.push(part.to_string()); + } + cx.shared.ensure_dir(&mydst)?; + let aliased_item_type = aliased_type.target_type; + mydst.push(&format!( + "{aliased_item_type}.{}.js", + aliased_type.target_fqp[aliased_type.target_fqp.len() - 1] + )); + + let (mut all_impls, _) = try_err!(collect(&mydst, krate.name(cx.tcx()).as_str()), &mydst); + all_impls.push(impls); + // Sort the implementors by crate so the file will be generated + // identically even with rustdoc running in parallel. + all_impls.sort(); + + let mut v = String::from("(function() {var type_impls = {\n"); + v.push_str(&all_impls.join(",\n")); + v.push_str("\n};"); + v.push_str( + "if (window.register_type_impls) {\ + window.register_type_impls(type_impls);\ + } else {\ + window.pending_type_impls = type_impls;\ + }", + ); + v.push_str("})()"); + cx.shared.fs.write(mydst, v)?; + } + // Update the list of all implementors for traits - let dst = cx.dst.join("implementors"); - let cache = cx.cache(); + // + let dst = cx.dst.join("trait.impl"); for (&did, imps) in &cache.implementors { // Private modules can leak through to this phase of rustdoc, which // could contain implementations for otherwise private types. In some diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/sources.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/sources.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/sources.rs 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/sources.rs 2023-12-21 16:55:28.000000000 +0000 @@ -1,4 +1,5 @@ use crate::clean; +use crate::clean::utils::has_doc_flag; use crate::docfs::PathError; use crate::error::Error; use crate::html::format; @@ -12,7 +13,7 @@ use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::ty::TyCtxt; use rustc_session::Session; -use rustc_span::source_map::FileName; +use rustc_span::{sym, FileName}; use std::cell::RefCell; use std::ffi::OsStr; @@ -223,7 +224,8 @@ cur.push(&fname); let title = format!("{} - source", src_fname.to_string_lossy()); - let desc = format!("Source of the Rust file `{}`.", filename.prefer_remapped()); + let desc = + format!("Source of the Rust file `{}`.", filename.prefer_remapped_unconditionaly()); let page = layout::Page { title: &title, css_class: "src", @@ -231,6 +233,7 @@ static_root_path: shared.static_root_path.as_deref(), description: &desc, resource_suffix: &shared.resource_suffix, + rust_logo: has_doc_flag(self.cx.tcx(), LOCAL_CRATE.as_def_id(), sym::rust_logo), }; let v = layout::render( &shared.layout, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/css/rustdoc.css rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/css/rustdoc.css --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/css/rustdoc.css 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/css/rustdoc.css 2023-12-21 16:55:28.000000000 +0000 @@ -461,19 +461,9 @@ display: none !important; } -.sidebar .logo-container { - margin-top: 10px; - margin-bottom: 10px; - text-align: center; -} - -.version { - overflow-wrap: break-word; -} - .logo-container > img { - height: 100px; - width: 100px; + height: 48px; + width: 48px; } ul.block, .block li { @@ -502,6 +492,7 @@ } .sidebar-elems, +.sidebar > .version, .sidebar > h2 { padding-left: 24px; } @@ -510,6 +501,8 @@ color: var(--sidebar-link-color); } .sidebar .current, +.sidebar .current a, +.sidebar-crate a.logo-container:hover + h2 a, .sidebar a:hover:not(.logo-container) { background-color: var(--sidebar-current-link-background-color); } @@ -524,6 +517,75 @@ overflow: hidden; } +.sidebar-crate { + display: flex; + align-items: center; + justify-content: center; + /* there's a 10px padding at the top of
    , and a 4px margin at the + top of the search form. To line them up, add them. */ + margin: 14px 32px 1rem; + row-gap: 10px; + column-gap: 32px; + flex-wrap: wrap; +} + +.sidebar-crate h2 { + flex-grow: 1; + /* This setup with the margins and row-gap is designed to make flex-wrap + work the way we want. If they're in the side-by-side lockup, there + should be a 16px margin to the left of the logo (visually the same as + the 24px one on everything else, which are not giant circles) and 8px + between it and the crate's name and version. When they're line wrapped, + the logo needs to have the same margin on both sides of itself (to + center properly) and the crate name and version need 24px on their + left margin. */ + margin: 0 -8px; + /* To align this with the search bar, it should not be centered, even when + the logo is. */ + align-self: start; +} + +.sidebar-crate .logo-container { + /* The logo is expected to have 8px "slop" along its edges, so we can optically + center it. */ + margin: 0 -16px 0 -16px; + text-align: center; +} + +.sidebar-crate h2 a { + display: block; + margin: 0 calc(-24px + 0.25rem) 0 -0.5rem; + /* Align the sidebar crate link with the search bar, which have different + font sizes. + + | | font-size | line-height | total line-height | padding-y | total | + |:-------|----------:|------------:|------------------:|----------:|-------------:| + | crate | 1.375rem | 1.25 | 1.72rem | x | 2x+1.72rem | + | search | 1rem | 1.15 | 1.15rem | 8px | 1.15rem+16px | + + 2x + 1.72rem = 1.15rem + 16px + 2x = 1.15rem + 16px - 1.72rem + 2x = 16px - 0.57rem + x = ( 16px - 0.57rem ) / 2 + */ + padding: calc( ( 16px - 0.57rem ) / 2 ) 0.25rem; + padding-left: 0.5rem; +} + +.sidebar-crate h2 .version { + display: block; + font-weight: normal; + font-size: 1rem; + overflow-wrap: break-word; + /* opposite of the link padding, cut in half again */ + margin-top: calc( ( -16px + 0.57rem ) / 2 ); +} + +.sidebar-crate + .version { + margin-top: -1rem; + margin-bottom: 1rem; +} + .mobile-topbar { display: none; } @@ -1045,6 +1107,7 @@ white-space: pre-wrap; border-radius: 3px; display: inline; + vertical-align: baseline; } .stab.portability > code { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/main.js rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/main.js --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/main.js 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/main.js 2023-12-21 16:55:28.000000000 +0000 @@ -51,9 +51,14 @@ // but with the current code it's hard to get the right information in the right place. const mobileTopbar = document.querySelector(".mobile-topbar"); const locationTitle = document.querySelector(".sidebar h2.location"); - if (mobileTopbar && locationTitle) { + if (mobileTopbar) { const mobileTitle = document.createElement("h2"); - mobileTitle.innerHTML = locationTitle.innerHTML; + mobileTitle.className = "location"; + if (hasClass(document.body, "crate")) { + mobileTitle.innerText = `Crate ${window.currentCrate}`; + } else if (locationTitle) { + mobileTitle.innerHTML = locationTitle.innerHTML; + } mobileTopbar.appendChild(mobileTitle); } } @@ -354,6 +359,34 @@ expandSection(pageId); } } + if (savedHash.startsWith("impl-")) { + // impl-disambiguated links, used by the search engine + // format: impl-X[-for-Y]/method.WHATEVER + // turn this into method.WHATEVER[-NUMBER] + const splitAt = savedHash.indexOf("/"); + if (splitAt !== -1) { + const implId = savedHash.slice(0, splitAt); + const assocId = savedHash.slice(splitAt + 1); + const implElem = document.getElementById(implId); + if (implElem && implElem.parentElement.tagName === "SUMMARY" && + implElem.parentElement.parentElement.tagName === "DETAILS") { + onEachLazy(implElem.parentElement.parentElement.querySelectorAll( + `[id^="${assocId}"]`), + item => { + const numbered = /([^-]+)-([0-9]+)/.exec(item.id); + if (item.id === assocId || (numbered && numbered[1] === assocId)) { + openParentDetails(item); + item.scrollIntoView(); + // Let the section expand itself before trying to highlight + setTimeout(() => { + window.location.replace("#" + item.id); + }, 0); + } + } + ); + } + } + } } function onHashChange(ev) { @@ -452,22 +485,27 @@ return; } + const modpath = hasClass(document.body, "mod") ? "../" : ""; + const h3 = document.createElement("h3"); - h3.innerHTML = `${longty}`; + h3.innerHTML = `${longty}`; const ul = document.createElement("ul"); ul.className = "block " + shortty; for (const name of filtered) { let path; if (shortty === "mod") { - path = name + "/index.html"; + path = `${modpath}${name}/index.html`; } else { - path = shortty + "." + name + ".html"; + path = `${modpath}${shortty}.${name}.html`; + } + let current_page = document.location.href.toString(); + if (current_page.endsWith("/")) { + current_page += "index.html"; } - const current_page = document.location.href.split("/").pop(); const link = document.createElement("a"); link.href = path; - if (path === current_page) { + if (link.href === current_page) { link.className = "current"; } link.textContent = name; @@ -480,23 +518,38 @@ } if (sidebar) { + // keep this synchronized with ItemSection::ALL in html/render/mod.rs + // Re-exports aren't shown here, because they don't have child pages + //block("reexport", "reexports", "Re-exports"); block("primitive", "primitives", "Primitive Types"); block("mod", "modules", "Modules"); block("macro", "macros", "Macros"); block("struct", "structs", "Structs"); block("enum", "enums", "Enums"); - block("union", "unions", "Unions"); block("constant", "constants", "Constants"); block("static", "static", "Statics"); block("trait", "traits", "Traits"); block("fn", "functions", "Functions"); block("type", "types", "Type Aliases"); + block("union", "unions", "Unions"); + // No point, because these items don't appear in modules + //block("impl", "impls", "Implementations"); + //block("tymethod", "tymethods", "Type Methods"); + //block("method", "methods", "Methods"); + //block("structfield", "fields", "Fields"); + //block("variant", "variants", "Variants"); + //block("associatedtype", "associated-types", "Associated Types"); + //block("associatedconstant", "associated-consts", "Associated Constants"); block("foreigntype", "foreign-types", "Foreign Types"); block("keyword", "keywords", "Keywords"); + block("opaque", "opaque-types", "Opaque Types"); + block("attr", "attributes", "Attribute Macros"); + block("derive", "derives", "Derive Macros"); block("traitalias", "trait-aliases", "Trait Aliases"); } } + // window.register_implementors = imp => { const implementors = document.getElementById("implementors-list"); const synthetic_implementors = document.getElementById("synthetic-implementors-list"); @@ -563,7 +616,7 @@ onEachLazy(code.getElementsByTagName("a"), elem => { const href = elem.getAttribute("href"); - if (href && !/^(?:[a-z+]+:)?\/\//.test(href)) { + if (href && !href.startsWith("#") && !/^(?:[a-z+]+:)?\/\//.test(href)) { elem.setAttribute("href", window.rootPath + href); } }); @@ -587,6 +640,216 @@ window.register_implementors(window.pending_implementors); } + /** + * + * + * [RUSTDOCIMPL] type.impl + * + * This code inlines implementations into the type alias docs at runtime. It's done at + * runtime because some crates have many type aliases and many methods, and we don't want + * to generate *O*`(types*methods)` HTML text. The data inside is mostly HTML fragments, + * wrapped in JSON. + * + * - It only includes docs generated for the current crate. This function accepts an + * object mapping crate names to the set of impls. + * + * - It filters down to the set of applicable impls. The Rust type checker is used to + * tag each HTML blob with the set of type aliases that can actually use it, so the + * JS only needs to consult the attached list of type aliases. + * + * - It renames the ID attributes, to avoid conflicting IDs in the resulting DOM. + * + * - It adds the necessary items to the sidebar. If it's an inherent impl, that means + * adding methods, associated types, and associated constants. If it's a trait impl, + * that means adding it to the trait impl sidebar list. + * + * - It adds the HTML block itself. If it's an inherent impl, it goes after the type + * alias's own inherent impls. If it's a trait impl, it goes in the Trait + * Implementations section. + * + * - After processing all of the impls, it sorts the sidebar items by name. + * + * @param {{[cratename: string]: Array>}} impl + */ + window.register_type_impls = imp => { + if (!imp || !imp[window.currentCrate]) { + return; + } + window.pending_type_impls = null; + const idMap = new Map(); + + let implementations = document.getElementById("implementations-list"); + let trait_implementations = document.getElementById("trait-implementations-list"); + let trait_implementations_header = document.getElementById("trait-implementations"); + + // We want to include the current type alias's impls, and no others. + const script = document.querySelector("script[data-self-path]"); + const selfPath = script ? script.getAttribute("data-self-path") : null; + + // These sidebar blocks need filled in, too. + const mainContent = document.querySelector("#main-content"); + const sidebarSection = document.querySelector(".sidebar section"); + let methods = document.querySelector(".sidebar .block.method"); + let associatedTypes = document.querySelector(".sidebar .block.associatedtype"); + let associatedConstants = document.querySelector(".sidebar .block.associatedconstant"); + let sidebarTraitList = document.querySelector(".sidebar .block.trait-implementation"); + + for (const impList of imp[window.currentCrate]) { + const types = impList.slice(2); + const text = impList[0]; + const isTrait = impList[1] !== 0; + const traitName = impList[1]; + if (types.indexOf(selfPath) === -1) { + continue; + } + let outputList = isTrait ? trait_implementations : implementations; + if (outputList === null) { + const outputListName = isTrait ? "Trait Implementations" : "Implementations"; + const outputListId = isTrait ? + "trait-implementations-list" : + "implementations-list"; + const outputListHeaderId = isTrait ? "trait-implementations" : "implementations"; + const outputListHeader = document.createElement("h2"); + outputListHeader.id = outputListHeaderId; + outputListHeader.innerText = outputListName; + outputList = document.createElement("div"); + outputList.id = outputListId; + if (isTrait) { + const link = document.createElement("a"); + link.href = `#${outputListHeaderId}`; + link.innerText = "Trait Implementations"; + const h = document.createElement("h3"); + h.appendChild(link); + trait_implementations = outputList; + trait_implementations_header = outputListHeader; + sidebarSection.appendChild(h); + sidebarTraitList = document.createElement("ul"); + sidebarTraitList.className = "block trait-implementation"; + sidebarSection.appendChild(sidebarTraitList); + mainContent.appendChild(outputListHeader); + mainContent.appendChild(outputList); + } else { + implementations = outputList; + if (trait_implementations) { + mainContent.insertBefore(outputListHeader, trait_implementations_header); + mainContent.insertBefore(outputList, trait_implementations_header); + } else { + const mainContent = document.querySelector("#main-content"); + mainContent.appendChild(outputListHeader); + mainContent.appendChild(outputList); + } + } + } + const template = document.createElement("template"); + template.innerHTML = text; + + onEachLazy(template.content.querySelectorAll("a"), elem => { + const href = elem.getAttribute("href"); + + if (href && !href.startsWith("#") && !/^(?:[a-z+]+:)?\/\//.test(href)) { + elem.setAttribute("href", window.rootPath + href); + } + }); + onEachLazy(template.content.querySelectorAll("[id]"), el => { + let i = 0; + if (idMap.has(el.id)) { + i = idMap.get(el.id); + } else if (document.getElementById(el.id)) { + i = 1; + while (document.getElementById(`${el.id}-${2 * i}`)) { + i = 2 * i; + } + while (document.getElementById(`${el.id}-${i}`)) { + i += 1; + } + } + if (i !== 0) { + const oldHref = `#${el.id}`; + const newHref = `#${el.id}-${i}`; + el.id = `${el.id}-${i}`; + onEachLazy(template.content.querySelectorAll("a[href]"), link => { + if (link.getAttribute("href") === oldHref) { + link.href = newHref; + } + }); + } + idMap.set(el.id, i + 1); + }); + const templateAssocItems = template.content.querySelectorAll("section.tymethod, " + + "section.method, section.associatedtype, section.associatedconstant"); + if (isTrait) { + const li = document.createElement("li"); + const a = document.createElement("a"); + a.href = `#${template.content.querySelector(".impl").id}`; + a.textContent = traitName; + li.appendChild(a); + sidebarTraitList.append(li); + } else { + onEachLazy(templateAssocItems, item => { + let block = hasClass(item, "associatedtype") ? associatedTypes : ( + hasClass(item, "associatedconstant") ? associatedConstants : ( + methods)); + if (!block) { + const blockTitle = hasClass(item, "associatedtype") ? "Associated Types" : ( + hasClass(item, "associatedconstant") ? "Associated Constants" : ( + "Methods")); + const blockClass = hasClass(item, "associatedtype") ? "associatedtype" : ( + hasClass(item, "associatedconstant") ? "associatedconstant" : ( + "method")); + const blockHeader = document.createElement("h3"); + const blockLink = document.createElement("a"); + blockLink.href = "#implementations"; + blockLink.innerText = blockTitle; + blockHeader.appendChild(blockLink); + block = document.createElement("ul"); + block.className = `block ${blockClass}`; + const insertionReference = methods || sidebarTraitList; + if (insertionReference) { + const insertionReferenceH = insertionReference.previousElementSibling; + sidebarSection.insertBefore(blockHeader, insertionReferenceH); + sidebarSection.insertBefore(block, insertionReferenceH); + } else { + sidebarSection.appendChild(blockHeader); + sidebarSection.appendChild(block); + } + if (hasClass(item, "associatedtype")) { + associatedTypes = block; + } else if (hasClass(item, "associatedconstant")) { + associatedConstants = block; + } else { + methods = block; + } + } + const li = document.createElement("li"); + const a = document.createElement("a"); + a.innerText = item.id.split("-")[0].split(".")[1]; + a.href = `#${item.id}`; + li.appendChild(a); + block.appendChild(li); + }); + } + outputList.appendChild(template.content); + } + + for (const list of [methods, associatedTypes, associatedConstants, sidebarTraitList]) { + if (!list) { + continue; + } + const newChildren = Array.prototype.slice.call(list.children); + newChildren.sort((a, b) => { + const aI = a.innerText; + const bI = b.innerText; + return aI < bI ? -1 : + aI > bI ? 1 : + 0; + }); + list.replaceChildren(...newChildren); + } + }; + if (window.pending_type_impls) { + window.register_type_impls(window.pending_type_impls); + } + function addSidebarCrates() { if (!window.ALL_CRATES) { return; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/search.js rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/search.js --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/search.js 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/static/js/search.js 2023-12-21 16:55:28.000000000 +0000 @@ -1555,7 +1555,7 @@ return false; } } - } else if (fnType.id !== null) { + } else { if (queryElem.id === typeNameIdOfArrayOrSlice && (fnType.id === typeNameIdOfSlice || fnType.id === typeNameIdOfArray) ) { @@ -1752,6 +1752,7 @@ type: item.type, is_alias: true, deprecated: item.deprecated, + implDisambiguator: item.implDisambiguator, }; } @@ -2218,7 +2219,7 @@ href = ROOT_PATH + name + "/index.html"; } else if (item.parent !== undefined) { const myparent = item.parent; - let anchor = "#" + type + "." + name; + let anchor = type + "." + name; const parentType = itemTypes[myparent.ty]; let pageType = parentType; let pageName = myparent.name; @@ -2232,16 +2233,19 @@ const enumName = item.path.substr(enumNameIdx + 2); path = item.path.substr(0, enumNameIdx); displayPath = path + "::" + enumName + "::" + myparent.name + "::"; - anchor = "#variant." + myparent.name + ".field." + name; + anchor = "variant." + myparent.name + ".field." + name; pageType = "enum"; pageName = enumName; } else { displayPath = path + "::" + myparent.name + "::"; } + if (item.implDisambiguator !== null) { + anchor = item.implDisambiguator + "/" + anchor; + } href = ROOT_PATH + path.replace(/::/g, "/") + "/" + pageType + "." + pageName + - ".html" + anchor; + ".html#" + anchor; } else { displayPath = item.path + "::"; href = ROOT_PATH + item.path.replace(/::/g, "/") + @@ -2727,6 +2731,10 @@ * Types are also represented as arrays; the first item is an index into the `p` * array, while the second is a list of types representing any generic parameters. * + * b[i] contains an item's impl disambiguator. This is only present if an item + * is defined in an impl block and, the impl block's type has more than one associated + * item with the same name. + * * `a` defines aliases with an Array of pairs: [name, offset], where `offset` * points into the n/t/d/q/i/f arrays. * @@ -2746,6 +2754,7 @@ * i: Array, * f: Array, * p: Array, + * b: Array<[Number, String]>, * c: Array * }} */ @@ -2766,6 +2775,7 @@ id: id, normalizedName: crate.indexOf("_") === -1 ? crate : crate.replace(/_/g, ""), deprecated: null, + implDisambiguator: null, }; id += 1; searchIndex.push(crateRow); @@ -2789,6 +2799,8 @@ const itemFunctionSearchTypes = crateCorpus.f; // an array of (Number) indices for the deprecated items const deprecatedItems = new Set(crateCorpus.c); + // an array of (Number) indices for the deprecated items + const implDisambiguator = new Map(crateCorpus.b); // an array of [(Number) item type, // (String) name] const paths = crateCorpus.p; @@ -2849,6 +2861,7 @@ id: id, normalizedName: word.indexOf("_") === -1 ? word : word.replace(/_/g, ""), deprecated: deprecatedItems.has(i), + implDisambiguator: implDisambiguator.has(i) ? implDisambiguator.get(i) : null, }; id += 1; searchIndex.push(row); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/templates/page.html rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/templates/page.html --- rustc-1.74.1+dfsg0ubuntu1~bpo10/src/librustdoc/html/templates/page.html 2023-12-04 19:48:34.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/src/librustdoc/html/templates/page.html 2023-12-21 16:55:28.000000000 +0000 @@ -10,7 +10,6 @@ {# #} {# #} {# #} - {# #} {# #} {# #} @@ -42,6 +41,8 @@ {# #} {% else if !page.css_class.contains("mod") %} {# #} + {% else if !page.css_class.contains("sys") %} + {# #} {% endif %} {# #} {% if layout.scrape_examples_extension %} @@ -77,36 +78,51 @@ {% if page.css_class != "src" %} {% endif %} {# #}
    {# #} {% if page.css_class != "src" %}
    {% endif %}
    -#[derive(Clone, Debug, yoke::Yokeable, zerofrom::ZeroFrom)] +#[derive(Clone, Debug, PartialEq, yoke::Yokeable, zerofrom::ZeroFrom)] #[cfg_attr( feature = "datagen", - derive(PartialEq, serde::Serialize, databake::Bake), + derive(serde::Serialize, databake::Bake), databake(path = icu_list::provider), )] pub struct SpecialCasePattern<'data> { @@ -245,7 +268,7 @@ let string = (&*self.string).bake(env); let index_1 = self.index_1.bake(env); databake::quote! { - ::icu_list::provider::ListJoinerPattern::from_parts(#string, #index_1) + icu_list::provider::ListJoinerPattern::from_parts(#string, #index_1) } } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list/src/provider/serde_dfa.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list/src/provider/serde_dfa.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list/src/provider/serde_dfa.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list/src/provider/serde_dfa.rs 2023-12-21 18:27:08.000000000 +0000 @@ -22,7 +22,6 @@ pattern: Option>, } -#[cfg(feature = "datagen")] impl PartialEq for SerdeDFA<'_> { fn eq(&self, other: &Self) -> bool { self.dfa_bytes == other.dfa_bytes @@ -38,7 +37,7 @@ // Safe because of `to_bytes_little_endian`/`to_bytes_big_endian`'s invariant. databake::quote! { unsafe { - ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( if cfg!(target_endian = "little") { #le_bytes } else { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/.cargo-checksum.json rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/.cargo-checksum.json --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/.cargo-checksum.json 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/.cargo-checksum.json 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"0b627eaa59c6446fe690034b6345e6c7a993fd8c559a42dd95e9ed66ea848dd0","LICENSE":"853f87c96f3d249f200fec6db1114427bc8bdf4afddc93c576956d78152ce978","README.md":"35c04021ba634c15b9e076681f06ed75bef0ac8dff1213c9dfbc50418a0efa99","data/any.rs":"20e2615f32f50679782f5a6168dbf770cb3105b625016b2240c0520ffe3bd6e7","data/macros.rs":"5822478d40e10a4af411d595adedbe51765f8c50f2483f53bd86fd89dceb8aff","data/macros/list_and_v1.data.rs":"5f19a3fafc767da2a9396057f5b9e68e121543dcbe2511323e54046b4b25b155","data/macros/list_or_v1.data.rs":"7c28e55d4003ae809316407a11aa1b4cde5d608f20cb8c081e4c3a210807a94e","data/macros/list_unit_v1.data.rs":"97dae78b7344e2e95917d75de8c9511223b5987f9fc2870b35d989b661a8444d","data/mod.rs":"3c792b7d86acca81c30348af137943d2400dbc896d8b8b9e57c38b4c0c2473c9","src/lib.rs":"2561f5be89f545fb8ccbdd8214762a87ad24e4d505dc9010b422f2a5b6ee8595"},"package":"d3237583f0cb7feafabb567c4492fe9ef1d2d4113f6a8798a923273ea5de996d"} \ No newline at end of file diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/Cargo.toml 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,33 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.66" +name = "icu_list_data" +version = "1.3.2" +authors = ["The ICU4X Project Developers"] +include = [ + "data/**/*", + "src/**/*", + "examples/**/*", + "benches/**/*", + "tests/**/*", + "Cargo.toml", + "LICENSE", + "README.md", +] +description = "Data for the icu_list crate" +homepage = "https://icu4x.unicode.org" +readme = "README.md" +categories = ["internationalization"] +license-file = "LICENSE" +repository = "https://github.com/unicode-org/icu4x" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/LICENSE rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/LICENSE --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/LICENSE 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,44 @@ +UNICODE LICENSE V3 + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 2020-2023 Unicode, Inc. + +NOTICE TO USER: Carefully read the following legal agreement. BY +DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING DATA FILES, AND/OR +SOFTWARE, YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE +TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT +DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of data files and any associated documentation (the "Data Files") or +software and any associated documentation (the "Software") to deal in the +Data Files or Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Data Files or Software, and to permit persons to whom the +Data Files or Software are furnished to do so, provided that either (a) +this copyright and permission notice appear with all copies of the Data +Files or Software, or (b) this copyright and permission notice appear in +associated Documentation. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF +THIRD PARTY RIGHTS. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA +FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in these Data Files or Software without prior written +authorization of the copyright holder. + +— + +Portions of ICU4X may have been adapted from ICU4C and/or ICU4J. +ICU 1.8.1 to ICU 57.1 © 1995-2016 International Business Machines Corporation and others. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/README.md rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/README.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/README.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/README.md 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,11 @@ +# icu_list_data [![crates.io](https://img.shields.io/crates/v/icu_list_data)](https://crates.io/crates/icu_list_data) + + + +Data for the icu_list crate + + + +## More Information + +For more information on development, authorship, contributing etc. please visit [`ICU4X home page`](https://github.com/unicode-org/icu4x). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/any.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/any.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/any.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/any.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,2 @@ +// @generated +impl_any_provider!(BakedDataProvider); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_and_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_and_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_and_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_and_v1.data.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,140 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_list_and_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + static TH: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" และ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("และ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" และ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("และ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" และ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("และ", 9u8), special_case: None }]); + static MY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }]); + static TO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo ", 4u8), special_case: None }]); + static AR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }]); + static DSB: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }]); + static SK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }]); + static CS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static GA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static GD: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ⁊ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ⁊ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EN_001: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EN_IN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static YRL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static MS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SQ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dhe ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dhe ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dhe ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dhe ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dhe ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dhe ", 5u8), special_case: None }]); + static GL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }]); + static PT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static NL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }]); + static AF: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }]); + static FR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static BR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ha ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ha ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }]); + static BS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }]); + static SL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }]); + static LT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }]); + static SO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" iyo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" iyo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static FI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }]); + static ET: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static KGP: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SW: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" och ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" och ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" och ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" och ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static DA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }]); + static IS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static LV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }]); + static DE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }]); + static UZ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" va ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" va ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" va ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" va ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static TR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static VI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" và ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" và ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" và ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" và ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static AZ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" və ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" və ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" və ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" və ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static TK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" we ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" we ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" we ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" we ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static AST: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }]); + static ES: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }]); + static HU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }]); + static RO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" și ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" și ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" și ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" și ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" και ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" και ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" και ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" και ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static KY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" жана ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" жана ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" жана ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" жана ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" жана ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" жана ", 10u8), special_case: None }]); + static BS_CYRL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }]); + static BG: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static RU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static CV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" тата ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" тата ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" тата ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" тата ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static BE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }]); + static UK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static TT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }]); + static HY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static HE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########y\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########\0\0\x01y\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" ו‑", 6u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########y\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########\0\0\x01y\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" ו‑", 6u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########y\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########\0\0\x01y\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" ו‑", 6u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########y\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########\0\0\x01y\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" ו‑", 6u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########y\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########\0\0\x01y\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" ו‑", 6u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########y\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x11\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x02\x03\x03\x04\x05\x05\x06\x06\x06\x06\x06\x06\x06\x06\x07\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\t\t\t\n\n\n\n\n\n\n\n\n\n\n\x0B\x0C\r\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x11\x12\x12\x12\x12\x12\x13\x14\x15\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !!!\"###########\0\0\x01y\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x0E\0\0\0\x17\x17\x18\x18\x19\x19\x1A\x1A\x1B\x1B\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"\0\0j\x01\0\0z\0\0\0\x89\0\0\0\x98\0\0\0z\0\0\0\xB3\0\0\0\xC2\0\0\0\xD1\0\0\0\xC2\0\0\0\xE0\0\0\0\x01\x01\0\0\x10\x01\0\0\x1F\x01\0\0\0\0\0\0\0\x02\0\x01\x15\0\0j\x01\0\0\0\0\0\0\0\x02\0\x01\x07\0\0j\x01\0\0\0\0\0\0\0\x04\0\x06\x06\x0B\x0E\x10\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\n\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0z\0\0\0\0\0\0\0\0\x02\0\x01\t\0\0z\0\0\0\0\0\0\0\0\x05\0\x01\x0B\x0C\x0C\r\r\x0E\x15\0\0z\0\0\0.\x01\0\0O\x01\0\0z\0\0\0\0\0\0\0\0\x02\0\x07\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x15\0\0\xC2\0\0\0\0\0\0\0\0\x02\0\x01\x06\0\0\xC2\0\0\0\0\0\0\0\0\x05\0\x01\x08\x11\x11\x13\x13\x15\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x04\0\x02\x02\x04\x04\x07\x15\0\0j\x01\0\0j\x01\0\0j\x01\0\0\0\0\0\0\0\x02\0\0#\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" ו‑", 6u8) }) }]); + static MR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आणि ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आणि ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आणि ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आणि ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आणि ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आणि ", 11u8), special_case: None }]); + static MNI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }]); + static AS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" আৰ\u{9c1} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" আৰ\u{9c1} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" আৰ\u{9c1} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" আৰ\u{9c1} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static BN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" এবং ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" এবং ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" এবং ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" এবং ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static PA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਅਤ\u{a47} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਅਤ\u{a47} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਅਤ\u{a47} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਅਤ\u{a47} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static GU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અન\u{ac7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અન\u{ac7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અન\u{ac7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અન\u{ac7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static TA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" மற\u{bcd}றும\u{bcd} ", 23u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" மற\u{bcd}றும\u{bcd} ", 23u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" மற\u{bcd}றும\u{bcd} ", 23u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" மற\u{bcd}றும\u{bcd} ", 23u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" மற\u{bcd}றும\u{bcd} ", 23u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" மற\u{bcd}றும\u{bcd} ", 23u8), special_case: None }]); + static TE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" మర\u{c3f}యు ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" మర\u{c3f}యు ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" మర\u{c3f}యు ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" మర\u{c3f}యు ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static KA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" და ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" და ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" და ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" და ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" და ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" და ", 8u8), special_case: None }]); + static KM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ន\u{17b7}ង ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ន\u{17b7}ង\u{200b}", 13u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ន\u{17b7}ង ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ន\u{17b7}ង ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static KO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 및 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 및 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 및 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 및 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 및 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 및 ", 5u8), special_case: None }]); + static ML: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", എന\u{d4d}നിവ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ക\u{d42}ട\u{d3e}തെ ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", എന\u{d4d}നിവ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ക\u{d42}ട\u{d3e}തെ ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", എന\u{d4d}നിവ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", എന\u{d4d}നിവ", 2u8), special_case: None }]); + static KK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" және ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" және ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" және ", 10u8), special_case: None }]); + static LO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ແລະ ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ແລະ ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ແລະ ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ແລະ ", 11u8), special_case: None }]); + static UND: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static KOK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static CY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", a(c) ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a(c) ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", a(c) ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a(c) ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static PCM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", an ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" an ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ọ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static EN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static FIL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", at ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" at ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", at ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" at ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static HI_LATN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }]); + static HA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", da ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" da ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", da ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" da ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", da ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" da ", 4u8), special_case: None }]); + static ID: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", dan ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", dan ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static JV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", lan ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", lan ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static IG: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", na ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", na ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", na ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static ZU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ne-", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ne-", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ne-", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ne-", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", sareng ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sareng ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", sareng ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sareng ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", sareng ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sareng ", 8u8), special_case: None }]); + static CEB: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ug ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ug ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ug ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ug ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static BRX: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", आरो ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आरो ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", आरो ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आरो ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", आरो ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आरो ", 11u8), special_case: None }]); + static HI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }]); + static MAI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }]); + static DOI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", त\u{947} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" त\u{947} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", त\u{947} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" त\u{947} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", त\u{947} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" त\u{947} ", 8u8), special_case: None }]); + static SA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", तथा ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" तथा ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", तथा ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" तथा ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", तथा ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" तथा ", 11u8), special_case: None }]); + static KS_DEVA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ति ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ति ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ति ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ति ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ति ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ति ", 8u8), special_case: None }]); + static OR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ଓ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ଓ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ଓ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ଓ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ଓ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ଓ ", 5u8), special_case: None }]); + static KN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ಮತ\u{ccd}ತು ", 18u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ಮತ\u{ccd}ತು ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ಮತ\u{ccd}ತು ", 18u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ಮತ\u{ccd}ತು ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", සහ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" සහ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", සහ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" සහ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", සහ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" සහ ", 8u8), special_case: None }]); + static CHR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ᎠᎴ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ᎠᎴ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SD: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ۽ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ۽ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ۽ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ۽ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static NE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" र ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" र ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" र ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" र ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static PS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، او ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" او ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، او ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" او ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، او ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }]); + static UR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، اور ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" اور ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، اور ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" اور ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }]); + static KS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ت\u{655}ہ ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ت\u{655}ہ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ت\u{655}ہ ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ت\u{655}ہ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ت\u{655}ہ ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ت\u{655}ہ ", 8u8), special_case: None }]); + static FA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }]); + static AM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", እና ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" እና ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", እና ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" እና ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", እና ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" እና ", 8u8), special_case: None }]); + static TI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }]); + static FF_ADLM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ 𞤫 ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤫 ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ & ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }]); + static JA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]); + static ZH_HK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }]); + static YUE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("同", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("同", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("同", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("同", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("同", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("同", 3u8), special_case: None }]); + static ZH: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]); + static ZH_HANT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }]); + static VALUES: [&::Yokeable; 123usize] = [&AF, &AM, &AR, &AS, &AST, &AZ, &BE, &BG, &BN, &BR, &BRX, &BS, &BS_CYRL, &BS, &CEB, &CHR, &CS, &CV, &CY, &DA, &DE, &DOI, &DSB, &EL, &EN, &EN_001, &EN_001, &EN_IN, &ES, &ET, &EU, &FA, &FF_ADLM, &FI, &FIL, &DA, &FR, &GA, &GD, &GL, &GU, &HA, &HE, &HI, &HI_LATN, &BS, &DSB, &HU, &HY, &GL, &ID, &IG, &IS, &GL, &JA, &JV, &KA, &BS, &KGP, &KK, &KM, &KN, &KO, &KOK, &KS, &KS_DEVA, &KY, &LO, <, &LV, &MAI, &BS_CYRL, &ML, &MNI, &MR, &MS, &MY, &NE, &NL, &DA, &OR, &PA, &PCM, &BS, &PS, &PT, &GL, &RO, &RU, &SA, &GL, &SD, &SI, &SK, &SL, &SO, &SQ, &BS_CYRL, &BS, &SU, &SV, &SW, &TA, &TE, &TH, &TI, &TK, &TO, &TR, &TT, &UK, &UND, &UR, &UZ, &VI, &YRL, &YUE, &YUE, &ZH, &ZH_HK, &ZH_HANT, &ZH_HK, &ZU]; + static KEYS: [&str; 123usize] = ["af", "am", "ar", "as", "ast", "az", "be", "bg", "bn", "br", "brx", "bs", "bs-Cyrl", "ca", "ceb", "chr", "cs", "cv", "cy", "da", "de", "doi", "dsb", "el", "en", "en-001", "en-CA", "en-IN", "es", "et", "eu", "fa", "ff-Adlm", "fi", "fil", "fo", "fr", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hi-Latn", "hr", "hsb", "hu", "hy", "ia", "id", "ig", "is", "it", "ja", "jv", "ka", "kea", "kgp", "kk", "km", "kn", "ko", "kok", "ks", "ks-Deva", "ky", "lo", "lt", "lv", "mai", "mk", "ml", "mni", "mr", "ms", "my", "ne", "nl", "no", "or", "pa", "pcm", "pl", "ps", "pt", "rm", "ro", "ru", "sa", "sc", "sd", "si", "sk", "sl", "so", "sq", "sr", "sr-Latn", "su", "sv", "sw", "ta", "te", "th", "ti", "tk", "to", "tr", "tt", "uk", "und", "ur", "uz", "vi", "yrl", "yue", "yue-Hans", "zh", "zh-HK", "zh-Hant", "zh-MO", "zu"]; + let mut metadata = icu_provider::DataResponseMetadata::default(); + let payload = if let Ok(payload) = KEYS.binary_search_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { + payload + } else { + const FALLBACKER: icu::locid_transform::fallback::LocaleFallbackerWithConfig<'static> = icu::locid_transform::fallback::LocaleFallbacker::new().for_config(::KEY.fallback_config()); + let mut fallback_iterator = FALLBACKER.fallback_for(req.locale.clone()); + loop { + if let Ok(payload) = KEYS.binary_search_by(|k| fallback_iterator.get().strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { + metadata.locale = Some(fallback_iterator.take()); + break payload; + } + fallback_iterator.step(); + } + }; + Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(payload)), metadata }) + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_or_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_or_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_or_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_or_v1.data.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,123 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_list_or_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + static MY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" သ\u{102d}\u{102f}\u{1037}မဟ\u{102f}တ\u{103a} ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" သ\u{102d}\u{102f}\u{1037}မဟ\u{102f}တ\u{103a} ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" သ\u{102d}\u{102f}\u{1037}မဟ\u{102f}တ\u{103a} ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" သ\u{102d}\u{102f}\u{1037}မဟ\u{102f}တ\u{103a} ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" - ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" သ\u{102d}\u{102f}\u{1037}မဟ\u{102f}တ\u{103a} ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" သ\u{102d}\u{102f}\u{1037}မဟ\u{102f}တ\u{103a} ", 29u8), special_case: None }]); + static YO_BJ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pɛ\u{300}lú ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", tabi ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tàbí ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pɛ\u{300}lú ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", tabi ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tàbí ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pɛ\u{300}lú ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", tabi ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tàbí ", 8u8), special_case: None }]); + static YO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pẹ\u{300}lú ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", tabi ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tàbí ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pẹ\u{300}lú ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", tabi ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tàbí ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pẹ\u{300}lú ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", tabi ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tàbí ", 8u8), special_case: None }]); + static AR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" أو ", 6u8), special_case: None }]); + static DSB: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" abo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" abo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" abo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" abo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" abo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" abo ", 5u8), special_case: None }]); + static SK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" alebo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" alebo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" alebo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" alebo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" alebo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" alebo ", 7u8), special_case: None }]); + static SL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ali ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ali ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ali ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ali ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ali ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ali ", 5u8), special_case: None }]); + static SO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ama ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ama ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ama ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ama ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ama ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ama ", 5u8), special_case: None }]); + static LT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ar ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ar ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ar ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ar ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ar ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ar ", 4u8), special_case: None }]); + static SW: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" au ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" au ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", au ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" au ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", au ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" au ", 4u8), special_case: None }]); + static EU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" edo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" edo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" edo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" edo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" edo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" edo ", 5u8), special_case: None }]); + static DA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" el. ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" el. ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" el. ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" el. ", 5u8), special_case: None }]); + static NO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eller ", 7u8), special_case: None }]); + static IS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eða ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eða ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eða ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eða ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eða ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eða ", 6u8), special_case: None }]); + static VI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" hoặc ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" hoặc ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" hoặc ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" hoặc ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" hoặc ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" hoặc ", 8u8), special_case: None }]); + static BS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ili ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ili ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ili ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ili ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ili ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ili ", 5u8), special_case: None }]); + static KGP: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ketũmỹr ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ketũmỹr ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ketũmỹr ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ketũmỹr ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ketũmỹr ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ketũmỹr ", 12u8), special_case: None }]); + static HA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ko ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ko ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ko ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ko ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ko ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ko ", 4u8), special_case: None }]); + static PL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lub ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lub ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lub ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lub ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lub ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" lub ", 5u8), special_case: None }]); + static CS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nebo ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nebo ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nebo ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nebo ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nebo ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nebo ", 6u8), special_case: None }]); + static CY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" neu ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" neu ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" neu ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" neu ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" neu ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" neu ", 5u8), special_case: None }]); + static GD: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" no ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" no ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" no ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" no ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" no ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" no ", 4u8), special_case: None }]); + static GA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nó ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nó ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nó ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nó ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nó ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" nó ", 5u8), special_case: None }]); + static AST: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }]); + static ES: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&M\x02\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&\0\0\x02M\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&M\x02\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&\0\0\x02M\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&M\x02\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&\0\0\x02M\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&M\x02\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&\0\0\x02M\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&M\x02\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&\0\0\x02M\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&M\x02\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x17\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x02\x03\x04\x05\x06\x07\x08\t\t\t\t\t\t\n\x0B\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x12\x12\x12\x12\x12\x12\x13\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x14\x15\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x16\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x17\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x18\x19\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1A\x1B\x1B\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1C\x1D\x1E\x1F !\"\"#$$$%&&&&&&&&&&&\0\0\x02M\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x02\x80\x07\x0B\0\0/\x02\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x07\0\x08\x08\n\n\r\r\x0F\x0F\x11\x11\x13\x13\0\0>\x02\0\0|\0\0\0g\0\0\0|\0\0\0g\0\0\0|\0\0\0\0\0\0\0\0\x03\0\x0F\x0F\x13\x13\0\0|\0\0\0|\0\0\0\0\0\0\0\0\x02\0\0&\0\0\x12\0\0\0\x12\0\0\0\0\x14\0\0\0\x01\x01\x02\x02\x03\x03\x04\x04\x05\x05\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E\x1E\x1F\x1F !!\"\"##$$%%\0\0|\0\0\0\x06\x01\0\0|\0\0\0!\x01\0\0|\0\0\0x\x01\0\0|\0\0\0\x87\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xC3\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x04\0\0\x06\x07\x0B\x0C&\0\0\x12\0\0\0#\0\0\0\x12\0\0\0\x12\0\0\0\0\x0E\0\0\x02\x04\x04\x06\x06\x07\x0B\x0C\x14\x1C\x1C\x1D\x1D\x1E !!\"\"##$$%%\0\0|\0\0\0|\0\0\0|\0\0\0!\x01\0\0|\0\0\0\x96\x01\0\0\xA5\x01\0\0\xB4\x01\0\0\xD8\x01\0\0\xB4\x01\0\0\xE7\x01\0\0\xF6\x01\0\0\x05\x02\0\0\x12\0\0\0\0\x02\0\x07\x0B\0\0\x87\x01\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0/\x02\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0|\0\0\0\0\0\0\0\0\x02\0\x18\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\x96\x01\0\0\0\0\0\0\0\x03\0\x15\x15\x16\x1A\0\0\x14\x02\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x15\x17\0\0\x96\x01\0\0\0\0\0\0\0\x02\0\x17\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x1A\0\0\xB4\x01\0\0\0\0\0\0\0\x02\0\x15\x16\0\0\xB4\x01\0\0\0\0\0\0\0\x04\0\x15\x18\x19\x19\x1A\x1A\0\0|\0\0\0\x06\x01\0\0|\0\0\0\0\0\0\0\0\x02\0\x07\x0B\0\0\x8B\0\0\0\0\0\0\0\0\x02\0\x08\x08\0\0\x8B\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0:\0\0\0:\0\0\0:\0\0\0:\0\0\0\0\0\0:\0\0\0\t\0\0\0\x12\0\0\0#\0\0\0\0\0\0\0\0\0\0\0:\0\0\0:" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8) }) }]); + static DE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" oder ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" oder ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" oder ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" oder ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" oder ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" oder ", 6u8), special_case: None }]); + static AF: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" of ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" of ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" of ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" of ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" of ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" of ", 4u8), special_case: None }]); + static EN_001: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }]); + static SQ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ose ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ose ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ose ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ose ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ose ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ose ", 5u8), special_case: None }]); + static FR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ou ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ou ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ou ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ou ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ou ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ou ", 4u8), special_case: None }]); + static BR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pe ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pe ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pe ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pe ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pe ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pe ", 4u8), special_case: None }]); + static RO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sau ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sau ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sau ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sau ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sau ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sau ", 5u8), special_case: None }]); + static FI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" tai ", 5u8), special_case: None }]); + static RM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" u ", 3u8), special_case: None }]); + static HU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vagy ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vagy ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vagy ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vagy ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vagy ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vagy ", 6u8), special_case: None }]); + static LV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vai ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" vai ", 5u8), special_case: None }]); + static TR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" veya ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" veya ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" veya ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" veya ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" veya ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" veya ", 6u8), special_case: None }]); + static ET: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" või ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" või ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" või ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" või ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" või ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" või ", 6u8), special_case: None }]); + static HI_LATN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaa ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaa ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaa ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaa ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaa ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaa ", 5u8), special_case: None }]); + static UZ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yoki ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yoki ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yoki ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yoki ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yoki ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yoki ", 6u8), special_case: None }]); + static TK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ýa-da ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ýa-da ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ýa-da ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ýa-da ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ýa-da ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ýa-da ", 8u8), special_case: None }]); + static EL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ή ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ή ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ή ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ή ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ή ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ή ", 4u8), special_case: None }]); + static UK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" або ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" або ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" або ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" або ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" або ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" або ", 8u8), special_case: None }]); + static CV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" е ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" е ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" е ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" е ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" е ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" е ", 4u8), special_case: None }]); + static KY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" же ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" же ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" же ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" же ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" же ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" же ", 6u8), special_case: None }]); + static BG: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" или ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" или ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" или ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" или ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" или ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" или ", 8u8), special_case: None }]); + static BE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ці ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ці ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ці ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ці ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ці ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ці ", 6u8), special_case: None }]); + static HY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" կամ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" կամ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" կամ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" կամ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" կամ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" կամ ", 8u8), special_case: None }]); + static HE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" או ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" או ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" או ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" או ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" או ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" או ", 6u8), special_case: None }]); + static HI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" या ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" या ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" या ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" या ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" या ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" या ", 8u8), special_case: None }]); + static AS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }]); + static PA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਜਾ\u{a02} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਜਾ\u{a02} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ਜਾ\u{a02} ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਜਾ\u{a02} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ਜਾ\u{a02} ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ਜਾ\u{a02} ", 11u8), special_case: None }]); + static OR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" କ\u{b3f}ମ\u{b4d}ବ\u{b3e} ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" କ\u{b3f}ମ\u{b4d}ବ\u{b3e} ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" କ\u{b3f}ମ\u{b4d}ବ\u{b3e} ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" କ\u{b3f}ମ\u{b4d}ବ\u{b3e} ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" କ\u{b3f}ମ\u{b4d}ବ\u{b3e} ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" କ\u{b3f}ମ\u{b4d}ବ\u{b3e} ", 20u8), special_case: None }]); + static TA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" அல\u{bcd}லது ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" அல\u{bcd}லது ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" அல\u{bcd}லது ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" அல\u{bcd}லது ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" அல\u{bcd}லது ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" அல\u{bcd}லது ", 20u8), special_case: None }]); + static TE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ల\u{c47}ద\u{c3e} ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ల\u{c47}ద\u{c3e} ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ల\u{c47}ద\u{c3e} ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ల\u{c47}ద\u{c3e} ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ల\u{c47}ద\u{c3e} ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ల\u{c47}ద\u{c3e} ", 14u8), special_case: None }]); + static TH: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" หร\u{e37}อ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" หร\u{e37}อ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" หร\u{e37}อ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("หร\u{e37}อ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" หร\u{e37}อ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("หร\u{e37}อ", 12u8), special_case: None }]); + static LO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ຫ\u{ebc}\u{eb7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ຫ\u{ebc}\u{eb7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ຫ\u{ebc}\u{eb7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ຫ\u{ebc}\u{eb7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ຫ\u{ebc}\u{eb7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ຫ\u{ebc}\u{eb7} ", 11u8), special_case: None }]); + static KA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ან ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ან ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ან ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ან ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ან ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ან ", 8u8), special_case: None }]); + static KM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ឬ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ឬ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ឬ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ឬ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ឬ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ឬ ", 5u8), special_case: None }]); + static PCM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ọ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ọ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ọ ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ọ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ọ ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ọ ", 5u8), special_case: None }]); + static KO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 또는 ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 또는 ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 또는 ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 또는 ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 또는 ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 또는 ", 8u8), special_case: None }]); + static MN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", зэргийн аль нэг", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" эсвэл ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", зэргийн аль нэг", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" эсвэл ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", зэргийн аль нэг", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" эсвэл ", 12u8), special_case: None }]); + static ID: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", atau ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" atau ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", atau ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" atau ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", atau ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" atau ", 6u8), special_case: None }]); + static FO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ella ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ella ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ella ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ella ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ella ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ella ", 6u8), special_case: None }]); + static CEB: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", o ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", o ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", o ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" o ", 3u8), special_case: None }]); + static UND: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", or ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", or ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", or ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }]); + static TO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", pē ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pē ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", pē ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pē ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", pē ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" pē ", 5u8), special_case: None }]); + static QU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", utaq ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" utaq ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", utaq ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" utaq ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", utaq ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" utaq ", 6u8), special_case: None }]); + static JV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", utowo ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" utowo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", utowo ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" utowo ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", utowo ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" utowo ", 7u8), special_case: None }]); + static AZ: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", yaxud ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" yaxud ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", yaxud ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", yaxud ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", yaxud ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", yaxud ", 8u8), special_case: None }]); + static KK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", не болмаса ", 22u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" не ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", не болмаса ", 22u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" не ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", не болмаса ", 22u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" не ", 6u8), special_case: None }]); + static SD: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", يا ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" يا ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", يا ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" يا ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", يا ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" يا ", 6u8), special_case: None }]); + static PS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", یا ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", یا ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", یا ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" or ", 4u8), special_case: None }]); + static MR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", कि\u{902}वा ", 18u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" कि\u{902}वा ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" कि\u{902}वा ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" कि\u{902}वा ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" कि\u{902}वा ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" कि\u{902}वा ", 17u8), special_case: None }]); + static KOK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", वा ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" वा ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", वा ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" वा ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", वा ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" वा ", 8u8), special_case: None }]); + static BN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ব\u{9be} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ব\u{9be} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ব\u{9be} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ব\u{9be} ", 8u8), special_case: None }]); + static GU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", અથવા ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અથવા ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અથવા ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અથવા ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અથવા ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અથવા ", 14u8), special_case: None }]); + static KN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ಅಥವಾ ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ಅಥವಾ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ಅಥವಾ ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ಅಥವಾ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ಅಥವಾ ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ಅಥವಾ ", 14u8), special_case: None }]); + static ML: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", അല\u{d4d}ലെങ\u{d4d}കിൽ ", 33u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" അല\u{d4d}ലെങ\u{d4d}കിൽ ", 32u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", അല\u{d4d}ലെങ\u{d4d}കിൽ ", 33u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" അല\u{d4d}ലെങ\u{d4d}കിൽ ", 32u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", അല\u{d4d}ലെങ\u{d4d}കിൽ ", 33u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" അല\u{d4d}ലെങ\u{d4d}കിൽ ", 32u8), special_case: None }]); + static SI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", හෝ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" හෝ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", හෝ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" හෝ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", හෝ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" හෝ ", 8u8), special_case: None }]); + static CHR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ᎠᎴᏱᎩ ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ᎠᎴᏱᎩ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ᎠᎴᏱᎩ ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ᎠᎴᏱᎩ ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ᎠᎴᏱᎩ ", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ᎠᎴᏱᎩ ", 14u8), special_case: None }]); + static UR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، یا ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" یا ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، یا ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" یا ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، یا ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" یا ", 6u8), special_case: None }]); + static FA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، یا ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" یا ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، یا ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" یا ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، یا ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" یا ", 6u8), special_case: None }]); + static TI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይ ", 8u8), special_case: None }]); + static AM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ወይም ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይም \u{feff}", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ወይም ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይም \u{feff}", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ወይም ", 14u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ወይም \u{feff}", 11u8), special_case: None }]); + static FF_ADLM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ 𞤥𞤢\u{1e944}𞤯𞤵𞤲 ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤥𞤢\u{1e944}𞤯𞤵𞤲 ", 26u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ 𞤥𞤢\u{1e944}𞤯𞤵𞤲 ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤥𞤢\u{1e944}𞤯𞤵𞤲 ", 26u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ 𞤥𞤢\u{1e944}𞤯𞤵𞤲 ", 29u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤥𞤢\u{1e944}𞤯𞤵𞤲 ", 26u8), special_case: None }]); + static YUE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 或 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 或 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 或 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 或 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 或 ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 或 ", 5u8), special_case: None }]); + static JA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、または", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("または", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、または", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("または", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、または", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("または", 9u8), special_case: None }]); + static ZH: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("或", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("或", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("或", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("或", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("或", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("或", 3u8), special_case: None }]); + static VALUES: [&::Yokeable; 111usize] = [&AF, &AM, &AR, &AS, &AST, &AZ, &BE, &BG, &BN, &BR, &BS, &AST, &CEB, &CHR, &CS, &CV, &CY, &DA, &DE, &DSB, &EL, &EN_001, &EN_001, &ES, &ET, &EU, &FA, &FF_ADLM, &FI, &CEB, &FO, &FR, &GA, &GD, &FR, &GU, &HA, &HE, &HI, &HI_LATN, &BS, &DSB, &HU, &HY, &AST, &ID, &IS, &AST, &JA, &JV, &KA, &AST, &KGP, &KK, &KM, &KN, &KO, &KOK, &KY, &LO, <, &LV, &BG, &ML, &MN, &MR, &ID, &MY, &KOK, &AF, &NO, &OR, &PA, &PCM, &PL, &PS, &FR, &QU, &RM, &RO, &BG, &AST, &SD, &SI, &SK, &SL, &SO, &SQ, &BG, &BS, &NO, &SW, &TA, &TE, &TH, &TI, &TK, &TO, &TR, &UK, &UND, &UR, &UZ, &VI, &YO, &YO_BJ, &RM, &YUE, &YUE, &ZH, &ZH]; + static KEYS: [&str; 111usize] = ["af", "am", "ar", "as", "ast", "az", "be", "bg", "bn", "br", "bs", "ca", "ceb", "chr", "cs", "cv", "cy", "da", "de", "dsb", "el", "en-001", "en-CA", "es", "et", "eu", "fa", "ff-Adlm", "fi", "fil", "fo", "fr", "ga", "gd", "gl", "gu", "ha", "he", "hi", "hi-Latn", "hr", "hsb", "hu", "hy", "ia", "id", "is", "it", "ja", "jv", "ka", "kea", "kgp", "kk", "km", "kn", "ko", "kok", "ky", "lo", "lt", "lv", "mk", "ml", "mn", "mr", "ms", "my", "ne", "nl", "no", "or", "pa", "pcm", "pl", "ps", "pt", "qu", "rm", "ro", "ru", "sc", "sd", "si", "sk", "sl", "so", "sq", "sr", "sr-Latn", "sv", "sw", "ta", "te", "th", "ti", "tk", "to", "tr", "uk", "und", "ur", "uz", "vi", "yo", "yo-BJ", "yrl", "yue", "yue-Hans", "zh", "zh-Hant"]; + let mut metadata = icu_provider::DataResponseMetadata::default(); + let payload = if let Ok(payload) = KEYS.binary_search_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { + payload + } else { + const FALLBACKER: icu::locid_transform::fallback::LocaleFallbackerWithConfig<'static> = icu::locid_transform::fallback::LocaleFallbacker::new().for_config(::KEY.fallback_config()); + let mut fallback_iterator = FALLBACKER.fallback_for(req.locale.clone()); + loop { + if let Ok(payload) = KEYS.binary_search_by(|k| fallback_iterator.get().strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { + metadata.locale = Some(fallback_iterator.take()); + break payload; + } + fallback_iterator.step(); + } + }; + Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(payload)), metadata }) + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_unit_v1.data.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_unit_v1.data.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_unit_v1.data.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros/list_unit_v1.data.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,106 @@ +// @generated +/// Implement `DataProvider` on the given struct using the data +/// hardcoded in this file. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +#[doc(hidden)] +#[macro_export] +macro_rules! __impl_list_unit_v1 { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO; + #[clippy::msrv = "1.66"] + impl icu_provider::DataProvider for $provider { + fn load(&self, req: icu_provider::DataRequest) -> Result, icu_provider::DataError> { + static BE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static JA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }]); + static LT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ir ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static TH: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" และ ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" และ ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" และ ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static ZH: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }]); + static DSB: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static CS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" a\u{a0}", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static GD: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ’s ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ’s ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static GA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" agus ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static YRL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" asuí ", 7u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static IT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static PT_PT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }]); + static GL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static AF: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }]); + static NL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" en ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static FR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static EU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" eta ", 5u8), special_case: None }]); + static HR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static BS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }]); + static KEA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" i ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static SL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" in ", 4u8), special_case: None }]); + static FI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ja ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static KGP: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" kar ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static TO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo e ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo e ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo e ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo e ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo e ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" mo e ", 6u8), special_case: None }]); + static SW: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }]); + static IS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }]); + static DA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }]); + static NO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static FO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" og ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static LV: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" un ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static DE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" und ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static AST: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: None }]); + static ES_PY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static ES_DO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static ES: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu::list::provider::SpecialCasePattern { condition: unsafe { icu::list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu::list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static HU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" és ", 5u8), special_case: None }]); + static BS_CYRL: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }]); + static BG: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }]); + static UK: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" і ", 4u8), special_case: None }]); + static TT: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" һәм ", 8u8), special_case: None }]); + static HY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" և ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static HE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ו-", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static MNI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" অমস\u{9c1}ং ", 17u8), special_case: None }]); + static SO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" iyo ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static RO: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" și ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static PS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" او ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static GU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" અન\u{ac7} ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static ML: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ക\u{d42}ട\u{d3e}തെ ", 20u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static MS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" dan ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static AS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static ZU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static KN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static UND: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static HI_LATN: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]); + static IG: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", na ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", na ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", na ", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" na ", 4u8), special_case: None }]); + static SU: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", sareng ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sareng ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", sareng ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sareng ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", sareng ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" sareng ", 8u8), special_case: None }]); + static BRX: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", आरो ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आरो ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", आरो ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आरो ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", आरो ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" आरो ", 11u8), special_case: None }]); + static HI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static MAI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", और ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" और ", 8u8), special_case: None }]); + static DOI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", त\u{947} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" त\u{947} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", त\u{947} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" त\u{947} ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", त\u{947} ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" त\u{947} ", 8u8), special_case: None }]); + static SA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", तथा ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" तथा ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", तथा ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" तथा ", 11u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", तथा ", 12u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" तथा ", 11u8), special_case: None }]); + static KS_DEVA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ति ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ति ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ति ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ति ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ति ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ति ", 8u8), special_case: None }]); + static SI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", සහ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" සහ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", සහ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" සහ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", සහ ", 9u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" සහ ", 8u8), special_case: None }]); + static UR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، اور ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، اور ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" اور ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، اور ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" اور ", 8u8), special_case: None }]); + static NE: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(",", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("", 0u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static MY: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("- ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("- ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a}", 15u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("- ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("- ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("န\u{103e}င\u{1037}\u{103a} ", 16u8), special_case: None }]); + static KS: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ت\u{655}ہ ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ت\u{655}ہ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ت\u{655}ہ ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ت\u{655}ہ ", 8u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، ت\u{655}ہ ", 10u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ت\u{655}ہ ", 8u8), special_case: None }]); + static AR: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و", 5u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و", 3u8), special_case: None }]); + static FA: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" و ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("، و ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("،\u{200f} ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static TI: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("ን ን", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static AM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("፣ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static FF_ADLM: ::Yokeable = icu::list::provider::ListFormatterPatternsV1([icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤫 ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤫 ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts("⹁ ", 4u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" 𞤫 ", 6u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }, icu::list::provider::ConditionalListJoinerPattern { default: icu::list::provider::ListJoinerPattern::from_parts(" ", 1u8), special_case: None }]); + static VALUES: [&::Yokeable; 114usize] = [&AF, &AM, &AR, &AS, &AST, &BE, &BG, &BRX, &BS, &BS_CYRL, &BS, &AS, &AS, &CS, &BE, &DA, &DE, &DOI, &DSB, &AS, &AS, &ES, &ES_DO, &ES_PY, &ES_PY, &AS, &EU, &FA, &FF_ADLM, &FI, &AS, &FO, &FR, &GA, &GD, &GL, &GU, &HE, &HI, &HI_LATN, &HR, &DSB, &HU, &HY, &AS, &IG, &IS, &IT, &JA, &AS, &KEA, &KGP, &BE, &BE, &KN, &BE, &AS, &KS, &KS_DEVA, &AS, &AS, <, &LV, &MAI, &BS_CYRL, &ML, &BE, &MNI, &AS, &MS, &MY, &NE, &NL, &AS, &NO, &AS, &AS, &AS, &BS, &PS, &IT, &PT_PT, &IT, &RO, &BE, &SA, &IT, &SI, &SL, &SO, &PT_PT, &BS_CYRL, &BS, &SU, &AS, &SW, &AS, &TH, &TI, &AS, &TO, &BE, &TT, &UK, &UND, &UR, &BE, &AS, &YRL, &JA, &JA, &ZH, &JA, &ZU]; + static KEYS: [&str; 114usize] = ["af", "am", "ar", "as", "ast", "be", "bg", "brx", "bs", "bs-Cyrl", "ca", "ceb", "chr", "cs", "cv", "da", "de", "doi", "dsb", "el", "en", "es", "es-DO", "es-PY", "es-US", "et", "eu", "fa", "ff-Adlm", "fi", "fil", "fo", "fr", "ga", "gd", "gl", "gu", "he", "hi", "hi-Latn", "hr", "hsb", "hu", "hy", "ia", "ig", "is", "it", "ja", "jv", "kea", "kgp", "kk", "km", "kn", "ko", "kok", "ks", "ks-Deva", "ky", "lo", "lt", "lv", "mai", "mk", "ml", "mn", "mni", "mr", "ms", "my", "ne", "nl", "nn", "no", "or", "pa", "pcm", "pl", "ps", "pt", "pt-PT", "rm", "ro", "ru", "sa", "sc", "si", "sl", "so", "sq", "sr", "sr-Latn", "su", "sv", "sw", "ta", "th", "ti", "tk", "to", "tr", "tt", "uk", "und", "ur", "uz", "vi", "yrl", "yue", "yue-Hans", "zh", "zh-Hant", "zu"]; + let mut metadata = icu_provider::DataResponseMetadata::default(); + let payload = if let Ok(payload) = KEYS.binary_search_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { + payload + } else { + const FALLBACKER: icu::locid_transform::fallback::LocaleFallbackerWithConfig<'static> = icu::locid_transform::fallback::LocaleFallbacker::new().for_config(::KEY.fallback_config()); + let mut fallback_iterator = FALLBACKER.fallback_for(req.locale.clone()); + loop { + if let Ok(payload) = KEYS.binary_search_by(|k| fallback_iterator.get().strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { + metadata.locale = Some(fallback_iterator.take()); + break payload; + } + fallback_iterator.step(); + } + }; + Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(payload)), metadata }) + } + } + }; +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/macros.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,41 @@ +// @generated +/// Marks a type as a data provider. You can then use macros like +/// `impl_core_helloworld_v1` to add implementations. +/// +/// ```ignore +/// struct MyProvider; +/// const _: () = { +/// include!("path/to/generated/macros.rs"); +/// make_provider!(MyProvider); +/// impl_core_helloworld_v1!(MyProvider); +/// } +/// ``` +#[doc(hidden)] +#[macro_export] +macro_rules! __make_provider { + ($ name : ty) => { + #[clippy::msrv = "1.66"] + impl $name { + #[doc(hidden)] + #[allow(dead_code)] + pub const MUST_USE_MAKE_PROVIDER_MACRO: () = (); + } + }; +} +#[doc(inline)] +pub use __make_provider as make_provider; +#[macro_use] +#[path = "macros/list_and_v1.data.rs"] +mod list_and_v1; +#[doc(inline)] +pub use __impl_list_and_v1 as impl_list_and_v1; +#[macro_use] +#[path = "macros/list_or_v1.data.rs"] +mod list_or_v1; +#[doc(inline)] +pub use __impl_list_or_v1 as impl_list_or_v1; +#[macro_use] +#[path = "macros/list_unit_v1.data.rs"] +mod list_unit_v1; +#[doc(inline)] +pub use __impl_list_unit_v1 as impl_list_unit_v1; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/data/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,29 @@ +// @generated +include!("macros.rs"); +macro_rules! impl_data_provider { + ($ provider : ty) => { + make_provider!($provider); + impl_list_and_v1!($provider); + impl_list_or_v1!($provider); + impl_list_unit_v1!($provider); + }; +} +#[allow(unused_macros)] +macro_rules! impl_any_provider { + ($ provider : ty) => { + #[clippy::msrv = "1.66"] + impl icu_provider::AnyProvider for $provider { + fn load_any(&self, key: icu_provider::DataKey, req: icu_provider::DataRequest) -> Result { + match key.hashed() { + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + h if h == ::KEY.hashed() => icu_provider::DataProvider::::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response), + _ => Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req)), + } + } + } + }; +} +#[clippy::msrv = "1.66"] +pub struct BakedDataProvider; +impl_data_provider!(BakedDataProvider); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_list_data/src/lib.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_list_data/src/lib.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,12 @@ +// This file is part of ICU4X. For terms of use, please see the file +// called LICENSE at the top level of the ICU4X source tree +// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ). + +//! Data for the icu_list crate + +#![no_std] + +#[cfg(icu4x_custom_data)] +include!(concat!(core::env!("ICU4X_DATA_DIR"), "/macros.rs")); +#[cfg(not(icu4x_custom_data))] +include!("../data/macros.rs"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/.cargo-checksum.json rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/.cargo-checksum.json --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/.cargo-checksum.json 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/.cargo-checksum.json 2023-12-21 18:27:08.000000000 +0000 @@ -1 +1 @@ -{"files":{"Cargo.lock":"1aa9f21333d579ccbdf71478538b586518faa9d7f73f7ba8049649f5d4e33b43","Cargo.toml":"d4d6c83d31deeb379ed08f9340dd71e9726ecaa8e89e55b01254fed5acdee859","LICENSE":"4ad7541d66a407234e2c84902124cef325c29f3e966353efdb800bedb8b8da21","README.md":"16472983782c836d9e97b4df4754baab7bb247d0a945d1a97cafb3210e951d8f","benches/fixtures/langid.json":"373c11527653c63c685c9e229a8de5ae2b557c25b686a9d891c59e1f603232d8","benches/fixtures/locale.json":"669b19db933094290a45bf856559920f4e92401072e364ac82c482119dc9233a","benches/fixtures/mod.rs":"9a9671eddcf38a6faa10cb814949f8abc15d89f5e70f3ad6f684f1bc3ffe72ea","benches/fixtures/subtags.json":"28be3a639e452d713e807d5779b6819e06277e2dbbf67801ef34964fb9b074b6","benches/helpers/macros.rs":"bba0945a826bc083156bc302507c48c0c99c4d965e2a84352644d768591b0339","benches/helpers/mod.rs":"c98167d866fdb7f66c8cab41e8d57b5aab9e9707dfc66c37ef136e088dac6fef","benches/iai_langid.rs":"7984d12b78a0e2ecfa1eac74ccf7310627285de821c13fab2fe000f0e961a136","benches/langid.rs":"4e3d307d48fd9071308a567a0ef927b229814978abd2ba29f57c65edd51f38e4","benches/locale.rs":"b8d5b1e3f8b5578c549a5149229656fb60de26b76a1bf66b6c1abce75042d674","benches/subtags.rs":"e7e80dabaf31bf031779456614f139cafcdadb805986e71b49133ac964928432","examples/filter_langids.rs":"e000b860432b1646c74709177e4e4df1cfdc9620f41a677d15a5016bd7eb9b29","examples/syntatically_canonicalize_locales.rs":"49184e6e52e2264c61a479c0df88c01e7f7079f3da991445faecca3844594079","src/databake.rs":"894d0f73836d99915c5726d8984e7833498e841e2985cedfd5893aeb2bdcc958","src/extensions/mod.rs":"684cb51f3edc0385f6f7703e81a83c3455bd035d0016bd8e9024b26546037cd1","src/extensions/other/mod.rs":"ee377c2eeaa6b622a2c80807bffdd307800030fe2ec8a99a9729bdde45452635","src/extensions/other/subtag.rs":"431d27a0a5adca7d56c7ea3a6de2a0412e1e14ad2dd8a8e09a548849984b84b6","src/extensions/private/mod.rs":"5d53d32adb79386416b6eb4a9de218423f3bee4000e96e4899b78462f609531c","src/extensions/private/other.rs":"586fd24398e78c5fda0afdb98de28a6467afd2d702683daf5dfab2a6c45af1e9","src/extensions/transform/fields.rs":"9221478ce7565738bb27951a6be25b3ebc5c11d63afb2ca744fd4c587d155e9b","src/extensions/transform/key.rs":"53e8c9ce13f00f678c2322855cc1d90afd91cd33a2af3758d098b7bbcc7090e5","src/extensions/transform/mod.rs":"111ebf59ad6cd9a09a8eb84367a0053ff03fff8329f07310131784a457d07b61","src/extensions/transform/value.rs":"577b642b32f7a74e98ba5bee8e30700021c8b0e6da63538398aaf95d13edfd65","src/extensions/unicode/attribute.rs":"d558a193b72f54cdb03afe8e023a145ac74832c8416ca55401cd417ebba2431c","src/extensions/unicode/attributes.rs":"ddc0361968151e28cc1e6a3d91056a0f71f2c42f22dacecd339aaa67dfdcf899","src/extensions/unicode/key.rs":"6c8694527079c5dd5f03f8e85f23ae6b5aa4b47899d1047036960e8400dca7de","src/extensions/unicode/keywords.rs":"782ce7de79b4151b072ea263d1c614e0a14e162043345eead6691d18b9de2092","src/extensions/unicode/mod.rs":"e066cbdabf567a40c777428d071e2e82389a043bd552bc1e83202401c86e0b2e","src/extensions/unicode/value.rs":"786ac19494618a3d45e6c0d0f85311c57812ba0e7a286a2ea903c3d358811e47","src/helpers.rs":"aae44d20013596d872d99e9db811f0dede093cc2d33d52104e11f45897185490","src/langid.rs":"314ef5d6764e35d5a30bd53e43a8d547c81fe7ee875fe52e54076215a89a64f1","src/lib.rs":"ba7bec8f041418942ccc36693d26c733d18d5d30dc06355c86870ca52b1b4a33","src/locale.rs":"bb59685345580711b9b1510a99db64f14080fbc176a461d2a6322ffda1bdb96c","src/macros.rs":"f7154fc103ea1120a55bb5898540b20df80de6eec42e70ce15f339d997f2bf52","src/ordering.rs":"d76c6f26ffb5eb1e24646b70ce532985af326c46c488abda52c89074387f1dcc","src/parser/errors.rs":"8af937e67d197272c0f4806cc40cb191c878085b8687f987e358ee01ac2b5004","src/parser/langid.rs":"749ac36945e7b5e24cbc82f04900f10f770fc24f7ce007af4c3be7a325ccc631","src/parser/locale.rs":"075c74803891894ad50bbedc69366931b8e76c0992b3caa1a5632f0a6816ccfd","src/parser/mod.rs":"5182392624876a419b1469d135d175aba680bb13d14e4f6ea0cfc4e071fbc743","src/serde.rs":"06e940e4f2d15f02d313b4e2b233aea3e74c93c6c43076f5ffe52d49c133608f","src/subtags/language.rs":"2ebc98952bd4a6b4077c77da1895225faacc17020af8a47675b8b41b05b9e7eb","src/subtags/mod.rs":"0257f746ed368ea3fa675054c9e7e40d972ec31cd7cc525be655a16a83c9d17b","src/subtags/region.rs":"4f4120f4910d0a4496f29c193d00313e71be4c646867d97ebd0e9a7438693847","src/subtags/script.rs":"6b1a68783cb90409bdd39b0184dfb2cb1c302fdee7202e3b6f7c7c8941bc7dfe","src/subtags/variant.rs":"956f1ea3d98172b6ead333411f010cf4e84404584a3051cb775d148d79beb4f8","src/subtags/variants.rs":"d312ec8994f25d0c0285f71f4bcdff5a06bdcfa45da3e3db31478d6f0293c741","src/zerovec.rs":"9d01a235d18296fbf0c2e89d188459e9446df0e63aaedc7e150165604af885b9","tests/fixtures/canonicalize.json":"9f2b7cbef72c24944cd4dc50de368c6e3ef69949f29c9ce1aa8807de767a4d0a","tests/fixtures/invalid-extensions.json":"4b7888006360b216030597257de8c301e22877e75216818967bbd8c83b6dbb0b","tests/fixtures/invalid.json":"5247849a6eb805619b8e70254c855227f7bdaf71431b071c91c6cc378ae9766e","tests/fixtures/langid.json":"960fd01722217ef1ea9077e2e0821d7089fe318a241bd7fb7918f50bf8f3f5c3","tests/fixtures/locale.json":"8606e0569fc6ea0e50a1fecb9295b911fbef7d8dbfde3c585476284a751baccf","tests/fixtures/mod.rs":"aea619960540b92199345cbd20ff03d2cb451aa2ce9aa6cf7915223ee9f812a3","tests/helpers/mod.rs":"d3bf59e7eed6230f340bef6c87a7b8de3a387ec391f60afc1b15a0d001cbfb67","tests/langid.rs":"82da0497c4b7d7c5d416ddb96bad0d13d4e51b735b5ed3164a25861de28e2118","tests/locale.rs":"bfbb137e6183e71e6e05076fd3b1caea98a70e69992c8626bb8f316eebc3a23c"},"package":"3003f85dccfc0e238ff567693248c59153a46f4e6125ba4020b973cef4d1d335"} \ No newline at end of file +{"files":{"Cargo.lock":"6214b473d2f714c579f3bb23814b478fb419ee5318c614e7ea77ee2b3c8c8ae3","Cargo.toml":"b73d1db3dab5e1607843117edff14504ddecbe381f7ad64ea1544bc726779e2d","LICENSE":"853f87c96f3d249f200fec6db1114427bc8bdf4afddc93c576956d78152ce978","README.md":"27c4ba4df8933825ab38b7da3de3b678bee7f1b4a011c6268861da643ac52de2","benches/fixtures/langid.json":"373c11527653c63c685c9e229a8de5ae2b557c25b686a9d891c59e1f603232d8","benches/fixtures/locale.json":"669b19db933094290a45bf856559920f4e92401072e364ac82c482119dc9233a","benches/fixtures/mod.rs":"9a9671eddcf38a6faa10cb814949f8abc15d89f5e70f3ad6f684f1bc3ffe72ea","benches/fixtures/subtags.json":"28be3a639e452d713e807d5779b6819e06277e2dbbf67801ef34964fb9b074b6","benches/helpers/macros.rs":"bba0945a826bc083156bc302507c48c0c99c4d965e2a84352644d768591b0339","benches/helpers/mod.rs":"c98167d866fdb7f66c8cab41e8d57b5aab9e9707dfc66c37ef136e088dac6fef","benches/iai_langid.rs":"8e8f93e4b4e2e70771f86eccfaec8c38f2f8a79f569d72eef29a64bb730f3e0d","benches/langid.rs":"4e3d307d48fd9071308a567a0ef927b229814978abd2ba29f57c65edd51f38e4","benches/locale.rs":"b8d5b1e3f8b5578c549a5149229656fb60de26b76a1bf66b6c1abce75042d674","benches/subtags.rs":"e7e80dabaf31bf031779456614f139cafcdadb805986e71b49133ac964928432","examples/filter_langids.rs":"e000b860432b1646c74709177e4e4df1cfdc9620f41a677d15a5016bd7eb9b29","examples/syntatically_canonicalize_locales.rs":"49184e6e52e2264c61a479c0df88c01e7f7079f3da991445faecca3844594079","src/databake.rs":"894d0f73836d99915c5726d8984e7833498e841e2985cedfd5893aeb2bdcc958","src/extensions/mod.rs":"684cb51f3edc0385f6f7703e81a83c3455bd035d0016bd8e9024b26546037cd1","src/extensions/other/mod.rs":"b126de24198275f847737f8d18715dc7276417f1028b14c84f970275d231c014","src/extensions/other/subtag.rs":"41e9e943d67d5940c1fa01d027e8713c5e95819e4542c4a7f8fbadc9fc47b085","src/extensions/private/mod.rs":"eedc5265a36dde934b69ef08a9f61666f138efbc4c5fd04eb88d20b58f2f6036","src/extensions/private/other.rs":"604341d3888b946102bbc1f9d6d1cc146a1aed26c55158385641f7a80a434b72","src/extensions/transform/fields.rs":"208f7783a49f53e0e120c51b8cdf8df43387e69d0f8cca0621802097b17c9094","src/extensions/transform/key.rs":"05ef426db886862257b4d8e11d0d7762694e858ed8e6e683e40a765be1d7f05b","src/extensions/transform/mod.rs":"31dd61ae69096bef3c8c9948e9a812fdfcc846c30623a645f221fdcde5f89236","src/extensions/transform/value.rs":"31f596b2f70fe19e42992e08dd0ca1130a4b89a41719983609ebf486fe8e0985","src/extensions/unicode/attribute.rs":"021115b6b1880048411dc6a983039dbf4cfce8eabf6895afc008356f13ced960","src/extensions/unicode/attributes.rs":"3b6c10548f78c5a1735d2c6b6c6b5cd9a11c7195843b2f3d71241e2931286412","src/extensions/unicode/key.rs":"3822a2710eeb6d8569666a0f4097cc0a85e5e490d8b7ff0b75a983e686cb26d3","src/extensions/unicode/keywords.rs":"bc33ab96f39d5c0d0b94ed076aec778ebb296a6ac14d141aee0ee3785c442c6d","src/extensions/unicode/mod.rs":"9aaa6e8a3b243d8398bc30d95be7eb003a82d64937979544e52287663196452b","src/extensions/unicode/value.rs":"b25db7ee38e42aa8473bdb7ee7b6ae48339f8f6de2a7f6bddc7d93804df91f39","src/helpers.rs":"8860167ebd2de94a977241efb0a3b60699db537fc64633318fba71c659adcce8","src/langid.rs":"7ced139493bf508aaf2ff8655ff78449194a2745642106b7f80fb9f604ace54a","src/lib.rs":"b0086f71477baa14abe327aece797f3a305ebc0b7cfc0fb7105f1a1dd64108ca","src/locale.rs":"51a28b67ac5df4261fee82501d5bd42e2621b328b84cf85cdddd7c5f1dadc707","src/macros.rs":"f7154fc103ea1120a55bb5898540b20df80de6eec42e70ce15f339d997f2bf52","src/ordering.rs":"d76c6f26ffb5eb1e24646b70ce532985af326c46c488abda52c89074387f1dcc","src/parser/errors.rs":"8af937e67d197272c0f4806cc40cb191c878085b8687f987e358ee01ac2b5004","src/parser/langid.rs":"282678684bf1530a92d5070cd02caef0e5a2797eeebb2a793febe5c74cb15d23","src/parser/locale.rs":"075c74803891894ad50bbedc69366931b8e76c0992b3caa1a5632f0a6816ccfd","src/parser/mod.rs":"5182392624876a419b1469d135d175aba680bb13d14e4f6ea0cfc4e071fbc743","src/serde.rs":"06e940e4f2d15f02d313b4e2b233aea3e74c93c6c43076f5ffe52d49c133608f","src/subtags/language.rs":"9d256e02908b57afdec69a453862af1a1267b04323b9522080e5dafc891a7a63","src/subtags/mod.rs":"a31350b679598b7452849ee6f8f56aefb2f2e370575ffe34dd643b62a0ca3fec","src/subtags/region.rs":"22a6dbe130899ebaab5482ca4f512de931eda1c5194639302995f696082344a2","src/subtags/script.rs":"1d57233bd238af03d4c347adb06b238bc108649da2bd395c03d6c1e879725f8a","src/subtags/variant.rs":"a0a14e1f26f1438b47e9419e696247f197c4a1042070a6e4a41620be720b5ac5","src/subtags/variants.rs":"33c9a8f979078461ae27b1812358a1c984594f4b5e66d2e5ba3ff649f89da7ba","src/zerovec.rs":"a34cfbce609b9ca711d22098a73f3cf17eec6ea73fd00f3106dca698b7dee29e","tests/fixtures/canonicalize.json":"9f2b7cbef72c24944cd4dc50de368c6e3ef69949f29c9ce1aa8807de767a4d0a","tests/fixtures/invalid-extensions.json":"4b7888006360b216030597257de8c301e22877e75216818967bbd8c83b6dbb0b","tests/fixtures/invalid.json":"5247849a6eb805619b8e70254c855227f7bdaf71431b071c91c6cc378ae9766e","tests/fixtures/langid.json":"960fd01722217ef1ea9077e2e0821d7089fe318a241bd7fb7918f50bf8f3f5c3","tests/fixtures/locale.json":"8606e0569fc6ea0e50a1fecb9295b911fbef7d8dbfde3c585476284a751baccf","tests/fixtures/mod.rs":"aea619960540b92199345cbd20ff03d2cb451aa2ce9aa6cf7915223ee9f812a3","tests/helpers/mod.rs":"d3bf59e7eed6230f340bef6c87a7b8de3a387ec391f60afc1b15a0d001cbfb67","tests/langid.rs":"82da0497c4b7d7c5d416ddb96bad0d13d4e51b735b5ed3164a25861de28e2118","tests/locale.rs":"bfbb137e6183e71e6e05076fd3b1caea98a70e69992c8626bb8f316eebc3a23c"},"package":"f284eb342dc49d3e9d9f3b188489d76b5d22dfb1d1a5e0d1941811253bac625c"} \ No newline at end of file diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.lock rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.lock --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.lock 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.lock 2023-12-21 18:27:08.000000000 +0000 @@ -3,6 +3,15 @@ version = 3 [[package]] +name = "aho-corasick" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" +dependencies = [ + "memchr", +] + +[[package]] name = "anes" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -14,7 +23,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "hermit-abi 0.1.19", + "hermit-abi", "libc", "winapi", ] @@ -33,9 +42,9 @@ [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "cast" @@ -51,9 +60,9 @@ [[package]] name = "ciborium" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" dependencies = [ "ciborium-io", "ciborium-ll", @@ -62,15 +71,15 @@ [[package]] name = "ciborium-io" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" [[package]] name = "ciborium-ll" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" dependencies = [ "ciborium-io", "half", @@ -78,9 +87,9 @@ [[package]] name = "clap" -version = "3.2.23" +version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "bitflags", "clap_lex", @@ -140,16 +149,6 @@ ] [[package]] -name = "crossbeam-channel" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] name = "crossbeam-deque" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -162,9 +161,9 @@ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", @@ -175,18 +174,18 @@ [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] [[package]] name = "databake" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3feab871d671ecc2e912e83e3a64b9b3343a224c86b999334c4c99b07cf07f52" +checksum = "959b676312ba1aaafb2219c475560082e6b20c3bc572ec1483f93cecd748cf3d" dependencies = [ "databake-derive", "proc-macro2", @@ -196,9 +195,9 @@ [[package]] name = "databake-derive" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b3072996668063c5fe00fcef85ee766fc01113f2fdd5e8a31d5ebae260c091" +checksum = "5f0694dfe255f1af0289d3d1b40787bb955e8603d96e96a6b14b225926e108fb" dependencies = [ "proc-macro2", "quote", @@ -208,9 +207,9 @@ [[package]] name = "displaydoc" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bf95dc3f046b9da4f2d51833c0d3547d8564ef6910f5c1ed130306a75b92886" +checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", @@ -219,9 +218,9 @@ [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "half" @@ -245,15 +244,6 @@ ] [[package]] -name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - -[[package]] name = "iai" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -261,7 +251,7 @@ [[package]] name = "icu_locid" -version = "1.2.0" +version = "1.3.2" dependencies = [ "criterion", "databake", @@ -297,15 +287,15 @@ [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.61" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -318,58 +308,51 @@ [[package]] name = "libc" -version = "0.2.141" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" [[package]] name = "litemap" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a04a5b2b6f54acba899926491d0a6c59d98012938ca2ab5befb281c034e8f94" +checksum = "77a1a2647d5b7134127971a6de0d533c49de2159167e7f259c427195f87168a1" [[package]] name = "log" -version = "0.4.17" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] -name = "memoffset" -version = "0.8.0" +name = "memchr" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] +checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" [[package]] -name = "num-traits" -version = "0.2.15" +name = "memoffset" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] [[package]] -name = "num_cpus" -version = "1.15.0" +name = "num-traits" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ - "hermit-abi 0.2.6", - "libc", + "autocfg", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "oorandom" @@ -379,15 +362,15 @@ [[package]] name = "os_str_bytes" -version = "6.5.0" +version = "6.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" +checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" [[package]] name = "plotters" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" dependencies = [ "num-traits", "plotters-backend", @@ -398,24 +381,24 @@ [[package]] name = "plotters-backend" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" [[package]] name = "plotters-svg" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" dependencies = [ "plotters-backend", ] [[package]] name = "postcard" -version = "1.0.4" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfa512cd0d087cc9f99ad30a1bf64795b67871edbead083ffc3a4dfafa59aa00" +checksum = "d534c6e61df1c7166e636ca612d9820d486fe96ddad37f7abc671517b297488e" dependencies = [ "cobs", "serde", @@ -423,27 +406,27 @@ [[package]] name = "proc-macro2" -version = "1.0.56" +version = "1.0.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.26" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] [[package]] name = "rayon" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -451,36 +434,48 @@ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] name = "regex" -version = "1.7.3" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" +checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" dependencies = [ + "aho-corasick", + "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" -version = "0.6.29" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -493,24 +488,24 @@ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.156" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "314b5b092c0ade17c00142951e50ced110ec27cea304b1037c6969246c2469a4" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.156" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7e29c4601e36bcec74a223228dce795f4cd3616341a4af93520ca1a837c087d" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", @@ -519,9 +514,9 @@ [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" dependencies = [ "itoa", "ryu", @@ -530,9 +525,9 @@ [[package]] name = "syn" -version = "1.0.109" +version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" dependencies = [ "proc-macro2", "quote", @@ -541,9 +536,9 @@ [[package]] name = "synstructure" -version = "0.12.6" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06" dependencies = [ "proc-macro2", "quote", @@ -559,9 +554,9 @@ [[package]] name = "tinystr" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef" +checksum = "d5d0e245e80bdc9b4e5356fc45a72184abbc3861992603f515270e9340f5a219" dependencies = [ "displaydoc", "serde", @@ -579,9 +574,9 @@ [[package]] name = "unicode-ident" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-xid" @@ -591,9 +586,9 @@ [[package]] name = "walkdir" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", "winapi-util", @@ -601,9 +596,9 @@ [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -611,9 +606,9 @@ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", @@ -626,9 +621,9 @@ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -636,9 +631,9 @@ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", @@ -649,15 +644,15 @@ [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "web-sys" -version = "0.3.61" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -681,9 +676,9 @@ [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -696,21 +691,21 @@ [[package]] name = "writeable" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e49e42bdb1d5dc76f4cd78102f8f0714d32edfa3efb82286eb0f0b1fc0da0f" +checksum = "c0af0c3d13faebf8dda0b5256fa7096a2d5ccb662f7b9f54a40fe201077ab1c2" [[package]] name = "zerofrom" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df54d76c3251de27615dfcce21e636c172dafb2549cd7fd93e21c66f6ca6bea2" +checksum = "655b0814c5c0b19ade497851070c640773304939a6c0fd5f5fb43da0696d05b7" [[package]] name = "zerovec" -version = "0.9.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "198f54134cd865f437820aa3b43d0ad518af4e68ee161b444cdd15d8e567c8ea" +checksum = "1194130c5b155bf8ae50ab16c86ab758cd695cf9ad176d2f870b744cbdbb572e" dependencies = [ "zerofrom", ] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.toml 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/Cargo.toml 2023-12-21 18:27:08.000000000 +0000 @@ -11,10 +11,12 @@ [package] edition = "2021" +rust-version = "1.66" name = "icu_locid" -version = "1.2.0" +version = "1.3.2" authors = ["The ICU4X Project Developers"] include = [ + "data/**/*", "src/**/*", "examples/**/*", "benches/**/*", @@ -24,19 +26,19 @@ "README.md", ] description = "API for managing Unicode Language and Locale Identifiers" +homepage = "https://icu4x.unicode.org" readme = "README.md" categories = ["internationalization"] -license = "Unicode-DFS-2016" +license-file = "LICENSE" repository = "https://github.com/unicode-org/icu4x" -[package.metadata.docs.rs] -all-features = true - [package.metadata.cargo-all-features] denylist = ["bench"] +[package.metadata.docs.rs] +all-features = true + [lib] -path = "src/lib.rs" bench = false [[example]] @@ -66,16 +68,19 @@ required-features = ["bench"] [dependencies.databake] -version = "0.1.3" +version = "0.1.6" features = ["derive"] optional = true +default-features = false [dependencies.displaydoc] version = "0.2.3" default-features = false [dependencies.litemap] -version = "0.7.0" +version = "0.7.1" +features = ["alloc"] +default-features = false [dependencies.serde] version = "1.0" @@ -87,19 +92,18 @@ default-features = false [dependencies.tinystr] -version = "0.7.1" +version = "0.7.3" features = ["alloc"] default-features = false [dependencies.writeable] -version = "0.5.1" +version = "0.5.3" +default-features = false [dependencies.zerovec] -version = "0.9.4" +version = "0.10.0" optional = true - -[dev-dependencies.criterion] -version = "0.4" +default-features = false [dev-dependencies.iai] version = "0.1.1" @@ -125,3 +129,6 @@ ] std = [] zerovec = ["dep:zerovec"] + +[target."cfg(not(target_arch = \"wasm32\"))".dev-dependencies.criterion] +version = "0.4" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/LICENSE rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/LICENSE --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/LICENSE 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/LICENSE 2023-12-21 18:27:08.000000000 +0000 @@ -1,49 +1,42 @@ -UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE - -See Terms of Use -for definitions of Unicode Inc.’s Data Files and Software. - -NOTICE TO USER: Carefully read the following legal agreement. -BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S -DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"), -YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE -TERMS AND CONDITIONS OF THIS AGREEMENT. -IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE -THE DATA FILES OR SOFTWARE. +UNICODE LICENSE V3 COPYRIGHT AND PERMISSION NOTICE -Copyright © 1991-2022 Unicode, Inc. All rights reserved. -Distributed under the Terms of Use in https://www.unicode.org/copyright.html. +Copyright © 2020-2023 Unicode, Inc. -Permission is hereby granted, free of charge, to any person obtaining -a copy of the Unicode data files and any associated documentation -(the "Data Files") or Unicode software and any associated documentation -(the "Software") to deal in the Data Files or Software -without restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, and/or sell copies of -the Data Files or Software, and to permit persons to whom the Data Files -or Software are furnished to do so, provided that either -(a) this copyright and permission notice appear with all copies -of the Data Files or Software, or -(b) this copyright and permission notice appear in associated -Documentation. - -THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT OF THIRD PARTY RIGHTS. -IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS -NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL -DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, -DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THE DATA FILES OR SOFTWARE. - -Except as contained in this notice, the name of a copyright holder -shall not be used in advertising or otherwise to promote the sale, -use or other dealings in these Data Files or Software without prior -written authorization of the copyright holder. +NOTICE TO USER: Carefully read the following legal agreement. BY +DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING DATA FILES, AND/OR +SOFTWARE, YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE +TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT +DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of data files and any associated documentation (the "Data Files") or +software and any associated documentation (the "Software") to deal in the +Data Files or Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Data Files or Software, and to permit persons to whom the +Data Files or Software are furnished to do so, provided that either (a) +this copyright and permission notice appear with all copies of the Data +Files or Software, or (b) this copyright and permission notice appear in +associated Documentation. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF +THIRD PARTY RIGHTS. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA +FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in these Data Files or Software without prior written +authorization of the copyright holder. — diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/README.md rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/README.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/README.md 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/README.md 2023-12-21 18:27:08.000000000 +0000 @@ -1,5 +1,7 @@ # icu_locid [![crates.io](https://img.shields.io/crates/v/icu_locid)](https://crates.io/crates/icu_locid) + + Parsing, manipulating, and serializing Unicode Language and Locale Identifiers. This module is published as its own crate ([`icu_locid`](https://docs.rs/icu_locid/latest/icu_locid/)) @@ -22,7 +24,8 @@ ```rust use icu::locid::Locale; use icu::locid::{ - locale, subtags_language as language, subtags_region as region, + locale, + subtags::{language, region}, }; let mut loc: Locale = locale!("en-US"); @@ -43,6 +46,8 @@ [`ICU4X`]: ../icu/index.html [`Unicode Extensions`]: extensions + + ## More Information For more information on development, authorship, contributing etc. please visit [`ICU4X home page`](https://github.com/unicode-org/icu4x). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/benches/iai_langid.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/benches/iai_langid.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/benches/iai_langid.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/benches/iai_langid.rs 2023-12-21 18:27:08.000000000 +0000 @@ -2,9 +2,7 @@ // called LICENSE at the top level of the ICU4X source tree // (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ). -use icu_locid::{ - langid, subtags_language as language, subtags_region as region, LanguageIdentifier, -}; +use icu_locid::{langid, subtags::language, subtags::region, LanguageIdentifier}; use writeable::Writeable; const LIDS: &[LanguageIdentifier] = &[ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/mod.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -21,10 +21,12 @@ mod subtag; +use crate::helpers::ShortSlice; use crate::parser::ParserError; use crate::parser::SubtagIterator; use alloc::vec::Vec; -pub use subtag::Subtag; +#[doc(inline)] +pub use subtag::{subtag, Subtag}; /// A list of [`Other Use Extensions`] as defined in [`Unicode Locale /// Identifier`] specification. @@ -49,7 +51,7 @@ #[derive(Clone, PartialEq, Eq, Debug, Default, Hash, PartialOrd, Ord)] pub struct Other { ext: u8, - keys: Vec, + keys: ShortSlice, } impl Other { @@ -71,6 +73,10 @@ /// assert_eq!(&other.to_string(), "a-foo-bar"); /// ``` pub fn from_vec_unchecked(ext: u8, keys: Vec) -> Self { + Self::from_short_slice_unchecked(ext, keys.into()) + } + + pub(crate) fn from_short_slice_unchecked(ext: u8, keys: ShortSlice) -> Self { assert!(ext.is_ascii_alphabetic()); Self { ext, keys } } @@ -78,7 +84,7 @@ pub(crate) fn try_from_iter(ext: u8, iter: &mut SubtagIterator) -> Result { debug_assert!(ext.is_ascii_alphabetic()); - let mut keys = Vec::new(); + let mut keys = ShortSlice::new(); while let Some(subtag) = iter.peek() { if !Subtag::valid_key(subtag) { break; @@ -89,7 +95,7 @@ iter.next(); } - Ok(Self::from_vec_unchecked(ext, keys)) + Ok(Self::from_short_slice_unchecked(ext, keys)) } /// Gets the tag character for this extension as a &str. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/subtag.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/subtag.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/subtag.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/other/subtag.rs 2023-12-21 18:27:08.000000000 +0000 @@ -11,12 +11,13 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions_other_subtag as subtag; + /// use icu::locid::extensions::other::subtag; /// /// assert_eq!(subtag!("Foo").as_str(), "foo"); /// ``` Subtag, - extensions::other::Subtag, + extensions::other, + subtag, extensions_other_subtag, 2..=8, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/mod.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -13,7 +13,7 @@ //! # Examples //! //! ``` -//! use icu::locid::extensions_private_subtag as subtag; +//! use icu::locid::extensions::private::subtag; //! use icu::locid::{locale, Locale}; //! //! let mut loc: Locale = "en-US-x-foo-faa".parse().expect("Parsing failed."); @@ -32,8 +32,10 @@ use alloc::vec::Vec; use core::ops::Deref; -pub use other::Subtag; +#[doc(inline)] +pub use other::{subtag, Subtag}; +use crate::helpers::ShortSlice; use crate::parser::ParserError; use crate::parser::SubtagIterator; @@ -58,7 +60,7 @@ /// [`Private Use Extensions`]: https://unicode.org/reports/tr35/#pu_extensions /// [`Unicode Locale Identifier`]: https://unicode.org/reports/tr35/#Unicode_locale_identifier #[derive(Clone, PartialEq, Eq, Debug, Default, Hash, PartialOrd, Ord)] -pub struct Private(Vec); +pub struct Private(ShortSlice); impl Private { /// Returns a new empty list of private-use extensions. Same as [`default()`](Default::default()), but is `const`. @@ -72,7 +74,7 @@ /// ``` #[inline] pub const fn new() -> Self { - Self(Vec::new()) + Self(ShortSlice::new()) } /// A constructor which takes a pre-sorted list of [`Subtag`]. @@ -89,7 +91,7 @@ /// assert_eq!(&private.to_string(), "x-foo-bar"); /// ``` pub fn from_vec_unchecked(input: Vec) -> Self { - Self(input) + Self(input.into()) } /// Empties the [`Private`] list. @@ -116,9 +118,9 @@ pub(crate) fn try_from_iter(iter: &mut SubtagIterator) -> Result { let keys = iter .map(Subtag::try_from_bytes) - .collect::, _>>()?; + .collect::, _>>()?; - Ok(Self::from_vec_unchecked(keys)) + Ok(Self(keys)) } pub(crate) fn for_each_subtag_str(&self, f: &mut F) -> Result<(), E> diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/other.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/other.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/other.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/private/other.rs 2023-12-21 18:27:08.000000000 +0000 @@ -18,7 +18,8 @@ /// assert_eq!(subtag1.as_str(), "foo"); /// ``` Subtag, - extensions::private::Subtag, + extensions::private, + subtag, extensions_private_subtag, 1..=8, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/fields.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/fields.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/fields.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/fields.rs 2023-12-21 18:27:08.000000000 +0000 @@ -24,11 +24,10 @@ /// # Examples /// /// ``` -/// use icu::locid::extensions::transform::{Fields, Key, Value}; -/// use icu::locid::extensions_transform_key as key; +/// use icu::locid::extensions::transform::{key, Fields, Key, Value}; /// /// let value = "hybrid".parse::().expect("Failed to parse a Value."); -/// let fields = vec![(key!("h0"), value)].into_iter().collect::(); +/// let fields = [(key!("h0"), value)].into_iter().collect::(); /// /// assert_eq!(&fields.to_string(), "h0-hybrid"); /// ``` @@ -76,11 +75,10 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions::transform::{Fields, Value}; - /// use icu::locid::extensions_transform_key as key; + /// use icu::locid::extensions::transform::{key, Fields, Value}; /// /// let value = "hybrid".parse::().expect("Failed to parse a Value."); - /// let mut fields = vec![(key!("h0"), value)].into_iter().collect::(); + /// let mut fields = [(key!("h0"), value)].into_iter().collect::(); /// /// assert_eq!(&fields.to_string(), "h0-hybrid"); /// @@ -102,7 +100,7 @@ /// /// let key: Key = "h0".parse().expect("Failed to parse a Key."); /// let value: Value = "hybrid".parse().expect("Failed to parse a Value."); - /// let mut fields: Fields = vec![(key, value)].into_iter().collect(); + /// let mut fields = [(key, value)].into_iter().collect::(); /// /// let key: Key = "h0".parse().expect("Failed to parse a Key."); /// assert!(&fields.contains_key(&key)); @@ -121,11 +119,10 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions::transform::{Fields, Key, Value}; - /// use icu::locid::extensions_transform_key as key; + /// use icu::locid::extensions::transform::{key, Fields, Key, Value}; /// /// let value = "hybrid".parse::().unwrap(); - /// let fields = vec![(key!("h0"), value.clone())] + /// let fields = [(key!("h0"), value.clone())] /// .into_iter() /// .collect::(); /// @@ -144,9 +141,7 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions::transform::Key; - /// use icu::locid::extensions::transform::Value; - /// use icu::locid::extensions_transform_key as key; + /// use icu::locid::extensions::transform::{key, Key, Value}; /// use icu::locid::Locale; /// /// let lower = "lower".parse::().expect("valid extension subtag"); @@ -169,7 +164,7 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions_transform_key as key; + /// use icu::locid::extensions::transform::key; /// use icu::locid::Locale; /// /// let mut loc: Locale = "und-t-h0-hybrid-d0-hex-m0-xml".parse().unwrap(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/key.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/key.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/key.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/key.rs 2023-12-21 18:27:08.000000000 +0000 @@ -18,7 +18,8 @@ /// assert_eq!(key1.as_str(), "k0"); /// ``` Key, - extensions::transform::Key, + extensions::transform, + key, extensions_transform_key, 2..=2, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/mod.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -35,14 +35,15 @@ mod value; pub use fields::Fields; -pub use key::Key; +#[doc(inline)] +pub use key::{key, Key}; pub use value::Value; +use crate::helpers::ShortSlice; use crate::parser::SubtagIterator; use crate::parser::{parse_language_identifier_from_iter, ParserError, ParserMode}; use crate::subtags::Language; use crate::LanguageIdentifier; -use alloc::vec; use litemap::LiteMap; /// A list of [`Unicode BCP47 T Extensions`] as defined in [`Unicode Locale @@ -144,21 +145,24 @@ } let mut current_tkey = None; - let mut current_tvalue = vec![]; + let mut current_tvalue = ShortSlice::new(); + let mut has_current_tvalue = false; while let Some(subtag) = iter.peek() { if let Some(tkey) = current_tkey { if let Ok(val) = Value::parse_subtag(subtag) { - current_tvalue.push(val); + has_current_tvalue = true; + if let Some(val) = val { + current_tvalue.push(val); + } } else { - if current_tvalue.is_empty() { + if !has_current_tvalue { return Err(ParserError::InvalidExtension); } - tfields.try_insert( - tkey, - Value::from_vec_unchecked(current_tvalue.drain(..).flatten().collect()), - ); + tfields.try_insert(tkey, Value::from_short_slice_unchecked(current_tvalue)); current_tkey = None; + current_tvalue = ShortSlice::new(); + has_current_tvalue = false; continue; } } else if let Ok(tkey) = Key::try_from_bytes(subtag) { @@ -171,13 +175,10 @@ } if let Some(tkey) = current_tkey { - if current_tvalue.is_empty() { + if !has_current_tvalue { return Err(ParserError::InvalidExtension); } - tfields.try_insert( - tkey, - Value::from_vec_unchecked(current_tvalue.into_iter().flatten().collect()), - ); + tfields.try_insert(tkey, Value::from_short_slice_unchecked(current_tvalue)); } Ok(Self { diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/value.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/value.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/value.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/transform/value.rs 2023-12-21 18:27:08.000000000 +0000 @@ -2,9 +2,8 @@ // called LICENSE at the top level of the ICU4X source tree // (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ). +use crate::helpers::ShortSlice; use crate::parser::{ParserError, SubtagIterator}; -use alloc::vec; -use alloc::vec::Vec; use core::ops::RangeInclusive; use core::str::FromStr; use tinystr::TinyAsciiStr; @@ -28,7 +27,7 @@ /// "no".parse::().expect_err("Invalid Value."); /// ``` #[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord, Default)] -pub struct Value(Vec>); +pub struct Value(ShortSlice>); const TYPE_LENGTH: RangeInclusive = 3..=8; const TRUE_TVALUE: TinyAsciiStr<8> = tinystr::tinystr!(8, "true"); @@ -45,7 +44,7 @@ /// let value = Value::try_from_bytes(b"hybrid").expect("Parsing failed."); /// ``` pub fn try_from_bytes(input: &[u8]) -> Result { - let mut v = vec![]; + let mut v = ShortSlice::default(); let mut has_value = false; for subtag in SubtagIterator::new(input) { @@ -66,12 +65,14 @@ Ok(Self(v)) } - pub(crate) fn from_vec_unchecked(input: Vec>) -> Self { + pub(crate) fn from_short_slice_unchecked( + input: ShortSlice>, + ) -> Self { Self(input) } pub(crate) fn is_type_subtag(t: &[u8]) -> bool { - TYPE_LENGTH.contains(&t.len()) && !t.iter().any(|c: &u8| !c.is_ascii_alphanumeric()) + TYPE_LENGTH.contains(&t.len()) && t.iter().all(u8::is_ascii_alphanumeric) } pub(crate) fn parse_subtag( @@ -122,9 +123,12 @@ let foobar = "foobar".parse().unwrap(); assert_writeable_eq!(Value::default(), "true"); - assert_writeable_eq!(Value::from_vec_unchecked(vec![hybrid]), "hybrid"); assert_writeable_eq!( - Value::from_vec_unchecked(vec![hybrid, foobar]), + Value::from_short_slice_unchecked(vec![hybrid].into()), + "hybrid" + ); + assert_writeable_eq!( + Value::from_short_slice_unchecked(vec![hybrid, foobar].into()), "hybrid-foobar" ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attribute.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attribute.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attribute.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attribute.rs 2023-12-21 18:27:08.000000000 +0000 @@ -12,8 +12,7 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions::unicode::Attribute; - /// use icu::locid::extensions_unicode_attribute as attribute; + /// use icu::locid::extensions::unicode::{attribute, Attribute}; /// /// let attr: Attribute = /// "buddhist".parse().expect("Failed to parse an Attribute."); @@ -21,7 +20,8 @@ /// assert_eq!(attr, attribute!("buddhist")); /// ``` Attribute, - extensions::unicode::Attribute, + extensions::unicode, + attribute, extensions_unicode_attribute, 3..=8, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attributes.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attributes.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attributes.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/attributes.rs 2023-12-21 18:27:08.000000000 +0000 @@ -4,6 +4,7 @@ use super::Attribute; +use crate::helpers::ShortSlice; use alloc::vec::Vec; use core::ops::Deref; @@ -30,7 +31,7 @@ /// assert_eq!(attributes.to_string(), "foobar-testing"); /// ``` #[derive(Default, Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord)] -pub struct Attributes(Vec); +pub struct Attributes(ShortSlice); impl Attributes { /// Returns a new empty set of attributes. Same as [`default()`](Default::default()), but is `const`. @@ -44,7 +45,7 @@ /// ``` #[inline] pub const fn new() -> Self { - Self(Vec::new()) + Self(ShortSlice::new()) } /// A constructor which takes a pre-sorted list of [`Attribute`] elements. @@ -68,6 +69,10 @@ /// for the caller to use [`binary_search`](slice::binary_search) instead of [`sort`](slice::sort) /// and [`dedup`](Vec::dedup()). pub fn from_vec_unchecked(input: Vec) -> Self { + Self(input.into()) + } + + pub(crate) fn from_short_slice_unchecked(input: ShortSlice) -> Self { Self(input) } @@ -78,8 +83,7 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions::unicode::{Attribute, Attributes}; - /// use icu::locid::extensions_unicode_attribute as attribute; + /// use icu::locid::extensions::unicode::{attribute, Attribute, Attributes}; /// use writeable::assert_writeable_eq; /// /// let mut attributes = Attributes::from_vec_unchecked(vec![ diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/key.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/key.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/key.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/key.rs 2023-12-21 18:27:08.000000000 +0000 @@ -17,7 +17,8 @@ /// assert!("ca".parse::().is_ok()); /// ``` Key, - extensions::unicode::Key, + extensions::unicode, + key, extensions_unicode_key, 2..=2, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/keywords.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/keywords.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/keywords.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/keywords.rs 2023-12-21 18:27:08.000000000 +0000 @@ -13,7 +13,7 @@ use crate::ordering::SubtagOrderingResult; /// A list of [`Key`]-[`Value`] pairs representing functional information -/// about locale's internationnalization preferences. +/// about locale's internationalization preferences. /// /// Here are examples of fields used in Unicode: /// - `hc` - Hour Cycle (`h11`, `h12`, `h23`, `h24`) @@ -30,11 +30,11 @@ /// /// ``` /// use icu::locid::{ -/// extensions::unicode::Keywords, extensions_unicode_key as key, -/// extensions_unicode_value as value, locale, +/// extensions::unicode::{key, value, Keywords}, +/// locale, /// }; /// -/// let keywords = vec![(key!("hc"), value!("h23"))] +/// let keywords = [(key!("hc"), value!("h23"))] /// .into_iter() /// .collect::(); /// @@ -45,7 +45,7 @@ /// /// ``` /// use icu::locid::{ -/// extensions_unicode_key as key, extensions_unicode_value as value, +/// extensions::unicode::{key, value}, /// Locale, /// }; /// @@ -116,12 +116,9 @@ /// # Examples /// /// ``` - /// use icu::locid::{ - /// extensions::unicode::Keywords, extensions_unicode_key as key, - /// extensions_unicode_value as value, - /// }; + /// use icu::locid::extensions::unicode::{key, value, Keywords}; /// - /// let keywords = vec![(key!("ca"), value!("gregory"))] + /// let keywords = [(key!("ca"), value!("gregory"))] /// .into_iter() /// .collect::(); /// @@ -141,12 +138,9 @@ /// # Examples /// /// ``` - /// use icu::locid::{ - /// extensions::unicode::Keywords, extensions_unicode_key as key, - /// extensions_unicode_value as value, - /// }; + /// use icu::locid::extensions::unicode::{key, value, Keywords}; /// - /// let keywords = vec![(key!("ca"), value!("buddhist"))] + /// let keywords = [(key!("ca"), value!("buddhist"))] /// .into_iter() /// .collect::(); /// @@ -167,12 +161,9 @@ /// # Examples /// /// ``` - /// use icu::locid::{ - /// extensions::unicode::Keywords, extensions_unicode_key as key, - /// extensions_unicode_value as value, - /// }; + /// use icu::locid::extensions::unicode::{key, value, Keywords}; /// - /// let mut keywords = vec![(key!("ca"), value!("buddhist"))] + /// let mut keywords = [(key!("ca"), value!("buddhist"))] /// .into_iter() /// .collect::(); /// @@ -196,10 +187,8 @@ /// ``` /// use icu::locid::extensions::unicode::Key; /// use icu::locid::extensions::unicode::Value; + /// use icu::locid::extensions::unicode::{key, value}; /// use icu::locid::Locale; - /// use icu::locid::{ - /// extensions_unicode_key as key, extensions_unicode_value as value, - /// }; /// /// let mut loc: Locale = "und-u-hello-ca-buddhist-hc-h12" /// .parse() @@ -222,8 +211,7 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions::unicode::Key; - /// use icu::locid::extensions_unicode_key as key; + /// use icu::locid::extensions::unicode::{key, Key}; /// use icu::locid::Locale; /// /// let mut loc: Locale = "und-u-hello-ca-buddhist-hc-h12" @@ -258,7 +246,7 @@ /// # Examples /// /// ``` - /// use icu::locid::extensions_unicode_key as key; + /// use icu::locid::extensions::unicode::key; /// use icu::locid::Locale; /// /// let mut loc: Locale = "und-u-ca-buddhist-hc-h12-ms-metric".parse().unwrap(); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/mod.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -11,12 +11,8 @@ //! # Examples //! //! ``` +//! use icu::locid::extensions::unicode::{attribute, key, value, Unicode}; //! use icu::locid::Locale; -//! use icu::locid::{ -//! extensions::unicode::Unicode, -//! extensions_unicode_attribute as attribute, -//! extensions_unicode_key as key, extensions_unicode_value as value, -//! }; //! //! let loc: Locale = "en-US-u-foobar-hc-h12".parse().expect("Parsing failed."); //! @@ -36,13 +32,16 @@ mod keywords; mod value; -use alloc::vec; -pub use attribute::Attribute; +#[doc(inline)] +pub use attribute::{attribute, Attribute}; pub use attributes::Attributes; -pub use key::Key; +#[doc(inline)] +pub use key::{key, Key}; pub use keywords::Keywords; -pub use value::Value; +#[doc(inline)] +pub use value::{value, Value}; +use crate::helpers::ShortSlice; use crate::parser::ParserError; use crate::parser::SubtagIterator; use litemap::LiteMap; @@ -63,10 +62,8 @@ /// # Examples /// /// ``` +/// use icu::locid::extensions::unicode::{key, value}; /// use icu::locid::Locale; -/// use icu::locid::{ -/// extensions_unicode_key as key, extensions_unicode_value as value, -/// }; /// /// let loc: Locale = /// "de-u-hc-h12-ca-buddhist".parse().expect("Parsing failed."); @@ -138,11 +135,7 @@ } pub(crate) fn try_from_iter(iter: &mut SubtagIterator) -> Result { - let mut attributes = vec![]; - let mut keywords = LiteMap::new(); - - let mut current_keyword = None; - let mut current_type = vec![]; + let mut attributes = ShortSlice::new(); while let Some(subtag) = iter.peek() { if let Ok(attr) = Attribute::try_from_bytes(subtag) { @@ -155,17 +148,22 @@ iter.next(); } + let mut keywords = LiteMap::new(); + + let mut current_keyword = None; + let mut current_value = ShortSlice::new(); + while let Some(subtag) = iter.peek() { let slen = subtag.len(); if slen == 2 { if let Some(kw) = current_keyword.take() { - keywords.try_insert(kw, Value::from_vec_unchecked(current_type)); - current_type = vec![]; + keywords.try_insert(kw, Value::from_short_slice_unchecked(current_value)); + current_value = ShortSlice::new(); } current_keyword = Some(Key::try_from_bytes(subtag)?); } else if current_keyword.is_some() { match Value::parse_subtag(subtag) { - Ok(Some(t)) => current_type.push(t), + Ok(Some(t)) => current_value.push(t), Ok(None) => {} Err(_) => break, } @@ -176,7 +174,7 @@ } if let Some(kw) = current_keyword.take() { - keywords.try_insert(kw, Value::from_vec_unchecked(current_type)); + keywords.try_insert(kw, Value::from_short_slice_unchecked(current_value)); } // Ensure we've defined at least one attribute or keyword @@ -186,7 +184,7 @@ Ok(Self { keywords: keywords.into(), - attributes: Attributes::from_vec_unchecked(attributes), + attributes: Attributes::from_short_slice_unchecked(attributes), }) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/value.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/value.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/value.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/extensions/unicode/value.rs 2023-12-21 18:27:08.000000000 +0000 @@ -4,7 +4,6 @@ use crate::helpers::ShortSlice; use crate::parser::{ParserError, SubtagIterator}; -use alloc::vec::Vec; use core::ops::RangeInclusive; use core::str::FromStr; use tinystr::TinyAsciiStr; @@ -20,9 +19,7 @@ /// # Examples /// /// ``` -/// use icu::locid::{ -/// extensions::unicode::Value, extensions_unicode_value as value, -/// }; +/// use icu::locid::extensions::unicode::{value, Value}; /// use writeable::assert_writeable_eq; /// /// assert_writeable_eq!(value!("gregory"), "gregory"); @@ -52,7 +49,7 @@ /// Value::try_from_bytes(b"buddhist").expect("Parsing failed."); /// ``` pub fn try_from_bytes(input: &[u8]) -> Result { - let mut v = Vec::new(); + let mut v = ShortSlice::new(); if !input.is_empty() { for subtag in SubtagIterator::new(input) { @@ -62,7 +59,7 @@ } } } - Ok(Self(v.into())) + Ok(Self(v)) } /// Const constructor for when the value contains only a single subtag. @@ -85,7 +82,7 @@ #[doc(hidden)] pub fn as_tinystr_slice(&self) -> &[TinyAsciiStr<8>] { - self.0.as_slice() + &self.0 } #[doc(hidden)] @@ -105,8 +102,8 @@ } } - pub(crate) fn from_vec_unchecked(input: Vec>) -> Self { - Self(input.into()) + pub(crate) fn from_short_slice_unchecked(input: ShortSlice>) -> Self { + Self(input) } #[doc(hidden)] @@ -140,7 +137,7 @@ where F: FnMut(&str) -> Result<(), E>, { - self.0.as_slice().iter().map(|t| t.as_str()).try_for_each(f) + self.0.iter().map(TinyAsciiStr::as_str).try_for_each(f) } } @@ -161,10 +158,8 @@ /// # Examples /// /// ``` +/// use icu::locid::extensions::unicode::{key, value}; /// use icu::locid::Locale; -/// use icu::locid::{ -/// extensions_unicode_key as key, extensions_unicode_value as value, -/// }; /// /// let loc: Locale = "de-u-ca-buddhist".parse().unwrap(); /// @@ -176,6 +171,7 @@ /// /// [`Value`]: crate::extensions::unicode::Value #[macro_export] +#[doc(hidden)] macro_rules! extensions_unicode_value { ($value:literal) => {{ // What we want: @@ -196,3 +192,5 @@ R }}; } +#[doc(inline)] +pub use extensions_unicode_value as value; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/helpers.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/helpers.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/helpers.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/helpers.rs 2023-12-21 18:27:08.000000000 +0000 @@ -7,6 +7,7 @@ use alloc::boxed::Box; use alloc::vec; use alloc::vec::Vec; +use core::ops::{Deref, DerefMut}; use litemap::store::*; /// Internal: A vector that supports no-allocation, constant values if length 0 or 1. @@ -43,24 +44,6 @@ } #[inline] - pub fn as_slice(&self) -> &[T] { - match self { - ShortSlice::ZeroOne(None) => &[], - ShortSlice::ZeroOne(Some(v)) => core::slice::from_ref(v), - ShortSlice::Multi(v) => v, - } - } - - #[inline] - pub fn as_mut_slice(&mut self) -> &mut [T] { - match self { - ShortSlice::ZeroOne(None) => &mut [], - ShortSlice::ZeroOne(Some(v)) => core::slice::from_mut(v), - ShortSlice::Multi(v) => v, - } - } - - #[inline] pub const fn single(&self) -> Option<&T> { match self { ShortSlice::ZeroOne(Some(v)) => Some(v), @@ -134,6 +117,43 @@ pub fn clear(&mut self) { let _ = core::mem::replace(self, ShortSlice::ZeroOne(None)); } + + pub fn retain(&mut self, mut f: F) + where + F: FnMut(&T) -> bool, + { + *self = match core::mem::take(self) { + Self::ZeroOne(Some(one)) if f(&one) => Self::ZeroOne(Some(one)), + Self::ZeroOne(_) => Self::ZeroOne(None), + Self::Multi(slice) => { + let mut vec = slice.into_vec(); + vec.retain(f); + Self::from(vec) + } + }; + } +} + +impl Deref for ShortSlice { + type Target = [T]; + + fn deref(&self) -> &Self::Target { + match self { + ShortSlice::ZeroOne(None) => &[], + ShortSlice::ZeroOne(Some(v)) => core::slice::from_ref(v), + ShortSlice::Multi(v) => v, + } + } +} + +impl DerefMut for ShortSlice { + fn deref_mut(&mut self) -> &mut Self::Target { + match self { + ShortSlice::ZeroOne(None) => &mut [], + ShortSlice::ZeroOne(Some(v)) => core::slice::from_mut(v), + ShortSlice::Multi(v) => v, + } + } } impl From> for ShortSlice { @@ -155,7 +175,18 @@ impl FromIterator for ShortSlice { fn from_iter>(iter: I) -> Self { - iter.into_iter().collect::>().into() + let mut iter = iter.into_iter(); + match (iter.next(), iter.next()) { + (Some(first), Some(second)) => { + // Size hint behaviour same as `Vec::extend` + 2 + let mut vec = Vec::with_capacity(iter.size_hint().0.saturating_add(3)); + vec.push(first); + vec.push(second); + vec.extend(iter); + Self::Multi(vec.into_boxed_slice()) + } + (first, _) => Self::ZeroOne(first), + } } } @@ -176,7 +207,7 @@ #[inline] fn lm_get(&self, index: usize) -> Option<(&K, &V)> { - self.as_slice().get(index).map(|elt| (&elt.0, &elt.1)) + self.get(index).map(|elt| (&elt.0, &elt.1)) } #[inline] @@ -193,7 +224,7 @@ where F: FnMut(&K) -> core::cmp::Ordering, { - self.as_slice().binary_search_by(|(k, _)| cmp(k)) + self.binary_search_by(|(k, _)| cmp(k)) } } @@ -212,9 +243,7 @@ fn lm_reserve(&mut self, _additional: usize) {} fn lm_get_mut(&mut self, index: usize) -> Option<(&K, &mut V)> { - self.as_mut_slice() - .get_mut(index) - .map(|elt| (&elt.0, &mut elt.1)) + self.get_mut(index).map(|elt| (&elt.0, &mut elt.1)) } fn lm_push(&mut self, key: K, value: V) { @@ -232,6 +261,13 @@ fn lm_clear(&mut self) { self.clear(); } + + fn lm_retain(&mut self, mut predicate: F) + where + F: FnMut(&K, &V) -> bool, + { + self.retain(|(k, v)| predicate(k, v)) + } } impl<'a, K: 'a, V: 'a> StoreIterable<'a, K, V> for ShortSlice<(K, V)> { @@ -239,14 +275,14 @@ core::iter::Map, for<'r> fn(&'r (K, V)) -> (&'r K, &'r V)>; fn lm_iter(&'a self) -> Self::KeyValueIter { - self.as_slice().iter().map(|elt| (&elt.0, &elt.1)) + self.iter().map(|elt| (&elt.0, &elt.1)) } } impl StoreFromIterator for ShortSlice<(K, V)> {} #[test] -fn test_shortvec_impl() { +fn test_short_slice_impl() { litemap::testing::check_store::>(); } @@ -254,8 +290,9 @@ ( $(#[$doc:meta])* $name:ident, - $($full_name:ident)::+, + $($path:ident)::+, $macro_name:ident, + $legacy_macro_name:ident, $len_start:literal..=$len_end:literal, $tinystr_ident:ident, $validate:expr, @@ -278,7 +315,7 @@ /// # Examples /// /// ``` - #[doc = concat!("use icu_locid::", stringify!($($full_name)::+), ";")] + #[doc = concat!("use icu_locid::", stringify!($($path::)+), stringify!($name), ";")] /// #[doc = concat!("assert!(", stringify!($name), "::try_from_bytes(b", stringify!($good_example), ").is_ok());")] #[doc = concat!("assert!(", stringify!($name), "::try_from_bytes(b", stringify!($bad_example), ").is_err());")] @@ -349,6 +386,11 @@ self.0.as_str() } + #[doc(hidden)] + pub const fn into_tinystr(&self) -> tinystr::TinyAsciiStr<$len_end> { + self.0 + } + /// Compare with BCP-47 bytes. /// /// The return value is equivalent to what would happen if you first converted @@ -389,7 +431,7 @@ impl From<$name> for tinystr::TinyAsciiStr<$len_end> { fn from(input: $name) -> Self { - input.0 + input.into_tinystr() } } @@ -417,37 +459,40 @@ /// Parsing errors don't have to be handled at runtime: /// ``` /// assert_eq!( - #[doc = concat!(" icu_locid::", stringify!($macro_name), "!(", stringify!($good_example) ,"),")] - #[doc = concat!(" ", stringify!($good_example), ".parse::().unwrap()")] + #[doc = concat!(" icu_locid::", $(stringify!($path), "::",)+ stringify!($macro_name), "!(", stringify!($good_example) ,"),")] + #[doc = concat!(" ", stringify!($good_example), ".parse::().unwrap()")] /// ); /// ``` /// /// Invalid input is a compile failure: /// ```compile_fail,E0080 - #[doc = concat!("icu_locid::", stringify!($macro_name), "!(", stringify!($bad_example) ,");")] + #[doc = concat!("icu_locid::", $(stringify!($path), "::",)+ stringify!($macro_name), "!(", stringify!($bad_example) ,");")] /// ``` /// - #[doc = concat!("[`", stringify!($name), "`]: crate::", stringify!($($full_name)::+))] + #[doc = concat!("[`", stringify!($name), "`]: crate::", $(stringify!($path), "::",)+ stringify!($name))] #[macro_export] - macro_rules! $macro_name { + #[doc(hidden)] + macro_rules! $legacy_macro_name { ($string:literal) => {{ - use $crate::$($full_name)::+; + use $crate::$($path ::)+ $name; const R: $name = match $name::try_from_bytes($string.as_bytes()) { Ok(r) => r, #[allow(clippy::panic)] // const context - _ => panic!(concat!("Invalid ", stringify!($name), ": ", $string)), + _ => panic!(concat!("Invalid ", $(stringify!($path), "::",)+ stringify!($name), ": ", $string)), }; R }}; } + #[doc(inline)] + pub use $legacy_macro_name as $macro_name; #[cfg(feature = "databake")] impl databake::Bake for $name { fn bake(&self, env: &databake::CrateEnv) -> databake::TokenStream { env.insert("icu_locid"); let string = self.as_str(); - databake::quote! {::icu_locid::$macro_name!(#string) } + databake::quote! { icu_locid::$($path::)+ $macro_name!(#string) } } } @@ -610,20 +655,20 @@ macro_rules! impl_writeable_for_subtag_list { ($type:tt, $sample1:literal, $sample2:literal) => { - impl_writeable_for_each_subtag_str_no_test!($type, selff, selff.0.len() == 1 => alloc::borrow::Cow::Borrowed(selff.0.as_slice().get(0).unwrap().as_str())); + impl_writeable_for_each_subtag_str_no_test!($type, selff, selff.0.len() == 1 => alloc::borrow::Cow::Borrowed(selff.0.get(0).unwrap().as_str())); #[test] fn test_writeable() { writeable::assert_writeable_eq!(&$type::default(), ""); writeable::assert_writeable_eq!( - &$type::from_vec_unchecked(alloc::vec![$sample1.parse().unwrap()]), + &$type::from_short_slice_unchecked(alloc::vec![$sample1.parse().unwrap()].into()), $sample1, ); writeable::assert_writeable_eq!( - &$type::from_vec_unchecked(vec![ + &$type::from_short_slice_unchecked(vec![ $sample1.parse().unwrap(), $sample2.parse().unwrap() - ]), + ].into()), core::concat!($sample1, "-", $sample2), ); } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/langid.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/langid.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/langid.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/langid.rs 2023-12-21 18:27:08.000000000 +0000 @@ -20,7 +20,8 @@ /// /// ``` /// use icu::locid::{ -/// langid, subtags_language as language, subtags_region as region, +/// langid, +/// subtags::{language, region}, /// }; /// /// let li = langid!("en-US"); @@ -49,8 +50,8 @@ /// /// ``` /// use icu::locid::{ -/// langid, subtags_language as language, subtags_region as region, -/// subtags_script as script, subtags_variant as variant, +/// langid, +/// subtags::{language, region, script, variant}, /// }; /// /// let li = langid!("eN_latn_Us-Valencia"); @@ -257,7 +258,7 @@ /// Compare this `LanguageIdentifier` with a potentially unnormalized BCP-47 string. /// /// The return value is equivalent to what would happen if you first parsed the - /// BCP-47 string to a `LanguageIdentifier` and then performed a structucal comparison. + /// BCP-47 string to a `LanguageIdentifier` and then performed a structural comparison. /// /// # Examples /// @@ -382,9 +383,7 @@ /// # Examples /// /// ``` -/// use icu::locid::{ -/// langid, subtags_language as language, LanguageIdentifier, -/// }; +/// use icu::locid::{langid, subtags::language, LanguageIdentifier}; /// /// assert_eq!(LanguageIdentifier::from(language!("en")), langid!("en")); /// ``` @@ -400,7 +399,7 @@ /// # Examples /// /// ``` -/// use icu::locid::{langid, subtags_script as script, LanguageIdentifier}; +/// use icu::locid::{langid, subtags::script, LanguageIdentifier}; /// /// assert_eq!( /// LanguageIdentifier::from(Some(script!("latn"))), @@ -419,7 +418,7 @@ /// # Examples /// /// ``` -/// use icu::locid::{langid, subtags_region as region, LanguageIdentifier}; +/// use icu::locid::{langid, subtags::region, LanguageIdentifier}; /// /// assert_eq!( /// LanguageIdentifier::from(Some(region!("US"))), @@ -441,8 +440,9 @@ /// /// ``` /// use icu::locid::{ -/// langid, subtags_language as language, subtags_region as region, -/// subtags_script as script, LanguageIdentifier, +/// langid, +/// subtags::{language, region, script}, +/// LanguageIdentifier, /// }; /// /// let lang = language!("en"); @@ -482,8 +482,8 @@ /// /// ``` /// use icu::locid::{ -/// langid, subtags_language as language, subtags_region as region, -/// subtags_script as script, +/// langid, +/// subtags::{language, region, script}, /// }; /// /// let lid = langid!("en-Latn-US"); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/lib.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/lib.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/lib.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/lib.rs 2023-12-21 18:27:08.000000000 +0000 @@ -24,7 +24,8 @@ //! ``` //! use icu::locid::Locale; //! use icu::locid::{ -//! locale, subtags_language as language, subtags_region as region, +//! locale, +//! subtags::{language, region}, //! }; //! //! let mut loc: Locale = locale!("en-US"); @@ -81,6 +82,7 @@ pub use ParserError as Error; pub mod extensions; +#[macro_use] pub mod subtags; pub mod zerovec; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/locale.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/locale.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/locale.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/locale.rs 2023-12-21 18:27:08.000000000 +0000 @@ -28,8 +28,9 @@ /// /// ``` /// use icu_locid::{ -/// extensions_unicode_key as key, extensions_unicode_value as value, -/// locale, subtags_language as language, subtags_region as region, +/// extensions::unicode::{key, value}, +/// locale, +/// subtags::{language, region}, /// }; /// /// let loc = locale!("en-US-u-ca-buddhist"); @@ -56,7 +57,8 @@ /// `_` separators to `-` and adjusting casing to conform to the Unicode standard. /// /// Any bogus subtags will cause the parsing to fail with an error. -/// No subtag validation or canonicalization is performed. +/// +/// No subtag validation or alias resolution is performed. /// /// # Examples /// @@ -98,13 +100,13 @@ assert_eq!(core::mem::size_of::>(), 32); assert_eq!(core::mem::size_of::(), 24); - assert_eq!(core::mem::size_of::(), 24); + assert_eq!(core::mem::size_of::(), 16); assert_eq!(core::mem::size_of::(), 24); assert_eq!(core::mem::size_of::>(), 24); - assert_eq!(core::mem::size_of::(), 24); - assert_eq!(core::mem::size_of::(), 152); + assert_eq!(core::mem::size_of::(), 16); + assert_eq!(core::mem::size_of::(), 136); - assert_eq!(core::mem::size_of::(), 184); + assert_eq!(core::mem::size_of::(), 168); } impl Locale { @@ -250,7 +252,7 @@ /// Compare this `Locale` with a potentially unnormalized BCP-47 string. /// /// The return value is equivalent to what would happen if you first parsed the - /// BCP-47 string to a `Locale` and then performed a structucal comparison. + /// BCP-47 string to a `Locale` and then performed a structural comparison. /// /// # Examples /// @@ -422,7 +424,7 @@ /// /// ``` /// use icu::locid::Locale; -/// use icu::locid::{locale, subtags_language as language}; +/// use icu::locid::{locale, subtags::language}; /// /// assert_eq!(Locale::from(language!("en")), locale!("en")); /// ``` @@ -439,7 +441,7 @@ /// /// ``` /// use icu::locid::Locale; -/// use icu::locid::{locale, subtags_script as script}; +/// use icu::locid::{locale, subtags::script}; /// /// assert_eq!(Locale::from(Some(script!("latn"))), locale!("und-Latn")); /// ``` @@ -456,7 +458,7 @@ /// /// ``` /// use icu::locid::Locale; -/// use icu::locid::{locale, subtags_region as region}; +/// use icu::locid::{locale, subtags::region}; /// /// assert_eq!(Locale::from(Some(region!("US"))), locale!("und-US")); /// ``` @@ -474,8 +476,8 @@ /// ``` /// use icu::locid::Locale; /// use icu::locid::{ -/// locale, subtags_language as language, subtags_region as region, -/// subtags_script as script, +/// locale, +/// subtags::{language, region, script}, /// }; /// /// assert_eq!( diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/parser/langid.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/parser/langid.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/parser/langid.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/parser/langid.rs 2023-12-21 18:27:08.000000000 +0000 @@ -5,10 +5,10 @@ pub use super::errors::ParserError; use crate::extensions::unicode::{Attribute, Key, Value}; use crate::extensions::ExtensionType; +use crate::helpers::ShortSlice; use crate::parser::SubtagIterator; use crate::LanguageIdentifier; use crate::{extensions, subtags}; -use alloc::vec::Vec; use tinystr::TinyAsciiStr; #[derive(PartialEq, Clone, Copy)] @@ -31,7 +31,7 @@ ) -> Result { let mut script = None; let mut region = None; - let mut variants = Vec::new(); + let mut variants = ShortSlice::new(); let language = if let Some(subtag) = iter.next() { subtags::Language::try_from_bytes(subtag)? @@ -95,7 +95,7 @@ language, script, region, - variants: subtags::Variants::from_vec_unchecked(variants), + variants: subtags::Variants::from_short_slice_unchecked(variants), }) } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/language.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/language.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/language.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/language.rs 2023-12-21 18:27:08.000000000 +0000 @@ -5,7 +5,7 @@ impl_tinystr_subtag!( /// A language subtag (examples: `"en"`, `"csb"`, `"zh"`, `"und"`, etc.) /// - /// [`Language`] represents a Unicode base language code conformat to the + /// [`Language`] represents a Unicode base language code conformant to the /// [`unicode_language_id`] field of the Language and Locale Identifier. /// /// # Examples @@ -34,7 +34,8 @@ /// /// [`unicode_language_id`]: https://unicode.org/reports/tr35/#unicode_language_id Language, - subtags::Language, + subtags, + language, subtags_language, 2..=3, s, @@ -63,7 +64,7 @@ /// # Examples /// /// ``` - /// use icu::locid::{subtags::Language, subtags_language as language}; + /// use icu::locid::subtags::{language, Language}; /// /// let mut lang = language!("csb"); /// diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/mod.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -51,8 +51,12 @@ mod variant; mod variants; -pub use language::Language; -pub use region::Region; -pub use script::Script; -pub use variant::Variant; +#[doc(inline)] +pub use language::{language, Language}; +#[doc(inline)] +pub use region::{region, Region}; +#[doc(inline)] +pub use script::{script, Script}; +#[doc(inline)] +pub use variant::{variant, Variant}; pub use variants::Variants; diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/region.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/region.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/region.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/region.rs 2023-12-21 18:27:08.000000000 +0000 @@ -5,7 +5,7 @@ impl_tinystr_subtag!( /// A region subtag (examples: `"US"`, `"CN"`, `"AR"` etc.) /// - /// [`Region`] represents a Unicode base language code conformat to the + /// [`Region`] represents a Unicode base language code conformant to the /// [`unicode_region_id`] field of the Language and Locale Identifier. /// /// # Examples @@ -19,7 +19,8 @@ /// /// [`unicode_region_id`]: https://unicode.org/reports/tr35/#unicode_region_id Region, - subtags::Region, + subtags, + region, subtags_region, 2..=3, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/script.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/script.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/script.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/script.rs 2023-12-21 18:27:08.000000000 +0000 @@ -5,7 +5,7 @@ impl_tinystr_subtag!( /// A script subtag (examples: `"Latn"`, `"Arab"`, etc.) /// - /// [`Script`] represents a Unicode base language code conformat to the + /// [`Script`] represents a Unicode base language code conformant to the /// [`unicode_script_id`] field of the Language and Locale Identifier. /// /// # Examples @@ -19,7 +19,8 @@ /// /// [`unicode_script_id`]: https://unicode.org/reports/tr35/#unicode_script_id Script, - subtags::Script, + subtags, + script, subtags_script, 4..=4, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variant.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variant.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variant.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variant.rs 2023-12-21 18:27:08.000000000 +0000 @@ -5,7 +5,7 @@ impl_tinystr_subtag!( /// A variant subtag (examples: `"macos"`, `"posix"`, `"1996"` etc.) /// - /// [`Variant`] represents a Unicode base language code conformat to the + /// [`Variant`] represents a Unicode base language code conformant to the /// [`unicode_variant_id`] field of the Language and Locale Identifier. /// /// # Examples @@ -19,7 +19,8 @@ /// /// [`unicode_variant_id`]: https://unicode.org/reports/tr35/#unicode_variant_id Variant, - subtags::Variant, + subtags, + variant, subtags_variant, 4..=8, s, diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variants.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variants.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variants.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/subtags/variants.rs 2023-12-21 18:27:08.000000000 +0000 @@ -16,7 +16,7 @@ /// # Examples /// /// ``` -/// use icu::locid::{subtags::Variants, subtags_variant as variant}; +/// use icu::locid::subtags::{variant, Variants}; /// /// let mut v = vec![variant!("posix"), variant!("macos")]; /// v.sort(); @@ -48,7 +48,7 @@ /// # Examples /// /// ``` - /// use icu::locid::{subtags::Variants, subtags_variant as variant}; + /// use icu::locid::subtags::{variant, Variants}; /// /// let variants = Variants::from_variant(variant!("posix")); /// ``` @@ -64,7 +64,7 @@ /// # Examples /// /// ``` - /// use icu::locid::{subtags::Variants, subtags_variant as variant}; + /// use icu::locid::subtags::{variant, Variants}; /// /// let mut v = vec![variant!("posix"), variant!("macos")]; /// v.sort(); @@ -77,7 +77,11 @@ /// for the caller to use [`binary_search`](slice::binary_search) instead of [`sort`](slice::sort) /// and [`dedup`](Vec::dedup()). pub fn from_vec_unchecked(input: Vec) -> Self { - Self(ShortSlice::from(input)) + Self(input.into()) + } + + pub(crate) fn from_short_slice_unchecked(input: ShortSlice) -> Self { + Self(input) } /// Empties the [`Variants`] list. @@ -87,7 +91,7 @@ /// # Examples /// /// ``` - /// use icu::locid::{subtags::Variants, subtags_variant as variant}; + /// use icu::locid::subtags::{variant, Variants}; /// /// let mut v = vec![variant!("posix"), variant!("macos")]; /// v.sort(); @@ -119,6 +123,6 @@ type Target = [Variant]; fn deref(&self) -> &[Variant] { - self.0.as_slice() + self.0.deref() } } diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/zerovec.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/zerovec.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/zerovec.rs 2023-12-04 21:32:19.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid/src/zerovec.rs 2023-12-21 18:27:08.000000000 +0000 @@ -60,8 +60,8 @@ //! use icu_locid::subtags::{Language, Region, Script}; //! use icu_locid::LanguageIdentifier; //! use icu_locid::{ -//! langid, subtags_language as language, subtags_region as region, -//! subtags_script as script, +//! langid, +//! subtags::{language, region, script}, //! }; //! use zerovec::ZeroMap; //! diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/.cargo-checksum.json rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/.cargo-checksum.json --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/.cargo-checksum.json 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/.cargo-checksum.json 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"334f54b489b88e4808da4fa355ddf773b86971570d4bb0360a876e3437cb962d","LICENSE":"853f87c96f3d249f200fec6db1114427bc8bdf4afddc93c576956d78152ce978","README.md":"3ff3f2e2f9e5d4c5786132838576edef42a12c5529d5f080370f24aa6246bb92","benches/fixtures/locales.json":"9846601a29874baf140cac1252d4624fadc30182fec106d17f008ece886b9185","benches/fixtures/uncanonicalized-locales.json":"a866ed318b92f79d8853567e79b373c02984967023f5f39161140544e71b0c72","benches/helpers/mod.rs":"d3bf59e7eed6230f340bef6c87a7b8de3a387ec391f60afc1b15a0d001cbfb67","benches/locale_canonicalizer.rs":"477af27aa35385d107e19b8e8a0452466a69d20e147a63631b78634c17712fed","src/canonicalizer.rs":"7770767fad20f38aaae19382430451573293ccdeac587b2d063163b870781086","src/directionality.rs":"a031a9d55ffe827c86400637b7302dc424c708dcc52ea667504a33a16db822c2","src/error.rs":"486fda8a0e9b7bb5822bbb0defb51145364d6053b8d60b88ef71e4b2bcd6699d","src/expander.rs":"d3ef487a416425ea6fb2ce7bf08b7487e180a580002e54ce30d5524cfd7514e2","src/fallback/algorithms.rs":"47625130cd5a04cf085dd0494591e117ba204a9d2eb649788b0ff96773cc6e21","src/fallback/mod.rs":"71ca2f23e410863010a62a48bba8a943763f7d49c12bf80b451a1b9295484e44","src/lib.rs":"5390facdc3df7e5ec5ab842bf59d4d13383d77d93a722685231a1d271cfba944","src/provider/canonicalizer.rs":"f848dbbc906b5f3be0b6384f5a2f26178898822a5c37334a57b12db8e1af0ed9","src/provider/directionality.rs":"fc516f501254af444cfa010d3c87aeea032dd6eccf5f82301c050ed3df2e05b1","src/provider/expander.rs":"6903d16138ada8216e0341d984126dcc1f6fac21468144e8140fc217b164572e","src/provider/fallback.rs":"d567e3d49261cac9de35825b3d57204d49068558f10579121f0bf0c42090c9cc","src/provider/mod.rs":"ce8e29eda7128747d489371118d1cf2c0e2740662eb8c6a55310dff86c5641cc","tests/fixtures/canonicalize.json":"3dc2f661b04e4c9ecced70fc1b98a504eb5f5a0067b38665b10e50c25174bc4a","tests/fixtures/maximize.json":"8137359060218572bcaf5e56825346fdcb600e2189378ef4be836ba0a7295b66","tests/fixtures/minimize.json":"3bb6f19c5525818212388dcbf778064e7f73d2c32a8a7e8c58d618583a77121a","tests/fixtures/mod.rs":"18a900aa4f74120b7e7e64fcb09eae38a16504d66e23f752e743dcd9b1ad6530","tests/helpers/mod.rs":"d3bf59e7eed6230f340bef6c87a7b8de3a387ec391f60afc1b15a0d001cbfb67","tests/locale_canonicalizer.rs":"1ebf7320f422b65cc3cc50468abdc8f08128feba85d936f5beb456b0b052a91d"},"package":"6551daf80882d8e68eee186cc19e132d8bde1b1f059a79b93384a5ca0e8fc5e7"} \ No newline at end of file diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/Cargo.toml rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/Cargo.toml --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/Cargo.toml 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,128 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.66" +name = "icu_locid_transform" +version = "1.3.2" +authors = ["The ICU4X Project Developers"] +include = [ + "data/**/*", + "src/**/*", + "examples/**/*", + "benches/**/*", + "tests/**/*", + "Cargo.toml", + "LICENSE", + "README.md", +] +description = "API for Unicode Language and Locale Identifiers canonicalization" +homepage = "https://icu4x.unicode.org" +readme = "README.md" +categories = ["internationalization"] +license-file = "LICENSE" +repository = "https://github.com/unicode-org/icu4x" + +[package.metadata.cargo-all-features] +denylist = ["bench"] +skip_optional_dependencies = true + +[package.metadata.docs.rs] +all-features = true + +[lib] +bench = false + +[[test]] +name = "locale_canonicalizer" +required-features = ["serde"] + +[[bench]] +name = "locale_canonicalizer" +harness = false + +[dependencies.databake] +version = "0.1.6" +features = ["derive"] +optional = true +default-features = false + +[dependencies.displaydoc] +version = "0.2.3" +default-features = false + +[dependencies.icu_locid] +version = "~1.3.2" +features = ["zerovec"] +default-features = false + +[dependencies.icu_locid_transform_data] +version = "~1.3.2" +optional = true +default-features = false + +[dependencies.icu_provider] +version = "~1.3.2" +features = ["macros"] +default-features = false + +[dependencies.serde] +version = "1.0" +features = [ + "derive", + "alloc", +] +optional = true +default-features = false + +[dependencies.tinystr] +version = "0.7.3" +features = [ + "alloc", + "zerovec", +] +default-features = false + +[dependencies.zerovec] +version = "0.10.0" +features = ["yoke"] +default-features = false + +[dev-dependencies.serde] +version = "1.0" +features = ["derive"] + +[dev-dependencies.serde_json] +version = "1.0" + +[features] +bench = ["serde"] +compiled_data = ["dep:icu_locid_transform_data"] +datagen = [ + "serde", + "dep:databake", + "zerovec/databake", + "icu_locid/databake", + "tinystr/databake", +] +default = ["compiled_data"] +serde = [ + "dep:serde", + "icu_locid/serde", + "tinystr/serde", + "zerovec/serde", + "icu_provider/serde", +] +std = [] + +[target."cfg(not(target_arch = \"wasm32\"))".dev-dependencies.criterion] +version = "0.4" diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/LICENSE rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/LICENSE --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/LICENSE 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,44 @@ +UNICODE LICENSE V3 + +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 2020-2023 Unicode, Inc. + +NOTICE TO USER: Carefully read the following legal agreement. BY +DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING DATA FILES, AND/OR +SOFTWARE, YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE +TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT +DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of data files and any associated documentation (the "Data Files") or +software and any associated documentation (the "Software") to deal in the +Data Files or Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Data Files or Software, and to permit persons to whom the +Data Files or Software are furnished to do so, provided that either (a) +this copyright and permission notice appear with all copies of the Data +Files or Software, or (b) this copyright and permission notice appear in +associated Documentation. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF +THIRD PARTY RIGHTS. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE +BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA +FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall +not be used in advertising or otherwise to promote the sale, use or other +dealings in these Data Files or Software without prior written +authorization of the copyright holder. + +— + +Portions of ICU4X may have been adapted from ICU4C and/or ICU4J. +ICU 1.8.1 to ICU 57.1 © 1995-2016 International Business Machines Corporation and others. diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/README.md rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/README.md --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/README.md 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/README.md 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,77 @@ +# icu_locid_transform [![crates.io](https://img.shields.io/crates/v/icu_locid_transform)](https://crates.io/crates/icu_locid_transform) + + + +Canonicalization of locale identifiers based on [`CLDR`] data. + +This module is published as its own crate ([`icu_locid_transform`](https://docs.rs/icu_locid_transform/latest/icu_locid_transform/)) +and as part of the [`icu`](https://docs.rs/icu/latest/icu/) crate. See the latter for more details on the ICU4X project. + +It currently supports locale canonicalization based upon the canonicalization +algorithm from [`UTS #35: Unicode LDML 3. LocaleId Canonicalization`], +as well as the minimize and maximize likely subtags algorithms +as described in [`UTS #35: Unicode LDML 3. Likely Subtags`]. + +The maximize method potentially updates a passed in locale in place +depending up the results of running the 'Add Likely Subtags' algorithm +from [`UTS #35: Unicode LDML 3. Likely Subtags`]. + +This minimize method returns a new Locale that is the result of running the +'Remove Likely Subtags' algorithm from [`UTS #35: Unicode LDML 3. Likely Subtags`]. + +## Examples + +```rust +use icu::locid::Locale; +use icu::locid_transform::{LocaleCanonicalizer, TransformResult}; + +let lc = LocaleCanonicalizer::new(); + +let mut locale: Locale = "ja-Latn-fonipa-hepburn-heploc" + .parse() + .expect("parse failed"); +assert_eq!(lc.canonicalize(&mut locale), TransformResult::Modified); +assert_eq!(locale, "ja-Latn-alalc97-fonipa".parse::().unwrap()); +``` + +```rust +use icu::locid::locale; +use icu::locid_transform::{LocaleExpander, TransformResult}; + +let lc = LocaleExpander::new(); + +let mut locale = locale!("zh-CN"); +assert_eq!(lc.maximize(&mut locale), TransformResult::Modified); +assert_eq!(locale, locale!("zh-Hans-CN")); + +let mut locale = locale!("zh-Hant-TW"); +assert_eq!(lc.maximize(&mut locale), TransformResult::Unmodified); +assert_eq!(locale, locale!("zh-Hant-TW")); +``` + +```rust +use icu::locid::locale; +use icu::locid_transform::{LocaleExpander, TransformResult}; +use writeable::assert_writeable_eq; + +let lc = LocaleExpander::new(); + +let mut locale = locale!("zh-Hans-CN"); +assert_eq!(lc.minimize(&mut locale), TransformResult::Modified); +assert_eq!(locale, locale!("zh")); + +let mut locale = locale!("zh"); +assert_eq!(lc.minimize(&mut locale), TransformResult::Unmodified); +assert_eq!(locale, locale!("zh")); +``` + +[`ICU4X`]: ../icu/index.html +[`CLDR`]: http://cldr.unicode.org/ +[`UTS #35: Unicode LDML 3. Likely Subtags`]: https://www.unicode.org/reports/tr35/#Likely_Subtags. +[`UTS #35: Unicode LDML 3. LocaleId Canonicalization`]: http://unicode.org/reports/tr35/#LocaleId_Canonicalization, + + + +## More Information + +For more information on development, authorship, contributing etc. please visit [`ICU4X home page`](https://github.com/unicode-org/icu4x). diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/locales.json rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/locales.json --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/locales.json 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/locales.json 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,41 @@ +[ + "en-US", + "en-GB", + "es-AR", + "it", + "zh-Hans-CN", + "de-AT", + "pl", + "fr-FR", + "de-AT", + "sr-Cyrl-SR", + "nb-NO", + "fr-FR", + "mk", + "uk", + "und-PL", + "und-Latn-AM", + "ug-Cyrl", + "sr-ME", + "mn-Mong", + "lif-Limb", + "gan", + "zh-Hant", + "yue-Hans", + "unr", + "unr-Deva", + "und-Thai-CN", + "ug-Cyrl", + "en-Latn-DE", + "pl-FR", + "de-CH", + "tuq", + "sr-ME", + "ng", + "klx", + "kk-Arab", + "en-Cyrl", + "und-Cyrl-UK", + "und-Arab", + "und-Arab-FO" +] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/uncanonicalized-locales.json rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/uncanonicalized-locales.json --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/uncanonicalized-locales.json 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/fixtures/uncanonicalized-locales.json 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,88 @@ +[ + "cka", + "cze", + "gfx", + "sgn-BR", + "sgn-DD", + "tam", + "und-aaland", + "nob-bokmal", + "no-nynorsk", + "und-Qaai", + "en-554", + "en-084", + "art-lojban", + "zh-guoyu", + "zh-hakka", + "zh-xiang", + "aar-x-private", + "heb-x-private", + "ces", + "hy-arevela", + "hy-arevmda", + "cel-gaulish", + "ja-latn-hepburn-heploc", + "ja-Latn-fonipa-hepburn-heploc", + "und-Armn-SU", + "sh", + "sh-Cyrl", + "cnr", + "cnr-BA", + "ru-SU", + "ru-810", + "en-SU", + "en-810", + "und-SU", + "und-810", + "und-Latn-SU", + "und-Latn-810", + "hy-SU", + "hy-810", + "und-Armn-SU", + "und-Armn-810", + "sr-CS", + "sr-Latn-CS", + "sr-Cyrl-CS", + "az-NT", + "sl-t-sl-rozaj-biske-1994", + "DE-T-M0-DIN-K0-QWERTZ", + "en-t-m0-true", + "en-t-iw", + "und-u-rg-no23", + "und-u-rg-cn11", + "und-u-rg-cz10a", + "und-u-rg-fra", + "und-u-rg-frg", + "und-u-rg-lud", + "und-NO-u-rg-no23", + "und-CN-u-rg-cn11", + "und-CZ-u-rg-cz10a", + "und-FR-u-rg-fra", + "und-FR-u-rg-frg", + "und-u-rg-lud", + "und-u-sd-no23", + "und-u-sd-cn11", + "und-u-sd-cz10a", + "und-u-sd-fra", + "hy-arevela", + "hy-Armn-arevela", + "hy-AM-arevela", + "hy-arevela-fonipa", + "hy-fonipa-arevela", + "hy-arevmda", + "hy-Armn-arevmda", + "hy-AM-arevmda", + "hy-arevmda-fonipa", + "hy-fonipa-arevmda", + "ja-Latn-hepburn-heploc", + "ja-Latn-JP-hepburn-heploc", + "sv-aaland", + "el-polytoni", + "ja-Latn-alalc97-hepburn-heploc", + "ja-Latn-hepburn-alalc97-heploc", + "ja-Latn-hepburn-heploc-alalc97", + "ja-Latn-heploc-hepburn", + "ja-Latn-heploc", + "ja-Latn-aaland-heploc", + "ja-Latn-heploc-polytoni" +] diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/helpers/mod.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/helpers/mod.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/helpers/mod.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/helpers/mod.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,15 @@ +// This file is part of ICU4X. For terms of use, please see the file +// called LICENSE at the top level of the ICU4X source tree +// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ). + +use std::fs::File; +use std::io::{BufReader, Error}; + +pub fn read_fixture(path: &str) -> Result +where + T: serde::de::DeserializeOwned, +{ + let file = File::open(path)?; + let reader = BufReader::new(file); + Ok(serde_json::from_reader(reader)?) +} diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/locale_canonicalizer.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/locale_canonicalizer.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/locale_canonicalizer.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/benches/locale_canonicalizer.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,99 @@ +// This file is part of ICU4X. For terms of use, please see the file +// called LICENSE at the top level of the ICU4X source tree +// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ). + +mod helpers; + +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use icu_locid::Locale; +use icu_locid_transform::LocaleCanonicalizer; +use icu_locid_transform::LocaleExpander; + +fn canonicalize_bench(c: &mut Criterion) { + let lc = LocaleCanonicalizer::new(); + + let mut group = c.benchmark_group("uncanonicalized"); + + let path = "./benches/fixtures/uncanonicalized-locales.json"; + let data: Vec = helpers::read_fixture(path).expect("Failed to read a fixture"); + let locales: Vec = data.iter().map(|s| s.parse().unwrap()).collect(); + + group.bench_function("clone", |b| { + b.iter(|| { + for locale in &locales { + let _ = black_box(locale).clone(); + } + }) + }); + + group.bench_function("canonicalize", |b| { + b.iter(|| { + for locale in &locales { + let mut locale = black_box(locale).clone(); + lc.canonicalize(&mut locale); + } + }) + }); + + group.finish(); +} + +fn canonicalize_noop_bench(c: &mut Criterion) { + let lc = LocaleCanonicalizer::new(); + + let mut group = c.benchmark_group("canonicalized"); + + // None of these locales require canonicalization, so this measures the cost of calling + // the canonicalizer on locales that will not be modified. + let path = "./benches/fixtures/locales.json"; + let data: Vec = helpers::read_fixture(path).expect("Failed to read a fixture"); + let locales: Vec = data.iter().map(|s| s.parse().unwrap()).collect(); + + group.bench_function("clone", |b| { + b.iter(|| { + for locale in &locales { + let _ = black_box(locale).clone(); + } + }) + }); + + group.bench_function("canonicalize", |b| { + b.iter(|| { + for locale in &locales { + let mut locale = black_box(locale).clone(); + lc.canonicalize(&mut locale); + } + }) + }); + + group.finish(); +} + +fn maximize_bench(c: &mut Criterion) { + let lc = LocaleExpander::new(); + + let mut group = c.benchmark_group("likelysubtags"); + + let path = "./benches/fixtures/locales.json"; + let data: Vec = helpers::read_fixture(path).expect("Failed to read a fixture"); + let locales: Vec = data.iter().map(|s| s.parse().unwrap()).collect(); + + group.bench_function("maximize", |b| { + b.iter(|| { + for locale in &locales { + let mut locale = locale.clone(); + lc.maximize(black_box(&mut locale)); + } + }) + }); + + group.finish(); +} + +criterion_group!( + benches, + canonicalize_bench, + canonicalize_noop_bench, + maximize_bench +); +criterion_main!(benches); diff -Nru rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/src/canonicalizer.rs rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/src/canonicalizer.rs --- rustc-1.74.1+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/src/canonicalizer.rs 1970-01-01 00:00:00.000000000 +0000 +++ rustc-1.75.0+dfsg0ubuntu1~bpo10/vendor/icu_locid_transform/src/canonicalizer.rs 2023-12-21 18:27:08.000000000 +0000 @@ -0,0 +1,618 @@ +// This file is part of ICU4X. For terms of use, please see the file +// called LICENSE at the top level of the ICU4X source tree +// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ). + +//! The collection of code for locale canonicalization. + +use crate::provider::*; +use crate::LocaleTransformError; +use alloc::vec::Vec; +use core::cmp::Ordering; + +use crate::LocaleExpander; +use crate::TransformResult; +use icu_locid::subtags::{Language, Region, Script}; +use icu_locid::{ + extensions::unicode::key, + subtags::{language, Variant, Variants}, + LanguageIdentifier, Locale, +}; +use icu_provider::prelude::*; +use tinystr::TinyAsciiStr; + +/// Implements the algorithm defined in *[UTS #35: Annex C, LocaleId Canonicalization]*. +/// +/// # Examples +/// +/// ``` +/// use icu_locid::Locale; +/// use icu_locid_transform::{LocaleCanonicalizer, TransformResult}; +/// +/// let lc = LocaleCanonicalizer::new(); +/// +/// let mut locale: Locale = "ja-Latn-fonipa-hepburn-heploc".parse().unwrap(); +/// assert_eq!(lc.canonicalize(&mut locale), TransformResult::Modified); +/// assert_eq!(locale, "ja-Latn-alalc97-fonipa".parse().unwrap()); +/// ``` +/// +/// [UTS #35: Annex C, LocaleId Canonicalization]: http://unicode.org/reports/tr35/#LocaleId_Canonicalization +#[derive(Debug)] +pub struct LocaleCanonicalizer { + /// Data to support canonicalization. + aliases: DataPayload, + /// Likely subtags implementation for delegation. + expander: LocaleExpander, +} + +#[inline] +fn uts35_rule_matches<'a, I>( + source: &Locale, + language: Language, + script: Option